UNCLASSIFIED

Verified Commit 5598658c authored by DJ Mountney's avatar DJ Mountney
Browse files

Refactor to new CHT pipeline pattern

- Removes gitlab versioned folders prior to 12.8
- Unwraps the 12.8 folder into the root
- Removes the prebuild.sh script in favor of download.json
- Drops the build script for now
- Update prepare script to use local downloaded artifact
parent 98ed7563
ARG CI_REGISTRY_IMAGE=
ARG FROM_IMAGE=gitlab-rails-ee
ARG TAG=ubi
ARG PYTHON_TAG=gitlab-python:ubi
ARG RAILS_IMAGE=${FROM_IMAGE}:${TAG}
ARG PYTHON_IMAGE=gitlab-python:ubi
FROM ${PYTHON_IMAGE} AS python
FROM ${RAILS_IMAGE}
ARG S3CMD_VERSION=2.0.1
ARG GSUTIL_VERSION=4.42
ARG GITLAB_USER=git
COPY --from=python /usr/local/bin /usr/local/bin/
COPY --from=python /usr/local/lib /usr/local/lib/
COPY --from=python /usr/local/include /usr/local/include/
RUN dnf --disableplugin=subscription-manager install -yb --nodocs ca-certificates openssl \
&& pip3 install s3cmd==${S3CMD_VERSION} gsutil==${GSUTIL_VERSION} crcmod
COPY scripts/bin/* /usr/local/bin/
COPY scripts/lib/* /usr/lib/ruby/vendor_ruby
USER ${GITLAB_USER}:${GITLAB_USER}
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
#!/bin/bash
set -e
ACTION="backup"
export BACKUP_BUCKET_NAME=${BACKUP_BUCKET_NAME-gitlab-backups}
export BACKUP_BACKEND=${BACKUP_BACKEND-s3}
rails_dir=/srv/gitlab
backups_path=$rails_dir/tmp/backups
backup_tars_path=$rails_dir/tmp/backup_tars
object_storage_backends=( registry uploads artifacts lfs packages )
skipping_backup_for=()
function usage()
{
cat << HEREDOC
Usage: backup-utility [--restore] [-f URL] [-t TIMESTAMP] [--skip COMPONENT] [--backend BACKEND]
Options:
-h, --help Show this help message and exit.
--restore [-t TIMESTAMP | -f URL] When specified, utility restores from an existing backup specified
as url or timestamp in object storage.
-f URL http(s):/ftp:/file: URL with backup location. Use with --restore.
-t TIMESTAMP Timestamp (part before '_gitlab_backup.tar' in archive name),
can be used to specify backup source or target name.
--rsyncable Pass the '--rsyncable' parameter to gzip for artifact compression.
--skip COMPONENT When specified, utility will skip the backup of COMPONENT.
May be defined multiple times. Valid values for COMPONENT are
db, repositories, and any of the object storages (e.g. 'lfs').
--backend BACKEND Object storage backend to use for backups.
Can be either 's3' or 'gcs'.
HEREDOC
}
# Checks if provided argument is a url for downloading it
function is_url() {
regex='(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]'
[[ $1 =~ $regex ]]
}
function fetch_remote_backup(){
mkdir -p $backups_path
output_path=$backups_path/0_gitlab_backup.tar
if is_url $1; then
>&2 echo "Downloading from $1";
curl --progress-bar -o $output_path $1
else # It's a timestamp
file_name="$1_gitlab_backup.tar"
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd sync "s3://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp "gs://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
else
echo "Unknown backend: ${BACKUP_BACKEND}"
fi
fi
echo $output_path
}
function unpack_backup(){
local file_path=$1
cd $(dirname $file_path)
echo "Unpacking backup"
if [ ! -f $file_path ]; then
echo $file_path not found
exit 1
fi
tar -xf $file_path
}
function pack_backup(){
echo "Packing up backup tar"
local backup_name=$1
tar -cf ${backup_tars_path}/${backup_name}.tar -C $backups_path .
}
function get_version(){
cat $rails_dir/VERSION
}
function get_backup_name(){
if [ -n "$BACKUP_TIMESTAMP" ]; then
echo ${BACKUP_TIMESTAMP}_gitlab_backup
else
now_timestamp=$(date +%s_%Y_%m_%d)
gitlab_version=$(get_version)
echo ${now_timestamp}_${gitlab_version}_gitlab_backup
fi
}
function cleanup(){
rm -rf $backups_path/*
rm -rf $backup_tars_path/*
}
function write_backup_info(){
cat << EOF > $backups_path/backup_information.yml
:db_version: $($rails_dir/bin/rails runner "File.write('/tmp/db_version', ActiveRecord::Migrator.current_version.to_s)" && cat /tmp/db_version)
:backup_created_at: $(date "+%Y-%m-%d %H:%M:%S %z")
:gitlab_version: $(get_version)
:tar_version: $(tar --version | head -n 1)
:installation_type: gitlab-helm-chart
:skipped: $1
EOF
}
function get_skipped(){
all=( artifacts.tar.gz uploads.tar.gz builds.tar.gz db lfs.tar.gz registry.tar.gz pages.tar.gz packages.tar.gz )
skipped_string=""
for backup_item in ${all[@]}; do
if [ ! -e $backups_path/$backup_item ]; then
skipped_string="$skipped_string,${backup_item%.tar.gz}";
fi;
done;
echo ${skipped_string#,}
}
function backup(){
backup_name=$(get_backup_name)
mkdir -p $backup_tars_path
if ! [[ ${skipping_backup_for[@]} =~ "db" ]]; then
gitlab-rake gitlab:backup:db:create
fi
if ! [[ ${skipping_backup_for[@]} =~ "repositories" ]]; then
gitlab-rake gitlab:backup:repo:create
fi
for backup_item in ${object_storage_backends[@]}; do
if ! [[ ${skipping_backup_for[@]} =~ $backup_item ]]; then
object-storage-backup $backup_item $backups_path/${backup_item}.tar.gz
fi
done
skipped=$(get_skipped $backup_name)
write_backup_info $skipped
pack_backup $backup_name
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd sync ${backup_tars_path}/${backup_name}.tar s3://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at s3://$BACKUP_BUCKET_NAME/${backup_name}.tar"
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp -n ${backup_tars_path}/${backup_name}.tar gs://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at gs://$BACKUP_BUCKET_NAME/${backup_name}.tar"
else
echo "Unknown backend for backup: ${BACKUP_BACKEND}"
fi
cleanup
}
function is_skipped() {
[[ $SKIPPED =~ $1 ]]
}
function restore(){
if [ -z "$BACKUP_URL" ] && [ -z "$BACKUP_TIMESTAMP" ]; then
echo "You need to set BACKUP_URL or BACKUP_TIMESTAMP variable"
exit 1
fi
BACKUP=${BACKUP_URL-}
if [ -z "$BACKUP" ]; then
BACKUP=$BACKUP_TIMESTAMP
fi
file=$(fetch_remote_backup $BACKUP)
dir_name=$(dirname $file)
file_name=$(basename $file)
timestamp="${file_name%%_*}"
export BACKUP=$timestamp
unpack_backup $file
skipped_line=$(grep skipped $(dirname $file)/backup_information.yml)
export SKIPPED=$(echo ${skipped_line#:skipped:})
installation_type_line=$(grep installation_type $(dirname $file)/backup_information.yml || echo ":installation_type: unknown")
export INSTALLATION_TYPE=$(echo ${installation_type_line#:installation_type: })
! is_skipped "db" && gitlab-rake gitlab:db:drop_tables
! is_skipped "db" && gitlab-rake gitlab:backup:db:restore
! is_skipped "repositories" && gitlab-rake gitlab:backup:repo:restore
! is_skipped "builds" && gitlab-rake gitlab:backup:builds:restore
if [ "$INSTALLATION_TYPE" = "gitlab-helm-chart" ]; then
for restore_item in ${object_storage_backends[@]}; do
if [ -f $backups_path/${restore_item}.tar.gz ]; then
! is_skipped $restore_item && object-storage-restore $restore_item $backups_path/${restore_item}.tar.gz
fi
done
else
echo "Backup tarball not from a Helm chart based installation. Not processing files in object storage."
fi
gitlab-rake cache:clear
}
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-h|--help)
usage
ACTION="none"
break
;;
-f|--file)
BACKUP_URL="$2"
shift
shift
;;
-t|--timestamp)
BACKUP_TIMESTAMP="$2"
shift
shift
;;
--backend)
export BACKUP_BACKEND="$2"
shift
shift
;;
--restore)
ACTION="restore"
shift
;;
--rsyncable)
export GZIP_RSYNCABLE="yes"
shift
;;
--skip)
skipping_backup_for+=( "$2" )
shift
shift
;;
*)
usage
echo "Unexpected parameter: $key"
exit 1
;;
esac
done
if [ "$ACTION" = "restore" ]; then
restore
elif [ "$ACTION" = "backup" ]; then
backup
fi
require 'open3'
require 'fileutils'
class String
def red; "\e[31m#{self}\e[0m" end
def green; "\e[32m#{self}\e[0m" end
def blue; "\e[34m#{self}\e[0m" end
end
class ObjectStorageBackup
attr_accessor :name, :local_tar_path, :remote_bucket_name, :tmp_bucket_name, :backend
def initialize(name, local_tar_path, remote_bucket_name, tmp_bucket_name = 'tmp', backend = 's3')
@name = name
@local_tar_path = local_tar_path
@remote_bucket_name = remote_bucket_name
@tmp_bucket_name = tmp_bucket_name
@backend = backend
end
def backup
if @backend == "s3"
check_bucket_cmd = %W(s3cmd ls s3://#{@remote_bucket_name})
cmd = %W(s3cmd --stop-on-error sync s3://#{@remote_bucket_name}/ /srv/gitlab/tmp/#{@name}/)
elsif @backend == "gcs"
check_bucket_cmd = %W(gsutil ls gs://#{@remote_bucket_name})
cmd = %W(gsutil -m rsync -r gs://#{@remote_bucket_name} /srv/gitlab/tmp/#{@name})
end
# Check if the bucket exists
output, status = run_cmd(check_bucket_cmd)
unless status.zero?
puts "Bucket not found: #{@remote_bucket_name}. Skipping backup of #{@name} ...".blue
return
end
puts "Dumping #{@name} ...".blue
# create the destination: gsutils requires it to exist, s3cmd does not
FileUtils.mkdir_p("/srv/gitlab/tmp/#{@name}", mode: 0700)
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
# check the destiation for contents. Bucket may have been empty.
if Dir.empty? "/srv/gitlab/tmp/#{@name}"
puts "empty".green
return
end
# build gzip command used for tar compression
gzip_cmd = 'gzip' + (ENV['GZIP_RSYNCABLE'] == 'yes' ? ' --rsyncable' : '')
cmd = %W(tar -cf #{@local_tar_path} -I #{gzip_cmd} -C /srv/gitlab/tmp/#{@name} . )
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
puts "done".green
end
def restore
puts "Restoring #{@name} ...".blue
backup_existing
cleanup
restore_from_backup
puts "done".green
end
def failure_abort(error_message)
puts "[Error] #{error_message}".red
abort "Restore #{@name} failed"
end
def upload_to_object_storage(source_path)
if @backend == "s3"
# s3cmd treats `-` as a special filename for using stdin, as a result
# we need a slightly different syntax to support syncing the `-` directory (used for system uploads)
if File.basename(source_path) == '-'
cmd = %W(s3cmd --stop-on-error sync #{source_path}/ s3://#{@remote_bucket_name}/-/)
else
cmd = %W(s3cmd --stop-on-error sync #{source_path} s3://#{@remote_bucket_name})
end
elsif @backend == "gcs"
cmd = %W(gsutil -m rsync -r #{source_path}/ gs://#{@remote_bucket_name})
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def backup_existing
backup_file_name = "#{@name}.#{Time.now.to_i}"
if @backend == "s3"
cmd = %W(s3cmd sync s3://#{@remote_bucket_name} s3://#{@tmp_bucket_name}/#{backup_file_name}/)
elsif @backend == "gcs"
cmd = %W(gsutil -m rsync -r gs://#{@remote_bucket_name} gs://#{@tmp_bucket_name}/#{backup_file_name}/)
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def cleanup
if @backend == "s3"
cmd = %W(s3cmd --stop-on-error del --force --recursive s3://#{@remote_bucket_name})
elsif @backend == "gcs"
# Check if the bucket has any objects
list_objects_cmd = %W(gsutil ls gs://#{@remote_bucket_name}/)
output, status = run_cmd(list_objects_cmd)
failure_abort(output) unless status.zero?
# There are no objects in the bucket so skip the cleanup
if output.length == 0
return
end
cmd = %W(gsutil rm -f -r gs://#{@remote_bucket_name}/*)
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def restore_from_backup
extracted_tar_path = File.join(File.dirname(@local_tar_path), "/srv/gitlab/tmp/#{@name}")
FileUtils.mkdir_p(extracted_tar_path, mode: 0700)
failure_abort("#{@local_tar_path} not found") unless File.exist?(@local_tar_path)
untar_cmd = %W(tar -xf #{@local_tar_path} -C #{extracted_tar_path})
output, status = run_cmd(untar_cmd)
failure_abort(output) unless status.zero?
Dir.glob("#{extracted_tar_path}/*").each do |file|
upload_to_object_storage(file)
end
end
def run_cmd(cmd)
_, stdout, wait_thr = Open3.popen2e(*cmd)
return stdout.read, wait_thr.value.exitstatus
end
end
ARG BASE_REGISTRY=registry.access.redhat.com
ARG BASE_IMAGE=ubi8/ubi
ARG BASE_TAG=8.0
ARG RAILS_IMAGE=${BASE_REGISTRY}/gitlab/gitlab/gitlab-rails:12.4
ARG RAILS_IMAGE=${BASE_REGISTRY}/gitlab/gitlab/gitlab-ruby:12.4
ARG PYTHON_IMAGE=${BASE_REGISTRY}/gitlab/gitlab/gitlab-python:12.4
FROM ${PYTHON_IMAGE} AS python
FROM ${RAILS_IMAGE}
ARG S3CMD_VERSION=2.0.1
ARG GSUTIL_VERSION=4.42
ARG GITLAB_VERSION
ARG GITLAB_USER=git
LABEL source="https://gitlab.com/gitlab-org/gitlab" \
name="GitLab Task Runner" \
maintainer="GitLab Distribution Team" \
vendor="GitLab" \
version=${GITLAB_VERSION} \
release=${GITLAB_VERSION} \
summary="Task Runner is an entry point for interaction with other containers in the cluster." \
description="Task Runner is an entry point for interaction with other containers in the cluster. It contains scripts for running Rake tasks, backup, restore, and tools to intract with object storage."
COPY --from=python /usr/local/bin /usr/local/bin/
COPY --from=python /usr/local/lib /usr/local/lib/
COPY --from=python /usr/local/include /usr/local/include/
RUN dnf install -yb --nodocs ca-certificates openssl libedit \
&& pip3 install s3cmd==${S3CMD_VERSION} gsutil==${GSUTIL_VERSION} crcmod
COPY scripts/bin/* /usr/local/bin/
COPY scripts/lib/* /usr/lib/ruby/vendor_ruby
USER ${GITLAB_USER}:${GITLAB_USER}
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
#!/bin/bash
set -e
ACTION="backup"
export BACKUP_BUCKET_NAME=${BACKUP_BUCKET_NAME-gitlab-backups}
export BACKUP_BACKEND=${BACKUP_BACKEND-s3}
rails_dir=/srv/gitlab
backups_path=$rails_dir/tmp/backups
backup_tars_path=$rails_dir/tmp/backup_tars
object_storage_backends=( registry uploads artifacts lfs packages )
skipping_backup_for=()
function usage()
{
cat << HEREDOC
Usage: backup-utility [--restore] [-f URL] [-t TIMESTAMP] [--skip COMPONENT] [--backend BACKEND]
Options:
-h, --help Show this help message and exit.
--restore [-t TIMESTAMP | -f URL] When specified, utility restores from an existing backup specified
as url or timestamp in object storage.
-f URL http(s):/ftp:/file: URL with backup location. Use with --restore.
-t TIMESTAMP Timestamp (part before '_gitlab_backup.tar' in archive name),
can be used to specify backup source or target name.
--rsyncable Pass the '--rsyncable' parameter to gzip for artifact compression.
--skip COMPONENT When specified, utility will skip the backup of COMPONENT.
May be defined multiple times. Valid values for COMPONENT are
db, repositories, and any of the object storages (e.g. 'lfs').
--backend BACKEND Object storage backend to use for backups.
Can be either 's3' or 'gcs'.
HEREDOC
}
# Checks if provided argument is a url for downloading it
function is_url() {
regex='(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]'
[[ $1 =~ $regex ]]
}
function fetch_remote_backup(){
mkdir -p $backups_path
output_path=$backups_path/0_gitlab_backup.tar
if is_url $1; then
>&2 echo "Downloading from $1";
curl --progress-bar -o $output_path $1
else # It's a timestamp
file_name="$1_gitlab_backup.tar"
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd get "s3://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp "gs://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
else
echo "Unknown backend: ${BACKUP_BACKEND}"
fi
fi
echo $output_path
}
function unpack_backup(){
local file_path=$1
cd $(dirname $file_path)
echo "Unpacking backup"
if [ ! -f $file_path ]; then
echo $file_path not found
exit 1
fi
tar -xf $file_path
}
function pack_backup(){
echo "Packing up backup tar"
local backup_name=$1
tar -cf ${backup_tars_path}/${backup_name}.tar -C $backups_path .
}
function get_version(){
cat $rails_dir/VERSION
}
function get_backup_name(){
if [ -n "$BACKUP_TIMESTAMP" ]; then
echo ${BACKUP_TIMESTAMP}_gitlab_backup
else
now_timestamp=$(date +%s_%Y_%m_%d)
gitlab_version=$(get_version)
echo ${now_timestamp}_${gitlab_version}_gitlab_backup
fi
}
function cleanup(){
rm -rf $backups_path/*
rm -rf $backup_tars_path/*
}
function write_backup_info(){
cat << EOF > $backups_path/backup_information.yml
:db_version: $($rails_dir/bin/rails runner "File.write('/tmp/db_version', ActiveRecord::Migrator.current_version.to_s)" && cat /tmp/db_version)
:backup_created_at: $(date "+%Y-%m-%d %H:%M:%S %z")
:gitlab_version: $(get_version)
:tar_version: $(tar --version | head -n 1)
:installation_type: gitlab-helm-chart
:skipped: $1
EOF
}
function get_skipped(){
all=( artifacts.tar.gz uploads.tar.gz builds.tar.gz db lfs.tar.gz registry.tar.gz pages.tar.gz packages.tar.gz )
skipped_string=""
for backup_item in ${all[@]}; do
if [ ! -e $backups_path/$backup_item ]; then
skipped_string="$skipped_string,${backup_item%.tar.gz}";
fi;
done;
echo ${skipped_string#,}
}
function backup(){
backup_name=$(get_backup_name)
mkdir -p $backup_tars_path
if ! [[ ${skipping_backup_for[@]} =~ "db" ]]; then
gitlab-rake gitlab:backup:db:create
fi
if ! [[ ${skipping_backup_for[@]} =~ "repositories" ]]; then
gitlab-rake gitlab:backup:repo:create
fi
for backup_item in ${object_storage_backends[@]}; do
if ! [[ ${skipping_backup_for[@]} =~ $backup_item ]]; then
object-storage-backup $backup_item $backups_path/${backup_item}.tar.gz
fi
done
skipped=$(get_skipped $backup_name)
write_backup_info $skipped
pack_backup $backup_name
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd put ${backup_tars_path}/${backup_name}.tar s3://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at s3://$BACKUP_BUCKET_NAME/${backup_name}.tar"
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp -n ${backup_tars_path}/${backup_name}.tar gs://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at gs://$BACKUP_BUCKET_NAME/${backup_name}.tar"
else
echo "Unknown backend for backup: ${BACKUP_BACKEND}"
fi
cleanup
}
function is_skipped() {
[[ $SKIPPED =~ $1 ]]
}
function restore(){
if [ -z "$BACKUP_URL" ] && [ -z "$BACKUP_TIMESTAMP" ]; then
echo "You need to set BACKUP_URL or BACKUP_TIMESTAMP variable"
exit 1
fi
BACKUP=${BACKUP_URL-}
if [ -z "$BACKUP" ]; then
BACKUP=$BACKUP_TIMESTAMP
fi
file=$(fetch_remote_backup $BACKUP)
dir_name=$(dirname $file)
file_name=$(basename $file)
timestamp="${file_name%%_*}"
export BACKUP=$timestamp
unpack_backup $file
skipped_line=$(grep skipped $(dirname $file)/backup_information.yml)
export SKIPPED=$(echo ${skipped_line#:skipped:})
installation_type_line=$(grep installation_type $(dirname $file)/backup_information.yml || echo ":installation_type: unknown")
export INSTALLATION_TYPE=$(echo ${installation_type_line#:installation_type: })
! is_skipped "db" && gitlab-rake gitlab:db:drop_tables
! is_skipped "db" && gitlab-rake gitlab:backup:db:restore
! is_skipped "repositories" && gitlab-rake gitlab:backup:repo:restore
! is_skipped "builds" && gitlab-rake gitlab:backup:builds:restore
if [ "$INSTALLATION_TYPE" = "gitlab-helm-chart" ]; then
for restore_item in ${object_storage_backends[@]}; do
if [ -f $backups_path/${restore_item}.tar.gz ]; then
! is_skipped $restore_item && object-storage-restore $restore_item $backups_path/${restore_item}.tar.gz
fi
done
else
echo "Backup tarball not from a Helm chart based installation. Not processing files in object storage."
fi
gitlab-rake cache:clear
}
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-h|--help)
usage
ACTION="none"
break
;;
-f|--file)
BACKUP_URL="$2"
shift
shift
;;
-t|--timestamp)
BACKUP_TIMESTAMP="$2"
shift
shift
;;
--backend)
export BACKUP_BACKEND="$2"
shift
shift
;;
--restore)
ACTION="restore"
shift
;;
--rsyncable)
export GZIP_RSYNCABLE="yes"
shift
;;
--skip)
skipping_backup_for+=( "$2" )
shift
shift
;;
*)
usage
echo "Unexpected parameter: $key"
exit 1
;;
esac
done
if [ "$ACTION" = "restore" ]; then
restore
elif [ "$ACTION" = "backup" ]; then
backup
fi
#!/bin/bash
set -e
/scripts/set-config "${CONFIG_TEMPLATE_DIRECTORY}" "${CONFIG_DIRECTORY:=$CONFIG_TEMPLATE_DIRECTORY}"
cd /srv/gitlab;
echo "Attempting to run '$@' as a main process";
exec "$@";
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rails "$@"
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rake -f $rails_dir/Rakefile "$@"
#!/usr/bin/env ruby
require 'object_storage_backup'
abort ("backup_item and output_tar_path arguments needs to be passed to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).backup
#!/usr/bin/env ruby
require 'object_storage_backup'
abort("restore_item and tar path needs to be passed as arguments to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).restore
ARG BASE_REGISTRY=registry.access.redhat.com
ARG BASE_IMAGE=ubi8/ubi
ARG BASE_TAG=8.0
ARG RAILS_IMAGE=${BASE_REGISTRY}/gitlab/gitlab/gitlab-rails:12.5
ARG GITLAB_VERSION=v12.5.2-ubi8
FROM ${RAILS_IMAGE} as builder
ARG NEXUS_SERVER
ARG GITLAB_VERSION
ARG VENDOR=gitlab
ARG PACKAGE_NAME=ubi8-build-dependencies-${GITLAB_VERSION}.tar
ARG PACKAGE_URL=https://${NEXUS_SERVER}/repository/dsop/${VENDOR}/gitlab-task-runner/${PACKAGE_NAME}
ADD build-scripts/ /build-scripts/
RUN /build-scripts/prepare.sh ${PACKAGE_URL}
FROM ${RAILS_IMAGE}
ARG GITLAB_VERSION
ARG GITLAB_USER=git
LABEL source="https://gitlab.com/gitlab-org/gitlab" \
name="GitLab Task Runner" \
maintainer="GitLab Distribution Team" \
vendor="GitLab" \
version=${GITLAB_VERSION} \
release=${GITLAB_VERSION} \
summary="Task Runner is an entry point for interaction with other containers in the cluster." \
description="Task Runner is an entry point for interaction with other containers in the cluster. It contains scripts for running Rake tasks, backup, restore, and tools to intract with object storage."
COPY --from=builder /prepare/dependencies /
COPY scripts/bin/* /usr/local/bin/
COPY scripts/lib/* /usr/lib/ruby/vendor_ruby
RUN dnf clean all \
&& rm -r /var/cache/dnf \
&& dnf --disableplugin=subscription-manager --nogpgcheck install -yb --nodocs ca-certificates openssl
USER ${GITLAB_USER}:${GITLAB_USER}
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
# gitlab-task-runner-container
Task Runner is an entry point for interaction with other containers in the cluster. It contains scripts for running Rake tasks, backup, restore, and tools to intract with object storage.
## GitLab 12.5.0 release
The hardened containers for GitLab 12.5 require the gitlab 12.5 blob to be available for download.
There are some included scripts to make this easier for user building images offline on their machine.
## Building
1. Switch to the 12.5 directory
2. Run `./build-scripts/build.sh`
* Runs docker build, and takes care of setting the appropriate build args for users running locally
* Uses the `NEXUS_SERVER` variable from your environment, or full `PACKAGE_URL`.
`build-script/build.sh` is provided as an example and helpful for building locally. You can also instead call docker build and pass build-args directly.
## Build Phases
Some of the GitLab containers are build ontop of previous containers, building the containers in ordered phases is necessary to build all containers.
- Phase One
* kubectl
* gitlab-ruby
- Phase Two
* git-base
* gitlab-exporter
* gitlab-mailroom
* gitlab-shell
* gitlab-rails
* gitlab-workhorse
- Phase 3
* gitaly
- Phase 4
* gitlab-sidekiq
* gitlab-task-runner
* gitlab-unicorn
#!/bin/bash
# NOTICE: This script requires `docker`.
set -euxo pipefail
TAG=${1:-12.5}
REPOSITORY=${2:-}
NEXUS_SERVER=${NEXUS_SERVER:-}
PACKAGE_URL=${PACKAGE_URL:-}
DOCKER_OPTS=${DOCKER_OPTS:-""}
imageName() {
printf "${REPOSITORY}${1}:${TAG}"
}
buildImage() {
IMAGE="${1}"
CONTEXT="${IMAGE%*-ee}"
{
docker build \
-t "$(imageName ${IMAGE})" . \
${DOCKER_OPTS:-} | tee ${CONTEXT}.out
} || {
echo "${CONTEXT}" >> failed.log
}
}
# Cleanup log outputs from previous build
rm -f *.out failed.log
if [ $NEXUS_SERVER ]; then
DOCKER_OPTS="$DOCKER_OPTS --build-arg NEXUS_SERVER=${NEXUS_SERVER}"
fi
if [ $PACKAGE_URL ]; then
DOCKER_OPTS="$DOCKER_OPTS --build-arg PACKAGE_URL=${PACKAGE_URL}"
fi
DOCKER_OPTS="$DOCKER_OPTS --build-arg RAILS_IMAGE=$(imageName gitlab-rails)"
buildImage gitlab-task-runner
#!/bin/bash
set -euxo pipefail
PACKAGE_URL=$1
WORKSPACE="${WORKSPACE:-/prepare}"
PACKAGE_NAME="${PACKAGE_NAME:-ubi8-build-dependencies.tar}"
# Download UBI dependencies package and it sha256
curl -fLk --create-dirs "${PACKAGE_URL}.sha256" -o "${WORKSPACE}/${PACKAGE_NAME}.sha256"
curl -fLk --create-dirs "${PACKAGE_URL}" -o "${WORKSPACE}/${PACKAGE_NAME}"
# Verify the package integrity
echo "$(cat ${PACKAGE_NAME}.sha256) ${PACKAGE_NAME}" | sha256sum --check --status \
&& if [ $? == '0' ]; then printf "\nSHA256 check for ${PACKAGE_NAME} succeeded\n\n"; \
else printf "SHA256 check for ${PACKAGE_NAME} failed\n\n"; fi
# Extract UBI dependencies
tar -xvf "${WORKSPACE}/${PACKAGE_NAME}" -C "${WORKSPACE}"
# Extract the specific depenencies needed for this contianer
mkdir ${WORKSPACE}/dependencies
tar -xvf "${WORKSPACE}/gitlab-python.tar.gz" -C "${WORKSPACE}/dependencies"
tar -xvf "${WORKSPACE}/gitlab-task-runner-ee.tar.gz" -C "${WORKSPACE}/dependencies"
#!/bin/bash
set -e
ACTION="backup"
export BACKUP_BUCKET_NAME=${BACKUP_BUCKET_NAME-gitlab-backups}
export BACKUP_BACKEND=${BACKUP_BACKEND-s3}
rails_dir=/srv/gitlab
backups_path=$rails_dir/tmp/backups
backup_tars_path=$rails_dir/tmp/backup_tars
object_storage_backends=( registry uploads artifacts lfs packages )
skipping_backup_for=()
function usage()
{
cat << HEREDOC
Usage: backup-utility [--restore] [-f URL] [-t TIMESTAMP] [--skip COMPONENT] [--backend BACKEND]
Options:
-h, --help Show this help message and exit.
--restore [-t TIMESTAMP | -f URL] When specified, utility restores from an existing backup specified
as url or timestamp in object storage.
-f URL http(s):/ftp:/file: URL with backup location. Use with --restore.
-t TIMESTAMP Timestamp (part before '_gitlab_backup.tar' in archive name),
can be used to specify backup source or target name.
--rsyncable Pass the '--rsyncable' parameter to gzip for artifact compression.
--skip COMPONENT When specified, utility will skip the backup of COMPONENT.
May be defined multiple times. Valid values for COMPONENT are
db, repositories, and any of the object storages (e.g. 'lfs').
--backend BACKEND Object storage backend to use for backups.
Can be either 's3' or 'gcs'.
HEREDOC
}
# Checks if provided argument is a url for downloading it
function is_url() {
regex='(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]'
[[ $1 =~ $regex ]]
}
function fetch_remote_backup(){
mkdir -p $backups_path
output_path=$backups_path/0_gitlab_backup.tar
if is_url $1; then
>&2 echo "Downloading from $1";
curl --progress-bar -o $output_path $1
else # It's a timestamp
file_name="$1_gitlab_backup.tar"
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd get "s3://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp "gs://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
else
echo "Unknown backend: ${BACKUP_BACKEND}"
fi
fi
echo $output_path
}
function unpack_backup(){
local file_path=$1
cd $(dirname $file_path)
echo "Unpacking backup"
if [ ! -f $file_path ]; then
echo $file_path not found
exit 1
fi
tar -xf $file_path
}
function pack_backup(){
echo "Packing up backup tar"
local backup_name=$1
tar -cf ${backup_tars_path}/${backup_name}.tar -C $backups_path .
}
function get_version(){
cat $rails_dir/VERSION
}
function get_backup_name(){
if [ -n "$BACKUP_TIMESTAMP" ]; then
echo ${BACKUP_TIMESTAMP}_gitlab_backup
else
now_timestamp=$(date +%s_%Y_%m_%d)
gitlab_version=$(get_version)
echo ${now_timestamp}_${gitlab_version}_gitlab_backup
fi
}
function cleanup(){
rm -rf $backups_path/*
rm -rf $backup_tars_path/*
}
function write_backup_info(){
cat << EOF > $backups_path/backup_information.yml
:db_version: $($rails_dir/bin/rails runner "File.write('/tmp/db_version', ActiveRecord::Migrator.current_version.to_s)" && cat /tmp/db_version)
:backup_created_at: $(date "+%Y-%m-%d %H:%M:%S %z")
:gitlab_version: $(get_version)
:tar_version: $(tar --version | head -n 1)
:installation_type: gitlab-helm-chart
:skipped: $1
EOF
}
function get_skipped(){
all=( artifacts.tar.gz uploads.tar.gz builds.tar.gz db lfs.tar.gz registry.tar.gz pages.tar.gz packages.tar.gz )
skipped_string=""
for backup_item in ${all[@]}; do
if [ ! -e $backups_path/$backup_item ]; then
skipped_string="$skipped_string,${backup_item%.tar.gz}";
fi;
done;
echo ${skipped_string#,}
}
function backup(){
backup_name=$(get_backup_name)
mkdir -p $backup_tars_path
if ! [[ ${skipping_backup_for[@]} =~ "db" ]]; then
gitlab-rake gitlab:backup:db:create
fi
if ! [[ ${skipping_backup_for[@]} =~ "repositories" ]]; then
gitlab-rake gitlab:backup:repo:create
fi
for backup_item in ${object_storage_backends[@]}; do
if ! [[ ${skipping_backup_for[@]} =~ $backup_item ]]; then
object-storage-backup $backup_item $backups_path/${backup_item}.tar.gz
fi
done
skipped=$(get_skipped $backup_name)
write_backup_info $skipped
pack_backup $backup_name
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd put ${backup_tars_path}/${backup_name}.tar s3://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at s3://$BACKUP_BUCKET_NAME/${backup_name}.tar"
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp -n ${backup_tars_path}/${backup_name}.tar gs://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at gs://$BACKUP_BUCKET_NAME/${backup_name}.tar"
else
echo "Unknown backend for backup: ${BACKUP_BACKEND}"
fi
cleanup
}
function is_skipped() {
[[ $SKIPPED =~ $1 ]]
}
function restore(){
if [ -z "$BACKUP_URL" ] && [ -z "$BACKUP_TIMESTAMP" ]; then
echo "You need to set BACKUP_URL or BACKUP_TIMESTAMP variable"
exit 1
fi
BACKUP=${BACKUP_URL-}
if [ -z "$BACKUP" ]; then
BACKUP=$BACKUP_TIMESTAMP
fi
file=$(fetch_remote_backup $BACKUP)
dir_name=$(dirname $file)
file_name=$(basename $file)
timestamp="${file_name%%_*}"
export BACKUP=$timestamp
unpack_backup $file
skipped_line=$(grep skipped $(dirname $file)/backup_information.yml)
export SKIPPED=$(echo ${skipped_line#:skipped:})
installation_type_line=$(grep installation_type $(dirname $file)/backup_information.yml || echo ":installation_type: unknown")
export INSTALLATION_TYPE=$(echo ${installation_type_line#:installation_type: })
! is_skipped "db" && gitlab-rake gitlab:db:drop_tables
! is_skipped "db" && gitlab-rake gitlab:backup:db:restore
! is_skipped "repositories" && gitlab-rake gitlab:backup:repo:restore
! is_skipped "builds" && gitlab-rake gitlab:backup:builds:restore
if [ "$INSTALLATION_TYPE" = "gitlab-helm-chart" ]; then
for restore_item in ${object_storage_backends[@]}; do
if [ -f $backups_path/${restore_item}.tar.gz ]; then
! is_skipped $restore_item && object-storage-restore $restore_item $backups_path/${restore_item}.tar.gz
fi
done
else
echo "Backup tarball not from a Helm chart based installation. Not processing files in object storage."
fi
gitlab-rake cache:clear
}
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-h|--help)
usage
ACTION="none"
break
;;
-f|--file)
BACKUP_URL="$2"
shift
shift
;;
-t|--timestamp)
BACKUP_TIMESTAMP="$2"
shift
shift
;;
--backend)
export BACKUP_BACKEND="$2"
shift
shift
;;
--restore)
ACTION="restore"
shift
;;
--rsyncable)
export GZIP_RSYNCABLE="yes"
shift
;;
--skip)
skipping_backup_for+=( "$2" )
shift
shift
;;
*)
usage
echo "Unexpected parameter: $key"
exit 1
;;
esac
done
if [ "$ACTION" = "restore" ]; then
restore
elif [ "$ACTION" = "backup" ]; then
backup
fi
#!/bin/bash
set -e
/scripts/set-config "${CONFIG_TEMPLATE_DIRECTORY}" "${CONFIG_DIRECTORY:=$CONFIG_TEMPLATE_DIRECTORY}"
cd /srv/gitlab;
echo "Attempting to run '$@' as a main process";
exec "$@";
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rails "$@"
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rake -f $rails_dir/Rakefile "$@"
#!/usr/bin/env ruby
require 'object_storage_backup'
abort ("backup_item and output_tar_path arguments needs to be passed to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).backup
#!/usr/bin/env ruby
require 'object_storage_backup'
abort("restore_item and tar path needs to be passed as arguments to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).restore
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment