UNCLASSIFIED

Verified Commit 5598658c authored by DJ Mountney's avatar DJ Mountney
Browse files

Refactor to new CHT pipeline pattern

- Removes gitlab versioned folders prior to 12.8
- Unwraps the 12.8 folder into the root
- Removes the prebuild.sh script in favor of download.json
- Drops the build script for now
- Update prepare script to use local downloaded artifact
parent 98ed7563
#!/bin/bash
set -euxo pipefail
rm -f *.tar.gz *.out failed.log
#!/bin/bash
set -e
/scripts/set-config "${CONFIG_TEMPLATE_DIRECTORY}" "${CONFIG_DIRECTORY:=$CONFIG_TEMPLATE_DIRECTORY}"
cd /srv/gitlab;
echo "Attempting to run '$@' as a main process";
exec "$@";
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rails "$@"
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rake -f $rails_dir/Rakefile "$@"
#!/usr/bin/env ruby
require 'object_storage_backup'
abort ("backup_item and output_tar_path arguments needs to be passed to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).backup
#!/usr/bin/env ruby
require 'object_storage_backup'
abort("restore_item and tar path needs to be passed as arguments to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).restore
require 'open3'
require 'fileutils'
class String
def red; "\e[31m#{self}\e[0m" end
def green; "\e[32m#{self}\e[0m" end
def blue; "\e[34m#{self}\e[0m" end
end
class ObjectStorageBackup
attr_accessor :name, :local_tar_path, :remote_bucket_name, :tmp_bucket_name, :backend
def initialize(name, local_tar_path, remote_bucket_name, tmp_bucket_name = 'tmp', backend = 's3')
@name = name
@local_tar_path = local_tar_path
@remote_bucket_name = remote_bucket_name
@tmp_bucket_name = tmp_bucket_name
@backend = backend
end
def backup
if @backend == "s3"
check_bucket_cmd = %W(s3cmd ls s3://#{@remote_bucket_name})
cmd = %W(s3cmd --stop-on-error --delete-removed sync s3://#{@remote_bucket_name}/ /srv/gitlab/tmp/#{@name}/)
elsif @backend == "gcs"
check_bucket_cmd = %W(gsutil ls gs://#{@remote_bucket_name})
cmd = %W(gsutil -m rsync -r gs://#{@remote_bucket_name} /srv/gitlab/tmp/#{@name})
end
# Check if the bucket exists
output, status = run_cmd(check_bucket_cmd)
unless status.zero?
puts "Bucket not found: #{@remote_bucket_name}. Skipping backup of #{@name} ...".blue
return
end
puts "Dumping #{@name} ...".blue
# create the destination: gsutils requires it to exist, s3cmd does not
FileUtils.mkdir_p("/srv/gitlab/tmp/#{@name}", mode: 0700)
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
# check the destiation for contents. Bucket may have been empty.
if Dir.empty? "/srv/gitlab/tmp/#{@name}"
puts "empty".green
return
end
# build gzip command used for tar compression
gzip_cmd = 'gzip' + (ENV['GZIP_RSYNCABLE'] == 'yes' ? ' --rsyncable' : '')
cmd = %W(tar -cf #{@local_tar_path} -I #{gzip_cmd} -C /srv/gitlab/tmp/#{@name} . )
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
puts "done".green
end
def restore
puts "Restoring #{@name} ...".blue
backup_existing
cleanup
restore_from_backup
puts "done".green
end
def failure_abort(error_message)
puts "[Error] #{error_message}".red
abort "Restore #{@name} failed"
end
def upload_to_object_storage(source_path)
if @backend == "s3"
dir_name = File.basename(source_path)
cmd = %W(s3cmd --stop-on-error sync #{source_path}/ s3://#{@remote_bucket_name}/#{dir_name}/)
elsif @backend == "gcs"
cmd = %W(gsutil -m rsync -r #{source_path}/ gs://#{@remote_bucket_name})
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def backup_existing
backup_file_name = "#{@name}.#{Time.now.to_i}"
if @backend == "s3"
cmd = %W(s3cmd sync s3://#{@remote_bucket_name} s3://#{@tmp_bucket_name}/#{backup_file_name}/)
elsif @backend == "gcs"
cmd = %W(gsutil -m rsync -r gs://#{@remote_bucket_name} gs://#{@tmp_bucket_name}/#{backup_file_name}/)
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def cleanup
if @backend == "s3"
cmd = %W(s3cmd --stop-on-error del --force --recursive s3://#{@remote_bucket_name})
elsif @backend == "gcs"
# Check if the bucket has any objects
list_objects_cmd = %W(gsutil ls gs://#{@remote_bucket_name}/)
output, status = run_cmd(list_objects_cmd)
failure_abort(output) unless status.zero?
# There are no objects in the bucket so skip the cleanup
if output.length == 0
return
end
cmd = %W(gsutil rm -f -r gs://#{@remote_bucket_name}/*)
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def restore_from_backup
extracted_tar_path = File.join(File.dirname(@local_tar_path), "/srv/gitlab/tmp/#{@name}")
FileUtils.mkdir_p(extracted_tar_path, mode: 0700)
failure_abort("#{@local_tar_path} not found") unless File.exist?(@local_tar_path)
untar_cmd = %W(tar -xf #{@local_tar_path} -C #{extracted_tar_path})
output, status = run_cmd(untar_cmd)
failure_abort(output) unless status.zero?
Dir.glob("#{extracted_tar_path}/*").each do |file|
upload_to_object_storage(file)
end
end
def run_cmd(cmd)
_, stdout, wait_thr = Open3.popen2e(*cmd)
return stdout.read, wait_thr.value.exitstatus
end
end
#!/bin/bash
set -e
### Environment Variables ###
GITLAB_VERSION=v12.7.0-ubi8
VENDOR=gitlab
# These three variables are required to push whatever outside binaries your container needs at build time to our Nexus repo
NEXUS_SERVER=${NEXUS_SERVER}
NEXUS_USERNAME=${NEXUS_USERNAME}
NEXUS_PASSWORD=${NEXUS_PASSWORD}
## Download variables
DOWNLOAD_DIR=tmp/${VENDOR}
UPLOAD_URL=https://${NEXUS_SERVER}/repository/dsop/${VENDOR}/kubectl
### Download files/dependencies ###
# temporarily place your binaries locally in the download directory
curl --create-dirs https://gitlab-ubi.s3.us-east-2.amazonaws.com/ubi8-build-dependencies-${GITLAB_VERSION}.tar -o ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar
### GPG Signature Check ###
# GPG signature verification is a requirement in the case that the downloaded files have a GPG signature
# For more information on GPG keys visit https://access.redhat.com/solutions/1541303 or https://gnupg.org/gph/en/manual.html
curl --create-dirs https://gitlab-ubi.s3.us-east-2.amazonaws.com/ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc -o ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc
for server in $(shuf -e ha.pool.sks-keyservers.net \
hkp://p80.pool.sks-keyservers.net:80 \
keyserver.ubuntu.com \
hkp://keyserver.ubuntu.com:80 \
pgp.mit.edu) ;
do
gpg --batch --keyserver "$server" --recv-keys 8040EEFCCED8C668EF27F7C61DC5606C0C7E9A9B && break || : ; \
done
gpg --verify ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar
### SHA256 Verification ###
# Verifying the files with the SHA256 is a requirement for all files
# Make sure to not download the SHA256 from the internet, but create it, check it and upload it to the Nexus repo
cd ${DOWNLOAD_DIR}
sha256sum ubi8-build-dependencies-${GITLAB_VERSION}.tar | awk '{print $1}' > ubi8-build-dependencies-${GITLAB_VERSION}.tar.sha256 \
&& echo "$(cat ubi8-build-dependencies-${GITLAB_VERSION}.tar.sha256) ubi8-build-dependencies-${GITLAB_VERSION}.tar" | sha256sum --check --status \
&& if [ $? == '0' ]; then printf "\nSHA256 check for ubi8-build-dependencies-${GITLAB_VERSION}.tar succeeded\n\n"; \
else printf "SHA256 check for ubi8-build-dependencies-${GITLAB_VERSION}.tar failed\n\n"; fi
cd -
### Nexus Repo Upload ###
# Push whatever binaries you need to ${NEXUS_SERVER}/dsop/vendor/project/ as you see in the example below. Follow the same
# format as in Gitlab. You will also need to push the GPG signature file and SHA256 file
for package in ubi8-build-dependencies-${GITLAB_VERSION}.tar ubi8-build-dependencies-${GITLAB_VERSION}.tar.sha256 ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc
do
curl -kfS -u ${NEXUS_USERNAME}:${NEXUS_PASSWORD} -T ${DOWNLOAD_DIR}/${package} https://${NEXUS_SERVER}/repository/dsop/${VENDOR}/gitlab-task-runner/${package}
done
MIT License
Copyright (c) 2017
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
# gitlab-task-runner-container
Task Runner is an entry point for interaction with other containers in the cluster. It contains scripts for running Rake tasks, backup, restore, and tools to intract with object storage.
## GitLab Build
The hardened containers for GitLab require the gitlab correctly version assets blob to be available for download.
There are some included scripts to make this easier for user building images offline on their machine.
## Building
1. Switch to the desired versioned directory
2. Run `./build-scripts/build.sh`
* Runs docker build, and takes care of setting the appropriate build args for users running locally
* Uses the `NEXUS_SERVER` variable from your environment, or full `PACKAGE_URL`.
`build-script/build.sh` is provided as an example and helpful for building locally. You can also instead call docker build and pass build-args directly.
## Build Phases
Some of the GitLab containers are build ontop of previous containers, building the containers in ordered phases is necessary to build all containers.
- Phase One
* kubectl
* gitlab-ruby
* gitlab-container-registry
- Phase Two
* git-base
* gitlab-exporter
* gitlab-mailroom
* gitlab-shell
* gitlab-rails
* gitlab-workhorse
- Phase 3
* gitaly
- Phase 4
* gitlab-sidekiq
* gitlab-task-runner
* gitlab-unicorn
#!/bin/bash
set -euxo pipefail
rm -f *.tar.gz *.out failed.log
#!/bin/bash
set -euxo pipefail
PACKAGE_URL=$1
WORKSPACE="${WORKSPACE:-/prepare}"
PACKAGE_NAME="${PACKAGE_NAME:-ubi8-build-dependencies.tar}"
# Download UBI dependencies package and it sha256
curl -fLk --create-dirs "${PACKAGE_URL}.sha256" -o "${WORKSPACE}/${PACKAGE_NAME}.sha256"
curl -fLk --create-dirs "${PACKAGE_URL}" -o "${WORKSPACE}/${PACKAGE_NAME}"
# Verify the package integrity
echo "$(cat ${PACKAGE_NAME}.sha256) ${PACKAGE_NAME}" | sha256sum --check --status \
&& if [ $? == '0' ]; then printf "\nSHA256 check for ${PACKAGE_NAME} succeeded\n\n"; \
else printf "SHA256 check for ${PACKAGE_NAME} failed\n\n"; fi
# Extract UBI dependencies
tar -xvf "${WORKSPACE}/${PACKAGE_NAME}" -C "${WORKSPACE}"
# Extract the specific depenencies needed for this contianer
mkdir ${WORKSPACE}/dependencies
tar -xvf "${WORKSPACE}/gitlab-python.tar.gz" -C "${WORKSPACE}/dependencies"
tar -xvf "${WORKSPACE}/gitlab-task-runner-ee.tar.gz" -C "${WORKSPACE}/dependencies"
#!/bin/bash
set -e
ACTION="backup"
export BACKUP_BUCKET_NAME=${BACKUP_BUCKET_NAME-gitlab-backups}
export BACKUP_BACKEND=${BACKUP_BACKEND-s3}
rails_dir=/srv/gitlab
backups_path=$rails_dir/tmp/backups
backup_tars_path=$rails_dir/tmp/backup_tars
object_storage_backends=( registry uploads artifacts lfs packages )
skipping_backup_for=()
function usage()
{
cat << HEREDOC
Usage: backup-utility [--restore] [-f URL] [-t TIMESTAMP] [--skip COMPONENT] [--backend BACKEND]
Options:
-h, --help Show this help message and exit.
--restore [-t TIMESTAMP | -f URL] When specified, utility restores from an existing backup specified
as url or timestamp in object storage.
-f URL http(s):/ftp:/file: URL with backup location. Use with --restore.
-t TIMESTAMP Timestamp (part before '_gitlab_backup.tar' in archive name),
can be used to specify backup source or target name.
--rsyncable Pass the '--rsyncable' parameter to gzip for artifact compression.
--skip COMPONENT When specified, utility will skip the backup of COMPONENT.
May be defined multiple times. Valid values for COMPONENT are
db, repositories, and any of the object storages (e.g. 'lfs').
--backend BACKEND Object storage backend to use for backups.
Can be either 's3' or 'gcs'.
HEREDOC
}
# Checks if provided argument is a url for downloading it
function is_url() {
regex='(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]'
[[ $1 =~ $regex ]]
}
function fetch_remote_backup(){
mkdir -p $backups_path
output_path=$backups_path/0_gitlab_backup.tar
if is_url $1; then
>&2 echo "Downloading from $1";
curl --progress-bar -o $output_path $1
else # It's a timestamp
file_name="$1_gitlab_backup.tar"
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd get "s3://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp "gs://$BACKUP_BUCKET_NAME/$file_name" $output_path > /dev/null
else
echo "Unknown backend: ${BACKUP_BACKEND}"
fi
fi
echo $output_path
}
function unpack_backup(){
local file_path=$1
cd $(dirname $file_path)
echo "Unpacking backup"
if [ ! -f $file_path ]; then
echo $file_path not found
exit 1
fi
tar -xf $file_path
}
function pack_backup(){
echo "Packing up backup tar"
local backup_name=$1
tar -cf ${backup_tars_path}/${backup_name}.tar -C $backups_path .
}
function get_version(){
cat $rails_dir/VERSION
}
function get_backup_name(){
if [ -n "$BACKUP_TIMESTAMP" ]; then
echo ${BACKUP_TIMESTAMP}_gitlab_backup
else
now_timestamp=$(date +%s_%Y_%m_%d)
gitlab_version=$(get_version)
echo ${now_timestamp}_${gitlab_version}_gitlab_backup
fi
}
function cleanup(){
rm -rf $backups_path/*
rm -rf $backup_tars_path/*
}
function write_backup_info(){
cat << EOF > $backups_path/backup_information.yml
:db_version: $($rails_dir/bin/rails runner "File.write('/tmp/db_version', ActiveRecord::Migrator.current_version.to_s)" && cat /tmp/db_version)
:backup_created_at: $(date "+%Y-%m-%d %H:%M:%S %z")
:gitlab_version: $(get_version)
:tar_version: $(tar --version | head -n 1)
:installation_type: gitlab-helm-chart
:skipped: $1
EOF
}
function get_skipped(){
all=( artifacts.tar.gz uploads.tar.gz builds.tar.gz db repositories lfs.tar.gz registry.tar.gz pages.tar.gz packages.tar.gz )
skipped_string=""
for backup_item in ${all[@]}; do
if [ ! -e $backups_path/$backup_item ]; then
skipped_string="$skipped_string,${backup_item%.tar.gz}";
fi;
done;
echo ${skipped_string#,}
}
function backup(){
backup_name=$(get_backup_name)
mkdir -p $backup_tars_path
if ! [[ ${skipping_backup_for[@]} =~ "db" ]]; then
gitlab-rake gitlab:backup:db:create
fi
if ! [[ ${skipping_backup_for[@]} =~ "repositories" ]]; then
gitlab-rake gitlab:backup:repo:create
fi
for backup_item in ${object_storage_backends[@]}; do
if ! [[ ${skipping_backup_for[@]} =~ $backup_item ]]; then
object-storage-backup $backup_item $backups_path/${backup_item}.tar.gz
fi
done
skipped=$(get_skipped $backup_name)
write_backup_info $skipped
pack_backup $backup_name
if [ "${BACKUP_BACKEND}" = "s3" ]; then
s3cmd put ${backup_tars_path}/${backup_name}.tar s3://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at s3://$BACKUP_BUCKET_NAME/${backup_name}.tar"
elif [ "${BACKUP_BACKEND}" = "gcs" ]; then
gsutil cp -n ${backup_tars_path}/${backup_name}.tar gs://$BACKUP_BUCKET_NAME > /dev/null
echo "[DONE] Backup can be found at gs://$BACKUP_BUCKET_NAME/${backup_name}.tar"
else
echo "Unknown backend for backup: ${BACKUP_BACKEND}"
fi
cleanup
}
function is_skipped() {
[[ $SKIPPED =~ $1 ]]
}
function restore(){
if [ -z "$BACKUP_URL" ] && [ -z "$BACKUP_TIMESTAMP" ]; then
echo "You need to set BACKUP_URL or BACKUP_TIMESTAMP variable"
exit 1
fi
BACKUP=${BACKUP_URL-}
if [ -z "$BACKUP" ]; then
BACKUP=$BACKUP_TIMESTAMP
fi
file=$(fetch_remote_backup $BACKUP)
dir_name=$(dirname $file)
file_name=$(basename $file)
timestamp="${file_name%%_*}"
export BACKUP=$timestamp
unpack_backup $file
skipped_line=$(grep skipped $(dirname $file)/backup_information.yml)
export SKIPPED=$(echo ${skipped_line#:skipped:})
installation_type_line=$(grep installation_type $(dirname $file)/backup_information.yml || echo ":installation_type: unknown")
export INSTALLATION_TYPE=$(echo ${installation_type_line#:installation_type: })
! is_skipped "db" && gitlab-rake gitlab:db:drop_tables
! is_skipped "db" && gitlab-rake gitlab:backup:db:restore
# Previous versions of the dump failed to mark the repos as skipped, so we additionally check for the directory
if [ -e $backups_path/repositories ]; then
! is_skipped "repositories" && gitlab-rake gitlab:backup:repo:restore
fi
! is_skipped "builds" && gitlab-rake gitlab:backup:builds:restore
if [ "$INSTALLATION_TYPE" = "gitlab-helm-chart" ]; then
for restore_item in ${object_storage_backends[@]}; do
if [ -f $backups_path/${restore_item}.tar.gz ]; then
! is_skipped $restore_item && object-storage-restore $restore_item $backups_path/${restore_item}.tar.gz
fi
done
else
echo "Backup tarball not from a Helm chart based installation. Not processing files in object storage."
fi
gitlab-rake cache:clear
}
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-h|--help)
usage
ACTION="none"
break
;;
-f|--file)
BACKUP_URL="$2"
shift
shift
;;
-t|--timestamp)
BACKUP_TIMESTAMP="$2"
shift
shift
;;
--backend)
export BACKUP_BACKEND="$2"
shift
shift
;;
--restore)
ACTION="restore"
shift
;;
--rsyncable)
export GZIP_RSYNCABLE="yes"
shift
;;
--skip)
skipping_backup_for+=( "$2" )
shift
shift
;;
*)
usage
echo "Unexpected parameter: $key"
exit 1
;;
esac
done
if [ "$ACTION" = "restore" ]; then
restore
elif [ "$ACTION" = "backup" ]; then
backup
fi
#!/bin/bash
set -e
/scripts/set-config "${CONFIG_TEMPLATE_DIRECTORY}" "${CONFIG_DIRECTORY:=$CONFIG_TEMPLATE_DIRECTORY}"
cd /srv/gitlab;
echo "Attempting to run '$@' as a main process";
exec "$@";
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rails "$@"
#!/bin/bash
rails_dir=/srv/gitlab
cd $rails_dir
$rails_dir/bin/bundle exec rake -f $rails_dir/Rakefile "$@"
#!/usr/bin/env ruby
require 'object_storage_backup'
abort ("backup_item and output_tar_path arguments needs to be passed to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).backup
#!/usr/bin/env ruby
require 'object_storage_backup'
abort("restore_item and tar path needs to be passed as arguments to the script") unless ARGV.length == 2
bucket_name = ENV["#{ARGV[0].upcase}_BUCKET_NAME"] || "gitlab-#{ARGV[0]}"
tmp_bucket = ENV['TMP_BUCKET_NAME'] || 'tmp'
backend_type = ENV['BACKUP_BACKEND'] || 's3'
ObjectStorageBackup.new(ARGV[0], ARGV[1], bucket_name, tmp_bucket, backend_type).restore
require 'open3'
require 'fileutils'
class String
def red; "\e[31m#{self}\e[0m" end
def green; "\e[32m#{self}\e[0m" end
def blue; "\e[34m#{self}\e[0m" end
end
class ObjectStorageBackup
attr_accessor :name, :local_tar_path, :remote_bucket_name, :tmp_bucket_name, :backend
def initialize(name, local_tar_path, remote_bucket_name, tmp_bucket_name = 'tmp', backend = 's3')
@name = name
@local_tar_path = local_tar_path
@remote_bucket_name = remote_bucket_name
@tmp_bucket_name = tmp_bucket_name
@backend = backend
end
def backup
if @backend == "s3"
check_bucket_cmd = %W(s3cmd ls s3://#{@remote_bucket_name})
cmd = %W(s3cmd --stop-on-error --delete-removed sync s3://#{@remote_bucket_name}/ /srv/gitlab/tmp/#{@name}/)
elsif @backend == "gcs"
check_bucket_cmd = %W(gsutil ls gs://#{@remote_bucket_name})
cmd = %W(gsutil -m rsync -r gs://#{@remote_bucket_name} /srv/gitlab/tmp/#{@name})
end
# Check if the bucket exists
output, status = run_cmd(check_bucket_cmd)
unless status.zero?
puts "Bucket not found: #{@remote_bucket_name}. Skipping backup of #{@name} ...".blue
return
end
puts "Dumping #{@name} ...".blue
# create the destination: gsutils requires it to exist, s3cmd does not
FileUtils.mkdir_p("/srv/gitlab/tmp/#{@name}", mode: 0700)
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
# check the destiation for contents. Bucket may have been empty.
if Dir.empty? "/srv/gitlab/tmp/#{@name}"
puts "empty".green
return
end
# build gzip command used for tar compression
gzip_cmd = 'gzip' + (ENV['GZIP_RSYNCABLE'] == 'yes' ? ' --rsyncable' : '')
cmd = %W(tar -cf #{@local_tar_path} -I #{gzip_cmd} -C /srv/gitlab/tmp/#{@name} . )
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
puts "done".green
end
def restore
puts "Restoring #{@name} ...".blue
backup_existing
cleanup
restore_from_backup
puts "done".green
end
def failure_abort(error_message)
puts "[Error] #{error_message}".red
abort "Restore #{@name} failed"
end
def upload_to_object_storage(source_path)
if @backend == "s3"
dir_name = File.basename(source_path)
cmd = %W(s3cmd --stop-on-error sync #{source_path}/ s3://#{@remote_bucket_name}/#{dir_name}/)
elsif @backend == "gcs"
cmd = %W(gsutil -m rsync -r #{source_path}/ gs://#{@remote_bucket_name})
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def backup_existing
backup_file_name = "#{@name}.#{Time.now.to_i}"
if @backend == "s3"
cmd = %W(s3cmd sync s3://#{@remote_bucket_name} s3://#{@tmp_bucket_name}/#{backup_file_name}/)
elsif @backend == "gcs"
cmd = %W(gsutil -m rsync -r gs://#{@remote_bucket_name} gs://#{@tmp_bucket_name}/#{backup_file_name}/)
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def cleanup
if @backend == "s3"
cmd = %W(s3cmd --stop-on-error del --force --recursive s3://#{@remote_bucket_name})
elsif @backend == "gcs"
# Check if the bucket has any objects
list_objects_cmd = %W(gsutil ls gs://#{@remote_bucket_name}/)
output, status = run_cmd(list_objects_cmd)
failure_abort(output) unless status.zero?
# There are no objects in the bucket so skip the cleanup
if output.length == 0
return
end
cmd = %W(gsutil rm -f -r gs://#{@remote_bucket_name}/*)
end
output, status = run_cmd(cmd)
failure_abort(output) unless status.zero?
end
def restore_from_backup
extracted_tar_path = File.join(File.dirname(@local_tar_path), "/srv/gitlab/tmp/#{@name}")
FileUtils.mkdir_p(extracted_tar_path, mode: 0700)
failure_abort("#{@local_tar_path} not found") unless File.exist?(@local_tar_path)
untar_cmd = %W(tar -xf #{@local_tar_path} -C #{extracted_tar_path})
output, status = run_cmd(untar_cmd)
failure_abort(output) unless status.zero?
Dir.glob("#{extracted_tar_path}/*").each do |file|
upload_to_object_storage(file)
end
end
def run_cmd(cmd)
_, stdout, wait_thr = Open3.popen2e(*cmd)
return stdout.read, wait_thr.value.exitstatus
end
end
#!/bin/bash
set -e
### Environment Variables ###
GITLAB_VERSION=v12.8.1-ubi8
VENDOR=gitlab
# These three variables are required to push whatever outside binaries your container needs at build time to our Nexus repo
NEXUS_SERVER=${NEXUS_SERVER}
NEXUS_USERNAME=${NEXUS_USERNAME}
NEXUS_PASSWORD=${NEXUS_PASSWORD}
## Download variables
DOWNLOAD_DIR=tmp/${VENDOR}
UPLOAD_URL=https://${NEXUS_SERVER}/repository/dsop/${VENDOR}/kubectl
### Download files/dependencies ###
# temporarily place your binaries locally in the download directory
curl --create-dirs https://gitlab-ubi.s3.us-east-2.amazonaws.com/ubi8-build-dependencies-${GITLAB_VERSION}.tar -o ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar
### GPG Signature Check ###
# GPG signature verification is a requirement in the case that the downloaded files have a GPG signature
# For more information on GPG keys visit https://access.redhat.com/solutions/1541303 or https://gnupg.org/gph/en/manual.html
curl --create-dirs https://gitlab-ubi.s3.us-east-2.amazonaws.com/ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc -o ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc
for server in $(shuf -e ha.pool.sks-keyservers.net \
hkp://p80.pool.sks-keyservers.net:80 \
keyserver.ubuntu.com \
hkp://keyserver.ubuntu.com:80 \
pgp.mit.edu) ;
do
gpg --batch --keyserver "$server" --recv-keys 5c7738cc4840f93f6e9170ff5a0e20d5f9706778 && break || : ; \
done
gpg --verify ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc ${DOWNLOAD_DIR}/ubi8-build-dependencies-${GITLAB_VERSION}.tar
### SHA256 Verification ###
# Verifying the files with the SHA256 is a requirement for all files
# Make sure to not download the SHA256 from the internet, but create it, check it and upload it to the Nexus repo
cd ${DOWNLOAD_DIR}
sha256sum ubi8-build-dependencies-${GITLAB_VERSION}.tar | awk '{print $1}' > ubi8-build-dependencies-${GITLAB_VERSION}.tar.sha256 \
&& echo "$(cat ubi8-build-dependencies-${GITLAB_VERSION}.tar.sha256) ubi8-build-dependencies-${GITLAB_VERSION}.tar" | sha256sum --check --status \
&& if [ $? == '0' ]; then printf "\nSHA256 check for ubi8-build-dependencies-${GITLAB_VERSION}.tar succeeded\n\n"; \
else printf "SHA256 check for ubi8-build-dependencies-${GITLAB_VERSION}.tar failed\n\n"; fi
cd -
### Nexus Repo Upload ###
# Push whatever binaries you need to ${NEXUS_SERVER}/dsop/vendor/project/ as you see in the example below. Follow the same
# format as in Gitlab. You will also need to push the GPG signature file and SHA256 file
for package in ubi8-build-dependencies-${GITLAB_VERSION}.tar ubi8-build-dependencies-${GITLAB_VERSION}.tar.sha256 ubi8-build-dependencies-${GITLAB_VERSION}.tar.asc
do
curl -kfS -u ${NEXUS_USERNAME}:${NEXUS_PASSWORD} -T ${DOWNLOAD_DIR}/${package} https://${NEXUS_SERVER}/repository/dsop/${VENDOR}/gitlab-task-runner/${package}
done
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment