diff --git a/stages/check-cves/pipeline_wl_compare.py b/stages/check-cves/pipeline_wl_compare.py index 22982fc6abbb807083c4b8a2c4cb8f15df613898..210edb4364b5e815f9d08676e1d31293544bc15a 100644 --- a/stages/check-cves/pipeline_wl_compare.py +++ b/stages/check-cves/pipeline_wl_compare.py @@ -366,11 +366,16 @@ def _get_complete_whitelist_for_image(image_name, whitelist_branch, hardening_ma with open("variables.env", "w") as f: # all cves for container have container approval at ind 2 if check_container_approval[2].lower() == "approve": - f.write(f"IMAGE_APPROVAL_STATUS='approved'\n") + f.write(f"IMAGE_APPROVAL_STATUS=approved\n") + logging.debug(f"IMAGE_APPROVAL_STATUS=approved") else: - f.write(f"IMAGE_APPROVAL_STATUS='notapproved'\n") + f.write(f"IMAGE_APPROVAL_STATUS=notapproved\n") + logging.debug(f"IMAGE_APPROVAL_STATUS=notapproved") f.write(f"BASE_IMAGE={hardening_manifest['args']['BASE_IMAGE']}\n") f.write(f"BASE_TAG={hardening_manifest['args']['BASE_TAG']}") + logging.debug( + f"BASE_IMAGE={hardening_manifest['args']['BASE_IMAGE']}\nBASE_TAG={hardening_manifest['args']['BASE_TAG']}" + ) # # Use the local hardening manifest to get the first parent. From here *only* the # the master branch should be used for the ancestry. diff --git a/stages/publish/create_repo_map_default.py b/stages/publish/create_repo_map_default.py index 435164505df2111d497f005b5484f3960d99c835..15b2db1a095134a5d514b96f079c99291064879e 100644 --- a/stages/publish/create_repo_map_default.py +++ b/stages/publish/create_repo_map_default.py @@ -26,6 +26,7 @@ def get_repomap(object_name, bucket="ironbank-pipeline-artifacts"): print(object_name) try: response = s3_client.download_file(bucket, object_name, "repo_map.json") + logging.debug(f"S3 download response: {response}") except ClientError as e: logging.error(e) print("Existing repo_map.json not found, creating new repo_map.json") diff --git a/stages/publish/create_repo_map_other.py b/stages/publish/create_repo_map_other.py index ca482e1ba261b9168aae1822ed59298c681b71ee..3f105be35807c870a1b4922af525889c0761606c 100644 --- a/stages/publish/create_repo_map_other.py +++ b/stages/publish/create_repo_map_other.py @@ -26,6 +26,7 @@ def get_repomap(object_name, bucket="ironbank-pipeline-artifacts"): print(object_name) try: response = s3_client.download_file(bucket, object_name, "repo_map.json") + logging.debug(f"S3 download response: {response}") except ClientError as e: logging.error(e) print("Existing repo_map.json not found, creating new repo_map.json") diff --git a/stages/publish/repo_map_vars.sh b/stages/publish/repo_map_vars.sh index 772438c9822cf646667c242a0339c3734efbebb8..c538f9b3cbed7ec0252c9d93d5fbc58aa0ecadc9 100644 --- a/stages/publish/repo_map_vars.sh +++ b/stages/publish/repo_map_vars.sh @@ -20,7 +20,7 @@ export pgp_signature="${S3_HTML_LINK}/${REMOTE_REPORT_DIRECTORY}/${SIG_FILE}.sig export signature_name="${SIG_FILE}.sig" export version_documentation="${S3_HTML_LINK}/${REMOTE_REPORT_DIRECTORY}/${DOCUMENTATION_FILENAME}.json" export tar_location="${S3_HTML_LINK}/${REMOTE_REPORT_DIRECTORY}/${REPORT_TAR_NAME}" -export tar_name="${IMAGE_FILE}.tar" +export tar_name="${REPORT_TAR_NAME}" export openscap_compliance_results="${S3_HTML_LINK}/${REMOTE_REPORT_DIRECTORY}/csvs/oscap.csv" export openscap_oval_results="${S3_HTML_LINK}/${REMOTE_REPORT_DIRECTORY}/csvs/oval.csv" export twistlock_results="${S3_HTML_LINK}/${REMOTE_REPORT_DIRECTORY}/csvs/tl.csv" diff --git a/stages/publish/s3_upload.py b/stages/publish/s3_upload.py index a7abccf96ebd8f0f23d3d282ef82a5b3be8f0107..02959b03c227820c5d4173ae12b6dc58d39a405a 100644 --- a/stages/publish/s3_upload.py +++ b/stages/publish/s3_upload.py @@ -5,6 +5,7 @@ import argparse import datetime from botocore.exceptions import ClientError import logging +import mimetypes def upload_file(file_name, bucket, object_name=None): @@ -19,9 +20,34 @@ def upload_file(file_name, bucket, object_name=None): access_key = os.environ["S3_ACCESS_KEY"] secret_key = os.environ["S3_SECRET_KEY"] + filetype = mimetypes.guess_type(file_name) + + if not filetype[0]: + # If mimetype is NoneType use default value + mimetype = "application/octet-stream" + elif filetype[1] == "gzip": + # mimetypes returns 'application/x-tar' + # but for S3 to properly serve gzip we need to set to the following + mimetype = "application/x-compressed-tar" + else: + mimetype = filetype[0] + # TODO: Add signature - extra_args = {"ContentType": "application/octet-stream", "ACL": "private"} + # If there is not an encoding-type value we don't add it to the extra args + if not filetype[1]: + extra_args = { + "ContentType": mimetype, + "ACL": "private", + } + else: + encoding = filetype[1] + extra_args = { + "ContentType": mimetype, + "ACL": "private", + "ContentEncoding": encoding, + } + logging.debug(f"extra_args for {file_name}: {extra_args}") # If S3 object_name was not specified, use file_name if object_name is None: object_name = file_name @@ -35,6 +61,7 @@ def upload_file(file_name, bucket, object_name=None): ) try: response = s3_client.upload_file(file_name, bucket, object_name, extra_args) + logging.debug(f"S3 upload response: {response}") except ClientError as e: logging.error(e) return False