diff --git a/Dockerfile b/Dockerfile index eec0c807b3ad9884feeabb49421107a514c24d38..e019f0bbb600e32c058cb5db9d4ec7bc5656815f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ ARG BASE_REGISTRY=registry1.dsop.io ARG BASE_IMAGE=ironbank/redhat/ubi/ubi8 ARG BASE_TAG=8.3 -FROM bitnami/airflow-worker:2.0.1-debian-10-r54 as base +FROM bitnami/airflow-worker:2.0.2-debian-10-r17 as base FROM ${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG} ARG BITNAMI_HOME=/opt/bitnami ARG BITNAMI_DIR=/bitnami @@ -16,6 +16,7 @@ ENV BITNAMI_PKG_EXTRA_DIRS="/opt/bitnami/airflow/dags" \ COPY --from=base ${BITNAMI_HOME} ${BITNAMI_HOME} COPY --from=base ${BITNAMI_DIR} ${BITNAMI_DIR} COPY --from=base \ + /usr/lib/x86_64-linux-gnu/libmariadb.so.3 \ /lib/x86_64-linux-gnu/libbz2.so.1.0 \ /usr/lib64/ @@ -48,7 +49,7 @@ RUN /opt/bitnami/scripts/airflow-worker/postunpack.sh && \ ENV AIRFLOW_HOME="/opt/bitnami/airflow" \ BITNAMI_APP_NAME="airflow-worker" \ - BITNAMI_IMAGE_VERSION="2.0.1-debian-10-r54" \ + BITNAMI_IMAGE_VERSION="2.0.2-debian-10-r17" \ C_FORCE_ROOT="True" \ LANG="en_US.UTF-8" \ LANGUAGE="en_US:en" \ diff --git a/README.md b/README.md index 33b62ab1222e1ba3b43710732e420057de299e2a..935cd7171ea892afa8db47dc800ce9c69114ced4 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -# What is Apache Airflow? +# What is Apache Airflow Worker? -> Airflow is a platform to programmatically author, schedule and monitor workflows. +> Airflow is a platform to programmatically author, schedule and monitor workflows. Airflow Worker is one of the required components when the CeleryExecutor is configured. https://airflow.apache.org/ ### Ironbank Hardened Image Notes: @@ -10,7 +10,7 @@ apache-airflow-providers-elasticsearch will not work on this image due to vulner ## Docker Compose ```console -$ curl -LO https://raw.githubusercontent.com/bitnami/bitnami-docker-airflow/master/docker-compose.yml +$ curl -LO https://raw.githubusercontent.com/bitnami/bitnami-docker-airflow-worker/master/docker-compose.yml $ docker-compose up ``` @@ -26,7 +26,7 @@ You can find the default credentials and available configuration options in the * Bitnami container images are released daily with the latest distribution packages available. -> This [CVE scan report](https://quay.io/repository/bitnami/airflow?tab=tags) contains a security report with all open CVEs. To get the list of actionable security issues, find the "latest" tag, click the vulnerability report link under the corresponding "Security scan" field and then select the "Only show fixable" filter on the next page. +> This [CVE scan report](https://quay.io/repository/bitnami/airflow-worker?tab=tags) contains a security report with all open CVEs. To get the list of actionable security issues, find the "latest" tag, click the vulnerability report link under the corresponding "Security scan" field and then select the "Only show fixable" filter on the next page. # Supported tags and respective `Dockerfile` links @@ -34,10 +34,10 @@ You can find the default credentials and available configuration options in the Learn more about the Bitnami tagging policy and the difference between rolling tags and immutable tags [in our documentation page](https://docs.bitnami.com/tutorials/understand-rolling-tags-containers/). -* [`2`, `2-debian-10`, `2.0.1`, `2.0.1-debian-10-r51`, `latest` (2/debian-10/Dockerfile)](https://github.com/bitnami/bitnami-docker-airflow/blob/2.0.1-debian-10-r51/2/debian-10/Dockerfile) -* [`1`, `1-debian-10`, `1.10.15`, `1.10.15-debian-10-r17` (1/debian-10/Dockerfile)](https://github.com/bitnami/bitnami-docker-airflow/blob/1.10.15-debian-10-r17/1/debian-10/Dockerfile) +* [`2`, `2-debian-10`, `2.0.2`, `2.0.2-debian-10-r17`, `latest` (2/debian-10/Dockerfile)](https://github.com/bitnami/bitnami-docker-airflow-worker/blob/2.0.2-debian-10-r17/2/debian-10/Dockerfile) +* [`1`, `1-debian-10`, `1.10.15`, `1.10.15-debian-10-r49` (1/debian-10/Dockerfile)](https://github.com/bitnami/bitnami-docker-airflow-worker/blob/1.10.15-debian-10-r49/1/debian-10/Dockerfile) -Subscribe to project updates by watching the [bitnami/airflow GitHub repo](https://github.com/bitnami/bitnami-docker-airflow). +Subscribe to project updates by watching the [bitnami/airflow GitHub repo](https://github.com/bitnami/bitnami-docker-airflow-worker). # Prerequisites @@ -45,14 +45,15 @@ To run this application you need [Docker Engine](https://www.docker.com/products # How to use this image -Airflow requires access to a PostgreSQL database to store information. We will use our very own [PostgreSQL image](https://www.github.com/bitnami/bitnami-docker-postgresql) for the database requirements. Additionally, if you pretend to use the `CeleryExecutor`, you will also need an [Airflow Scheduler](https://www.github.com/bitnami/bitnami-docker-airflow-scheduler), one or more [Airflow Workers](https://www.github.com/bitnami/bitnami-docker-airflow-worker) and a [Redis(TM) server](https://www.github.com/bitnami/bitnami-docker-redis). +Airflow Worker is a component of an Airflow solution configuring with the `CeleryExecutor`. Hence, you will need to rest of Airflow components for this image to work. +You will need an [Airflow Webserver](https://www.github.com/bitnami/bitnami-docker-airflow), an [Airflow Scheduler](https://www.github.com/bitnami/bitnami-docker-airflow-scheduler), a [PostgreSQL database](https://www.github.com/bitnami/bitnami-docker-postgresql) and a [Redis(TM) server](https://www.github.com/bitnami/bitnami-docker-redis). ## Using Docker Compose -The main folder of this repository contains a functional [`docker-compose.yml`](https://github.com/bitnami/bitnami-docker-airflow/blob/master/docker-compose.yml) file. Run the application using it as shown below: +The main folder of this repository contains a functional [`docker-compose.yml`](https://github.com/bitnami/bitnami-docker-airflow-worker/blob/master/docker-compose.yml) file. Run the application using it as shown below: ```console -$ curl -sSL https://raw.githubusercontent.com/bitnami/bitnami-docker-airflow/master/docker-compose.yml > docker-compose.yml +$ curl -sSL https://raw.githubusercontent.com/bitnami/bitnami-docker-airflow-worker/master/docker-compose.yml > docker-compose.yml $ docker-compose up -d ``` @@ -135,6 +136,7 @@ If you want to run the application manually instead of using `docker-compose`, t -e AIRFLOW_DATABASE_NAME=bitnami_airflow \ -e AIRFLOW_DATABASE_USERNAME=bn_airflow \ -e AIRFLOW_DATABASE_PASSWORD=bitnami1 \ + -e AIRFLOW_QUEUE=new_queue \ --net airflow-tier \ --volume airflow_worker_data:/bitnami \ bitnami/airflow-worker:latest @@ -148,7 +150,7 @@ If you remove the container all your data and configurations will be lost, and t For persistence you should mount a volume at the `/bitnami` path. Additionally you should mount volumes for persistence of [PostgreSQL data](https://github.com/bitnami/bitnami-docker-mariadb#persisting-your-database) and [Redis(TM) data](https://github.com/bitnami/bitnami-docker-mariadb#persisting-your-database) -The above examples define docker volumes namely `postgresql_data`, `redis_data`, `airflow_data`, `airflow_scheduler_data` and `airflow_worker_data`. The Airflow application state will persist as long as these volumes are not removed. +The above examples define docker volumes namely `postgresql_data`, `redis_data`, `airflow_data`, `airflow_scheduler_data` and `airflow_worker_data`. The Airflow Worker application state will persist as long as these volumes are not removed. To avoid inadvertent removal of these volumes you can [mount host directories as data volumes](https://docs.docker.com/engine/tutorials/dockervolumes/). Alternatively you can make use of volume plugins to host the volume data. @@ -166,7 +168,7 @@ services: - POSTGRESQL_USERNAME=bn_airflow - POSTGRESQL_PASSWORD=bitnami1 volumes: - - /path/to/airflow-persistence:/bitnami/postgresql + - /path/to/airflow-persistence:/bitnami redis: image: 'bitnami/redis:latest' environment: @@ -291,63 +293,36 @@ services: # Configuration -## Load DAG files - -Custom DAG files can be mounted to `/opt/bitnami/airflow/dags`. - ## Installing additional python modules This container supports the installation of additional python modules at start-up time. In order to do that, you can mount a `requirements.txt` file with your specific needs under the path `/bitnami/python/requirements.txt`. ## Environment variables -The Airflow instance can be customized by specifying environment variables on the first run. The following environment values are provided to customize Airflow: - -##### User configuration - -- `AIRFLOW_USERNAME`: Airflow application username. Default: **user** -- `AIRFLOW_PASSWORD`: Airflow application password. Default: **bitnami** -- `AIRFLOW_EMAIL`: Airflow application email. Default: **user@example.com** +The Airflow Worker instance can be customized by specifying environment variables on the first run. The following environment values are provided to customize Airflow Worker: -##### Airflow configuration +##### Airflow Worker configuration -- `AIRFLOW_EXECUTOR`: Airflow executor. Default: **SequentialExecutor** -- `AIRFLOW_FERNET_KEY`: Airflow Fernet key. No defaults. -- `AIRFLOW_WEBSERVER_HOST`: Airflow webserver host. Default: **127.0.0.1** -- `AIRFLOW_WEBSERVER_PORT_NUMBER`: Airflow webserver port. Default: **8080** -- `AIRFLOW_LOAD_EXAMPLES`: To load example tasks into the application. Default: **yes** -- `AIRFLOW_BASE_URL`: Airflow webserver base URL. No defaults. +- `AIRFLOW_EXECUTOR`: Airflow Worker executor. Default: **SequentialExecutor** +- `AIRFLOW_FERNET_KEY`: Airflow Worker Fernet key. No defaults. +- `AIRFLOW_WEBSERVER_HOST`: Airflow Worker webserver host. Default: **airflow** +- `AIRFLOW_WEBSERVER_PORT_NUMBER`: Airflow Worker webserver port. Default: **8080** - `AIRFLOW_HOSTNAME_CALLABLE`: Method to obtain the hostname. No defaults. -- `AIRFLOW_POOL_NAME`: Pool name. No defaults. -- `AIRFLOW_POOL_SIZE`: Pool size, required with `AIRFLOW_POOL_NAME`. No defaults. -- `AIRFLOW_POOL_DESC`: Pool description, required with `AIRFLOW_POOL_NAME`. No defaults. +- `AIRFLOW_QUEUE`: A queue for the worker to pull tasks from. ##### Use an existing database - `AIRFLOW_DATABASE_HOST`: Hostname for PostgreSQL server. Default: **postgresql** - `AIRFLOW_DATABASE_PORT_NUMBER`: Port used by PostgreSQL server. Default: **5432** -- `AIRFLOW_DATABASE_NAME`: Database name that Airflow will use to connect with the database. Default: **bitnami_airflow** -- `AIRFLOW_DATABASE_USERNAME`: Database user that Airflow will use to connect with the database. Default: **bn_airflow** -- `AIRFLOW_DATABASE_PASSWORD`: Database password that Airflow will use to connect with the database. No defaults. -- `AIRFLOW_DATABASE_USE_SSL`: Set to yes if the database is using SSL. Default: **no** +- `AIRFLOW_DATABASE_NAME`: Database name that Airflow Worker will use to connect with the database. Default: **bitnami_airflow** +- `AIRFLOW_DATABASE_USERNAME`: Database user that Airflow Worker will use to connect with the database. Default: **bn_airflow** +- `AIRFLOW_DATABASE_PASSWORD`: Database password that Airflow Worker will use to connect with the database. No defaults. +- `AIRFLOW_DATABASE_USE_SSL`: Set to yes if the database uses SSL. Default: **no** - `AIRFLOW_REDIS_USE_SSL`: Set to yes if Redis(TM) uses SSL. Default: **no** - `REDIS_HOST`: Hostname for Redis(TM) server. Default: **redis** - `REDIS_PORT_NUMBER`: Port used by Redis(TM) server. Default: **6379** -- `REDIS_USER`: User that Airflow will use to connect with Redis(TM). No defaults. -- `REDIS_PASSWORD`: Password that Airflow will use to connect with Redis(TM). No defaults. - -##### Airflow LDAP authentication - -- `AIRFLOW_LDAP_ENABLE`: Enable LDAP authentication. Default: **no** -- `AIRFLOW_LDAP_URI`: LDAP server URI. No defaults. -- `AIRFLOW_LDAP_SEARCH`: LDAP search base. No defaults. -- `AIRFLOW_LDAP_BIND_USER`: LDAP user name. No defaults. -- `AIRFLOW_LDAP_BIND_PASSWORD`: LDAP user password. No defaults. -- `AIRFLOW_LDAP_UID_FIELD`: LDAP field used for uid. Default: **uid**. -- `AIRFLOW_LDAP_USE_TLS`: Use LDAP SSL. Defaults: **False**. -- `AIRFLOW_LDAP_ALLOW_SELF_SIGNED`: Allow self signed certicates in LDAP ssl. Default: **True**. -- `AIRFLOW_LDAP_TLS_CA_CERTIFICATE`: File that store the CA for LDAP ssl. No defaults. -- `AIRFLOW_USER_REGISTRATION_ROLE`: Role for the created user. Default: **Public** +- `REDIS_USER`: User that Airflow Worker will use to connect with Redis(TM). No defaults. +- `REDIS_PASSWORD`: Password that Airflow Worker will use to connect with Redis(TM). No defaults. > In addition to the previous environment variables, all the parameters from the configuration file can be overwritten by using environment variables with this format: `AIRFLOW__{SECTION}__{KEY}`. Note the double underscores. @@ -386,78 +361,20 @@ $ docker run -d --name airflow -p 8080:8080 \ bitnami/airflow:latest ``` -### SMTP Configuration - -To configure Airflow to send email using SMTP you can set the following environment variables: - -- `AIRFLOW__SMTP__SMTP_HOST`: Host for outgoing SMTP email. Default: **localhost** -- `AIRFLOW__SMTP__SMTP_PORT`: Port for outgoing SMTP email. Default: **25** -- `AIRFLOW__SMTP__SMTP_STARTTLS`: To use TLS communication. Default: **True** -- `AIRFLOW__SMTP__SMTP_SSL`: To use SSL communication. Default: **False** -- `AIRFLOW__SMTP__SMTP_USER`: User of SMTP used for authentication (likely email). No defaults. -- `AIRFLOW__SMTP__SMTP_PASSWORD`: Password for SMTP. No defaults. -- `AIRFLOW__SMTP__SMTP_MAIL_FROM`: To modify the "from email address". Default: **airflow@example.com** - -This would be an example of SMTP configuration using a GMail account: - - * docker-compose (application part): - -```yaml - airflow: - image: bitnami/airflow:latest - environment: - - AIRFLOW_FERNET_KEY=46BKJoQYlPPOexq0OhDZnIlNepKFf87WFwLbfzqDDho= - - AIRFLOW_EXECUTOR=CeleryExecutor - - AIRFLOW_DATABASE_NAME=bitnami_airflow - - AIRFLOW_DATABASE_USERNAME=bn_airflow - - AIRFLOW_DATABASE_PASSWORD=bitnami1 - - AIRFLOW_PASSWORD=bitnami - - AIRFLOW_USERNAME=user - - AIRFLOW_EMAIL=user@email.com - - AIRFLOW__SMTP__SMTP_HOST=smtp@gmail.com - - AIRFLOW__SMTP__SMTP_USER=your_email@gmail.com - - AIRFLOW__SMTP__SMTP_PASSWORD=your_password - - AIRFLOW__SMTP__SMTP_PORT=587 - ports: - - '8080:8080' - volumes: - - airflow_data:/bitnami -``` - -* For manual execution: - -```console -$ docker run -d --name airflow -p 8080:8080 \ - -e AIRFLOW_FERNET_KEY=46BKJoQYlPPOexq0OhDZnIlNepKFf87WFwLbfzqDDho= \ - -e AIRFLOW_EXECUTOR=CeleryExecutor \ - -e AIRFLOW_DATABASE_NAME=bitnami_airflow \ - -e AIRFLOW_DATABASE_USERNAME=bn_airflow \ - -e AIRFLOW_DATABASE_PASSWORD=bitnami1 \ - -e AIRFLOW_PASSWORD=bitnami123 \ - -e AIRFLOW_USERNAME=user \ - -e AIRFLOW_EMAIL=user@example.com \ - -e AIRFLOW__SMTP__SMTP_HOST=smtp@gmail.com \ - -e AIRFLOW__SMTP__SMTP_USER=your_email@gmail.com \ - -e AIRFLOW__SMTP__SMTP_PASSWORD=your_password \ - -e AIRFLOW__SMTP__SMTP_PORT=587 \ - --volume airflow_data:/bitnami \ - bitnami/airflow:latest -``` - # Notable Changes -## 1.10.15-debian-10-r17 and 2.0.1-debian-10-r50 +## 1.10.15-debian-10-r18 and 2.0.1-debian-10-r51 - The size of the container image has been decreased. - The configuration logic is now based on Bash scripts in the *rootfs/* folder. # Contributing -We'd love for you to contribute to this container. You can request new features by creating an [issue](https://github.com/bitnami/bitnami-docker-airflow/issues), or submit a [pull request](https://github.com/bitnami/bitnami-docker-airflow/pulls) with your contribution. +We'd love for you to contribute to this container. You can request new features by creating an [issue](https://github.com/bitnami/bitnami-docker-airflow-worker/issues), or submit a [pull request](https://github.com/bitnami/bitnami-docker-airflow-worker/pulls) with your contribution. # Issues -If you encountered a problem running this container, you can file an [issue](https://github.com/bitnami/bitnami-docker-airflow/issues/new). For us to provide better support, be sure to include the following information in your issue: +If you encountered a problem running this container, you can file an [issue](https://github.com/bitnami/bitnami-docker-airflow-worker/issues/new). For us to provide better support, be sure to include the following information in your issue: - Host OS and version - Docker version (`$ docker version`) diff --git a/hardening_manifest.yaml b/hardening_manifest.yaml index dd4956dd2d6e1f974f84b52c0f5f538551461be2..8596ad9f804f791c2c455e834f8a5bb8379c82fa 100644 --- a/hardening_manifest.yaml +++ b/hardening_manifest.yaml @@ -8,7 +8,7 @@ name: "bitnami/airflow-worker" # The most specific version should be the first tag and will be shown # on ironbank.dsop.io tags: -- "2.0.1" +- "2.0.2" # Build args passed to Dockerfile ARGs args: @@ -26,7 +26,7 @@ labels: org.opencontainers.image.url: "https://airflow.apache.org" ## Name of the distributing entity, organization or individual org.opencontainers.image.vendor: "Bitnami" - org.opencontainers.image.version: "2.0.1" + org.opencontainers.image.version: "2.0.2" ## Keywords to help with search (ex. "cicd,gitops,golang") mil.dso.ironbank.image.keywords: "workflows,dags,tasks" ## This value can be "opensource" or "commercial" @@ -36,48 +36,18 @@ labels: # List of resources to make available to the offline build context resources: -- tag: bitnami/airflow-worker:2.0.1-debian-10-r54 - url: docker://docker.io/bitnami/airflow-worker@sha256:224ad64fbbf7cae7930fb3241b8e515499cde298cae639a4b037c0c3f8d8cd2c +- tag: bitnami/airflow-worker:2.0.2-debian-10-r17 + url: docker://docker.io/bitnami/airflow-worker@sha256:a07d81ee9af4f3ed40a20482f4ee5070108500c15d614d3c9ff56d2e9229abd7 - filename: thrift-0.14.1.tar.gz url: https://apache.osuosl.org/thrift/0.14.1/thrift-0.14.1.tar.gz validation: type: sha256 value: 13da5e1cd9c8a3bb89778c0337cc57eb0c29b08f3090b41cf6ab78594b410ca5 -- filename: pylint-2.7.2-py3-none-any.whl - url: https://files.pythonhosted.org/packages/b3/66/af8f80d4fa77dcd4cba9e56e136522838920a2eaf6794b784e1f377f84d9/pylint-2.7.2-py3-none-any.whl +- filename: httplib2-0.19.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/15/7e/51e5bd333c0afa1c7bdbf98eb3b0ccf5167e2b1ecc8b4d13e9cc29291f81/httplib2-0.19.0-py3-none-any.whl validation: type: sha256 - value: d09b0b07ba06bcdff463958f53f23df25e740ecd81895f7d2699ec04bbd8dc3b -- filename: networkx-2.5.1-py3-none-any.whl - url: https://files.pythonhosted.org/packages/f3/b7/c7f488101c0bb5e4178f3cde416004280fd40262433496830de8a8c21613/networkx-2.5.1-py3-none-any.whl - validation: - type: sha256 - value: 0635858ed7e989f4c574c2328380b452df892ae85084144c73d8cd819f0c4e06 -- filename: decorator-4.4.2-py2.py3-none-any.whl - url: https://files.pythonhosted.org/packages/ed/1b/72a1821152d07cf1d8b6fce298aeb06a7eb90f4d6d41acec9861e7cc6df0/decorator-4.4.2-py2.py3-none-any.whl - validation: - type: sha256 - value: 41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760 -- filename: aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_x86_64.whl - url: https://files.pythonhosted.org/packages/a6/76/f18138b0ff84fcd939667a2efc2e1b49c871299f9091f84c06bb4c350c01/aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_x86_64.whl - validation: - type: sha256 - value: 79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013 -- filename: azure_storage_blob-12.8.0-py2.py3-none-any.whl - url: https://files.pythonhosted.org/packages/09/14/4ca417a9c92b0fb93516575dd7be9b058bf13d531dcc21239b5f8f216a69/azure_storage_blob-12.8.0-py2.py3-none-any.whl - validation: - type: sha256 - value: 46999df6e2cde8773739f7c3bd1eb5846d4b7dc1ef6e2161f3b6d1d0f21726ba -- filename: pytest-6.2.3-py3-none-any.whl - url: https://files.pythonhosted.org/packages/76/4d/9c00146923da9f1cabd1878209d71b1380d537ec331a1a613e8f4b9d7985/pytest-6.2.3-py3-none-any.whl - validation: - type: sha256 - value: 6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc -- filename: astroid-2.5.1-py3-none-any.whl - url: https://files.pythonhosted.org/packages/f1/49/d51e5ce77ea234ee416966e489283512a9852f78d9ff125747eae29e7b69/astroid-2.5.1-py3-none-any.whl - validation: - type: sha256 - value: 21d735aab248253531bb0f1e1e6d068f0ee23533e18ae8a6171ff892b98297cf + value: 749c32603f9bf16c1277f59531d502e8f1c2ca19901ae653b49c4ed698f0820e - filename: Babel-2.9.1-py2.py3-none-any.whl url: https://files.pythonhosted.org/packages/aa/96/4ba93c5f40459dc850d25f9ba93f869a623e77aaecc7a9344e19c01942cf/Babel-2.9.1-py2.py3-none-any.whl validation: diff --git a/prebuildfs/opt/bitnami/.bitnami_components.json b/prebuildfs/opt/bitnami/.bitnami_components.json index 1c30dae19506c34eb5bd65b32a12d18cdee57653..d764d01414d6317694e9e7e433baa8df6761435a 100644 --- a/prebuildfs/opt/bitnami/.bitnami_components.json +++ b/prebuildfs/opt/bitnami/.bitnami_components.json @@ -1,10 +1,10 @@ { "airflow-worker": { "arch": "amd64", - "digest": "cf754bdc8e231d408275c1971d9a732576e346e0cd4af1ea426739bf3e62538d", + "digest": "b88e99ebecdbf6586b58e07cbe8b67a04fc729499b1daa8673ec284a6a07fc9e", "distro": "debian-10", "type": "NAMI", - "version": "2.0.1-2" + "version": "2.0.2-1" }, "gosu": { "arch": "amd64", @@ -29,10 +29,10 @@ }, "python": { "arch": "amd64", - "digest": "4f1f6b81a3617dfaaa2c579510118ef6df07119977a5d6ca7df3cf485fca709a", + "digest": "b7a37a0590eff13717c191c90dc277f26706196c5fbf2a6b79019bd9f1032f68", "distro": "debian-10", "type": "NAMI", - "version": "3.8.9-0" + "version": "3.8.10-2" }, "wait-for-port": { "arch": "amd64", diff --git a/prebuildfs/opt/bitnami/scripts/libvalidations.sh b/prebuildfs/opt/bitnami/scripts/libvalidations.sh index 8d827924afe45f3e0ca361c081947fa5ac940bd6..ca5afc91c7b5e89a6e7593c1547e345082d7c115 100644 --- a/prebuildfs/opt/bitnami/scripts/libvalidations.sh +++ b/prebuildfs/opt/bitnami/scripts/libvalidations.sh @@ -181,7 +181,7 @@ validate_ipv4() { local stat=1 if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then - read -r -a ip_array <<< "$(tr '.' ' ' <<< "$ip")" + read -r -a ip_array <<< "$(tr '.' ' ' <<< "$ip")" [[ ${ip_array[0]} -le 255 && ${ip_array[1]} -le 255 \ && ${ip_array[2]} -le 255 && ${ip_array[3]} -le 255 ]] stat=$? diff --git a/prebuildfs/opt/bitnami/scripts/libwebserver.sh b/prebuildfs/opt/bitnami/scripts/libwebserver.sh index 25bff4a05105695386164251442995542fb9ddc7..1280134f015c62ebe31a5427130177f526f48b61 100644 --- a/prebuildfs/opt/bitnami/scripts/libwebserver.sh +++ b/prebuildfs/opt/bitnami/scripts/libwebserver.sh @@ -180,6 +180,7 @@ web_server_reload() { # --apache-move-htaccess - Move .htaccess files to a common place so they can be loaded during Apache startup # NGINX-specific flags: # --nginx-additional-configuration - Additional server block configuration (no default) +# --nginx-external-configuration - Configuration external to server block (no default) # Returns: # true if the configuration was enabled, false otherwise ######################## @@ -212,6 +213,7 @@ ensure_web_server_app_configuration_exists() { | --apache-before-vhost-configuration \ | --apache-allow-override \ | --apache-extra-directory-configuration \ + | --apache-proxy-address \ | --apache-move-htaccess \ ) apache_args+=("${1//apache-/}" "${2:?missing value}") @@ -219,7 +221,8 @@ ensure_web_server_app_configuration_exists() { ;; # Specific NGINX flags - --nginx-additional-configuration) + --nginx-additional-configuration \ + | --nginx-external-configuration) nginx_args+=("${1//nginx-/}" "${2:?missing value}") shift ;; diff --git a/rootfs/opt/bitnami/scripts/airflow-worker/entrypoint.sh b/rootfs/opt/bitnami/scripts/airflow-worker/entrypoint.sh index 3f04f774f1692070804bef7468a190168e96e316..8b625a358f102d4fd9be18f7f53723537f062ba1 100755 --- a/rootfs/opt/bitnami/scripts/airflow-worker/entrypoint.sh +++ b/rootfs/opt/bitnami/scripts/airflow-worker/entrypoint.sh @@ -16,6 +16,21 @@ set -o pipefail print_welcome_page +if ! am_i_root && [[ -e "$LIBNSS_WRAPPER_PATH" ]]; then + info "Enabling non-root system user with nss_wrapper" + echo "airflow:x:$(id -u):$(id -g):Airflow:$AIRFLOW_HOME:/bin/false" > "$NSS_WRAPPER_PASSWD" + echo "airflow:x:$(id -g):" > "$NSS_WRAPPER_GROUP" + + export LD_PRELOAD="$LIBNSS_WRAPPER_PATH" +fi + +# Install custom python package if requirements.txt is present +if [[ -f "/bitnami/python/requirements.txt" ]]; then + . /opt/bitnami/airflow/venv/bin/activate + pip install -r /bitnami/python/requirements.txt + deactivate +fi + if [[ "$*" = *"/opt/bitnami/scripts/airflow-worker/run.sh"* || "$*" = *"/run.sh"* ]]; then info "** Starting Airflow setup **" /opt/bitnami/scripts/airflow-worker/setup.sh diff --git a/rootfs/opt/bitnami/scripts/libairflow.sh b/rootfs/opt/bitnami/scripts/libairflow.sh index 7a6cde93e21fd80077caec15c7408fd7ef33188b..92549cfb95b1d29a525705106e295f8bdd832cad 100644 --- a/rootfs/opt/bitnami/scripts/libairflow.sh +++ b/rootfs/opt/bitnami/scripts/libairflow.sh @@ -2,8 +2,7 @@ # Bitnami Airflow library -# shellcheck disable=SC1091 -# shellcheck disable=SC2153 +# shellcheck disable=SC1091,SC2153 # Load Generic Libraries . /opt/bitnami/scripts/libfile.sh @@ -23,9 +22,17 @@ # Arguments: # None # Returns: -# None +# 0 if the validation succeeded, 1 otherwise ######################### airflow_validate() { + local error_code=0 + + # Auxiliary functions + print_validation_error() { + error "$1" + error_code=1 + } + # Check postgresql host [[ -z "$AIRFLOW_DATABASE_HOST" ]] && print_validation_error "Missing AIRFLOW_DATABASE_HOST" @@ -46,6 +53,8 @@ airflow_validate() { [[ -z "$AIRFLOW_POOL_DESC" ]] && print_validation_error "Provided AIRFLOW_POOL_NAME but missing AIRFLOW_POOL_DESC" [[ -z "$AIRFLOW_POOL_SIZE" ]] && print_validation_error "Provided AIRFLOW_POOL_NAME but missing AIRFLOW_POOL_SIZE" fi + + return "$error_code" } ######################## @@ -429,4 +438,4 @@ is_airflow_not_running() { airflow_stop() { info "Stopping Airflow..." stop_service_using_pid "$AIRFLOW_PID_FILE" -} \ No newline at end of file +} diff --git a/rootfs/opt/bitnami/scripts/libairflowworker.sh b/rootfs/opt/bitnami/scripts/libairflowworker.sh index e440bd813ce28b834566b348d040904bb5d5f325..938d258041318c47ba9fe43cf7e46c678d2424b3 100644 --- a/rootfs/opt/bitnami/scripts/libairflowworker.sh +++ b/rootfs/opt/bitnami/scripts/libairflowworker.sh @@ -3,7 +3,6 @@ # Bitnami Airflow library # shellcheck disable=SC1091 -# shellcheck disable=SC2153 # Load Generic Libraries . /opt/bitnami/scripts/libfile.sh