UNCLASSIFIED - NO CUI

Skip to content
Snippets Groups Projects
Commit 2acd6c47 authored by Scott Stroud's avatar Scott Stroud
Browse files

init

parent d0e199e7
No related branches found
No related tags found
2 merge requests!24Development,!23Q2 2024 jdk17
Pipeline #3505446 failed
Showing
with 632 additions and 34 deletions
ibscripts/build-image.sh
ibscripts
scripts/build-image.sh
archive.key
ARG CP_BASE_NEW_IMAGE=confluentinc/cp-base-new
ARG CP_BASE_NEW_TAG=7.7.0
ARG BASE_REGISTRY=registry1.dso.mil
ARG BASE_IMAGE=ironbank/redhat/ubi/ubi8
ARG BASE_TAG=8.7
ARG BASE_IMAGE=redhat/openjdk/openjdk17
ARG BASE_TAG=1.17
FROM ${CP_BASE_NEW_IMAGE}:${CP_BASE_NEW_TAG} AS base
FROM ${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}
RUN dnf update -y --nodocs && \
dnf clean all && \
rm -rf /var/cache/dnf
# This affects how strings in Java class files are interpreted.
# We want UTF-8 and this is the only locale in the base image that supports it
ENV LANG="C.UTF-8"
# openjdk11 (base image) does not expose all the fips ciphers, like HmacSHA256, when fips enabled
# This causes issues with keytool & connect, so disable it for ONLY those java processes (not other processes, the OS, or K8S host)
ARG JAVA_OPTS="-Dsecurity.useSystemPropertiesFile=false -Dcom.redhat.fips=false"
ENV JDK_JAVA_OPTIONS=$JAVA_OPTS
ENV JAVA_TOOL_OPTIONS=$JAVA_OPTS
ENV CUB_CLASSPATH='"/usr/share/java/cp-base-new/*"'
USER root
## Add the Confluent Docker Utility Belt which helps with starting the proper applications
## https://github.com/confluentinc/common-docker/tree/master/utility-belt
COPY archive.key /tmp
## For auditing & debugging
COPY LICENSE /licenses/cp-base-new_LICENSE
COPY Dockerfile /etc/confluent/ironbank/cp-base-new_Dockerfile
COPY hardening_manifest.yaml /etc/confluent/ironbank/cp-base-new_Dockerfile
COPY --chmod=755 scripts/dnfupdate /usr/bin/
COPY --chmod=755 scripts/replacejar /usr/bin/
## copy from dockerhub image
COPY --from=base /etc/confluent /etc/confluent
COPY --from=base /etc/cp-base-new /etc/cp-base-new
COPY --from=base /licenses /licenses
COPY --from=base /usr/share/java/cp-base-new /usr/share/java/cp-base-new
COPY --from=base /usr/share/doc /usr/share/doc
COPY --from=base /usr/local/lib /usr/local/lib
COPY --from=base /usr/local/lib64 /usr/local/lib64
COPY --from=base /usr/local/bin /usr/local/bin
RUN echo "===> dnf installs" \
&& dnf install -y --nodocs python39 openssl tar procps iputils hostname \
&& dnf install -y --nodocs glibc glibc-common glibc-minimal-langpack \
&& dnf install -y --nodocs tzdata libgcc libstdc++ cyrus-sasl-lib libsolv \
&& rpm --import /tmp/archive.key \
&& echo "===> python aliasing" \
&& ln -s /usr/bin/python3.9 /usr/bin/python \
&& ln -s /usr/bin/pip3 /usr/bin/pip \
&& alias python=python3.9 \
&& alias pip=pip3 \
&& alternatives --set python /usr/bin/python3.9 \
&& echo "===> manual hardening" \
&& python3 -m pip install --upgrade setuptools \
&& pip3 install urllib3 --upgrade \
&& dnf erase -y procps-ng gdb-gdbserver \
&& echo "===> appuser setup" \
&& mkdir -p /usr/logs /licenses \
&& useradd --no-log-init --create-home --shell /bin/bash appuser \
&& chown appuser:appuser -R /usr/logs \
&& dnfupdate
USER appuser
## healthcheck not applicable since this is ONLY used as a base image
HEALTHCHECK NONE
USER nobody
# <application name>
# cp-base-new-jdk17
This is the base image used to support the Confluent Platform images.
Architecture: https://www.confluent.io/resources/apache-kafka-confluent-enterprise-reference-architecture/
Operator Deployment: https://docs.confluent.io/operator/current/co-deployment.html
For questions on this container please contact: confluent-fed@confluent.io
Project template for all Iron Bank container repositories.
\ No newline at end of file
---
apiVersion: v1
# The repository name in registry1, excluding /ironbank/
name: "confluentinc/cp-base-new"
name: "opensource/foo/bar"
# List of tags to push for the repository in registry1
# The most specific version should be the first tag and will be shown
# on ironbank.dso.mil
tags:
- "0.0.0"
- "latest"
- "Q2_2024_jdk17"
- "2024-07-29"
- "7.7.0"
- "latest-ubi8.amd64"
- "latest.amd64"
- "latest_jdk17"
- "latest"
# Build args passed to Dockerfile ARGs
args:
BASE_IMAGE: "redhat/ubi/ubi8"
BASE_TAG: "8.7"
CP_BASE_NEW_IMAGE: "confluentinc/cp-base-new"
CP_BASE_NEW_TAG: "7.7.0"
BASE_IMAGE: "redhat/openjdk/openjdk17"
BASE_TAG: "1.17"
# Docker image labels
labels:
org.opencontainers.image.title: "title"
org.opencontainers.image.description: "description"
org.opencontainers.image.licenses: "Apache-2.0"
org.opencontainers.image.url: "https://url"
org.opencontainers.image.vendor: "vendor"
org.opencontainers.image.version: "0.0.0"
mil.dso.ironbank.image.keywords: "tag1,tag2,tag3"
mil.dso.ironbank.image.type: "opensource"
mil.dso.ironbank.product.name: "name"
org.opencontainers.image.title: "cp-base-new"
org.opencontainers.image.description: "Official Confluent Docker base image for other Confluent Platform images"
org.opencontainers.image.licenses: "CONFLUENT ENTERPRISE LICENSE"
org.opencontainers.image.url: "https://docs.confluent.io/platform/current/overview.html"
org.opencontainers.image.vendor: "Confluent"
org.opencontainers.image.version: "Q2_2024_jdk17"
mil.dso.ironbank.image.keywords: "confluent,cflt,kafka,operator,cfk"
mil.dso.ironbank.image.type: "commercial"
mil.dso.ironbank.product.name: "Confluent Platform"
# List of resources to make available to the offline build context
resources:
- filename:
url:
validation:
type: sha256
value:
- url: "docker://docker.io/confluentinc/cp-base-new@sha256:470903bc7130ae4a7fb366f5e48ccd0aeb7065a3873704beab2cfc25449d2f88"
tag: "confluentinc/cp-base-new:7.7.0"
- filename: archive.key
url: https://packages.confluent.io/rpm/7.0/archive.key
validation:
type: sha256
value: b569e9d80bc08b65d342491f8b94e47eb7032701ce17b091f212e1072672a4d5
# List of project maintainers
maintainers:
- email: "username@example.com"
name: "FirstName LastName"
username: "UserName"
cht_member: true/false
- name: "Scott Stroud"
username: "scottstroud"
email: "confluent-fed@confluent.io"
cht_member: false
- name: "Preston McGowan"
username: "preston.mcgowan"
email: "confluent-fed@confluent.io"
cht_member: false
- name: "Gary Hott"
username: "garyhott86"
email: "confluent-fed@confluent.io"
cht_member: false
{
"assignees": [
"@scottstroud"
],
"baseBranches": [
"development"
],
"automerge": false,
"gitLabAutomerge": false,
"regexManagers": [
{
"fileMatch": [
"^hardening_manifest.yaml$"
],
"matchStrings": [
"org\\.opencontainers\\.image\\.version:\\s+(\\s|\"|')?(?<currentValue>.+?)(\\s|\"|'|$)",
"tags:\\s+-(\\s|\"|')+(?<currentValue>.+?)(\\s|\"|'|$)+"
],
"depNameTemplate": "confluentinc/cp-base-new",
"datasourceTemplate": "docker"
}
]
}
\ No newline at end of file
#!/bin/bash
echo "===> Dependency update" \
&& dnf update -y --nodocs \
&& echo "===> Clean up, Clean up" \
&& dnf clean all \
&& yum clean all \
&& rm -rf /tmp/* /var/cache/dnf \
&& chmod gu-s /usr/bin/write
#!/bin/bash
start=`date +%s`
if [[ -f .env ]]; then
echo "Loading in '.env' for values ..."
export $(grep -v '^#' .env | xargs)
echo " "
fi
SCRIPTS_HOME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT_HOME=${PROJECT_HOME:-'../'}
PROJECT_HOME=$(realpath $PROJECT_HOME)
RESOURCE_HOME=${RESOURCE_HOME:-'../../resources'}
RESOURCE_HOME=$(realpath $RESOURCE_HOME)
importArtifacts() {
manifestPath=$1
httpdomain=$2
resourceDir=$3
echo -e "\nImporting ${httpdomain} artifacts ..."
maniDir=$(dirname "$manifestPath")
# (re)set the filelist.txt
mkdir -p $resourceDir
prefix="url: "
pushd $resourceDir
grep $httpdomain $manifestPath | while read -r line ; do
url=${line##*$prefix}
filename=$(basename "$url")
local=$(find $RESOURCE_HOME -name $filename)
echo "local $local"
if [[ -f "$filename" ]]; then
echo "AA"
echo "Using existing $filename. Run ./clean.sh if thats not desired."
elif [[ ! -z "$local" ]]; then
echo "BB"
echo "Using existing $local. Run ./clean.sh if thats not desired."
cp $local $maniDir/
else
echo "downloading ${url} ..."
wget $url
fi
if [[ ! -f "$maniDir/$filename" ]]; then
echo "copying ${filename} into ${maniDir} ..."
cp $filename $maniDir/
fi
done
popd
}
mani=$(realpath "../hardening_manifest.yaml")
importArtifacts $mani 'ironbank-files.s3.amazonaws.com' "$RESOURCE_HOME/ironbank-files"
importArtifacts $mani 'packages.confluent.io' "$RESOURCE_HOME/confluent"
importArtifacts $mani 's3.us-west-2.amazonaws.com/staging-confluent-packages' "$RESOURCE_HOME/confluent"
importArtifacts $mani 'files.pythonhosted.org' "$RESOURCE_HOME/pythonhosted"
importArtifacts $mani 'repo1.maven.org' "$RESOURCE_HOME/maven"
# echo "Generating hardening_manifest_template.yaml ..."
# $SCRIPTS_HOME/genManifestTemplate.sh "$RESOURCE_HOME/ironbank-files"
# $SCRIPTS_HOME/genManifestTemplate.sh "$RESOURCE_HOME/confluent"
# $SCRIPTS_HOME/genManifestTemplate.sh "$RESOURCE_HOME/pythonhosted"
end=`date +%s`
duration=$((end-start))
echo "#########################"
echo "All done (${duration} seconds)"
echo "#########################"
#
# Copyright 2018 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o nounset \
-o errexit
# Trace may expose passwords/credentials by printing them to stdout, so turn on with care.
if [ "${TRACE:-}" == "true" ]; then
set -o verbose \
-o xtrace
fi
#!/usr/bin/env bash
#
# Copyright 2019 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
. /etc/confluent/docker/bash-config
dub ensure KAFKA_ZOOKEEPER_CONNECT
dub ensure KAFKA_ADVERTISED_LISTENERS
# By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing
# hosts with 0.0.0.0. This is good default as it ensures that the broker
# process listens on all ports.
if [[ -z "${KAFKA_LISTENERS-}" ]]
then
export KAFKA_LISTENERS
KAFKA_LISTENERS=$(cub listeners "$KAFKA_ADVERTISED_LISTENERS")
fi
dub path /etc/kafka/ writable
if [[ -z "${KAFKA_LOG_DIRS-}" ]]
then
export KAFKA_LOG_DIRS
KAFKA_LOG_DIRS="/var/lib/kafka/data"
fi
# advertised.host, advertised.port, host and port are deprecated. Exit if these properties are set.
if [[ -n "${KAFKA_ADVERTISED_PORT-}" ]]
then
echo "advertised.port is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
exit 1
fi
if [[ -n "${KAFKA_ADVERTISED_HOST-}" ]]
then
echo "advertised.host is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
exit 1
fi
if [[ -n "${KAFKA_HOST-}" ]]
then
echo "host is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
exit 1
fi
if [[ -n "${KAFKA_PORT-}" ]]
then
echo "port is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
exit 1
fi
# Set if ADVERTISED_LISTENERS has SSL:// or SASL_SSL:// endpoints.
if [[ $KAFKA_ADVERTISED_LISTENERS == *"SSL://"* ]]
then
echo "SSL is enabled."
dub ensure KAFKA_SSL_KEYSTORE_FILENAME
export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME"
dub path "$KAFKA_SSL_KEYSTORE_LOCATION" exists
dub ensure KAFKA_SSL_KEY_CREDENTIALS
KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS"
dub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists
export KAFKA_SSL_KEY_PASSWORD
KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION")
dub ensure KAFKA_SSL_KEYSTORE_CREDENTIALS
KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_CREDENTIALS"
dub path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists
export KAFKA_SSL_KEYSTORE_PASSWORD
KAFKA_SSL_KEYSTORE_PASSWORD=$(cat "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION")
if [[ -n "${KAFKA_SSL_CLIENT_AUTH-}" ]] && ( [[ $KAFKA_SSL_CLIENT_AUTH == *"required"* ]] || [[ $KAFKA_SSL_CLIENT_AUTH == *"requested"* ]] )
then
dub ensure KAFKA_SSL_TRUSTSTORE_FILENAME
export KAFKA_SSL_TRUSTSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_FILENAME"
dub path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists
dub ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS
KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_CREDENTIALS"
dub path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists
export KAFKA_SSL_TRUSTSTORE_PASSWORD
KAFKA_SSL_TRUSTSTORE_PASSWORD=$(cat "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION")
fi
fi
# Set if KAFKA_ADVERTISED_LISTENERS has SASL_PLAINTEXT:// or SASL_SSL:// endpoints.
if [[ $KAFKA_ADVERTISED_LISTENERS =~ .*SASL_.*://.* ]]
then
echo "SASL" is enabled.
dub ensure KAFKA_OPTS
if [[ ! $KAFKA_OPTS == *"java.security.auth.login.config"* ]]
then
echo "KAFKA_OPTS should contain 'java.security.auth.login.config' property."
fi
fi
if [[ -n "${KAFKA_JMX_OPTS-}" ]]
then
if [[ ! $KAFKA_JMX_OPTS == *"com.sun.management.jmxremote.rmi.port"* ]]
then
echo "KAFKA_OPTS should contain 'com.sun.management.jmxremote.rmi.port' property. It is required for accessing the JMX metrics externally."
fi
fi
if [[ -z "${KAFKA_CONFLUENT_BALANCER_ENABLE-}" ]]
then
export KAFKA_CONFLUENT_BALANCER_ENABLE
KAFKA_CONFLUENT_BALANCER_ENABLE="true"
fi
dub template "/etc/confluent/docker/${COMPONENT}.properties.template" "/etc/${COMPONENT}/${COMPONENT}.properties"
dub template "/etc/confluent/docker/log4j.properties.template" "/etc/${COMPONENT}/log4j.properties"
dub template "/etc/confluent/docker/tools-log4j.properties.template" "/etc/${COMPONENT}/tools-log4j.properties"
#!/usr/bin/env bash
#
# Copyright 2020 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
. /etc/confluent/docker/bash-config
export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"}
echo "===> Check if $KAFKA_DATA_DIRS is writable ..."
dub path "$KAFKA_DATA_DIRS" writable
if [[ -n "${KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE-}" ]] && [[ $KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE == "true" ]]
then
echo "===> Skipping Zookeeper health check for SSL connections..."
else
echo "===> Check if Zookeeper is healthy ..."
cub zk-ready "$KAFKA_ZOOKEEPER_CONNECT" "${KAFKA_CUB_ZK_TIMEOUT:-40}"
fi
{% set excluded_props = ['KAFKA_VERSION',
'KAFKA_HEAP_OPTS'
'KAFKA_LOG4J_OPTS',
'KAFKA_OPTS',
'KAFKA_JMX_OPTS',
'KAFKA_JVM_PERFORMANCE_OPTS',
'KAFKA_GC_LOG_OPTS',
'KAFKA_LOG4J_ROOT_LOGLEVEL',
'KAFKA_LOG4J_LOGGERS',
'KAFKA_TOOLS_LOG4J_LOGLEVEL',
'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET']
-%}
{# properties that don't fit the standard format #}
{% set other_props = {
'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET' : 'zookeeper.clientCnxnSocket'
} -%}
{% set kafka_props = env_to_props('KAFKA_', '', exclude=excluded_props) -%}
{% for name, value in kafka_props.items() -%}
{{name}}={{value}}
{% endfor -%}
{% for k, property in other_props.items() -%}
{% if env.get(k) != None -%}
{{property}}={{env[k]}}
{% endif -%}
{% endfor -%}
{% set confluent_support_props = env_to_props('CONFLUENT_SUPPORT_', 'confluent.support.') -%}
{% for name, value in confluent_support_props.items() -%}
{{name}}={{value}}
{% endfor -%}
{% set confluent_metric_props = env_to_props('CONFLUENT_METRICS_', 'confluent.metrics.') -%}
{% for name, value in confluent_metric_props.items() -%}
{{name}}={{value}}
{% endfor -%}
#!/usr/bin/env bash
#
# Copyright 2019 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Override this section from the script to include the com.sun.management.jmxremote.rmi.port property.
if [ -z "$KAFKA_JMX_OPTS" ]; then
export KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote=true -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
fi
# The JMX client needs to be able to connect to java.rmi.server.hostname.
# The default for bridged n/w is the bridged IP so you will only be able to connect from another docker container.
# For host n/w, this is the IP that the hostname on the host resolves to.
# If you have more that one n/w configured, hostname -i gives you all the IPs,
# the default is to pick the first IP (or network).
export KAFKA_JMX_HOSTNAME=${KAFKA_JMX_HOSTNAME:-$(hostname -i | cut -d" " -f1)}
if [ "$KAFKA_JMX_PORT" ]; then
# This ensures that the "if" section for JMX_PORT in kafka launch script does not trigger.
export JMX_PORT=$KAFKA_JMX_PORT
export KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Djava.rmi.server.hostname=$KAFKA_JMX_HOSTNAME -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT -Dcom.sun.management.jmxremote.port=$JMX_PORT"
fi
echo "===> Launching ${COMPONENT} ... "
exec "${COMPONENT}"-server-start /etc/"${COMPONENT}"/"${COMPONENT}".properties
log4j.rootLogger={{ env["KAFKA_LOG4J_ROOT_LOGLEVEL"] | default('INFO') }}, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
{% set loggers = {
'kafka': 'INFO',
'kafka.network.RequestChannel$': 'WARN',
'kafka.producer.async.DefaultEventHandler': 'DEBUG',
'kafka.request.logger': 'WARN',
'kafka.controller': 'TRACE',
'kafka.log.LogCleaner': 'INFO',
'state.change.logger': 'TRACE',
'kafka.authorizer.logger': 'WARN'
} -%}
{% if env['KAFKA_LOG4J_LOGGERS'] %}
{% set loggers = parse_log4j_loggers(env['KAFKA_LOG4J_LOGGERS'], loggers) %}
{% endif %}
{% for logger,loglevel in loggers.items() %}
log4j.logger.{{logger}}={{loglevel}}
{% endfor %}
#!/usr/bin/env bash
set +o nounset
if [ -z $SKIP_MESOS_AUTO_SETUP ]; then
if [ -n $MESOS_SANDBOX ] && [ -e $MESOS_SANDBOX/.ssl/scheduler.crt ] && [ -e $MESOS_SANDBOX/.ssl/scheduler.key ]; then
echo "Entering Mesos auto setup for Java SSL truststore. You should not see this if you are not on mesos ..."
openssl pkcs12 -export -in $MESOS_SANDBOX/.ssl/scheduler.crt -inkey $MESOS_SANDBOX/.ssl/scheduler.key \
-out /tmp/keypair.p12 -name keypair \
-CAfile $MESOS_SANDBOX/.ssl/ca-bundle.crt -caname root -passout pass:export
keytool -importkeystore \
-deststorepass changeit -destkeypass changeit -destkeystore /tmp/kafka-keystore.jks \
-srckeystore /tmp/keypair.p12 -srcstoretype PKCS12 -srcstorepass export \
-alias keypair
keytool -import \
-trustcacerts \
-alias root \
-file $MESOS_SANDBOX/.ssl/ca-bundle.crt \
-storepass changeit \
-keystore /tmp/kafka-truststore.jks -noprompt
fi
fi
set -o nounset
#!/usr/bin/env bash
#
# Copyright 2019 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
. /etc/confluent/docker/bash-config
# Set environment values if they exist as arguments
if [ $# -ne 0 ]; then
echo "===> Overriding env params with args ..."
for var in "$@"
do
export "$var"
done
fi
echo "===> User"
id
echo "===> Configuring ..."
/etc/confluent/docker/configure
echo "===> Running preflight checks ... "
/etc/confluent/docker/ensure
echo "===> Launching ... "
exec /etc/confluent/docker/launch
log4j.rootLogger={{ env["KAFKA_TOOLS_LOG4J_LOGLEVEL"] | default('WARN') }}, stderr
log4j.appender.stderr=org.apache.log4j.ConsoleAppender
log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.stderr.Target=System.err
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=OFF
# Only log errors from Kafka and ZKClient
log4j.logger.org.apache.kafka=ERROR
log4j.logger.org.I0Itec.zkclient.ZkClient=ERROR
# Log informational messages from the CLI and Zookeeper
log4j.logger.io.confluent.admin.utils=INFO, stderr
log4j.logger.org.apache.zookeeper=INFO, stderr
# STDERR Appender
log4j.appender.stderr=org.apache.log4j.ConsoleAppender
log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
log4j.appender.stderr.Target=System.err
log4j.appender.stderr.layout.ConversionPattern=%m%n
\ No newline at end of file
#!/bin/bash
shopt -s globstar
SRC_DIR=/tmp
DEST_DIR=/usr/share/java
echo "===> Replacing instances for $1 jars under ${DEST_DIR}"
chown appuser:appuser $SRC_DIR/$1*.jar
for i in $DEST_DIR/**/$1*.jar; do # Whitespace-safe and recursive
rm -f "$i"
cp $SRC_DIR/$1*.jar $(dirname "$i")/
done
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment