2.5.1-debian-10-r0 release

This commit is contained in:
Bitnami Bot
2020-06-16 13:55:13 +00:00
parent 23923a7ae5
commit b18114f4a1
23 changed files with 2341 additions and 0 deletions

View File

@@ -0,0 +1,28 @@
FROM docker.io/bitnami/minideb:buster
LABEL maintainer "Bitnami <containers@bitnami.com>"
ENV HOME="/" \
OS_ARCH="amd64" \
OS_FLAVOUR="debian-10" \
OS_NAME="linux"
COPY prebuildfs /
# Install required system packages and dependencies
RUN install_packages ca-certificates curl gzip libc6 libgcc1 procps tar
RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "java" "1.8.252-3" --checksum 8631fe0cc0887a566e878939cf8cd58650be5d5de23f3c6f94fffb258aadeb3a
RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "yq" "3.3.2-0" --checksum 50cac57ffd984455e7321d1f13380f94b6bda2a16b7e2547ba33aad347d5e9eb
RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "spring-cloud-dataflow" "2.5.1-0" --checksum a3e04a29d806126cb0b276c384b6168c20cbd95f426c2eedad758dae118827ee
RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "gosu" "1.12.0-0" --checksum 582d501eeb6b338a24f417fededbf14295903d6be55c52d66c52e616c81bcd8c
RUN apt-get update && apt-get upgrade -y && \
rm -r /var/lib/apt/lists /var/cache/apt/archives
COPY rootfs /
RUN /opt/bitnami/scripts/spring-cloud-dataflow/postunpack.sh
ENV BITNAMI_APP_NAME="spring-cloud-dataflow" \
BITNAMI_IMAGE_VERSION="2.5.1-debian-10-r0" \
JAVA_TOOL_OPTIONS="-Duser.home=/bitnami/spring-cloud-dataflow" \
PATH="/opt/bitnami/java/bin:/opt/bitnami/common/bin:$PATH"
USER 1001
ENTRYPOINT [ "/opt/bitnami/scripts/spring-cloud-dataflow/entrypoint.sh" ]
CMD [ "/opt/bitnami/scripts/spring-cloud-dataflow/run.sh" ]

View File

@@ -0,0 +1,81 @@
version: '2'
services:
spring-cloud-dataflow:
image: 'docker.io/bitnami/spring-cloud-dataflow:2-debian-10'
restart: always
environment:
- SERVER_PORT=9393
- SPRING_CLOUD_DATAFLOW_DATABASE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME=bn_dataflow
- SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD=bn_dataflow
# enable advances features
- SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true
- SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED=true
# configure dataflow stream
- SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI=http://spring-cloud-skipper:7577/api
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST=rabbitmq
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT=5672
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME=user
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD=bitnami
ports:
- '9393:9393'
- '9000-9099:9000-9099'
depends_on:
- mariadb-dataflow
- spring-cloud-skipper
spring-cloud-skipper:
image: 'docker.io/bitnami/spring-cloud-skipper:2-debian-10'
restart: always
environment:
- SERVER_PORT=7577
- SPRING_CLOUD_SKIPPER_DATABASE_URL=jdbc:mariadb://mariadb-skipper:3306/skipper?useMysqlMetadata=true
- SPRING_CLOUD_SKIPPER_DATABASE_USERNAME=bn_skipper
- SPRING_CLOUD_SKIPPER_DATABASE_PASSWORD=bn_skipper
ports:
- '9100-9199:9100-9199'
depends_on:
- mariadb-skipper
- rabbitmq
mariadb-dataflow:
image: 'docker.io/bitnami/mariadb:10.3-debian-10'
environment:
- MARIADB_ROOT_PASSWORD=root_password
- MARIADB_USER=bn_dataflow
- MARIADB_PASSWORD=bn_dataflow
- MARIADB_DATABASE=dataflow
volumes:
- 'mariadb_dataflow_data:/bitnami'
mariadb-skipper:
image: 'docker.io/bitnami/mariadb:10.3-debian-10'
environment:
- MARIADB_ROOT_PASSWORD=root_password
- MARIADB_USER=bn_skipper
- MARIADB_PASSWORD=bn_skipper
- MARIADB_DATABASE=skipper
volumes:
- 'mariadb_skipper_data:/bitnami'
mariadb-test:
image: 'docker.io/bitnami/mariadb:10.3-debian-10'
environment:
- MARIADB_ROOT_PASSWORD=root_password
- MARIADB_USER=bn_test
- MARIADB_PASSWORD=bn_test
- MARIADB_DATABASE=test
rabbitmq:
image: 'docker.io/bitnami/rabbitmq:3.8-debian-10'
volumes:
- 'rabbitmq_data:/bitnami'
volumes:
mariadb_dataflow_data:
driver: local
mariadb_skipper_data:
driver: local
rabbitmq_data:
driver: local

View File

@@ -0,0 +1,3 @@
Bitnami containers ship with software bundles. You can find the licenses under:
/opt/bitnami/nami/COPYING
/opt/bitnami/[name-of-bundle]/licenses/[bundle-version].txt

View File

@@ -0,0 +1,51 @@
#!/bin/bash
#
# Bitnami custom library
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/liblog.sh
# Constants
BOLD='\033[1m'
# Functions
########################
# Print the welcome page
# Globals:
# DISABLE_WELCOME_MESSAGE
# BITNAMI_APP_NAME
# Arguments:
# None
# Returns:
# None
#########################
print_welcome_page() {
if [[ -z "${DISABLE_WELCOME_MESSAGE:-}" ]]; then
if [[ -n "$BITNAMI_APP_NAME" ]]; then
print_image_welcome_page
fi
fi
}
########################
# Print the welcome page for a Bitnami Docker image
# Globals:
# BITNAMI_APP_NAME
# Arguments:
# None
# Returns:
# None
#########################
print_image_welcome_page() {
local github_url="https://github.com/bitnami/bitnami-docker-${BITNAMI_APP_NAME}"
log ""
log "${BOLD}Welcome to the Bitnami ${BITNAMI_APP_NAME} container${RESET}"
log "Subscribe to project updates by watching ${BOLD}${github_url}${RESET}"
log "Submit issues and feature requests at ${BOLD}${github_url}/issues${RESET}"
log ""
}

View File

@@ -0,0 +1,69 @@
#!/bin/bash
#
# Library for managing Bitnami components
# Constants
CACHE_ROOT="/tmp/bitnami/pkg/cache"
DOWNLOAD_URL="https://downloads.bitnami.com/files/stacksmith"
# Functions
########################
# Download and unpack a Bitnami package
# Globals:
# OS_NAME
# OS_ARCH
# OS_FLAVOUR
# Arguments:
# $1 - component's name
# $2 - component's version
# Returns:
# None
#########################
component_unpack() {
local name="${1:?name is required}"
local version="${2:?version is required}"
local base_name="${name}-${version}-${OS_NAME}-${OS_ARCH}-${OS_FLAVOUR}"
local package_sha256=""
local directory="/opt/bitnami"
# Validate arguments
shift 2
while [ "$#" -gt 0 ]; do
case "$1" in
-c|--checksum)
shift
package_sha256="${1:?missing package checksum}"
;;
*)
echo "Invalid command line flag $1" >&2
return 1
;;
esac
shift
done
echo "Downloading $base_name package"
if [ -f "${CACHE_ROOT}/${base_name}.tar.gz" ]; then
echo "${CACHE_ROOT}/${base_name}.tar.gz already exists, skipping download."
cp "${CACHE_ROOT}/${base_name}.tar.gz" .
rm "${CACHE_ROOT}/${base_name}.tar.gz"
if [ -f "${CACHE_ROOT}/${base_name}.tar.gz.sha256" ]; then
echo "Using the local sha256 from ${CACHE_ROOT}/${base_name}.tar.gz.sha256"
package_sha256="$(< "${CACHE_ROOT}/${base_name}.tar.gz.sha256")"
rm "${CACHE_ROOT}/${base_name}.tar.gz.sha256"
fi
else
curl --remote-name --silent "${DOWNLOAD_URL}/${base_name}.tar.gz"
fi
if [ -n "$package_sha256" ]; then
echo "Verifying package integrity"
echo "$package_sha256 ${base_name}.tar.gz" | sha256sum --check -
fi
tar --directory "${directory}" --extract --gunzip --file "${base_name}.tar.gz" --no-same-owner --strip-components=2 "${base_name}/files/"
rm "${base_name}.tar.gz"
# Include metadata about the package
touch "${directory}/.bitnami_packages"
echo "$base_name" >> "${directory}/.bitnami_packages"
}

View File

@@ -0,0 +1,80 @@
#!/bin/bash
#
# Library for managing files
# Functions
########################
# Replace a regex in a file
# Arguments:
# $1 - filename
# $2 - match regex
# $3 - substitute regex
# $4 - use POSIX regex. Default: true
# Returns:
# None
#########################
replace_in_file() {
local filename="${1:?filename is required}"
local match_regex="${2:?match regex is required}"
local substitute_regex="${3:?substitute regex is required}"
local posix_regex=${4:-true}
local result
# We should avoid using 'sed in-place' substitutions
# 1) They are not compatible with files mounted from ConfigMap(s)
# 2) We found incompatibility issues with Debian10 and "in-place" substitutions
del=$'\001' # Use a non-printable character as a 'sed' delimiter to avoid issues
if [[ $posix_regex = true ]]; then
result="$(sed -E "s${del}${match_regex}${del}${substitute_regex}${del}g" "$filename")"
else
result="$(sed "s${del}${match_regex}${del}${substitute_regex}${del}g" "$filename")"
fi
echo "$result" > "$filename"
}
########################
# Remove a line in a file based on a regex
# Arguments:
# $1 - filename
# $2 - match regex
# $3 - use POSIX regex. Default: true
# Returns:
# None
#########################
remove_in_file() {
local filename="${1:?filename is required}"
local match_regex="${2:?match regex is required}"
local posix_regex=${3:-true}
local result
# We should avoid using 'sed in-place' substitutions
# 1) They are not compatible with files mounted from ConfigMap(s)
# 2) We found incompatibility issues with Debian10 and "in-place" substitutions
if [[ $posix_regex = true ]]; then
result="$(sed -E "/$match_regex/d" "$filename")"
else
result="$(sed "/$match_regex/d" "$filename")"
fi
echo "$result" > "$filename"
}
########################
# Appends text after the last line matching a pattern
# Arguments:
# $1 - file
# $2 - match regex
# $3 - contents to add
# Returns:
# None
#########################
append_file_after_last_match() {
local file="${1:?missing file}"
local match_regex="${2:?missing pattern}"
local value="${3:?missing value}"
# We read the file in reverse, replace the first match (0,/pattern/s) and then reverse the results again
result="$(tac "$file" | sed -E "0,/($match_regex)/s||${value}\n\1|" | tac)"
echo "$result" > "$file"
}

View File

@@ -0,0 +1,150 @@
#!/bin/bash
#
# Library for file system actions
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/liblog.sh
# Functions
########################
# Ensure a file/directory is owned (user and group) but the given user
# Arguments:
# $1 - filepath
# $2 - owner
# Returns:
# None
#########################
owned_by() {
local path="${1:?path is missing}"
local owner="${2:?owner is missing}"
chown "$owner":"$owner" "$path"
}
########################
# Ensure a directory exists and, optionally, is owned by the given user
# Arguments:
# $1 - directory
# $2 - owner
# Returns:
# None
#########################
ensure_dir_exists() {
local dir="${1:?directory is missing}"
local owner="${2:-}"
mkdir -p "${dir}"
if [[ -n $owner ]]; then
owned_by "$dir" "$owner"
fi
}
########################
# Checks whether a directory is empty or not
# arguments:
# $1 - directory
# returns:
# boolean
#########################
is_dir_empty() {
local dir="${1:?missing directory}"
if [[ ! -e "$dir" ]] || [[ -z "$(ls -A "$dir")" ]]; then
true
else
false
fi
}
########################
# Checks whether a file can be written to or not
# arguments:
# $1 - file
# returns:
# boolean
#########################
is_file_writable() {
local file="${1:?missing file}"
local dir
dir="$(dirname "$file")"
if [[ ( -f "$file" && -w "$file" ) || ( ! -f "$file" && -d "$dir" && -w "$dir" ) ]]; then
true
else
false
fi
}
########################
# Configure permisions and ownership recursively
# Globals:
# None
# Arguments:
# $1 - paths (as a string).
# Flags:
# -f|--file-mode - mode for directories.
# -d|--dir-mode - mode for files.
# -u|--user - user
# -g|--group - group
# Returns:
# None
#########################
configure_permissions_ownership() {
local -r paths="${1:?paths is missing}"
local dir_mode=""
local file_mode=""
local user=""
local group=""
# Validate arguments
shift 1
while [ "$#" -gt 0 ]; do
case "$1" in
-f|--file-mode)
shift
file_mode="${1:?missing mode for files}"
;;
-d|--dir-mode)
shift
dir_mode="${1:?missing mode for directories}"
;;
-u|--user)
shift
user="${1:?missing user}"
;;
-g|--group)
shift
group="${1:?missing group}"
;;
*)
echo "Invalid command line flag $1" >&2
return 1
;;
esac
shift
done
read -r -a filepaths <<< "$paths"
for p in "${filepaths[@]}"; do
if [[ -e "$p" ]]; then
if [[ -n $dir_mode ]]; then
find -L "$p" -type d -exec chmod "$dir_mode" {} \;
fi
if [[ -n $file_mode ]]; then
find -L "$p" -type f -exec chmod "$file_mode" {} \;
fi
if [[ -n $user ]] && [[ -n $group ]]; then
chown -LR "$user":"$group" "$p"
elif [[ -n $user ]] && [[ -z $group ]]; then
chown -LR "$user" "$p"
elif [[ -z $user ]] && [[ -n $group ]]; then
chgrp -LR "$group" "$p"
fi
else
stderr_print "$p does not exist"
fi
done
}

View File

@@ -0,0 +1,110 @@
#!/bin/bash
#
# Library for logging functions
# Constants
RESET='\033[0m'
RED='\033[38;5;1m'
GREEN='\033[38;5;2m'
YELLOW='\033[38;5;3m'
MAGENTA='\033[38;5;5m'
CYAN='\033[38;5;6m'
# Functions
########################
# Print to STDERR
# Arguments:
# Message to print
# Returns:
# None
#########################
stderr_print() {
# 'is_boolean_yes' is defined in libvalidations.sh, but depends on this file so we cannot source it
local bool="${BITNAMI_QUIET:-false}"
# comparison is performed without regard to the case of alphabetic characters
shopt -s nocasematch
if ! [[ "$bool" = 1 || "$bool" =~ ^(yes|true)$ ]]; then
printf "%b\\n" "${*}" >&2
fi
}
########################
# Log message
# Arguments:
# Message to log
# Returns:
# None
#########################
log() {
stderr_print "${CYAN}${MODULE:-} ${MAGENTA}$(date "+%T.%2N ")${RESET}${*}"
}
########################
# Log an 'info' message
# Arguments:
# Message to log
# Returns:
# None
#########################
info() {
log "${GREEN}INFO ${RESET} ==> ${*}"
}
########################
# Log message
# Arguments:
# Message to log
# Returns:
# None
#########################
warn() {
log "${YELLOW}WARN ${RESET} ==> ${*}"
}
########################
# Log an 'error' message
# Arguments:
# Message to log
# Returns:
# None
#########################
error() {
log "${RED}ERROR${RESET} ==> ${*}"
}
########################
# Log a 'debug' message
# Globals:
# BITNAMI_DEBUG
# Arguments:
# None
# Returns:
# None
#########################
debug() {
# 'is_boolean_yes' is defined in libvalidations.sh, but depends on this file so we cannot source it
local bool="${BITNAMI_DEBUG:-false}"
# comparison is performed without regard to the case of alphabetic characters
shopt -s nocasematch
if [[ "$bool" = 1 || "$bool" =~ ^(yes|true)$ ]]; then
log "${MAGENTA}DEBUG${RESET} ==> ${*}"
fi
}
########################
# Indent a string
# Arguments:
# $1 - string
# $2 - number of indentation characters (default: 4)
# $3 - indentation character (default: " ")
# Returns:
# None
#########################
indent() {
local string="${1:-}"
local num="${2:?missing num}"
local char="${3:-" "}"
# Build the indentation unit string
local indent_unit=""
for ((i = 0; i < num; i++)); do
indent_unit="${indent_unit}${char}"
done
echo "$string" | sed "s/^/${indent_unit}/"
}

View File

@@ -0,0 +1,140 @@
#!/bin/bash
#
# Library for network functions
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/liblog.sh
# Functions
########################
# Resolve dns
# Arguments:
# $1 - Hostname to resolve
# Returns:
# IP
#########################
dns_lookup() {
local host="${1:?host is missing}"
getent ahosts "$host" | awk '/STREAM/ {print $1 }'
}
#########################
# Wait for a hostname and return the IP
# Arguments:
# $1 - hostname
# $2 - number of retries
# $3 - seconds to wait between retries
# Returns:
# - IP address that corresponds to the hostname
#########################
wait_for_dns_lookup() {
local hostname="${1:?hostname is missing}"
local retries="${2:-5}"
local seconds="${3:-1}"
check_host() {
if [[ $(dns_lookup "$hostname") == "" ]]; then
false
else
true
fi
}
# Wait for the host to be ready
retry_while "check_host ${hostname}" "$retries" "$seconds"
dns_lookup "$hostname"
}
########################
# Get machine's IP
# Arguments:
# None
# Returns:
# Machine IP
#########################
get_machine_ip() {
local -a ip_addresses
local hostname
hostname="$(hostname)"
read -r -a ip_addresses <<< "$(dns_lookup "$hostname" | xargs echo)"
if [[ "${#ip_addresses[@]}" -gt 1 ]]; then
warn "Found more than one IP address associated to hostname ${hostname}: ${ip_addresses[*]}, will use ${ip_addresses[0]}"
elif [[ "${#ip_addresses[@]}" -lt 1 ]]; then
error "Could not find any IP address associated to hostname ${hostname}"
exit 1
fi
echo "${ip_addresses[0]}"
}
########################
# Check if the provided argument is a resolved hostname
# Arguments:
# $1 - Value to check
# Returns:
# Boolean
#########################
is_hostname_resolved() {
local -r host="${1:?missing value}"
if [[ -n "$(dns_lookup "$host")" ]]; then
true
else
false
fi
}
########################
# Parse URL
# Globals:
# None
# Arguments:
# $1 - uri - String
# $2 - component to obtain. Valid options (scheme, authority, userinfo, host, port, path, query or fragment) - String
# Returns:
# String
parse_uri() {
local uri="${1:?uri is missing}"
local component="${2:?component is missing}"
# Solution based on https://tools.ietf.org/html/rfc3986#appendix-B with
# additional sub-expressions to split authority into userinfo, host and port
# Credits to Patryk Obara (see https://stackoverflow.com/a/45977232/6694969)
local -r URI_REGEX='^(([^:/?#]+):)?(//((([^@/?#]+)@)?([^:/?#]+)(:([0-9]+))?))?(/([^?#]*))?(\?([^#]*))?(#(.*))?'
# || | ||| | | | | | | | | |
# |2 scheme | ||6 userinfo 7 host | 9 port | 11 rpath | 13 query | 15 fragment
# 1 scheme: | |5 userinfo@ 8 :... 10 path 12 ?... 14 #...
# | 4 authority
# 3 //...
local index=0
case "$component" in
scheme)
index=2
;;
authority)
index=4
;;
userinfo)
index=6
;;
host)
index=7
;;
port)
index=9
;;
path)
index=10
;;
query)
index=13
;;
fragment)
index=14
;;
*)
stderr_print "unrecognized component $component"
return 1
;;
esac
[[ "$uri" =~ $URI_REGEX ]] && echo "${BASH_REMATCH[${index}]}"
}

View File

@@ -0,0 +1,263 @@
#!/bin/bash
#
# Library for operating system actions
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/liblog.sh
# Functions
########################
# Check if an user exists in the system
# Arguments:
# $1 - user
# Returns:
# Boolean
#########################
user_exists() {
local user="${1:?user is missing}"
id "$user" >/dev/null 2>&1
}
########################
# Check if a group exists in the system
# Arguments:
# $1 - group
# Returns:
# Boolean
#########################
group_exists() {
local group="${1:?group is missing}"
getent group "$group" >/dev/null 2>&1
}
########################
# Create a group in the system if it does not exist already
# Arguments:
# $1 - group
# Returns:
# None
#########################
ensure_group_exists() {
local group="${1:?group is missing}"
if ! group_exists "$group"; then
groupadd "$group" >/dev/null 2>&1
fi
}
########################
# Create an user in the system if it does not exist already
# Arguments:
# $1 - user
# $2 - group
# Returns:
# None
#########################
ensure_user_exists() {
local user="${1:?user is missing}"
local group="${2:-}"
if ! user_exists "$user"; then
useradd "$user" >/dev/null 2>&1
fi
if [[ -n "$group" ]]; then
ensure_group_exists "$group"
usermod -a -G "$group" "$user" >/dev/null 2>&1
fi
}
########################
# Check if the script is currently running as root
# Arguments:
# $1 - user
# $2 - group
# Returns:
# Boolean
#########################
am_i_root() {
if [[ "$(id -u)" = "0" ]]; then
true
else
false
fi
}
########################
# Get total memory available
# Arguments:
# None
# Returns:
# Memory in bytes
#########################
get_total_memory() {
echo $(($(grep MemTotal /proc/meminfo | awk '{print $2}') / 1024))
}
########################
# Get machine size depending on specified memory
# Globals:
# None
# Arguments:
# $1 - memory size (optional)
# Returns:
# Detected instance size
#########################
get_machine_size() {
local memory="${1:-}"
if [[ -z "$memory" ]]; then
debug "Memory was not specified, detecting available memory automatically"
memory="$(get_total_memory)"
fi
sanitized_memory=$(convert_to_mb "$memory")
if [[ "$sanitized_memory" -gt 26000 ]]; then
echo 2xlarge
elif [[ "$sanitized_memory" -gt 13000 ]]; then
echo xlarge
elif [[ "$sanitized_memory" -gt 6000 ]]; then
echo large
elif [[ "$sanitized_memory" -gt 3000 ]]; then
echo medium
elif [[ "$sanitized_memory" -gt 1500 ]]; then
echo small
else
echo micro
fi
}
########################
# Get machine size depending on specified memory
# Globals:
# None
# Arguments:
# $1 - memory size (optional)
# Returns:
# Detected instance size
#########################
get_supported_machine_sizes() {
echo micro small medium large xlarge 2xlarge
}
########################
# Convert memory size from string to amount of megabytes (i.e. 2G -> 2048)
# Globals:
# None
# Arguments:
# $1 - memory size
# Returns:
# Result of the conversion
#########################
convert_to_mb() {
local amount="${1:-}"
if [[ $amount =~ ^([0-9]+)(M|G) ]]; then
size="${BASH_REMATCH[1]}"
unit="${BASH_REMATCH[2]}"
if [[ "$unit" = "G" ]]; then
amount="$((size * 1024))"
else
amount="$size"
fi
fi
echo "$amount"
}
#########################
# Redirects output to /dev/null if debug mode is disabled
# Globals:
# BITNAMI_DEBUG
# Arguments:
# $@ - Command to execute
# Returns:
# None
#########################
debug_execute() {
if ${BITNAMI_DEBUG:-false}; then
"$@"
else
"$@" >/dev/null 2>&1
fi
}
########################
# Retries a command a given number of times
# Arguments:
# $1 - cmd (as a string)
# $2 - max retries. Default: 12
# $3 - sleep between retries (in seconds). Default: 5
# Returns:
# Boolean
#########################
retry_while() {
local cmd="${1:?cmd is missing}"
local retries="${2:-12}"
local sleep_time="${3:-5}"
local return_value=1
read -r -a command <<< "$cmd"
for ((i = 1 ; i <= retries ; i+=1 )); do
"${command[@]}" && return_value=0 && break
sleep "$sleep_time"
done
return $return_value
}
########################
# Generate a random string
# Arguments:
# -t|--type - String type (ascii, alphanumeric, numeric), defaults to ascii
# -c|--count - Number of characters, defaults to 32
# Arguments:
# None
# Returns:
# None
# Returns:
# String
#########################
generate_random_string() {
local type="ascii"
local count="32"
local filter
local result
# Validate arguments
while [[ "$#" -gt 0 ]]; do
case "$1" in
-t|--type)
shift
type="$1"
;;
-c|--count)
shift
count="$1"
;;
*)
echo "Invalid command line flag $1" >&2
return 1
;;
esac
shift
done
# Validate type
case "$type" in
ascii)
filter="[:print:]"
;;
alphanumeric)
filter="a-zA-Z0-9"
;;
numeric)
filter="0-9"
;;
*)
echo "Invalid type ${type}" >&2
return 1
esac
# Obtain count + 10 lines from /dev/urandom to ensure that the resulting string has the expected size
# Note there is a very small chance of strings starting with EOL character
# Therefore, the higher amount of lines read, this will happen less frequently
result="$(head -n "$((count + 10))" /dev/urandom | tr -dc "$filter" | head -c "$count")"
echo "$result"
}

View File

@@ -0,0 +1,147 @@
#!/bin/bash
#
# Library for managing services
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/libvalidations.sh
# Functions
########################
# Read the provided pid file and returns a PID
# Arguments:
# $1 - Pid file
# Returns:
# PID
#########################
get_pid_from_file() {
local pid_file="${1:?pid file is missing}"
if [[ -f "$pid_file" ]]; then
if [[ -n "$(< "$pid_file")" ]] && [[ "$(< "$pid_file")" -gt 0 ]]; then
echo "$(< "$pid_file")"
fi
fi
}
########################
# Check if a provided PID corresponds to a running service
# Arguments:
# $1 - PID
# Returns:
# Boolean
#########################
is_service_running() {
local pid="${1:?pid is missing}"
kill -0 "$pid" 2>/dev/null
}
########################
# Stop a service by sending a termination signal to its pid
# Arguments:
# $1 - Pid file
# $2 - Signal number (optional)
# Returns:
# None
#########################
stop_service_using_pid() {
local pid_file="${1:?pid file is missing}"
local signal="${2:-}"
local pid
pid="$(get_pid_from_file "$pid_file")"
[[ -z "$pid" ]] || ! is_service_running "$pid" && return
if [[ -n "$signal" ]]; then
kill "-${signal}" "$pid"
else
kill "$pid"
fi
local counter=10
while [[ "$counter" -ne 0 ]] && is_service_running "$pid"; do
sleep 1
counter=$((counter - 1))
done
}
########################
# Generate a monit configuration file for a given service
# Arguments:
# $1 - Service name
# $2 - Pid file
# $3 - Start command
# $4 - Stop command
# Flags:
# --disabled - Whether to disable the monit configuration
# Returns:
# None
#########################
generate_monit_conf() {
local service_name="${1:?service name is missing}"
local pid_file="${2:?pid file is missing}"
local start_command="${3:?start command is missing}"
local stop_command="${4:?stop command is missing}"
local monit_conf_dir="/etc/monit/conf.d"
local disabled="no"
# Parse optional CLI flags
shift 4
while [[ "$#" -gt 0 ]]; do
case "$1" in
--disabled)
shift
disabled="$1"
;;
*)
echo "Invalid command line flag ${1}" >&2
return 1
;;
esac
shift
done
is_boolean_yes "$disabled" && conf_suffix=".disabled"
mkdir -p "$monit_conf_dir"
cat >"${monit_conf_dir}/${service_name}.conf${conf_suffix:-}" <<EOF
check process ${service_name}
with pidfile "${pid_file}"
start program = "${start_command}" with timeout 90 seconds
stop program = "${stop_command}" with timeout 90 seconds
EOF
}
########################
# Generate a logrotate configuration file
# Arguments:
# $1 - Log path
# $2 - Period
# $3 - Number of rotations to store
# $4 - Extra options (Optional)
# Returns:
# None
#########################
generate_logrotate_conf() {
local service_name="${1:?service name is missing}"
local log_path="${2:?log path is missing}"
local period="${3:-weekly}"
local rotations="${4:-150}"
local extra_options="${5:-}"
local logrotate_conf_dir="/etc/logrotate.d"
mkdir -p "$logrotate_conf_dir"
cat >"${logrotate_conf_dir}/${service_name}" <<EOF
${log_path} {
${period}
rotate ${rotations}
dateext
compress
copytruncate
missingok
${extra_options}
}
EOF
}

View File

@@ -0,0 +1,248 @@
#!/bin/bash
#
# Validation functions library
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/liblog.sh
# Functions
########################
# Check if the provided argument is an integer
# Arguments:
# $1 - Value to check
# Returns:
# Boolean
#########################
is_int() {
local -r int="${1:?missing value}"
if [[ "$int" =~ ^-?[0-9]+ ]]; then
true
else
false
fi
}
########################
# Check if the provided argument is a positive integer
# Arguments:
# $1 - Value to check
# Returns:
# Boolean
#########################
is_positive_int() {
local -r int="${1:?missing value}"
if is_int "$int" && (( "${int}" >= 0 )); then
true
else
false
fi
}
########################
# Check if the provided argument is a boolean or is the string 'yes/true'
# Arguments:
# $1 - Value to check
# Returns:
# Boolean
#########################
is_boolean_yes() {
local -r bool="${1:-}"
# comparison is performed without regard to the case of alphabetic characters
shopt -s nocasematch
if [[ "$bool" = 1 || "$bool" =~ ^(yes|true)$ ]]; then
true
else
false
fi
}
########################
# Check if the provided argument is a boolean yes/no value
# Arguments:
# $1 - Value to check
# Returns:
# Boolean
#########################
is_yes_no_value() {
local -r bool="${1:-}"
if [[ "$bool" =~ ^(yes|no)$ ]]; then
true
else
false
fi
}
########################
# Check if the provided argument is a boolean true/false value
# Arguments:
# $1 - Value to check
# Returns:
# Boolean
#########################
is_true_false_value() {
local -r bool="${1:-}"
if [[ "$bool" =~ ^(true|false)$ ]]; then
true
else
false
fi
}
########################
# Check if the provided argument is an empty string or not defined
# Arguments:
# $1 - Value to check
# Returns:
# Boolean
#########################
is_empty_value() {
local -r val="${1:-}"
if [[ -z "$val" ]]; then
true
else
false
fi
}
########################
# Validate if the provided argument is a valid port
# Arguments:
# $1 - Port to validate
# Returns:
# Boolean and error message
#########################
validate_port() {
local value
local unprivileged=0
# Parse flags
while [[ "$#" -gt 0 ]]; do
case "$1" in
-unprivileged)
unprivileged=1
;;
--)
shift
break
;;
-*)
stderr_print "unrecognized flag $1"
return 1
;;
*)
break
;;
esac
shift
done
if [[ "$#" -gt 1 ]]; then
echo "too many arguments provided"
return 2
elif [[ "$#" -eq 0 ]]; then
stderr_print "missing port argument"
return 1
else
value=$1
fi
if [[ -z "$value" ]]; then
echo "the value is empty"
return 1
else
if ! is_int "$value"; then
echo "value is not an integer"
return 2
elif [[ "$value" -lt 0 ]]; then
echo "negative value provided"
return 2
elif [[ "$value" -gt 65535 ]]; then
echo "requested port is greater than 65535"
return 2
elif [[ "$unprivileged" = 1 && "$value" -lt 1024 ]]; then
echo "privileged port requested"
return 3
fi
fi
}
########################
# Validate if the provided argument is a valid IPv4 address
# Arguments:
# $1 - IP to validate
# Returns:
# Boolean
#########################
validate_ipv4() {
local ip="${1:?ip is missing}"
local stat=1
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
read -r -a ip_array <<< "$(tr '.' ' ' <<< "$ip")"
[[ ${ip_array[0]} -le 255 && ${ip_array[1]} -le 255 \
&& ${ip_array[2]} -le 255 && ${ip_array[3]} -le 255 ]]
stat=$?
fi
return $stat
}
########################
# Validate a string format
# Arguments:
# $1 - String to validate
# Returns:
# Boolean
#########################
validate_string() {
local string
local min_length=-1
local max_length=-1
# Parse flags
while [ "$#" -gt 0 ]; do
case "$1" in
-min-length)
shift
min_length=${1:-}
;;
-max-length)
shift
max_length=${1:-}
;;
--)
shift
break
;;
-*)
stderr_print "unrecognized flag $1"
return 1
;;
*)
break
;;
esac
shift
done
if [ "$#" -gt 1 ]; then
stderr_print "too many arguments provided"
return 2
elif [ "$#" -eq 0 ]; then
stderr_print "missing string"
return 1
else
string=$1
fi
if [[ "$min_length" -ge 0 ]] && [[ "${#string}" -lt "$min_length" ]]; then
echo "string length is less than $min_length"
return 1
fi
if [[ "$max_length" -ge 0 ]] && [[ "${#string}" -gt "$max_length" ]]; then
echo "string length is great than $max_length"
return 1
fi
}

View File

@@ -0,0 +1,49 @@
#!/bin/bash
#
# Library for managing versions strings
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/liblog.sh
# Functions
########################
# Gets semantic version
# Arguments:
# $1 - version: string to extract major.minor.patch
# $2 - section: 1 to extract major, 2 to extract minor, 3 to extract patch
# Returns:
# array with the major, minor and release
#########################
get_sematic_version () {
local version="${1:?version is required}"
local section="${2:?section is required}"
local -a version_sections
#Regex to parse versions: x.y.z
local -r regex='([0-9]+)(\.([0-9]+)(\.([0-9]+))?)?'
if [[ "$version" =~ $regex ]]; then
local i=1
local j=1
local n=${#BASH_REMATCH[*]}
while [[ $i -lt $n ]]; do
if [[ -n "${BASH_REMATCH[$i]}" ]] && [[ "${BASH_REMATCH[$i]:0:1}" != '.' ]]; then
version_sections[$j]=${BASH_REMATCH[$i]}
((j++))
fi
((i++))
done
local number_regex='^[0-9]+$'
if [[ "$section" =~ $number_regex ]] && (( $section > 0 )) && (( $section <= 3 )); then
echo "${version_sections[$section]}"
return
else
stderr_print "Section allowed values are: 1, 2, and 3"
return 1
fi
fi
}

View File

@@ -0,0 +1,320 @@
#!/bin/bash
#
# Bitnami web server handler library
# shellcheck disable=SC1091
# Load generic libraries
. /opt/bitnami/scripts/liblog.sh
# Load web server libraries
[[ -f "/opt/bitnami/scripts/libapache.sh" ]] && . /opt/bitnami/scripts/libapache.sh
[[ -f "/opt/bitnami/scripts/libnginx.sh" ]] && . /opt/bitnami/scripts/libnginx.sh
# Load environment for all configured web servers
[[ -f "/opt/bitnami/scripts/apache-env.sh" ]] && . /opt/bitnami/scripts/apache-env.sh
[[ -f "/opt/bitnami/scripts/nginx-env.sh" ]] && . /opt/bitnami/scripts/nginx-env.sh
########################
# Prints the currently-enabled web server type
# Globals:
# WEB_SERVER_TYPE
# Arguments:
# None
# Returns:
# None
#########################
web_server_type() {
echo "$WEB_SERVER_TYPE"
}
########################
# Validate that a supported web server is configured
# Globals:
# WEB_SERVER_*
# Arguments:
# None
# Returns:
# None
#########################
web_server_validate() {
local error_code=0
local supported_web_servers=("apache" "nginx")
# Auxiliary functions
print_validation_error() {
error "$1"
error_code=1
}
if [[ -z "$(web_server_type)" || ! " ${supported_web_servers[*]} " == *" $(web_server_type) "* ]]; then
print_validation_error "Could not detect any supported web servers. It must be one of: ${supported_web_servers[*]}"
elif ! type -t "is_$(web_server_type)_running" >/dev/null; then
print_validation_error "Could not load the $(web_server_type) web server library from /opt/bitnami/scripts. Check that it exists and is readable."
fi
return "$error_code"
}
########################
# Check whether the web server is running
# Globals:
# *
# Arguments:
# None
# Returns:
# true if the web server is running, false otherwise
#########################
is_web_server_running() {
"is_$(web_server_type)_running"
}
########################
# Start web server
# Globals:
# *
# Arguments:
# None
# Returns:
# None
#########################
web_server_start() {
"${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/start.sh"
}
########################
# Stop web server
# Globals:
# *
# Arguments:
# None
# Returns:
# None
#########################
web_server_stop() {
"${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/stop.sh"
}
########################
# Restart web server
# Globals:
# *
# Arguments:
# None
# Returns:
# None
#########################
web_server_restart() {
"${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/restart.sh"
}
########################
# Reload web server
# Globals:
# *
# Arguments:
# None
# Returns:
# None
#########################
web_server_reload() {
"${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/reload.sh"
}
########################
# Ensure a web server application configuration exists (i.e. Apache virtual host format or NGINX server block)
# It serves as a wrapper for the specific web server function
# Globals:
# *
# Arguments:
# $1 - App name
# Flags:
# --hosts - Hosts to enable
# --type - Application type, which has an effect on which configuration template to use
# --allow-remote-connections - Whether to allow remote connections or to require local connections
# --disabled - Whether to render the file with a .disabled prefix
# --enable-https - Enable app configuration on HTTPS port
# --http-port - HTTP port number
# --https-port - HTTPS port number
# --document-root - Path to document root directory
# Apache-specific flags:
# --apache-additional-configuration - Additional vhost configuration (no default)
# --apache-allow-override - Whether to allow .htaccess files (only allowed when --move-htaccess is set to 'no')
# --apache-extra-directory-configuration - Extra configuration for the document root directory
# --apache-move-htaccess - Move .htaccess files to a common place so they can be loaded during Apache startup
# NGINX-specific flags:
# --nginx-additional-configuration - Additional server block configuration (no default)
# Returns:
# true if the configuration was enabled, false otherwise
########################
ensure_web_server_app_configuration_exists() {
local app="${1:?missing app}"
local -a args=()
# Validate arguments
shift
while [[ "$#" -gt 0 ]]; do
case "$1" in
# Common flags
--hosts \
| --type \
| --allow-remote-connections \
| --disabled \
| --enable-https \
| --http-port \
| --https-port \
| --document-root \
)
args+=("$1" "$2")
shift
;;
# Specific Apache flags
--apache-additional-configuration \
| --apache-allow-override \
| --apache-extra-directory-configuration \
| --apache-move-htaccess \
)
[[ "$(web_server_type)" == "apache" ]] && args+=("${1//apache-/}" "$2")
shift
;;
# Specific NGINX flags
--nginx-additional-configuration)
[[ "$(web_server_type)" == "nginx" ]] && args+=("${1//nginx-/}" "$2")
shift
;;
*)
echo "Invalid command line flag $1" >&2
return 1
;;
esac
shift
done
"ensure_$(web_server_type)_app_configuration_exists" "$app" "${args[@]}"
}
########################
# Ensure a web server application configuration does not exist anymore (i.e. Apache virtual host format or NGINX server block)
# It serves as a wrapper for the specific web server function
# Globals:
# *
# Arguments:
# $1 - App name
# Returns:
# true if the configuration was disabled, false otherwise
########################
ensure_web_server_app_configuration_not_exists() {
local app="${1:?missing app}"
"ensure_$(web_server_type)_app_configuration_not_exists" "$app"
}
########################
# Ensure the web server loads the configuration for an application in a URL prefix
# It serves as a wrapper for the specific web server function
# Globals:
# *
# Arguments:
# $1 - App name
# Flags:
# --allow-remote-connections - Whether to allow remote connections or to require local connections
# --document-root - Path to document root directory
# --prefix - URL prefix from where it will be accessible (i.e. /myapp)
# --type - Application type, which has an effect on what configuration template will be used
# Apache-specific flags:
# --apache-additional-configuration - Additional vhost configuration (no default)
# --apache-allow-override - Whether to allow .htaccess files (only allowed when --move-htaccess is set to 'no')
# --apache-extra-directory-configuration - Extra configuration for the document root directory
# --apache-move-htaccess - Move .htaccess files to a common place so they can be loaded during Apache startup
# NGINX-specific flags:
# --nginx-additional-configuration - Additional server block configuration (no default)
# Returns:
# true if the configuration was enabled, false otherwise
########################
ensure_web_server_prefix_configuration_exists() {
local app="${1:?missing app}"
local -a args=()
# Validate arguments
shift
while [[ "$#" -gt 0 ]]; do
case "$1" in
# Common flags
--allow-remote-connections \
| --document-root \
| --prefix \
| --type \
)
args+=("$1" "$2")
shift
;;
# Specific Apache flags
--apache-additional-configuration \
| --apache-allow-override \
| --apache-extra-directory-configuration \
| --apache-move-htaccess \
)
[[ "$(web_server_type)" == "apache" ]] && args+=("${1//apache-/}" "$2")
shift
;;
# Specific NGINX flags
--nginx-additional-configuration)
[[ "$(web_server_type)" == "nginx" ]] && args+=("${1//nginx-/}" "$2")
shift
;;
*)
echo "Invalid command line flag $1" >&2
return 1
;;
esac
shift
done
"ensure_$(web_server_type)_prefix_configuration_exists" "$app" "${args[@]}"
}
########################
# Enable loading page, which shows users that the initialization process is not yet completed
# Globals:
# *
# Arguments:
# None
# Returns:
# None
#########################
web_server_enable_loading_page() {
ensure_web_server_app_configuration_exists "__loading" --hosts "_default_" \
--apache-additional-configuration "
# Show a HTTP 503 Service Unavailable page by default
RedirectMatch 503 ^/$
# Show index.html if server is answering with 404 Not Found or 503 Service Unavailable status codes
ErrorDocument 404 /index.html
ErrorDocument 503 /index.html" \
--nginx-additional-configuration "
# Show a HTTP 503 Service Unavailable page by default
location / {
return 503;
}
# Show index.html if server is answering with 404 Not Found or 503 Service Unavailable status codes
error_page 404 @installing;
error_page 503 @installing;
location @installing {
rewrite ^(.*)$ /index.html break;
}"
web_server_reload
}
########################
# Enable loading page, which shows users that the initialization process is not yet completed
# Globals:
# *
# Arguments:
# None
# Returns:
# None
#########################
web_server_disable_install_page() {
ensure_web_server_app_configuration_not_exists "__loading"
web_server_reload
}

View File

@@ -0,0 +1,24 @@
#!/bin/sh
set -e
set -u
export DEBIAN_FRONTEND=noninteractive
n=0
max=2
until [ $n -gt $max ]; do
set +e
(
apt-get update -qq &&
apt-get install -y --no-install-recommends "$@"
)
CODE=$?
set -e
if [ $CODE -eq 0 ]; then
break
fi
if [ $n -eq $max ]; then
exit $CODE
fi
echo "apt failed, retrying"
n=$(($n + 1))
done
rm -r /var/lib/apt/lists /var/cache/apt/archives

View File

@@ -0,0 +1,162 @@
#!/bin/bash
#
# Bitnami Spring Cloud Data Flow library
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/libfile.sh
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/liblog.sh
. /opt/bitnami/scripts/libnet.sh
. /opt/bitnami/scripts/libservice.sh
. /opt/bitnami/scripts/libvalidations.sh
########################
# Validate settings in SPRING_CLOUD_DATAFLOW_* environment variables
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_validate() {
info "Validating settings in SPRING_CLOUD_DATAFLOW_* env vars"
local error_code=0
print_validation_error() {
error "$1"
error_code=1
}
if [[ "$SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API" = "true" ]]; then
if is_empty_value "$SPRING_CLOUD_KUBERNETES_SECRETS_PATHS"; then
print_validation_error "You set the environment variable SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API=true. A Kubernetes secrect is expected to be mounted in SPRING_CLOUD_KUBERNETES_SECRETS_PATHS."
else
warn "Using Kubernetes Secrets."
fi
is_empty_value "$SPRING_CLOUD_KUBERNETES_CONFIG_NAME" && print_validation_error "If SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API=true. You must set a ConfigMap name in SPRING_CLOUD_KUBERNETES_CONFIG_NAME."
fi
if [[ "$SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED" = "true" ]]; then
is_empty_value "$SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI" && print_validation_error "If SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true then you must set a skipper server URI in SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI"
fi
! is_empty_value "$SERVER_PORT" && ! validate_port -unprivileged "$SERVER_PORT" && print_validation_error "SERVER_PORT with value = ${SERVER_PORT} is not a valid port."
[[ "$error_code" -eq 0 ]] || return "$error_code"
}
########################
# Creates Spring Cloud Data Flow default configuration file
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_create_default_config() {
info "Creating '${SPRING_CLOUD_DATAFLOW_CONF_FILE}' as the main configuration file with default values"
cat > "$SPRING_CLOUD_DATAFLOW_CONF_FILE" <<EOF
spring:
cloud:
config:
enabled: ${SPRING_CLOUD_CONFIG_ENABLED_DEFAULT}
datasource:
testOnBorrow: true
validationQuery: SELECT 1
maven:
localRepository: ${SPRING_CLOUD_DATAFLOW_VOLUME_DIR}/.m2/repository/
EOF
}
########################
# Update Spring Cloud Data Flow configuration file with user custom inputs
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_update_custom_config() {
! is_empty_value "$SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED" && dataflow_conf_set "spring.cloud.config.enabled" "$SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED"
if [[ "$SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API" = "false" ]]; then
# Database setting
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_URL" && dataflow_conf_set "spring.datasource.url" "$SPRING_CLOUD_DATAFLOW_DATABASE_URL"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME" && dataflow_conf_set "spring.datasource.username" "$SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD" && dataflow_conf_set "spring.datasource.password" "$SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER" && dataflow_conf_set "spring.datasource.driver-class-name" "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER"
if ! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_URL"; then
is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER" && dataflow_conf_set "spring.datasource.driver-class-name" "org.mariadb.jdbc.Driver"
if [[ "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER" = "org.mariadb.jdbc.Driver" ]] || is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER"; then
dataflow_conf_set "spring.jpa.properties.hibernate.dialect" "org.hibernate.dialect.MariaDB102Dialect"
fi
fi
local -r spring_stream_prop="spring.cloud.dataflow.applicationProperties.stream"
# Kafka settings
local -r kafka_prop="${spring_stream_prop}.spring.cloud.stream.kafka"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI" && dataflow_conf_set "${kafka_prop}.binder.brokers" "$SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI" && \
dataflow_conf_set "${kafka_prop}.streams.binder.brokers" "$SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI" && dataflow_conf_set "${kafka_prop}.binder.zkNodes" "$SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI" && \
dataflow_conf_set "${kafka_prop}.streams.binder.zkNodes" "$SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI"
# RabbitMQ settings
local -r rabbitmq_prop="${spring_stream_prop}.spring.rabbitmq"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST" && dataflow_conf_set "${rabbitmq_prop}.host" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT" && dataflow_conf_set "${rabbitmq_prop}.port" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME" && dataflow_conf_set "${rabbitmq_prop}.username" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD" && dataflow_conf_set "${rabbitmq_prop}.password" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD"
fi
# Avoid exit code of previous commands to affect the result of this function
true
}
########################
# Add or modify an entry in the Spring Cloud Data Flow configuration file ("$SPRING_CLOUD_DATAFLOW_CONF_FILE")
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# $1 - Spring Cloud Data Flow variable name
# $2 - Value to assign to the Spring Cloud Data Flow variable
# $3 - Whether the value is a literal, or if instead it should be quoted (default: no)
# Returns:
# None
#########################
dataflow_conf_set() {
local -r key="${1:?key missing}"
local -r value="${2:?value missing}"
info "Setting ${key} option"
debug "Setting ${key} to '${value}' in dataflow configuration"
yq w -i "$SPRING_CLOUD_DATAFLOW_CONF_FILE" "${key}" "${value}"
}
########################
# Ensure Spring Cloud Data Flow is initialized
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_initialize() {
if is_file_writable "$SPRING_CLOUD_DATAFLOW_CONF_FILE"; then
info "Updating '${SPRING_CLOUD_DATAFLOW_CONF_FILE}' with custom configuration"
dataflow_update_custom_config
else
warn "The Spring Cloud Data Flow configuration file '${SPRING_CLOUD_DATAFLOW_CONF_FILE}' is not writable. Configurations based on environment variables will not be applied for this file."
fi
}

View File

@@ -0,0 +1,93 @@
#!/bin/bash
#
# Environment configuration for spring-cloud-dataflow
# The values for all environment variables will be set in the below order of precedence
# 1. Custom environment variables defined below after Bitnami defaults
# 2. Constants defined in this file (environment variables with no default), i.e. BITNAMI_ROOT_DIR
# 3. Environment variables overridden via external files using *_FILE variables (see below)
# 4. Environment variables set externally (i.e. current Bash context/Dockerfile/userdata)
export BITNAMI_ROOT_DIR="/opt/bitnami"
export BITNAMI_VOLUME_DIR="/bitnami"
# Logging configuration
export MODULE="${MODULE:-spring-cloud-dataflow}"
export BITNAMI_DEBUG="${BITNAMI_DEBUG:-false}"
# By setting an environment variable matching *_FILE to a file path, the prefixed environment
# variable will be overridden with the value specified in that file
spring_cloud_dataflow_env_vars=(
SERVER_PORT
SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED
SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API
SPRING_CLOUD_KUBERNETES_CONFIG_NAME
SPRING_CLOUD_KUBERNETES_SECRETS_PATHS
SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED
SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED
SPRING_CLOUD_DATAFLOW_FEATURES_SCHEDULES_ENABLED
SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI
SPRING_CLOUD_DATAFLOW_DATABASE_URL
SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME
SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD
SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER
SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI
SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD
)
for env_var in "${spring_cloud_dataflow_env_vars[@]}"; do
file_env_var="${env_var}_FILE"
if [[ -n "${!file_env_var:-}" ]]; then
export "${env_var}=$(< "${!file_env_var}")"
unset "${file_env_var}"
fi
done
unset spring_cloud_dataflow_env_vars
# Paths
export SPRING_CLOUD_DATAFLOW_BASE_DIR="${BITNAMI_ROOT_DIR}/spring-cloud-dataflow"
export SPRING_CLOUD_DATAFLOW_VOLUME_DIR="${BITNAMI_VOLUME_DIR}/spring-cloud-dataflow"
export SPRING_CLOUD_DATAFLOW_CONF_DIR="${SPRING_CLOUD_DATAFLOW_BASE_DIR}/conf"
export SPRING_CLOUD_DATAFLOW_LOGS_DIR="${SPRING_CLOUD_DATAFLOW_BASE_DIR}/logs"
export SPRING_CLOUD_DATAFLOW_TMP_DIR="${SPRING_CLOUD_DATAFLOW_BASE_DIR}/tmp"
export SPRING_CLOUD_DATAFLOW_CONF_FILE="${SPRING_CLOUD_DATAFLOW_CONF_DIR}/application.yml"
# System users (when running with a privileged user)
export SPRING_CLOUD_DATAFLOW_DAEMON_USER="dataflow"
export SPRING_CLOUD_DATAFLOW_DAEMON_GROUP="dataflow"
# SPRING CLOUD DATAFLOW Build-time defaults conf, these variable are used to create default config file at build time.
export SPRING_CLOUD_CONFIG_ENABLED_DEFAULT="false"
# SPRING CLOUD DATAFLOW authentication.
# Dataflow settings
export SERVER_PORT="${SERVER_PORT:-}"
export SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED="${SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED:-}"
export SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API="${SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API:-false}"
export SPRING_CLOUD_KUBERNETES_CONFIG_NAME="${SPRING_CLOUD_KUBERNETES_CONFIG_NAME:-}"
export SPRING_CLOUD_KUBERNETES_SECRETS_PATHS="${SPRING_CLOUD_KUBERNETES_SECRETS_PATHS:-}"
export SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED="${SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED:-false}"
export SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED="${SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED:-false}"
export SPRING_CLOUD_DATAFLOW_FEATURES_SCHEDULES_ENABLED="${SPRING_CLOUD_DATAFLOW_FEATURES_SCHEDULES_ENABLED:-false}"
export SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI="${SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI:-}"
# Database settings
export SPRING_CLOUD_DATAFLOW_DATABASE_URL="${SPRING_CLOUD_DATAFLOW_DATABASE_URL:-}"
export SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME="${SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME:-}"
export SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD="${SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD:-}"
export SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER="${SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER:-}"
# Messaging settings
export SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI="${SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI="${SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD:-}"
# Custom environment variables may be defined below

View File

@@ -0,0 +1,28 @@
#!/bin/bash
#
# Bitnami Spring Cloud Data Flow entrypoint
# shellcheck disable=SC1091
set -o errexit
set -o nounset
set -o pipefail
# set -o xtrace # Uncomment this line for debugging purpose
# Load libraries
. /opt/bitnami/scripts/libbitnami.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
# Load Spring Cloud Data Flow environment variables
. /opt/bitnami/scripts/spring-cloud-dataflow-env.sh
print_welcome_page
if [[ "$*" = *"/opt/bitnami/scripts/spring-cloud-dataflow/run.sh"* || "$*" = "/run.sh" ]]; then
info "** Starting Spring Cloud Data Flow setup **"
/opt/bitnami/scripts/spring-cloud-dataflow/setup.sh
info "** Spring Cloud Data Flow setup finished! **"
fi
echo ""
exec "$@"

View File

@@ -0,0 +1,27 @@
#!/bin/bash
#
# Bitnami Spring Cloud Data Flow postunpack
# shellcheck disable=SC1091
set -o errexit
set -o nounset
set -o pipefail
# set -o xtrace # Uncomment this line for debugging purpose
# Load libraries
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
# Load Spring Cloud Data Flow environment variables
. /opt/bitnami/scripts/spring-cloud-dataflow-env.sh
# Configure Spring Cloud Data Flow options based on build-time defaults
info "Configuring default Spring Cloud Data Flow options"
ensure_dir_exists "$SPRING_CLOUD_DATAFLOW_CONF_DIR"
dataflow_create_default_config
for dir in "${SPRING_CLOUD_DATAFLOW_VOLUME_DIR}" "${SPRING_CLOUD_DATAFLOW_CONF_DIR}" "${SPRING_CLOUD_DATAFLOW_LOGS_DIR}" "${SPRING_CLOUD_DATAFLOW_TMP_DIR}"; do
ensure_dir_exists "$dir"
chmod -R g+rwX "$dir"
done

View File

@@ -0,0 +1,29 @@
#!/bin/bash
#
# Bitnami Spring Cloud Data Flow run
# shellcheck disable=SC1091
set -o errexit
set -o nounset
set -o pipefail
# set -o xtrace # Uncomment this line for debugging purpose
# Load libraries
. /opt/bitnami/scripts/liblog.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
. /opt/bitnami/scripts/libos.sh
# Load Spring Cloud Data Flow environment variables
. /opt/bitnami/scripts/spring-cloud-dataflow-env.sh
info "** Starting Spring Cloud Data Flow **"
__run_cmd="java"
__run_flags=("-jar" "${SPRING_CLOUD_DATAFLOW_BASE_DIR}/spring-cloud-dataflow.jar" "--spring.config.additional-location=${SPRING_CLOUD_DATAFLOW_CONF_FILE}" "$@")
if am_i_root; then
exec gosu "$SPRING_CLOUD_DATAFLOW_DAEMON_USER" "${__run_cmd}" "${__run_flags[@]}"
else
exec "${__run_cmd}" "${__run_flags[@]}"
fi

View File

@@ -0,0 +1,25 @@
#!/bin/bash
#
# Bitnami Spring Cloud Data Flow setup
# shellcheck disable=SC1091
set -o errexit
set -o nounset
set -o pipefail
# set -o xtrace # Uncomment this line for debugging purpose
# Load Generic Libraries
. /opt/bitnami/scripts/libvalidations.sh
. /opt/bitnami/scripts/libos.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
# Load Spring Cloud Data Flow environment variables
. /opt/bitnami/scripts/spring-cloud-dataflow-env.sh
# Ensure Spring Cloud Data Flow environment variables settings are valid
dataflow_validate
# Ensure 'daemon' user exists when running as 'root'
am_i_root && ensure_user_exists "$SPRING_CLOUD_DATAFLOW_DAEMON_USER" "$SPRING_CLOUD_DATAFLOW_DAEMON_GROUP"
# Ensure Spring Cloud Data Flow is initialized
dataflow_initialize

View File

@@ -0,0 +1,133 @@
# What is Spring Cloud Data Flow?
> Spring Cloud Data Flow is a microservices-based toolkit for building streaming and batch data processing pipelines in Cloud Foundry and Kubernetes.
[Overview of spring cloud data flow](https://docs.spring.io/spring-cloud-dataflow/docs/current/reference/htmlsingle/#getting-started)
# TL;DR;
## Docker Compose
```console
$ curl -sSL https://raw.githubusercontent.com/bitnami/bitnami-docker-spring-cloud-dataflow/master/docker-compose.yml > docker-compose.yml
$ docker-compose up -d
```
# Why use Bitnami Images?
* Bitnami closely tracks upstream source changes and promptly publishes new versions of this image using our automated systems.
* With Bitnami images the latest bug fixes and features are available as soon as possible.
* Bitnami containers, virtual machines and cloud images use the same components and configuration approach - making it easy to switch between formats based on your project needs.
* All our images are based on [minideb](https://github.com/bitnami/minideb) a minimalist Debian based container image which gives you a small base container image and the familiarity of a leading linux distribution.
* All Bitnami images available in Docker Hub are signed with [Docker Content Trust (DTC)](https://docs.docker.com/engine/security/trust/content_trust/). You can use `DOCKER_CONTENT_TRUST=1` to verify the integrity of the images.
* Bitnami container images are released daily with the latest distribution packages available.
> This [CVE scan report](https://quay.io/repository/bitnami/spring-cloud-dataflow?tab=tags) contains a security report with all open CVEs. To get the list of actionable security issues, find the "latest" tag, click the vulnerability report link under the corresponding "Security scan" field and then select the "Only show fixable" filter on the next page.
# How to deploy Thanos in Kubernetes?
Deploying Bitnami applications as Helm Charts is the easiest way to get started with our applications on Kubernetes. Read more about the installation in the [Bitnami Spring Cloud Data Flow Chart GitHub repository](https://github.com/bitnami/charts/tree/master/bitnami/spring-cloud-dataflow).
# Why use a non-root container?
Non-root container images add an extra layer of security and are generally recommended for production environments. However, because they run as a non-root user, privileged tasks are typically off-limits. Learn more about non-root containers [in our docs](https://docs.bitnami.com/tutorials/work-with-non-root-containers/).
# Supported tags and respective `Dockerfile` links
Learn more about the Bitnami tagging policy and the difference between rolling tags and immutable tags [in our documentation page](https://docs.bitnami.com/tutorials/understand-rolling-tags-containers/).
* [`2-debian-10`, `2.5.1-debian-10-r0`, `2`, `2.5.1`, `latest` (2/debian-10/Dockerfile)](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow/blob/2.5.1-debian-10-r0/2/debian-10/Dockerfile)
Subscribe to project updates by watching the [bitnami/spring-cloud-dataflow GitHub repo](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow).
# Get this image
The recommended way to get the Bitnami spring-cloud-dataflow Docker Image is to pull the prebuilt image from the [Docker Hub Registry](https://hub.docker.com/r/bitnami/spring-cloud-dataflow).
```console
$ docker pull bitnami/spring-cloud-dataflow:latest
```
To use a specific version, you can pull a versioned tag. You can view the [list of available versions](https://hub.docker.com/r/bitnami/spring-cloud-dataflow/tags/) in the Docker Hub Registry.
```console
$ docker pull bitnami/spring-cloud-dataflow:[TAG]
```
If you wish, you can also build the image yourself.
```console
$ docker build -t bitnami/spring-cloud-dataflow:latest 'https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow.git#master:2/debian-10'
```
# Configuration
You can use some environment variable in order to configure the deployment of spring cloud data flow.
## Configuring database
A relational database is used to store stream and task definitions as well as the state of executed tasks. Spring Cloud Data Flow provides schemas for H2, MySQL, Oracle, PostgreSQL, Db2, and SQL Server. Use the following environment to configure the connection.
- SPRING_CLOUD_DATAFLOW_DATABASE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME=bn_dataflow
- SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD=bn_dataflow
## Configuring advances features
Spring Cloud Data Flow Server offers specific set of features that can be enabled/disabled when launching.
- SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true. If you enable streams, you will need to configure the stream platform, see [Configuring stream platform](#configuringstreamplatform).
- SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED=true
## Configuring stream platform
In order to deploy streams using data flow you will require [Spring Cloud Skipper](https://github.com/bitnami/bitnami-docker-spring-cloud-skipper) and one of the following messaging platforms. Please add the following environment variable to point to a different skipper endpoint.
- SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI=http://spring-cloud-skipper:7577/api
### Using RabbitMQ
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST=rabbitmq
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT=5672
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME=user
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD=bitnami
### Using Kafka
- SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI=PLAINTEXT://kafka-broker:9092
- SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI=zookeeper:2181
Consult the [spring-cloud-dataflow Reference Documentation](https://docs.spring.io/spring-cloud-dataflow/docs/current/reference/htmlsingle/#configuration-local) to find the completed list of documentation.
# Contributing
We'd love for you to contribute to this container. You can request new features by creating an [issue](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow/issues), or submit a [pull request](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow/pulls) with your contribution.
# Issues
If you encountered a problem running this container, you can file an [issue](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow/issues/new). For us to provide better support, be sure to include the following information in your issue:
- Host OS and version
- Docker version (`docker version`)
- Output of `docker info`
- Version of this container
- The command you used to run the container, and any relevant output you saw (masking any sensitive information)
# License
Copyright 2020 Bitnami
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,81 @@
version: '2'
services:
spring-cloud-dataflow:
image: 'docker.io/bitnami/spring-cloud-dataflow:2-debian-10'
restart: always
environment:
- SERVER_PORT=9393
- SPRING_CLOUD_DATAFLOW_DATABASE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME=bn_dataflow
- SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD=bn_dataflow
# enable advances features
- SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true
- SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED=true
# configure dataflow stream
- SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI=http://spring-cloud-skipper:7577/api
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST=rabbitmq
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT=5672
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME=user
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD=bitnami
ports:
- '9393:9393'
- '9000-9099:9000-9099'
depends_on:
- mariadb-dataflow
- spring-cloud-skipper
spring-cloud-skipper:
image: 'docker.io/bitnami/spring-cloud-skipper:2-debian-10'
restart: always
environment:
- SERVER_PORT=7577
- SPRING_CLOUD_SKIPPER_DATABASE_URL=jdbc:mariadb://mariadb-skipper:3306/skipper?useMysqlMetadata=true
- SPRING_CLOUD_SKIPPER_DATABASE_USERNAME=bn_skipper
- SPRING_CLOUD_SKIPPER_DATABASE_PASSWORD=bn_skipper
ports:
- '9100-9199:9100-9199'
depends_on:
- mariadb-skipper
- rabbitmq
mariadb-dataflow:
image: 'docker.io/bitnami/mariadb:10.3-debian-10'
environment:
- MARIADB_ROOT_PASSWORD=root_password
- MARIADB_USER=bn_dataflow
- MARIADB_PASSWORD=bn_dataflow
- MARIADB_DATABASE=dataflow
volumes:
- 'mariadb_dataflow_data:/bitnami'
mariadb-skipper:
image: 'docker.io/bitnami/mariadb:10.3-debian-10'
environment:
- MARIADB_ROOT_PASSWORD=root_password
- MARIADB_USER=bn_skipper
- MARIADB_PASSWORD=bn_skipper
- MARIADB_DATABASE=skipper
volumes:
- 'mariadb_skipper_data:/bitnami'
mariadb-test:
image: 'docker.io/bitnami/mariadb:10.3-debian-10'
environment:
- MARIADB_ROOT_PASSWORD=root_password
- MARIADB_USER=bn_test
- MARIADB_PASSWORD=bn_test
- MARIADB_DATABASE=test
rabbitmq:
image: 'docker.io/bitnami/rabbitmq:3.8-debian-10'
volumes:
- 'rabbitmq_data:/bitnami'
volumes:
mariadb_dataflow_data:
driver: local
mariadb_skipper_data:
driver: local
rabbitmq_data:
driver: local