fix: add env variable KAFKA_CFG_MESSAGE_MAX_BYTES (#54458)

* fix: add env variable KAFKA_CFG_MESSAGE_MAX_BYTES

Signed-off-by: jk2K <4025839+jk2K@users.noreply.github.com>

* Update bitnami/kafka/3.3/debian-11/rootfs/opt/bitnami/scripts/kafka-env.sh

Co-authored-by: Ibone González Mauraza <ibone.gonzalez-mauraza@broadcom.com>
Signed-off-by: Meng Ye <4025839+jk2K@users.noreply.github.com>

* Update bitnami/kafka/3.2/debian-11/rootfs/opt/bitnami/scripts/kafka-env.sh

Co-authored-by: Ibone González Mauraza <ibone.gonzalez-mauraza@broadcom.com>
Signed-off-by: Meng Ye <4025839+jk2K@users.noreply.github.com>

* Update bitnami/kafka/3.5/debian-11/rootfs/opt/bitnami/scripts/kafka-env.sh

Co-authored-by: Ibone González Mauraza <ibone.gonzalez-mauraza@broadcom.com>
Signed-off-by: Meng Ye <4025839+jk2K@users.noreply.github.com>

* Update bitnami/kafka/3.6/debian-11/rootfs/opt/bitnami/scripts/kafka-env.sh

Co-authored-by: Ibone González Mauraza <ibone.gonzalez-mauraza@broadcom.com>
Signed-off-by: Meng Ye <4025839+jk2K@users.noreply.github.com>

* Update kafka-env.sh

Signed-off-by: Meng Ye <4025839+jk2K@users.noreply.github.com>

---------

Signed-off-by: jk2K <4025839+jk2K@users.noreply.github.com>
Signed-off-by: Meng Ye <4025839+jk2K@users.noreply.github.com>
Co-authored-by: Ibone González Mauraza <ibone.gonzalez-mauraza@broadcom.com>
This commit is contained in:
Meng Ye
2024-01-15 20:19:08 +08:00
committed by GitHub
parent 6d1e08769f
commit 0b8731370e
10 changed files with 20 additions and 0 deletions

View File

@@ -95,6 +95,9 @@ export KAFKA_OPTS="${KAFKA_OPTS:-}"
export KAFKA_CFG_SASL_ENABLED_MECHANISMS="${KAFKA_CFG_SASL_ENABLED_MECHANISMS:-PLAIN,SCRAM-SHA-256,SCRAM-SHA-512}"
export KAFKA_KRAFT_CLUSTER_ID="${KAFKA_KRAFT_CLUSTER_ID:-}"
export KAFKA_SKIP_KRAFT_STORAGE_INIT="${KAFKA_SKIP_KRAFT_STORAGE_INIT:-false}"
export KAFKA_CFG_MAX_REQUEST_SIZE="${KAFKA_CFG_MAX_REQUEST_SIZE:-}"
export KAFKA_CFG_MESSAGE_MAX_BYTES="${KAFKA_CFG_MESSAGE_MAX_BYTES:-}"
export KAFKA_CFG_MAX_PARTITION_FETCH_BYTES="${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}"
export KAFKA_CLIENT_LISTENER_NAME="${KAFKA_CLIENT_LISTENER_NAME:-}"
# ZooKeeper connection settings

View File

@@ -937,6 +937,7 @@ kafka_initialize() {
kafka_configure_from_environment_variables
# Configure Kafka producer/consumer to set up message sizes
! is_empty_value "${KAFKA_CFG_MAX_REQUEST_SIZE:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/producer.properties" max.request.size "$KAFKA_CFG_MAX_REQUEST_SIZE"
! is_empty_value "${KAFKA_CFG_MESSAGE_MAX_BYTES:-}" && kafka_server_conf_set message.max.bytes "$KAFKA_CFG_MESSAGE_MAX_BYTES"
! is_empty_value "${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/consumer.properties" max.partition.fetch.bytes "$KAFKA_CFG_MAX_PARTITION_FETCH_BYTES"
# Zookeeper mode additional settings
if ! is_empty_value "${KAFKA_CFG_ZOOKEEPER_CONNECT:-}"; then

View File

@@ -95,6 +95,9 @@ export KAFKA_OPTS="${KAFKA_OPTS:-}"
export KAFKA_CFG_SASL_ENABLED_MECHANISMS="${KAFKA_CFG_SASL_ENABLED_MECHANISMS:-PLAIN,SCRAM-SHA-256,SCRAM-SHA-512}"
export KAFKA_KRAFT_CLUSTER_ID="${KAFKA_KRAFT_CLUSTER_ID:-}"
export KAFKA_SKIP_KRAFT_STORAGE_INIT="${KAFKA_SKIP_KRAFT_STORAGE_INIT:-false}"
export KAFKA_CFG_MAX_REQUEST_SIZE="${KAFKA_CFG_MAX_REQUEST_SIZE:-}"
export KAFKA_CFG_MESSAGE_MAX_BYTES="${KAFKA_CFG_MESSAGE_MAX_BYTES:-}"
export KAFKA_CFG_MAX_PARTITION_FETCH_BYTES="${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}"
export KAFKA_CLIENT_LISTENER_NAME="${KAFKA_CLIENT_LISTENER_NAME:-}"
# ZooKeeper connection settings

View File

@@ -937,6 +937,7 @@ kafka_initialize() {
kafka_configure_from_environment_variables
# Configure Kafka producer/consumer to set up message sizes
! is_empty_value "${KAFKA_CFG_MAX_REQUEST_SIZE:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/producer.properties" max.request.size "$KAFKA_CFG_MAX_REQUEST_SIZE"
! is_empty_value "${KAFKA_CFG_MESSAGE_MAX_BYTES:-}" && kafka_server_conf_set message.max.bytes "$KAFKA_CFG_MESSAGE_MAX_BYTES"
! is_empty_value "${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/consumer.properties" max.partition.fetch.bytes "$KAFKA_CFG_MAX_PARTITION_FETCH_BYTES"
# Zookeeper mode additional settings
if ! is_empty_value "${KAFKA_CFG_ZOOKEEPER_CONNECT:-}"; then

View File

@@ -95,6 +95,9 @@ export KAFKA_OPTS="${KAFKA_OPTS:-}"
export KAFKA_CFG_SASL_ENABLED_MECHANISMS="${KAFKA_CFG_SASL_ENABLED_MECHANISMS:-PLAIN,SCRAM-SHA-256,SCRAM-SHA-512}"
export KAFKA_KRAFT_CLUSTER_ID="${KAFKA_KRAFT_CLUSTER_ID:-}"
export KAFKA_SKIP_KRAFT_STORAGE_INIT="${KAFKA_SKIP_KRAFT_STORAGE_INIT:-false}"
export KAFKA_CFG_MAX_REQUEST_SIZE="${KAFKA_CFG_MAX_REQUEST_SIZE:-}"
export KAFKA_CFG_MESSAGE_MAX_BYTES="${KAFKA_CFG_MESSAGE_MAX_BYTES:-}"
export KAFKA_CFG_MAX_PARTITION_FETCH_BYTES="${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}"
export KAFKA_CLIENT_LISTENER_NAME="${KAFKA_CLIENT_LISTENER_NAME:-}"
# ZooKeeper connection settings

View File

@@ -937,6 +937,7 @@ kafka_initialize() {
kafka_configure_from_environment_variables
# Configure Kafka producer/consumer to set up message sizes
! is_empty_value "${KAFKA_CFG_MAX_REQUEST_SIZE:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/producer.properties" max.request.size "$KAFKA_CFG_MAX_REQUEST_SIZE"
! is_empty_value "${KAFKA_CFG_MESSAGE_MAX_BYTES:-}" && kafka_server_conf_set message.max.bytes "$KAFKA_CFG_MESSAGE_MAX_BYTES"
! is_empty_value "${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/consumer.properties" max.partition.fetch.bytes "$KAFKA_CFG_MAX_PARTITION_FETCH_BYTES"
# Zookeeper mode additional settings
if ! is_empty_value "${KAFKA_CFG_ZOOKEEPER_CONNECT:-}"; then

View File

@@ -95,6 +95,9 @@ export KAFKA_OPTS="${KAFKA_OPTS:-}"
export KAFKA_CFG_SASL_ENABLED_MECHANISMS="${KAFKA_CFG_SASL_ENABLED_MECHANISMS:-PLAIN,SCRAM-SHA-256,SCRAM-SHA-512}"
export KAFKA_KRAFT_CLUSTER_ID="${KAFKA_KRAFT_CLUSTER_ID:-}"
export KAFKA_SKIP_KRAFT_STORAGE_INIT="${KAFKA_SKIP_KRAFT_STORAGE_INIT:-false}"
export KAFKA_CFG_MAX_REQUEST_SIZE="${KAFKA_CFG_MAX_REQUEST_SIZE:-}"
export KAFKA_CFG_MESSAGE_MAX_BYTES="${KAFKA_CFG_MESSAGE_MAX_BYTES:-}"
export KAFKA_CFG_MAX_PARTITION_FETCH_BYTES="${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}"
export KAFKA_CLIENT_LISTENER_NAME="${KAFKA_CLIENT_LISTENER_NAME:-}"
# ZooKeeper connection settings

View File

@@ -937,6 +937,7 @@ kafka_initialize() {
kafka_configure_from_environment_variables
# Configure Kafka producer/consumer to set up message sizes
! is_empty_value "${KAFKA_CFG_MAX_REQUEST_SIZE:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/producer.properties" max.request.size "$KAFKA_CFG_MAX_REQUEST_SIZE"
! is_empty_value "${KAFKA_CFG_MESSAGE_MAX_BYTES:-}" && kafka_server_conf_set message.max.bytes "$KAFKA_CFG_MESSAGE_MAX_BYTES"
! is_empty_value "${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/consumer.properties" max.partition.fetch.bytes "$KAFKA_CFG_MAX_PARTITION_FETCH_BYTES"
# Zookeeper mode additional settings
if ! is_empty_value "${KAFKA_CFG_ZOOKEEPER_CONNECT:-}"; then

View File

@@ -95,6 +95,9 @@ export KAFKA_OPTS="${KAFKA_OPTS:-}"
export KAFKA_CFG_SASL_ENABLED_MECHANISMS="${KAFKA_CFG_SASL_ENABLED_MECHANISMS:-PLAIN,SCRAM-SHA-256,SCRAM-SHA-512}"
export KAFKA_KRAFT_CLUSTER_ID="${KAFKA_KRAFT_CLUSTER_ID:-}"
export KAFKA_SKIP_KRAFT_STORAGE_INIT="${KAFKA_SKIP_KRAFT_STORAGE_INIT:-false}"
export KAFKA_CFG_MAX_REQUEST_SIZE="${KAFKA_CFG_MAX_REQUEST_SIZE:-}"
export KAFKA_CFG_MESSAGE_MAX_BYTES="${KAFKA_CFG_MESSAGE_MAX_BYTES:-}"
export KAFKA_CFG_MAX_PARTITION_FETCH_BYTES="${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}"
export KAFKA_CLIENT_LISTENER_NAME="${KAFKA_CLIENT_LISTENER_NAME:-}"
# ZooKeeper connection settings

View File

@@ -937,6 +937,7 @@ kafka_initialize() {
kafka_configure_from_environment_variables
# Configure Kafka producer/consumer to set up message sizes
! is_empty_value "${KAFKA_CFG_MAX_REQUEST_SIZE:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/producer.properties" max.request.size "$KAFKA_CFG_MAX_REQUEST_SIZE"
! is_empty_value "${KAFKA_CFG_MESSAGE_MAX_BYTES:-}" && kafka_server_conf_set message.max.bytes "$KAFKA_CFG_MESSAGE_MAX_BYTES"
! is_empty_value "${KAFKA_CFG_MAX_PARTITION_FETCH_BYTES:-}" && kafka_common_conf_set "$KAFKA_CONF_DIR/consumer.properties" max.partition.fetch.bytes "$KAFKA_CFG_MAX_PARTITION_FETCH_BYTES"
# Zookeeper mode additional settings
if ! is_empty_value "${KAFKA_CFG_ZOOKEEPER_CONNECT:-}"; then