Skip to content

Commit

Permalink
[DPE-5109] Split Kraft mode into own service (#45)
Browse files Browse the repository at this point in the history
* split raft mode into own service
  • Loading branch information
zmraul authored Aug 14, 2024
1 parent 56e59b1 commit 7de181b
Show file tree
Hide file tree
Showing 6 changed files with 209 additions and 3 deletions.
10 changes: 8 additions & 2 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ jobs:
curl -s http://localhost:9090/kafkacruisecontrol/state | grep "state: RUNNING"
test-with-kraft:
name: Test Snap with Kraft
name: Test Snap in Kraft mode
runs-on: ubuntu-latest
needs:
- build
Expand All @@ -106,14 +106,20 @@ jobs:
- name: Set default kraft config
run: |
sudo cp /snap/charmed-kafka/current/opt/kafka/config/kraft/server.properties /var/snap/charmed-kafka/current/etc/kafka
sudo cp /snap/charmed-kafka/current/opt/kafka/config/kraft/broker.properties /var/snap/charmed-kafka/current/etc/kafka/server.properties
sudo cp /snap/charmed-kafka/current/opt/kafka/config/kraft/controller.properties /var/snap/charmed-kafka/current/etc/kraft/controller.properties
sudo sed -i '/log.dirs=/c\log.dirs=/var/snap/charmed-kafka/common/var/log/kafka' /var/snap/charmed-kafka/current/etc/kafka/server.properties
sudo sed -i '/log.dirs=/c\log.dirs=/var/snap/charmed-kafka/common/var/log/kraft' /var/snap/charmed-kafka/current/etc/kraft/controller.properties
uuid=$(sudo charmed-kafka.storage random-uuid)
sudo charmed-kafka.storage format --cluster-id $uuid -c /var/snap/charmed-kafka/current/etc/kafka/server.properties
sudo charmed-kafka.storage format --cluster-id $uuid -c /var/snap/charmed-kafka/current/etc/kraft/controller.properties
- name: Start snap services
run: |
sudo snap start charmed-kafka.controller
sleep 5
sudo snap start charmed-kafka.daemon
sleep 5
Expand Down
2 changes: 1 addition & 1 deletion snap/hooks/install
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

set -eux

for COMPONENT in kafka cruise-control
for COMPONENT in kafka kraft cruise-control
do
export CONF="${SNAP_DATA}"/etc/$COMPONENT
export LOGS="${SNAP_COMMON}"/var/log/$COMPONENT
Expand Down
69 changes: 69 additions & 0 deletions snap/local/etc/kraft/jmx_prometheus.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
lowercaseOutputName: true
rules:
# Special cases and very specific rules
- pattern : kafka.server<type=(.+), name=(.+), clientId=(.+), topic=(.+), partition=(.*)><>Value
name: controller_server_$1_$2
type: GAUGE
labels:
clientId: "$3"
topic: "$4"
partition: "$5"
- pattern : kafka.server<type=(.+), name=(.+), clientId=(.+), brokerHost=(.+), brokerPort=(.+)><>Value
name: controller_server_$1_$2
type: GAUGE
labels:
clientId: "$3"
broker: "$4:$5"

- pattern : kafka.server<type=KafkaRequestHandlerPool, name=RequestHandlerAvgIdlePercent><>OneMinuteRate
name: controller_server_kafkarequesthandlerpool_requesthandleravgidlepercent_total
type: GAUGE

- pattern : kafka.server<type=socket-server-metrics, clientSoftwareName=(.+), clientSoftwareVersion=(.+), listener=(.+), networkProcessor=(.+)><>connections
name: controller_server_socketservermetrics_connections
type: GAUGE
labels:
client_software_name: "$1"
client_software_version: "$2"
listener: "$3"
network_processor: "$4"

- pattern : 'kafka.server<type=socket-server-metrics, listener=(.+), networkProcessor=(.+)><>(.+):'
name: controller_server_socketservermetrics_$3
type: GAUGE
labels:
listener: "$1"
network_processor: "$2"

# Count and Value
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.+), (.+)=(.+)><>(Count|Value)
name: controller_$1_$2_$3
labels:
"$4": "$5"
"$6": "$7"
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.+)><>(Count|Value)
name: controller_$1_$2_$3
labels:
"$4": "$5"
- pattern: kafka.(.*)<type=(.+), name=(.+)><>(Count|Value)
name: controller_$1_$2_$3

# Percentile
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.*), (.+)=(.+)><>(\d+)thPercentile
name: controller_$1_$2_$3
type: GAUGE
labels:
"$4": "$5"
"$6": "$7"
quantile: "0.$8"
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.*)><>(\d+)thPercentile
name: controller_$1_$2_$3
type: GAUGE
labels:
"$4": "$5"
quantile: "0.$6"
- pattern: kafka.(.*)<type=(.+), name=(.+)><>(\d+)thPercentile
name: controller_$1_$2_$3
type: GAUGE
labels:
quantile: "0.$4"
94 changes: 94 additions & 0 deletions snap/local/etc/kraft/log4j.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
charmed.kafka.log.level=INFO
charmed.kafka.log.maxfilesize=100MB
charmed.kafka.log.retention=10

log4j.rootLogger=${charmed.kafka.log.level}, kafkaAppender

log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.kafkaAppender=org.apache.log4j.RollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=${controller.logs.dir}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.kafkaAppender.MaxFileSize=${charmed.kafka.log.maxfilesize}
log4j.appender.kafkaAppender.MaxBackupIndex=${charmed.kafka.log.retention}

log4j.appender.stateChangeAppender=org.apache.log4j.RollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=${controller.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.stateChangeAppender.MaxFileSize=${charmed.kafka.log.maxfilesize}
log4j.appender.stateChangeAppender.MaxBackupIndex=${charmed.kafka.log.retention}

log4j.appender.requestAppender=org.apache.log4j.RollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=${controller.logs.dir}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.requestAppender.MaxFileSize=${charmed.kafka.log.maxfilesize}
log4j.appender.requestAppender.MaxBackupIndex=${charmed.kafka.log.retention}

log4j.appender.cleanerAppender=org.apache.log4j.RollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=${controller.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.cleanerAppender.MaxFileSize=${charmed.kafka.log.maxfilesize}
log4j.appender.cleanerAppender.MaxBackupIndex=${charmed.kafka.log.retention}

log4j.appender.controllerAppender=org.apache.log4j.RollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=${controller.logs.dir}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.controllerAppender.MaxFileSize=${charmed.kafka.log.maxfilesize}
log4j.appender.controllerAppender.MaxBackupIndex=${charmed.kafka.log.retention}

log4j.appender.authorizerAppender=org.apache.log4j.RollingFileAppender
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.authorizerAppender.File=${controller.logs.dir}/kafka-authorizer.log
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.authorizerAppender.MaxFileSize=${charmed.kafka.log.maxfilesize}
log4j.appender.authorizerAppender.MaxBackupIndex=${charmed.kafka.log.retention}

# Change the line below to adjust ZK client logging
log4j.logger.org.apache.zookeeper=${charmed.kafka.log.level}

# Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
log4j.logger.kafka=${charmed.kafka.log.level}
log4j.logger.org.apache.kafka=${charmed.kafka.log.level}

# Change to DEBUG or TRACE to enable request logging
log4j.logger.kafka.request.logger=WARN, requestAppender
log4j.additivity.kafka.request.logger=false

# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
# related to the handling of requests
#log4j.logger.kafka.network.Processor=TRACE, requestAppender
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
#log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false

# Change the line below to adjust KRaft mode controller logging
log4j.logger.org.apache.kafka.controller=${charmed.kafka.log.level}, controllerAppender
log4j.additivity.org.apache.kafka.controller=false

# Change the line below to adjust ZK mode controller logging
log4j.logger.kafka.controller=${charmed.kafka.log.level}, controllerAppender
log4j.additivity.kafka.controller=false

log4j.logger.kafka.log.LogCleaner=${charmed.kafka.log.level}, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false

log4j.logger.state.change.logger=${charmed.kafka.log.level}, stateChangeAppender
log4j.additivity.state.change.logger=false

# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
log4j.logger.kafka.authorizer.logger=${charmed.kafka.log.level}, authorizerAppender
log4j.additivity.kafka.authorizer.logger=false
21 changes: 21 additions & 0 deletions snap/local/opt/kraft/bin/start-wrapper.bash
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/bin/bash

set -e

# Kraft uses the same var for JMX metrics. To allow both to be used on the same machine,
# use a specific var for Kraft and override during startup.
if [ "x$KRAFT_JMX_OPTS" != "x" ]; then
export KAFKA_JMX_OPTS=${KRAFT_JMX_OPTS}
else
unset KAFKA_JMX_OPTS
fi

if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
export KAFKA_LOG4J_OPTS="-Dcontroller.logs.dir=${LOG_DIR} -Dlog4j.configuration=file:${SNAP_DATA}/etc/kraft/log4j.properties -Dcharmed.kafka.log.level=${KAFKA_CFG_LOGLEVEL:-INFO}"
fi

"${SNAP}"/usr/bin/setpriv \
--clear-groups \
--reuid snap_daemon \
--regid snap_daemon -- \
"${SNAP}/opt/kafka/bin/kafka-server-start.sh" "${SNAP_DATA}"/etc/kraft/controller.properties
16 changes: 16 additions & 0 deletions snap/snapcraft.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ slots:
source:
read:
- $SNAP_COMMON/var/log/kafka
kraft-logs:
interface: content
source:
read:
- $SNAP_COMMON/var/log/kraft
cc-logs:
interface: content
source:
Expand All @@ -44,6 +49,17 @@ apps:
- network-bind
- removable-media
- mount-observe
controller:
command: opt/kraft/bin/start-wrapper.bash
daemon: simple
install-mode: disable
environment:
LOG_DIR: $SNAP_COMMON/var/log/kraft
plugs:
- network
- network-bind
- removable-media
- mount-observe
cruise-control:
command: opt/cruise-control/bin/start-wrapper.bash
daemon: simple
Expand Down

0 comments on commit 7de181b

Please sign in to comment.