-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[DPE-5109] Split Kraft mode into own service (#45)
* split raft mode into own service
- Loading branch information
Showing
6 changed files
with
209 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
lowercaseOutputName: true | ||
rules: | ||
# Special cases and very specific rules | ||
- pattern : kafka.server<type=(.+), name=(.+), clientId=(.+), topic=(.+), partition=(.*)><>Value | ||
name: controller_server_$1_$2 | ||
type: GAUGE | ||
labels: | ||
clientId: "$3" | ||
topic: "$4" | ||
partition: "$5" | ||
- pattern : kafka.server<type=(.+), name=(.+), clientId=(.+), brokerHost=(.+), brokerPort=(.+)><>Value | ||
name: controller_server_$1_$2 | ||
type: GAUGE | ||
labels: | ||
clientId: "$3" | ||
broker: "$4:$5" | ||
|
||
- pattern : kafka.server<type=KafkaRequestHandlerPool, name=RequestHandlerAvgIdlePercent><>OneMinuteRate | ||
name: controller_server_kafkarequesthandlerpool_requesthandleravgidlepercent_total | ||
type: GAUGE | ||
|
||
- pattern : kafka.server<type=socket-server-metrics, clientSoftwareName=(.+), clientSoftwareVersion=(.+), listener=(.+), networkProcessor=(.+)><>connections | ||
name: controller_server_socketservermetrics_connections | ||
type: GAUGE | ||
labels: | ||
client_software_name: "$1" | ||
client_software_version: "$2" | ||
listener: "$3" | ||
network_processor: "$4" | ||
|
||
- pattern : 'kafka.server<type=socket-server-metrics, listener=(.+), networkProcessor=(.+)><>(.+):' | ||
name: controller_server_socketservermetrics_$3 | ||
type: GAUGE | ||
labels: | ||
listener: "$1" | ||
network_processor: "$2" | ||
|
||
# Count and Value | ||
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.+), (.+)=(.+)><>(Count|Value) | ||
name: controller_$1_$2_$3 | ||
labels: | ||
"$4": "$5" | ||
"$6": "$7" | ||
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.+)><>(Count|Value) | ||
name: controller_$1_$2_$3 | ||
labels: | ||
"$4": "$5" | ||
- pattern: kafka.(.*)<type=(.+), name=(.+)><>(Count|Value) | ||
name: controller_$1_$2_$3 | ||
|
||
# Percentile | ||
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.*), (.+)=(.+)><>(\d+)thPercentile | ||
name: controller_$1_$2_$3 | ||
type: GAUGE | ||
labels: | ||
"$4": "$5" | ||
"$6": "$7" | ||
quantile: "0.$8" | ||
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.*)><>(\d+)thPercentile | ||
name: controller_$1_$2_$3 | ||
type: GAUGE | ||
labels: | ||
"$4": "$5" | ||
quantile: "0.$6" | ||
- pattern: kafka.(.*)<type=(.+), name=(.+)><>(\d+)thPercentile | ||
name: controller_$1_$2_$3 | ||
type: GAUGE | ||
labels: | ||
quantile: "0.$4" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
charmed.kafka.log.level=INFO | ||
charmed.kafka.log.maxfilesize=100MB | ||
charmed.kafka.log.retention=10 | ||
|
||
log4j.rootLogger=${charmed.kafka.log.level}, kafkaAppender | ||
|
||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender | ||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n | ||
|
||
log4j.appender.kafkaAppender=org.apache.log4j.RollingFileAppender | ||
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH | ||
log4j.appender.kafkaAppender.File=${controller.logs.dir}/server.log | ||
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n | ||
log4j.appender.kafkaAppender.MaxFileSize=${charmed.kafka.log.maxfilesize} | ||
log4j.appender.kafkaAppender.MaxBackupIndex=${charmed.kafka.log.retention} | ||
|
||
log4j.appender.stateChangeAppender=org.apache.log4j.RollingFileAppender | ||
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH | ||
log4j.appender.stateChangeAppender.File=${controller.logs.dir}/state-change.log | ||
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n | ||
log4j.appender.stateChangeAppender.MaxFileSize=${charmed.kafka.log.maxfilesize} | ||
log4j.appender.stateChangeAppender.MaxBackupIndex=${charmed.kafka.log.retention} | ||
|
||
log4j.appender.requestAppender=org.apache.log4j.RollingFileAppender | ||
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH | ||
log4j.appender.requestAppender.File=${controller.logs.dir}/kafka-request.log | ||
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n | ||
log4j.appender.requestAppender.MaxFileSize=${charmed.kafka.log.maxfilesize} | ||
log4j.appender.requestAppender.MaxBackupIndex=${charmed.kafka.log.retention} | ||
|
||
log4j.appender.cleanerAppender=org.apache.log4j.RollingFileAppender | ||
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH | ||
log4j.appender.cleanerAppender.File=${controller.logs.dir}/log-cleaner.log | ||
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n | ||
log4j.appender.cleanerAppender.MaxFileSize=${charmed.kafka.log.maxfilesize} | ||
log4j.appender.cleanerAppender.MaxBackupIndex=${charmed.kafka.log.retention} | ||
|
||
log4j.appender.controllerAppender=org.apache.log4j.RollingFileAppender | ||
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH | ||
log4j.appender.controllerAppender.File=${controller.logs.dir}/controller.log | ||
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n | ||
log4j.appender.controllerAppender.MaxFileSize=${charmed.kafka.log.maxfilesize} | ||
log4j.appender.controllerAppender.MaxBackupIndex=${charmed.kafka.log.retention} | ||
|
||
log4j.appender.authorizerAppender=org.apache.log4j.RollingFileAppender | ||
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH | ||
log4j.appender.authorizerAppender.File=${controller.logs.dir}/kafka-authorizer.log | ||
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n | ||
log4j.appender.authorizerAppender.MaxFileSize=${charmed.kafka.log.maxfilesize} | ||
log4j.appender.authorizerAppender.MaxBackupIndex=${charmed.kafka.log.retention} | ||
|
||
# Change the line below to adjust ZK client logging | ||
log4j.logger.org.apache.zookeeper=${charmed.kafka.log.level} | ||
|
||
# Change the two lines below to adjust the general broker logging level (output to server.log and stdout) | ||
log4j.logger.kafka=${charmed.kafka.log.level} | ||
log4j.logger.org.apache.kafka=${charmed.kafka.log.level} | ||
|
||
# Change to DEBUG or TRACE to enable request logging | ||
log4j.logger.kafka.request.logger=WARN, requestAppender | ||
log4j.additivity.kafka.request.logger=false | ||
|
||
# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output | ||
# related to the handling of requests | ||
#log4j.logger.kafka.network.Processor=TRACE, requestAppender | ||
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender | ||
#log4j.additivity.kafka.server.KafkaApis=false | ||
log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender | ||
log4j.additivity.kafka.network.RequestChannel$=false | ||
|
||
# Change the line below to adjust KRaft mode controller logging | ||
log4j.logger.org.apache.kafka.controller=${charmed.kafka.log.level}, controllerAppender | ||
log4j.additivity.org.apache.kafka.controller=false | ||
|
||
# Change the line below to adjust ZK mode controller logging | ||
log4j.logger.kafka.controller=${charmed.kafka.log.level}, controllerAppender | ||
log4j.additivity.kafka.controller=false | ||
|
||
log4j.logger.kafka.log.LogCleaner=${charmed.kafka.log.level}, cleanerAppender | ||
log4j.additivity.kafka.log.LogCleaner=false | ||
|
||
log4j.logger.state.change.logger=${charmed.kafka.log.level}, stateChangeAppender | ||
log4j.additivity.state.change.logger=false | ||
|
||
# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses | ||
log4j.logger.kafka.authorizer.logger=${charmed.kafka.log.level}, authorizerAppender | ||
log4j.additivity.kafka.authorizer.logger=false |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
#!/bin/bash | ||
|
||
set -e | ||
|
||
# Kraft uses the same var for JMX metrics. To allow both to be used on the same machine, | ||
# use a specific var for Kraft and override during startup. | ||
if [ "x$KRAFT_JMX_OPTS" != "x" ]; then | ||
export KAFKA_JMX_OPTS=${KRAFT_JMX_OPTS} | ||
else | ||
unset KAFKA_JMX_OPTS | ||
fi | ||
|
||
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then | ||
export KAFKA_LOG4J_OPTS="-Dcontroller.logs.dir=${LOG_DIR} -Dlog4j.configuration=file:${SNAP_DATA}/etc/kraft/log4j.properties -Dcharmed.kafka.log.level=${KAFKA_CFG_LOGLEVEL:-INFO}" | ||
fi | ||
|
||
"${SNAP}"/usr/bin/setpriv \ | ||
--clear-groups \ | ||
--reuid snap_daemon \ | ||
--regid snap_daemon -- \ | ||
"${SNAP}/opt/kafka/bin/kafka-server-start.sh" "${SNAP_DATA}"/etc/kraft/controller.properties |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters