This repository has been archived by the owner on Mar 17, 2024. It is now read-only.
-
-
Notifications
You must be signed in to change notification settings - Fork 197
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Changes to support pushing lag metrics into InfluxDB
Merge 3e2f0bfaa3e646c27cb4158c0b547a2fcdd8fdea Merge Cosmetics Syntax Removing files Refactored dependencies Create db test Removed unused import Remove unused Syntax Cosmetics Syntax Removing files Refactored dependencies Create db test Removed unused import Changes to support pushing lag metrics into InfluxDB Removed libraryDependencies from build.sbt
- Loading branch information
1 parent
fb0aaaa
commit 2841caf
Showing
18 changed files
with
424 additions
and
114 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
7 changes: 0 additions & 7 deletions
7
src/main/scala/com/lightbend/kafkalagexporter/GraphiteConfig.scala
This file was deleted.
Oops, something went wrong.
25 changes: 25 additions & 0 deletions
25
src/main/scala/com/lightbend/kafkalagexporter/GraphiteEndpointConfig.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
/* | ||
* Copyright (C) 2019 Lightbend Inc. <http://www.lightbend.com> | ||
*/ | ||
|
||
package com.lightbend.kafkalagexporter | ||
|
||
import com.typesafe.config.Config | ||
import scala.util.Try | ||
|
||
class GraphiteEndpointConfig(sinkType: String, metricWhitelist: List[String], config: Config) extends SinkConfig(sinkType, metricWhitelist, config) | ||
{ | ||
val port: Int = config.getInt("reporters.graphite.port") | ||
val host: String = config.getString("reporters.graphite.host") | ||
val prefix: Option[String] = Try(config.getString("reporters.graphite.prefix")).toOption | ||
|
||
override def toString(): String = { | ||
s""" | ||
|Graphite: | ||
| host: ${host} | ||
| port: ${port} | ||
| prefix: ${prefix} | ||
""" | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
143 changes: 143 additions & 0 deletions
143
src/main/scala/com/lightbend/kafkalagexporter/InfluxDBPusherSink.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,143 @@ | ||
/* | ||
* Copyright (C) 2019 Lightbend Inc. <http://www.lightbend.com> | ||
*/ | ||
|
||
package com.lightbend.kafkalagexporter | ||
|
||
import com.lightbend.kafkalagexporter.MetricsSink._ | ||
import com.lightbend.kafkalagexporter.EndpointSink.ClusterGlobalLabels | ||
import org.influxdb.{InfluxDB, InfluxDBFactory, BatchOptions} | ||
import org.influxdb.InfluxDB.ConsistencyLevel | ||
import org.influxdb.dto.{Query, Point, BatchPoints} | ||
import java.io.IOException | ||
import java.util.function.Consumer | ||
import java.util.function.BiConsumer | ||
import org.influxdb.dto.QueryResult | ||
import java.lang.Iterable | ||
import com.typesafe.scalalogging.Logger | ||
|
||
import scala.util.Try | ||
|
||
object InfluxDBPusherSink | ||
{ | ||
def apply(sinkConfig: InfluxDBPusherSinkConfig, clusterGlobalLabels: ClusterGlobalLabels): MetricsSink = | ||
{ | ||
Try(new InfluxDBPusherSink(sinkConfig, clusterGlobalLabels)) | ||
.fold(t => throw new IOException("Could not create Influx DB Pusher Sink", t), sink => sink) | ||
} | ||
} | ||
|
||
class InfluxDBPusherSink private(sinkConfig: InfluxDBPusherSinkConfig, clusterGlobalLabels: ClusterGlobalLabels) extends EndpointSink(clusterGlobalLabels) { | ||
|
||
val logger = Logger("InfluxDBPusherSink") | ||
val influxDB = connect() | ||
createDatabase() | ||
enableBatching() | ||
|
||
override def report(m: MetricValue): Unit = { | ||
if (sinkConfig.metricWhitelist.exists(m.definition.name.matches)) { | ||
if (!m.value.isNaN) { | ||
write(m) | ||
} | ||
} | ||
} | ||
|
||
def write(m: MetricValue): Unit = { | ||
try { | ||
val point = buildPoint(m) | ||
if (sinkConfig.async) | ||
writeAsync(point) | ||
else | ||
writeSync(point) | ||
} catch { | ||
case t: Throwable => | ||
handlingFailure(t) | ||
} | ||
} | ||
|
||
def writeAsync(point: Point): Unit = { | ||
influxDB.write(point) | ||
} | ||
|
||
def writeSync(point: Point): Unit = { | ||
val batchPoints = BatchPoints | ||
.database(sinkConfig.database) | ||
.consistency(ConsistencyLevel.ALL) | ||
.build() | ||
|
||
batchPoints.point(point) | ||
influxDB.write(batchPoints) | ||
} | ||
|
||
def buildPoint(m: MetricValue): Point = { | ||
val point = Point.measurement(m.definition.name) | ||
val fields = m.definition.labels zip m.labels | ||
fields.foreach { field => point.tag(field._1, field._2) } | ||
point.addField("value", m.value) | ||
return point.build() | ||
} | ||
|
||
override def remove(m: RemoveMetric): Unit = { | ||
if (sinkConfig.metricWhitelist.exists(m.definition.name.matches)) | ||
logger.warn("Remove is not supported by InfluxDBPusherSink") | ||
} | ||
|
||
def enableBatching(): Unit = { | ||
if (sinkConfig.async) { | ||
influxDB.setDatabase(sinkConfig.database) | ||
influxDB.enableBatch(BatchOptions.DEFAULTS.exceptionHandler(createExceptionHandler())) | ||
} | ||
} | ||
|
||
def connect(): InfluxDB = | ||
{ | ||
val url = sinkConfig.endpoint + ":" + sinkConfig.port | ||
if (!sinkConfig.username.isEmpty) return InfluxDBFactory.connect(url, sinkConfig.username, sinkConfig.password) | ||
else return InfluxDBFactory.connect(url) | ||
} | ||
|
||
def createDatabase() = | ||
{ | ||
influxDB.query(new Query("CREATE DATABASE " + sinkConfig.database, sinkConfig.database), successQueryHandler(), failQueryHandler()) | ||
} | ||
|
||
def successQueryHandler(): Consumer[QueryResult] = | ||
{ | ||
return new Consumer[QueryResult] { | ||
override def accept(result:QueryResult): Unit = { | ||
logger.info(result.toString()) | ||
} | ||
} | ||
} | ||
|
||
def failQueryHandler(): Consumer[Throwable] = | ||
{ | ||
return new Consumer[Throwable] { | ||
override def accept(throwable:Throwable): Unit = { | ||
handlingFailure(throwable) | ||
} | ||
} | ||
} | ||
|
||
def createExceptionHandler(): BiConsumer[Iterable[Point], Throwable] = | ||
{ | ||
return new BiConsumer[Iterable[Point], Throwable] { | ||
override def accept(failedPoints:Iterable[Point], throwable:Throwable): Unit = { | ||
handlingFailure(throwable) | ||
} | ||
} | ||
} | ||
|
||
def handlingFailure(t: Throwable): Unit = { | ||
logger.error("Unrecoverable exception, will stop ", t) | ||
stop() | ||
throw t | ||
} | ||
|
||
override def stop(): Unit = { | ||
if (influxDB.isBatchEnabled()) { | ||
influxDB.disableBatch() | ||
} | ||
influxDB.close() | ||
} | ||
} |
Oops, something went wrong.