-
Notifications
You must be signed in to change notification settings - Fork 305
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
atlas-eval: expose data rate per datasource via diagnostic messages #1192
Changes from all commits
0c4245c
935c40e
e53c4c9
daa0c51
51dbdee
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
/* | ||
* Copyright 2014-2020 Netflix, Inc. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
package com.netflix.atlas.eval.model | ||
|
||
import com.netflix.atlas.json.JsonSupport | ||
|
||
case class EvalDataRate( | ||
timestamp: Long, | ||
step: Long, | ||
inputSize: EvalDataSize, | ||
intermediateSize: EvalDataSize, | ||
outputSize: EvalDataSize | ||
) extends JsonSupport { | ||
val `type`: String = "rate" | ||
} | ||
|
||
case class EvalDataSize(total: Int, details: Map[String, Int] = Map.empty[String, Int]) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
/* | ||
* Copyright 2014-2020 Netflix, Inc. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
package com.netflix.atlas.eval.stream | ||
|
||
import com.netflix.atlas.core.model.DataExpr | ||
import com.netflix.atlas.core.util.RefIntHashMap | ||
import com.netflix.atlas.eval.model.EvalDataRate | ||
import com.netflix.atlas.eval.model.EvalDataSize | ||
|
||
import scala.collection.mutable | ||
|
||
class EvalDataRateCollector(timestamp: Long, step: Long) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It would be nice to have some tests for this class. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Should be covered by FinalExprEvalSuite |
||
|
||
private val inputCounts = mutable.Map.empty[String, RefIntHashMap[DataExpr]] | ||
private val intermediateCounts = mutable.Map.empty[String, RefIntHashMap[DataExpr]] | ||
private val outputCounts = new RefIntHashMap[String] | ||
|
||
def incrementOutput(id: String, amount: Int): Unit = { | ||
outputCounts.increment(id, amount) | ||
} | ||
|
||
def incrementIntermediate(id: String, dataExpr: DataExpr, amount: Int): Unit = { | ||
increment(intermediateCounts, id, dataExpr, amount) | ||
} | ||
|
||
def incrementInput(id: String, dataExpr: DataExpr, amount: Int): Unit = { | ||
increment(inputCounts, id, dataExpr, amount) | ||
} | ||
|
||
def getAll: Map[String, EvalDataRate] = { | ||
inputCounts.map { | ||
case (id, _) => { | ||
id -> EvalDataRate( | ||
timestamp, | ||
step, | ||
getDataRate(inputCounts, id), | ||
getDataRate(intermediateCounts, id), | ||
EvalDataSize(outputCounts.get(id, 0)) | ||
) | ||
} | ||
}.toMap | ||
} | ||
|
||
private def getDataRate( | ||
counts: mutable.Map[String, RefIntHashMap[DataExpr]], | ||
id: String | ||
): EvalDataSize = { | ||
counts.get(id) match { | ||
case Some(v: RefIntHashMap[DataExpr]) => | ||
var total = 0 | ||
val builder = Map.newBuilder[String, Int] | ||
v.foreach { (dataExpr, count) => | ||
builder += dataExpr.toString -> count | ||
total += count | ||
} | ||
EvalDataSize(total, builder.result()) | ||
case None => EvalDataRateCollector.EmptyRate | ||
} | ||
} | ||
|
||
private def increment( | ||
counts: mutable.Map[String, RefIntHashMap[DataExpr]], | ||
id: String, | ||
dataExpr: DataExpr, | ||
amount: Int | ||
): Unit = { | ||
counts.getOrElseUpdate(id, new RefIntHashMap[DataExpr]).increment(dataExpr, amount) | ||
} | ||
} | ||
|
||
object EvalDataRateCollector { | ||
val EmptyRate = EvalDataSize(0) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -15,8 +15,6 @@ | |
*/ | ||
package com.netflix.atlas.eval.stream | ||
|
||
import java.time.Instant | ||
|
||
import akka.NotUsed | ||
import akka.http.scaladsl.model.Uri | ||
import akka.stream.Attributes | ||
|
@@ -42,6 +40,8 @@ import com.netflix.atlas.eval.stream.Evaluator.DataSources | |
import com.netflix.atlas.eval.stream.Evaluator.MessageEnvelope | ||
import com.typesafe.scalalogging.StrictLogging | ||
|
||
import scala.collection.mutable | ||
|
||
/** | ||
* Takes the set of data sources and time grouped partial aggregates as input and performs | ||
* the final evaluation step. | ||
|
@@ -73,6 +73,9 @@ private[stream] class FinalExprEval(interpreter: ExprInterpreter) | |
// the data for it | ||
private var recipients = List.empty[(StyleExpr, List[String])] | ||
|
||
// Track the set of DataExprs per DataSource | ||
private var dataSourceIdToDataExprs = Map.empty[String, Set[DataExpr]] | ||
|
||
// Empty data map used as base to account for expressions that do not have any | ||
// matches for a given time interval | ||
private var noData = Map.empty[DataExpr, List[TimeSeries]] | ||
|
@@ -110,9 +113,23 @@ private[stream] class FinalExprEval(interpreter: ExprInterpreter) | |
} | ||
} | ||
.groupBy(_._1) | ||
.map(t => t._1 -> t._2.map(_._2).toList) | ||
.map(t => t._1 -> t._2.map(_._2)) | ||
.toList | ||
|
||
dataSourceIdToDataExprs = recipients | ||
.flatMap(styleExprAndIds => | ||
styleExprAndIds._2.map(id => id -> styleExprAndIds._1.expr.dataExprs.toSet) | ||
) | ||
// Fold to mutable map to avoid creating new Map on every update | ||
.foldLeft(mutable.Map.empty[String, Set[DataExpr]]) { | ||
case (map, (id, dataExprs)) => { | ||
map += map.get(id).fold(id -> dataExprs) { vs => | ||
id -> (dataExprs ++ vs) | ||
} | ||
} | ||
} | ||
.toMap | ||
|
||
// Cleanup state for any expressions that are no longer needed | ||
val removed = previous.keySet -- recipients.map(_._1).toSet | ||
removed.foreach { expr => | ||
|
@@ -149,50 +166,62 @@ private[stream] class FinalExprEval(interpreter: ExprInterpreter) | |
private def handleData(group: TimeGroup): Unit = { | ||
// Finalize the DataExprs, needed as input for further evaluation | ||
val timestamp = group.timestamp | ||
val groupedDatapoints = group.values | ||
val groupedDatapoints = group.dataExprValues | ||
|
||
val expressionDatapoints = noData ++ groupedDatapoints.map { | ||
val dataExprToDatapoints = noData ++ groupedDatapoints.map { | ||
case (k, vs) => | ||
k -> AggrDatapoint.aggregate(vs).map(_.toTimeSeries) | ||
k -> AggrDatapoint.aggregate(vs.values).map(_.toTimeSeries) | ||
} | ||
val expressionDiagnostics = groupedDatapoints.map { | ||
case (k, vs) => | ||
val t = Instant.ofEpochMilli(timestamp) | ||
k -> DiagnosticMessage.info(s"$t: ${vs.length} input datapoints for [$k]") | ||
|
||
// Collect input and intermediate data size per DataSource | ||
val rateCollector = new EvalDataRateCollector(timestamp, step) | ||
dataSourceIdToDataExprs.foreach { | ||
case (id, dataExprSet) => | ||
dataExprSet.foreach(dataExpr => { | ||
group.dataExprValues.get(dataExpr).foreach { info => | ||
rateCollector.incrementInput(id, dataExpr, info.numRawDatapoints) | ||
rateCollector.incrementIntermediate(id, dataExpr, info.values.size) | ||
} | ||
}) | ||
} | ||
|
||
// Generate the time series and diagnostic output | ||
val output = recipients.flatMap { | ||
case (expr, ids) => | ||
case (styleExpr, ids) => | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Renamed for easier to follow, I was a little confused when first saw expr.expr.dataExprs |
||
// Use an identity map for the state to ensure that multiple equivalent stateful | ||
// expressions, e.g. derivative(a) + derivative(a), will have isolated state. | ||
val state = states.getOrElse(expr, IdentityMap.empty[StatefulExpr, Any]) | ||
val state = states.getOrElse(styleExpr, IdentityMap.empty[StatefulExpr, Any]) | ||
val context = EvalContext(timestamp, timestamp + step, step, state) | ||
try { | ||
val result = expr.expr.eval(context, expressionDatapoints) | ||
states(expr) = result.state | ||
val data = if (result.data.isEmpty) List(noData(expr)) else result.data | ||
val result = styleExpr.expr.eval(context, dataExprToDatapoints) | ||
states(styleExpr) = result.state | ||
val data = if (result.data.isEmpty) List(noData(styleExpr)) else result.data | ||
val msgs = data.map { t => | ||
TimeSeriesMessage(expr, context, t.withLabel(expr.legend(t))) | ||
TimeSeriesMessage(styleExpr, context, t.withLabel(styleExpr.legend(t))) | ||
} | ||
|
||
val diagnostics = expr.expr.dataExprs.flatMap(expressionDiagnostics.get) | ||
// Collect final data size per DataSource | ||
ids.foreach(rateCollector.incrementOutput(_, data.size)) | ||
|
||
ids.flatMap { id => | ||
(msgs ++ diagnostics).map { msg => | ||
msgs.map { msg => | ||
new MessageEnvelope(id, msg) | ||
} | ||
} | ||
} catch { | ||
case e: Exception => | ||
val msg = error(expr.toString, "final eval failed", e) | ||
val msg = error(styleExpr.toString, "final eval failed", e) | ||
ids.map { id => | ||
new MessageEnvelope(id, msg) | ||
} | ||
} | ||
} | ||
|
||
push(out, Source(output)) | ||
val rateMessages = rateCollector.getAll.map { | ||
case (id, rate) => new MessageEnvelope(id, rate) | ||
}.toList | ||
|
||
push(out, Source(output ++ rateMessages)) | ||
} | ||
|
||
override def onPush(): Unit = { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Need increase here because the constructor take 1 Datapoint