Skip to content

Commit

Permalink
Support custom histogram buckets for Prometheus Timer Metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
Ashwin Tumma committed Jan 31, 2025
1 parent a895aae commit 25e4758
Show file tree
Hide file tree
Showing 8 changed files with 53 additions and 10 deletions.
4 changes: 3 additions & 1 deletion docs/development/extensions-contrib/prometheus.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ All the configuration parameters for the Prometheus emitter are under `druid.emi
| `druid.emitter.prometheus.strategy` | The strategy to expose prometheus metrics. <br/>Should be one of `exporter` and `pushgateway`. Default strategy `exporter` would expose metrics for scraping purpose. Peon tasks (short-lived jobs) should use `pushgateway` strategy. | yes | exporter |
| `druid.emitter.prometheus.port` | The port on which to expose the prometheus HTTPServer. Required if using `exporter` strategy. | no | none |
| `druid.emitter.prometheus.namespace` | Optional metric namespace. Must match the regex `[a-zA-Z_:][a-zA-Z0-9_:]*` | no | druid |
| `druid.emitter.prometheus.dimensionMapPath` | JSON file defining the Prometheus metric type, desired dimensions, help text, and conversionFactor for every Druid metric. | no | Default mapping provided. See below. |
| `druid.emitter.prometheus.dimensionMapPath` | JSON file defining the Prometheus metric type, desired dimensions, conversionFactor, histogram buckets and help text for every Druid metric. | no | Default mapping provided. See below. |
| `druid.emitter.prometheus.addHostAsLabel` | Flag to include the hostname as a prometheus label. | no | false |
| `druid.emitter.prometheus.addServiceAsLabel` | Flag to include the druid service name (e.g. `druid/broker`, `druid/coordinator`, etc.) as a prometheus label. | no | false |
| `druid.emitter.prometheus.pushGatewayAddress` | Pushgateway address. Required if using `pushgateway` strategy. | no | none |
Expand Down Expand Up @@ -90,6 +90,7 @@ Prometheus metric path is organized using the following schema:
"dimensions" : <dimension list>,
"type" : <timer|counter|gauge>,
"conversionFactor": <conversionFactor>,
"histogramBuckets": <bucket values>,
"help" : <help text>
}
```
Expand All @@ -100,6 +101,7 @@ For example:
"dimensions" : ["dataSource", "type"],
"type" : "timer",
"conversionFactor": 1000.0,
"histogramBuckets": [0.1, 0.25, 0.5, 0.75, 1.0, 2.5, 5.0, 7.5, 10.0, 30.0, 60.0, 120.0, 300.0],
"help": "Seconds taken to complete a query."
}
```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,14 @@ public class DimensionsAndCollector
private final String[] dimensions;
private final SimpleCollector collector;
private final double conversionFactor;
private final double[] histogramBuckets;

DimensionsAndCollector(String[] dimensions, SimpleCollector collector, double conversionFactor)
DimensionsAndCollector(String[] dimensions, SimpleCollector collector, double conversionFactor, double[] histogramBuckets)
{
this.dimensions = dimensions;
this.collector = collector;
this.conversionFactor = conversionFactor;
this.histogramBuckets = histogramBuckets;
}

public String[] getDimensions()
Expand All @@ -48,4 +50,9 @@ public double getConversionFactor()
{
return conversionFactor;
}

public double[] getHistogramBuckets()
{
return histogramBuckets;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import java.io.InputStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.regex.Pattern;
Expand Down Expand Up @@ -108,15 +109,15 @@ public Metrics(String namespace, String path, boolean isAddHostAsLabel, boolean
.namespace(namespace)
.name(formattedName)
.labelNames(dimensions)
.buckets(.1, .25, .5, .75, 1, 2.5, 5, 7.5, 10, 30, 60, 120, 300)
.buckets(metric.histogramBuckets)
.help(metric.help)
.register();
} else {
log.error("Unrecognized metric type [%s]", type);
}

if (collector != null) {
parsedRegisteredMetrics.put(name, new DimensionsAndCollector(dimensions, collector, metric.conversionFactor));
parsedRegisteredMetrics.put(name, new DimensionsAndCollector(dimensions, collector, metric.conversionFactor, metric.histogramBuckets));
}
}
this.registeredMetrics = Collections.unmodifiableMap(parsedRegisteredMetrics);
Expand Down Expand Up @@ -153,19 +154,26 @@ public static class Metric
public final Type type;
public final String help;
public final double conversionFactor;
public final double[] histogramBuckets;

@JsonCreator
public Metric(
@JsonProperty("dimensions") SortedSet<String> dimensions,
@JsonProperty("type") Type type,
@JsonProperty("help") String help,
@JsonProperty("conversionFactor") double conversionFactor
@JsonProperty("conversionFactor") double conversionFactor,
@JsonProperty("histogramBuckets") List<Double> histogramBuckets
)
{
this.dimensions = dimensions;
this.type = type;
this.help = help;
this.conversionFactor = conversionFactor;
if (histogramBuckets != null && !histogramBuckets.isEmpty()) {
this.histogramBuckets = histogramBuckets.stream().mapToDouble(Double::doubleValue).toArray();
} else {
this.histogramBuckets = new double[] {0.1, 0.25, 0.5, 0.75, 1.0, 2.5, 5.0, 7.5, 10.0, 30.0, 60.0, 120.0, 300.0};
}
}

public enum Type
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"query/time" : { "dimensions" : ["dataSource", "type"], "type" : "timer", "conversionFactor": 1000.0, "help": "Seconds taken to complete a query."},
"query/time" : { "dimensions" : ["dataSource", "type"], "type" : "timer", "conversionFactor": 1000.0, "histogramBuckets": [0.1, 0.25, 0.5, 0.75, 1.0, 2.5, 5.0, 7.5, 10.0, 30.0, 60.0, 120.0, 300.0], "help": "Seconds taken to complete a query."},
"query/bytes" : { "dimensions" : ["dataSource", "type"], "type" : "count", "help": "Number of bytes returned in query response."},
"query/node/time" : { "dimensions" : ["server"], "type" : "timer", "conversionFactor": 1000.0, "help": "Seconds taken to query individual historical/realtime processes."},
"query/node/bytes" : { "dimensions" : ["server"], "type" : "count", "help": "Number of bytes returned from querying individual historical/realtime processes."},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ public void testMetricsConfiguration()
Assert.assertEquals("host_name", dimensions[2]);
Assert.assertEquals("type", dimensions[3]);
Assert.assertEquals(1000.0, dimensionsAndCollector.getConversionFactor(), 0.0);
double[] defaultHistogramBuckets = {0.1, 0.25, 0.5, 0.75, 1.0, 2.5, 5.0, 7.5, 10.0, 30.0, 60.0, 120.0, 300.0};
Assert.assertArrayEquals(defaultHistogramBuckets, dimensionsAndCollector.getHistogramBuckets(), 0.0);
Assert.assertTrue(dimensionsAndCollector.getCollector() instanceof Histogram);

DimensionsAndCollector d = metrics.getByName("segment/loadQueue/count", "historical");
Expand All @@ -67,6 +69,8 @@ public void testMetricsConfigurationWithExtraLabels()
Assert.assertEquals("host_name", dimensions[3]);
Assert.assertEquals("type", dimensions[4]);
Assert.assertEquals(1000.0, dimensionsAndCollector.getConversionFactor(), 0.0);
double[] defaultHistogramBuckets = {0.1, 0.25, 0.5, 0.75, 1.0, 2.5, 5.0, 7.5, 10.0, 30.0, 60.0, 120.0, 300.0};
Assert.assertArrayEquals(defaultHistogramBuckets, dimensionsAndCollector.getHistogramBuckets(), 0.0);
Assert.assertTrue(dimensionsAndCollector.getCollector() instanceof Histogram);

DimensionsAndCollector d = metrics.getByName("segment/loadQueue/count", "historical");
Expand Down Expand Up @@ -106,8 +110,26 @@ public void testMetricsConfigurationWithNonExistentMetric()
@Test
public void testMetricsConfigurationWithUnSupportedType()
{
Assert.assertThrows(ISE.class, () -> {
new Metrics("test_5", "src/test/resources/defaultMetricsTest.json", true, true, null);
ISE iseException = Assert.assertThrows(ISE.class, () -> {
new Metrics("test_5", "src/test/resources/defaultInvalidMetricsTest.json", true, true, null);
});
Assert.assertEquals("Failed to parse metric configuration", iseException.getMessage());
}

@Test
public void testMetricsConfigurationWithTimerHistogramBuckets()
{
Metrics metrics = new Metrics("test_6", "src/test/resources/defaultMetricsTest.json", true, true, null);
DimensionsAndCollector dimensionsAndCollector = metrics.getByName("query/time", "historical");
Assert.assertNotNull(dimensionsAndCollector);
String[] dimensions = dimensionsAndCollector.getDimensions();
Assert.assertEquals("dataSource", dimensions[0]);
Assert.assertEquals("druid_service", dimensions[1]);
Assert.assertEquals("host_name", dimensions[2]);
Assert.assertEquals("type", dimensions[3]);
Assert.assertEquals(1000.0, dimensionsAndCollector.getConversionFactor(), 0.0);
double[] expectedHistogramBuckets = {10.0, 30.0, 60.0, 120.0, 200.0, 300.0};
Assert.assertArrayEquals(expectedHistogramBuckets, dimensionsAndCollector.getHistogramBuckets(), 0.0);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"query/nonExistent" : { "dimensions" : ["dataSource"], "type" : "nonExistent", "help": "Non supported type."}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
"query/nonExistent" : { "dimensions" : ["dataSource"], "type" : "nonExistent", "help": "Non supported type."}
}
"query/time" : { "dimensions" : ["dataSource", "type"], "type" : "timer", "conversionFactor": 1000.0, "histogramBuckets": [10.0, 30.0, 60.0, 120.0, 200.0, 300.0], "help": "Seconds taken to complete a query."}
}
1 change: 1 addition & 0 deletions website/.spelling
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,7 @@ hashcode
hashtable
high-QPS
historicals
histogramBuckets
hostname
hostnames
http
Expand Down

0 comments on commit 25e4758

Please sign in to comment.