feat: query sum metric without rate logic (#717)

Add the ability to query a sum metric and obtain the underlying values instead of the rate of change between those points.

Ref: HDX-1543
This commit is contained in:
Dan Hable 2025-03-27 14:09:58 -05:00 committed by GitHub
parent 81e4799b89
commit e002c2f9c6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 101 additions and 24 deletions

View file

@ -0,0 +1,5 @@
---
"@hyperdx/common-utils": minor
---
Support querying a sum metric as a value instead of a rate

View file

@ -24,12 +24,12 @@ Array [
exports[`renderChartConfig Query Metrics calculates min_rate/max_rate correctly for sum metrics: maxSum 1`] = `
Array [
Object {
"Value": 24,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"max(toFloat64OrNull(toString(Value)))": 24,
},
Object {
"Value": 134,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
"max(toFloat64OrNull(toString(Value)))": 134,
},
]
`;
@ -37,12 +37,12 @@ Array [
exports[`renderChartConfig Query Metrics calculates min_rate/max_rate correctly for sum metrics: minSum 1`] = `
Array [
Object {
"Value": 15,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"min(toFloat64OrNull(toString(Value)))": 15,
},
Object {
"Value": 52,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
"min(toFloat64OrNull(toString(Value)))": 52,
},
]
`;
@ -50,12 +50,12 @@ Array [
exports[`renderChartConfig Query Metrics handles counter resets correctly for sum metrics 1`] = `
Array [
Object {
"Value": 15,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"sum(toFloat64OrNull(toString(Value)))": 15,
},
Object {
"Value": 52,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
"sum(toFloat64OrNull(toString(Value)))": 52,
},
]
`;
@ -140,20 +140,33 @@ Array [
exports[`renderChartConfig Query Metrics single sum rate 1`] = `
Array [
Object {
"Value": 19,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"sum(toFloat64OrNull(toString(Value)))": 19,
},
Object {
"Value": 79,
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"sum(toFloat64OrNull(toString(Value)))": 79,
},
Object {
"Value": 5813,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
"sum(toFloat64OrNull(toString(Value)))": 5813,
},
Object {
"Value": 78754,
"__hdx_time_bucket": "2022-01-05T00:15:00Z",
"sum(toFloat64OrNull(toString(Value)))": 78754,
},
]
`;
exports[`renderChartConfig Query Metrics sum values as without rate computation 1`] = `
Array [
Object {
"Value": 950400,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
},
Object {
"Value": 1641600,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
},
]
`;

View file

@ -342,6 +342,25 @@ describe('renderChartConfig', () => {
IsMonotonic: true,
AggregationTemporality: 2, // Cumulative
}));
const podAgePoints = [
{ Value: 518400, TimeUnix: new Date(now - ms('1m')) },
{ Value: 604800, TimeUnix: new Date(now) },
{ Value: 691200, TimeUnix: new Date(now + ms('1m')) },
{ Value: 777600, TimeUnix: new Date(now + ms('2m')) },
{ Value: 864000, TimeUnix: new Date(now + ms('3m')) },
{ Value: 950400, TimeUnix: new Date(now + ms('4m')) },
{ Value: 1641600, TimeUnix: new Date(now + ms('12m')) },
].map(point => ({
MetricName: 'k8s.pod.uptime',
ServiceName: 'api',
ResourceAttributes: {
host: 'cluster-node-1',
ip: '127.0.0.1',
},
IsMonotonic: true,
AggregationTemporality: 2,
...point,
}));
const histPointsA = [
{
BucketCounts: [0, 0, 0],
@ -436,6 +455,7 @@ describe('renderChartConfig', () => {
...sumPointsC,
...sumPointsD,
...sumPointsE,
...podAgePoints,
]),
bulkInsertMetricsHistogram([
...histPointsA,
@ -586,6 +606,30 @@ describe('renderChartConfig', () => {
expect(await queryData(query)).toMatchSnapshot();
});
it('sum values as without rate computation', async () => {
const query = await renderChartConfig(
{
select: [
{
metricName: 'k8s.pod.uptime',
metricType: MetricsDataType.Sum,
valueExpression: 'Value',
},
],
from: metricSource.from,
where: 'ServiceName:api',
whereLanguage: 'lucene',
metricTables: TEST_METRIC_TABLES,
dateRange: [new Date(now), new Date(now + ms('20m'))],
granularity: '5 minutes',
timestampValueExpression: metricSource.timestampValueExpression,
connection: connection.id,
},
metadata,
);
expect(await queryData(query)).toMatchSnapshot();
});
it('handles counter resets correctly for sum metrics', async () => {
const query = await renderChartConfig(
{

View file

@ -79,15 +79,17 @@ exports[`renderChartConfig should generate sql for a single sum metric 1`] = `
IF(AggregationTemporality = 1,
SUM(Value) OVER (PARTITION BY AttributesHash ORDER BY AttributesHash, TimeUnix ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW),
deltaSum(Value) OVER (PARTITION BY AttributesHash ORDER BY AttributesHash, TimeUnix ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)
) AS Value
) AS Rate,
IF(AggregationTemporality = 1, Rate, Value) AS Sum
FROM default.otel_metrics_sum
WHERE (TimeUnix >= toStartOfInterval(fromUnixTimestamp64Milli(1739318400000), INTERVAL 5 minute) - INTERVAL 5 minute AND TimeUnix <= toStartOfInterval(fromUnixTimestamp64Milli(1765670400000), INTERVAL 5 minute) + INTERVAL 5 minute) AND ((MetricName = 'db.client.connections.usage'))),Bucketed AS (
SELECT
toStartOfInterval(toDateTime(TimeUnix), INTERVAL 5 minute) AS \`__hdx_time_bucket2\`,
AttributesHash,
last_value(Source.Value) AS \`__hdx_value_high\`,
last_value(Source.Rate) AS \`__hdx_value_high\`,
any(\`__hdx_value_high\`) OVER(PARTITION BY AttributesHash ORDER BY \`__hdx_time_bucket2\` ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS \`__hdx_value_high_prev\`,
\`__hdx_value_high\` - \`__hdx_value_high_prev\` AS Value,
\`__hdx_value_high\` - \`__hdx_value_high_prev\` AS Rate,
last_value(Source.Sum) AS Sum,
any(ResourceAttributes) AS ResourceAttributes,
any(ResourceSchemaUrl) AS ResourceSchemaUrl,
any(ScopeName) AS ScopeName,
@ -108,6 +110,6 @@ exports[`renderChartConfig should generate sql for a single sum metric 1`] = `
GROUP BY AttributesHash, \`__hdx_time_bucket2\`
ORDER BY AttributesHash, \`__hdx_time_bucket2\`
) SELECT avg(
toFloat64OrNull(toString(Value))
),toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` FROM Bucketed WHERE (\`__hdx_time_bucket2\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket2\` <= fromUnixTimestamp64Milli(1765670400000)) GROUP BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` ORDER BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` LIMIT 10"
toFloat64OrNull(toString(Rate))
) AS \\"Value\\",toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` FROM Bucketed WHERE (\`__hdx_time_bucket2\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket2\` <= fromUnixTimestamp64Milli(1765670400000)) GROUP BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` ORDER BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` LIMIT 10"
`;

View file

@ -958,7 +958,7 @@ async function translateMetricChartConfig(
...chartConfig,
from: {
...from,
tableName: metricTables[MetricsDataType.Gauge],
tableName: metricTables[MetricsDataType.Sum],
},
filters: [
...(filters ?? []),
@ -988,7 +988,8 @@ async function translateMetricChartConfig(
IF(AggregationTemporality = 1,
SUM(Value) OVER (PARTITION BY AttributesHash ORDER BY AttributesHash, TimeUnix ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW),
deltaSum(Value) OVER (PARTITION BY AttributesHash ORDER BY AttributesHash, TimeUnix ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)
) AS Value
) AS Rate,
IF(AggregationTemporality = 1, Rate, Value) AS Sum
FROM ${renderFrom({ from: { ...from, tableName: metricTables[MetricsDataType.Sum] } })}
WHERE ${where}`,
},
@ -998,9 +999,10 @@ async function translateMetricChartConfig(
SELECT
${timeExpr},
AttributesHash,
last_value(Source.Value) AS ${valueHighCol},
last_value(Source.Rate) AS ${valueHighCol},
any(${valueHighCol}) OVER(PARTITION BY AttributesHash ORDER BY \`${timeBucketCol}\` ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS ${valueHighPrevCol},
${valueHighCol} - ${valueHighPrevCol} AS Value,
${valueHighCol} - ${valueHighPrevCol} AS Rate,
last_value(Source.Sum) AS Sum,
any(ResourceAttributes) AS ResourceAttributes,
any(ResourceSchemaUrl) AS ResourceSchemaUrl,
any(ScopeName) AS ScopeName,
@ -1024,11 +1026,22 @@ async function translateMetricChartConfig(
},
],
select: [
{
..._select,
valueExpression: 'Value',
aggCondition: '', // clear up the condition since the where clause is already applied at the upstream CTE
},
// HDX-1543: If the chart config query asks for an aggregation, the use the computed rate value, otherwise
// use the underlying summed value. The alias field appears before the spread so user defined aliases will
// take precedent over our generic value.
_select.aggFn
? {
alias: 'Value',
..._select,
valueExpression: 'Rate',
aggCondition: '',
}
: {
alias: 'Value',
..._select,
valueExpression: 'last_value(Sum)',
aggCondition: '',
},
],
from: {
databaseName: '',