mirror of
https://github.com/hyperdxio/hyperdx
synced 2026-04-21 13:37:15 +00:00
feat: support count per sec/min/hr aggregation functions (#198)
This commit is contained in:
parent
3a8cfb8bfb
commit
f4360edf39
4 changed files with 171 additions and 12 deletions
6
.changeset/breezy-seahorses-swim.md
Normal file
6
.changeset/breezy-seahorses-swim.md
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
'@hyperdx/api': patch
|
||||
'@hyperdx/app': patch
|
||||
---
|
||||
|
||||
feat: support count per sec/min/hr aggregation functions
|
||||
|
|
@ -323,6 +323,80 @@ Array [
|
|||
"ts_bucket": 1641341100,
|
||||
},
|
||||
]
|
||||
`);
|
||||
|
||||
const multiGroupBysData2 = (
|
||||
await clickhouse.getMultiSeriesChart({
|
||||
series: [
|
||||
{
|
||||
type: 'time',
|
||||
table: 'logs',
|
||||
aggFn: clickhouse.AggFn.CountPerMin,
|
||||
field: 'awesomeNumber',
|
||||
where: `runId:${runId}`,
|
||||
groupBy: ['testGroup', 'testOtherGroup'],
|
||||
},
|
||||
],
|
||||
tableVersion: undefined,
|
||||
teamId,
|
||||
startTime: now,
|
||||
endTime: now + ms('10m'),
|
||||
granularity: '5 minute',
|
||||
maxNumGroups: 20,
|
||||
seriesReturnType: clickhouse.SeriesReturnType.Column,
|
||||
})
|
||||
).data.map(d => {
|
||||
return _.pick(d, [
|
||||
'group',
|
||||
'series_0.data',
|
||||
'series_1.data',
|
||||
'ts_bucket',
|
||||
]);
|
||||
});
|
||||
expect(multiGroupBysData2.length).toEqual(5);
|
||||
expect(multiGroupBysData2).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"group": Array [
|
||||
"group2",
|
||||
"otherGroup1",
|
||||
],
|
||||
"series_0.data": 0.6,
|
||||
"ts_bucket": 1641340800,
|
||||
},
|
||||
Object {
|
||||
"group": Array [
|
||||
"group1",
|
||||
"otherGroup1",
|
||||
],
|
||||
"series_0.data": 0.4,
|
||||
"ts_bucket": 1641340800,
|
||||
},
|
||||
Object {
|
||||
"group": Array [
|
||||
"group1",
|
||||
"otherGroup2",
|
||||
],
|
||||
"series_0.data": 0.2,
|
||||
"ts_bucket": 1641340800,
|
||||
},
|
||||
Object {
|
||||
"group": Array [
|
||||
"group1",
|
||||
"otherGroup2",
|
||||
],
|
||||
"series_0.data": 0.4,
|
||||
"ts_bucket": 1641341100,
|
||||
},
|
||||
Object {
|
||||
"group": Array [
|
||||
"group1",
|
||||
"otherGroup3",
|
||||
],
|
||||
"series_0.data": 0.2,
|
||||
"ts_bucket": 1641341100,
|
||||
},
|
||||
]
|
||||
`);
|
||||
|
||||
const ratioData = (
|
||||
|
|
@ -382,6 +456,51 @@ Array [
|
|||
"ts_bucket": 1641341100,
|
||||
},
|
||||
]
|
||||
`);
|
||||
|
||||
const tableData = (
|
||||
await clickhouse.getMultiSeriesChart({
|
||||
series: [
|
||||
{
|
||||
type: 'table',
|
||||
table: 'logs',
|
||||
aggFn: clickhouse.AggFn.CountPerMin,
|
||||
where: `runId:${runId}`,
|
||||
groupBy: ['testGroup'],
|
||||
},
|
||||
],
|
||||
tableVersion: undefined,
|
||||
teamId,
|
||||
startTime: now,
|
||||
endTime: now + ms('10m'),
|
||||
granularity: undefined,
|
||||
maxNumGroups: 20,
|
||||
seriesReturnType: clickhouse.SeriesReturnType.Column,
|
||||
})
|
||||
).data.map(d => {
|
||||
return _.pick(d, ['group', 'series_0.data', 'ts_bucket', 'rank']);
|
||||
});
|
||||
|
||||
expect(tableData.length).toEqual(2);
|
||||
expect(tableData).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"group": Array [
|
||||
"group1",
|
||||
],
|
||||
"rank": "1",
|
||||
"series_0.data": 0.6,
|
||||
"ts_bucket": "0",
|
||||
},
|
||||
Object {
|
||||
"group": Array [
|
||||
"group2",
|
||||
],
|
||||
"rank": "2",
|
||||
"series_0.data": 0.3,
|
||||
"ts_bucket": "0",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
|
|
@ -479,8 +598,6 @@ Array [
|
|||
}),
|
||||
);
|
||||
|
||||
mockLogsPropertyTypeMappingsModel({});
|
||||
|
||||
mockSpyMetricPropertyTypeMappingsModel({
|
||||
runId: 'string',
|
||||
host: 'string',
|
||||
|
|
@ -846,8 +963,6 @@ Array [
|
|||
}),
|
||||
);
|
||||
|
||||
mockLogsPropertyTypeMappingsModel({});
|
||||
|
||||
mockSpyMetricPropertyTypeMappingsModel({
|
||||
runId: 'string',
|
||||
host: 'string',
|
||||
|
|
|
|||
|
|
@ -61,6 +61,9 @@ export enum AggFn {
|
|||
AvgRate = 'avg_rate',
|
||||
Count = 'count',
|
||||
CountDistinct = 'count_distinct',
|
||||
CountPerSec = 'count_per_sec',
|
||||
CountPerMin = 'count_per_min',
|
||||
CountPerHour = 'count_per_hour',
|
||||
Max = 'max',
|
||||
MaxRate = 'max_rate',
|
||||
Min = 'min',
|
||||
|
|
@ -1078,6 +1081,18 @@ const buildEventSeriesQuery = async ({
|
|||
throw new Error('Rate is not supported in logs chart');
|
||||
}
|
||||
|
||||
const isCountFn =
|
||||
aggFn === AggFn.Count ||
|
||||
aggFn === AggFn.CountPerSec ||
|
||||
aggFn === AggFn.CountPerMin ||
|
||||
aggFn === AggFn.CountPerHour;
|
||||
|
||||
if (field == null && !isCountFn) {
|
||||
throw new Error(
|
||||
'Field is required for all aggregation functions except Count',
|
||||
);
|
||||
}
|
||||
|
||||
const tableName = getLogStreamTableName(tableVersion, teamId);
|
||||
const whereClause = await buildSearchQueryWhereCondition({
|
||||
endTime,
|
||||
|
|
@ -1086,18 +1101,11 @@ const buildEventSeriesQuery = async ({
|
|||
startTime,
|
||||
});
|
||||
|
||||
if (field == null && aggFn !== AggFn.Count) {
|
||||
throw new Error(
|
||||
'Field is required for all aggregation functions except Count',
|
||||
);
|
||||
}
|
||||
|
||||
const selectField =
|
||||
field != null
|
||||
? buildSearchColumnName(propertyTypeMappingsModel.get(field), field)
|
||||
: '';
|
||||
|
||||
const isCountFn = aggFn === AggFn.Count;
|
||||
const groupByColumnNames = groupBy.map(g => {
|
||||
const columnName = buildSearchColumnName(
|
||||
propertyTypeMappingsModel.get(g),
|
||||
|
|
@ -1130,8 +1138,35 @@ const buildEventSeriesQuery = async ({
|
|||
const label = SqlString.escape(`${aggFn}(${field})`);
|
||||
|
||||
const selectClause = [
|
||||
isCountFn
|
||||
aggFn === AggFn.Count
|
||||
? 'toFloat64(count()) as data'
|
||||
: aggFn === AggFn.CountPerSec
|
||||
? granularity
|
||||
? SqlString.format('divide(count(), ?) as data', [
|
||||
ms(granularity) / ms('1 second'),
|
||||
])
|
||||
: SqlString.format(
|
||||
"divide(count(), age('ss', toDateTime(?), toDateTime(?))) as data",
|
||||
[startTime / 1000, endTime / 1000],
|
||||
)
|
||||
: aggFn === AggFn.CountPerMin
|
||||
? granularity
|
||||
? SqlString.format('divide(count(), ?) as data', [
|
||||
ms(granularity) / ms('1 minute'),
|
||||
])
|
||||
: SqlString.format(
|
||||
"divide(count(), age('mi', toDateTime(?), toDateTime(?))) as data",
|
||||
[startTime / 1000, endTime / 1000],
|
||||
)
|
||||
: aggFn === AggFn.CountPerHour
|
||||
? granularity
|
||||
? SqlString.format('divide(count(), ?) as data', [
|
||||
ms(granularity) / ms('1 hour'),
|
||||
])
|
||||
: SqlString.format(
|
||||
"divide(count(), age('hh', toDateTime(?), toDateTime(?))) as data",
|
||||
[startTime / 1000, endTime / 1000],
|
||||
)
|
||||
: aggFn === AggFn.Sum
|
||||
? `toFloat64(sum(${selectField})) as data`
|
||||
: aggFn === AggFn.Avg
|
||||
|
|
|
|||
|
|
@ -165,6 +165,9 @@ export type AggFn =
|
|||
| 'avg'
|
||||
| 'count_distinct'
|
||||
| 'count'
|
||||
| 'count_per_sec'
|
||||
| 'count_per_min'
|
||||
| 'count_per_hour'
|
||||
| 'max_rate'
|
||||
| 'max'
|
||||
| 'min_rate'
|
||||
|
|
|
|||
Loading…
Reference in a new issue