feat: extract and ingest more metrics context (aggregation temporality, unit and monotonicity) (#136)

This commit is contained in:
Warren 2023-12-03 17:26:38 -08:00 committed by GitHub
parent 8c8c476daa
commit ff38d753d3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 48 additions and 4 deletions

View file

@ -0,0 +1,7 @@
---
'@hyperdx/api': minor
'@hyperdx/app': minor
---
feat: extract and ingest more metrics context (aggregation temporality, unit and
monotonicity)

View file

@ -709,8 +709,12 @@ source = '''
if err == null && structured.event == "metric" {
# TODO: do this at extract_token
.hdx_token = del(structured.fields.__HDX_API_KEY)
.dt = structured.fields.metric_type
filtered_keys = ["metric_type"]
.at = to_int(del(structured.fields.metric_aggregation_temporality)) ?? 0
.dt = del(structured.fields.metric_type)
.im = to_bool(del(structured.fields.metric_is_monotonic)) ?? null
.u = del(structured.fields.metric_unit)
filtered_keys = []
for_each(object(structured.fields) ?? {})-> |key, value| {
if is_integer(value) || is_float(value) {
filtered_keys = push(filtered_keys, key)

View file

@ -30,6 +30,22 @@ processors:
- key: __HDX_API_KEY
from_context: authorization
action: upsert
# TODO: use transform to attach __HDX_API_KEY attribute to spans/metrics/logs
transform:
error_mode: ignore
metric_statements:
- context: resource
statements:
# map metrics context to resource context (so splunk_hec will capture it)
- set(attributes["metric_aggregation_temporality"], "0")
- set(attributes["metric_unit"], "")
- set(attributes["metric_is_monotonic"], false)
- context: metric
statements:
- set(resource.attributes["metric_aggregation_temporality"],
aggregation_temporality)
- set(resource.attributes["metric_unit"], unit)
- set(resource.attributes["metric_is_monotonic"], is_monotonic)
batch:
memory_limiter:
# 80% of maximum memory up to 2G
@ -78,7 +94,7 @@ service:
exporters: [logzio/traces, logging]
metrics:
receivers: [otlp]
processors: [attributes/attachHdxKey, memory_limiter, batch]
processors: [attributes/attachHdxKey, transform, memory_limiter, batch]
exporters: [splunk_hec, logging]
logs:
receivers: [otlp, fluentforward]

View file

@ -633,8 +633,11 @@ const getMetricsTagsUncached = async (teamId: string) => {
const query = SqlString.format(
`
SELECT
format('{} - {}', name, data_type) as name,
any(is_delta) as is_delta,
any(is_monotonic) as is_monotonic,
any(unit) as unit,
data_type,
format('{} - {}', name, data_type) as name,
groupUniqArray(_string_attributes) AS tags
FROM ??
GROUP BY name, data_type

View file

@ -6,6 +6,11 @@ export type JSONBlob = Record<string, any>;
export type KeyPath = string[];
export enum AggregationTemporality {
Delta = 1,
Cumulative = 2,
}
export enum LogType {
Log = 'log',
Metric = 'metric',
@ -67,8 +72,11 @@ export type LogStreamModel = KeyValuePairs &
export type MetricModel = {
_string_attributes: Record<string, string>;
data_type: string;
is_delta: boolean;
is_monotonic: boolean;
name: string;
timestamp: number;
unit: string;
value: number;
};
@ -207,14 +215,17 @@ export type VectorSpan = {
};
export type VectorMetric = {
at: number; // aggregation temporality
authorization?: string;
b: JSONBlob; // tags
dt: string; // data type
hdx_platform: string;
hdx_token: string;
im: boolean; // is monotonic
n: string; // name
ts: number; // timestamp
tso: number; // observed timestamp
u: string; // unit
v: number; // value
};
@ -276,8 +287,11 @@ class VectorMetricParser extends ParsingInterface<VectorMetric> {
return {
_string_attributes: metric.b,
data_type: metric.dt,
is_delta: metric.at === AggregationTemporality.Delta,
is_monotonic: metric.im,
name: metric.n,
timestamp: metric.ts,
unit: metric.u,
value: metric.v,
};
}