migration: alert v2 model (#542)

This PR includes changes:
1. Rename `type` field to `thresholdType` (enum 'above' or 'below')
2. Introduce alert source enum ('saved_search' or 'tile')
3. Rename `dashboardId` field to `dashboard`
4. Deprecate unused `cron` field
5. Deprecate external alert translation layers
6. Deprecate `appType` flag
7. Copied over 'common' pkgs from app dir (mostly clickhouse query helpers + types) -> will become a sharable pkg between app and api
This commit is contained in:
Warren 2025-01-07 12:15:02 -08:00 committed by GitHub
parent ca9fb1893f
commit 8b6e09c20a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 3551 additions and 901 deletions

View file

@ -56,7 +56,11 @@ services:
# ports:
# - 9000:9000
environment:
CLICKHOUSE_HOST: http://ch-server:8123
CLICKHOUSE_PASSWORD: api
CLICKHOUSE_USER: api
EXPRESS_SESSION_SECRET: 'hyperdx is cool 👋'
FRONTEND_URL: 'http://app:8080'
MONGO_URI: 'mongodb://db:29999/hyperdx-test'
NODE_ENV: ci
PORT: 9000

View file

@ -8,6 +8,7 @@
},
"dependencies": {
"@clickhouse/client": "^0.2.10",
"@clickhouse/client-common": "^1.9.1",
"@hyperdx/lucene": "^3.1.1",
"@hyperdx/node-opentelemetry": "^0.8.1",
"@opentelemetry/api": "^1.8.0",
@ -33,6 +34,7 @@
"mongoose": "^6.12.0",
"ms": "^2.1.3",
"node-schedule": "^2.1.1",
"node-sql-parser": "^5.3.5",
"object-hash": "^3.0.0",
"on-headers": "^1.0.2",
"passport": "^0.6.0",
@ -47,7 +49,7 @@
"sqlstring": "^2.3.3",
"uuid": "^8.3.2",
"winston": "^3.10.0",
"zod": "^3.22.3",
"zod": "^3.24.1",
"zod-express-middleware": "^1.4.0"
},
"devDependencies": {

View file

@ -34,7 +34,7 @@ const sess: session.SessionOptions & { cookie: session.CookieOptions } = {
};
app.set('trust proxy', 1);
if (config.FRONTEND_URL && !config.IS_CI) {
if (!config.IS_CI && config.FRONTEND_URL) {
const feUrl = new URL(config.FRONTEND_URL);
sess.cookie.domain = feUrl.hostname;
if (feUrl.protocol === 'https:') {

View file

@ -0,0 +1,9 @@
export enum DisplayType {
Line = 'line',
StackedBar = 'stacked_bar',
Table = 'table',
Number = 'number',
Search = 'search',
Heatmap = 'heatmap',
Markdown = 'markdown',
}

View file

@ -0,0 +1,606 @@
import {
BaseResultSet,
DataFormat,
isSuccessfulResponse,
ResponseJSON,
} from '@clickhouse/client-common';
import { SQLInterval } from '@/common/sqlTypes';
import { timeBucketByGranularity } from '@/common/utils';
import { hashCode } from '@/common/utils';
export const CLICKHOUSE_HOST = '/api/clickhouse-proxy';
export enum JSDataType {
Array = 'array',
Date = 'date',
Map = 'map',
Number = 'number',
String = 'string',
Bool = 'bool',
}
export const convertCHDataTypeToJSType = (
dataType: string,
): JSDataType | null => {
if (dataType.startsWith('Date')) {
return JSDataType.Date;
} else if (dataType.startsWith('Map')) {
return JSDataType.Map;
} else if (dataType.startsWith('Array')) {
return JSDataType.Array;
} else if (
dataType.startsWith('Int') ||
dataType.startsWith('UInt') ||
dataType.startsWith('Float') ||
// Nullable types are possible (charts)
dataType.startsWith('Nullable(Int') ||
dataType.startsWith('Nullable(UInt') ||
dataType.startsWith('Nullable(Float')
) {
return JSDataType.Number;
} else if (
dataType.startsWith('String') ||
dataType.startsWith('FixedString') ||
dataType.startsWith('Enum') ||
dataType.startsWith('UUID') ||
dataType.startsWith('IPv4') ||
dataType.startsWith('IPv6')
) {
return JSDataType.String;
} else if (dataType === 'Bool') {
return JSDataType.Bool;
} else if (dataType.startsWith('LowCardinality')) {
return convertCHDataTypeToJSType(dataType.slice(15, -1));
}
return null;
};
export const convertCHTypeToPrimitiveJSType = (dataType: string) => {
const jsType = convertCHDataTypeToJSType(dataType);
if (jsType === JSDataType.Map || jsType === JSDataType.Array) {
throw new Error('Map type is not a primitive type');
} else if (jsType === JSDataType.Date) {
return JSDataType.Number;
}
return jsType;
};
const hash = (input: string | number) => Math.abs(hashCode(`${input}`));
const paramHash = (str: string | number) => {
return `HYPERDX_PARAM_${hash(str)}`;
};
export type ChSql = {
sql: string;
params: Record<string, any>;
};
type ParamTypes =
| ChSql
| ChSql[]
| { Identifier: string }
| { String: string }
| { Float32: number }
| { Float64: number }
| { Int32: number }
| { Int64: number }
| { UNSAFE_RAW_SQL: string }
| string; // TODO: Deprecate raw string interpolation
export const chSql = (
strings: TemplateStringsArray,
...values: ParamTypes[]
): ChSql => {
const sql = strings
.map((str, i) => {
const value = values[i];
// if (typeof value === 'string') {
// console.error('Unsafe string detected', value, 'in', strings, values);
// }
return (
str +
(value == null
? ''
: typeof value === 'string'
? value // If it's just a string sql literal
: 'UNSAFE_RAW_SQL' in value
? value.UNSAFE_RAW_SQL
: Array.isArray(value)
? value.map(v => v.sql).join('')
: 'sql' in value
? value.sql
: 'Identifier' in value
? `{${paramHash(value.Identifier)}:Identifier}`
: 'String' in value
? `{${paramHash(value.String)}:String}`
: 'Float32' in value
? `{${paramHash(value.Float32)}:Float32}`
: 'Float64' in value
? `{${paramHash(value.Float64)}:Float64}`
: 'Int32' in value
? `{${paramHash(value.Int32)}:Int32}`
: 'Int64' in value
? `{${paramHash(value.Int64)}:Int64}`
: '')
);
})
.join('');
return {
sql,
params: values.reduce((acc, value) => {
return {
...acc,
...(value == null ||
typeof value === 'string' ||
'UNSAFE_RAW_SQL' in value
? {}
: Array.isArray(value)
? value.reduce((acc, v) => {
Object.assign(acc, v.params);
return acc;
}, {})
: 'params' in value
? value.params
: 'Identifier' in value
? { [paramHash(value.Identifier)]: value.Identifier }
: 'String' in value
? { [paramHash(value.String)]: value.String }
: 'Float32' in value
? { [paramHash(value.Float32)]: value.Float32 }
: 'Float64' in value
? { [paramHash(value.Float64)]: value.Float64 }
: 'Int32' in value
? { [paramHash(value.Int32)]: value.Int32 }
: 'Int64' in value
? { [paramHash(value.Int64)]: value.Int64 }
: {}),
};
}, {}),
};
};
export const concatChSql = (sep: string, ...args: (ChSql | ChSql[])[]) => {
return args.reduce(
(acc: ChSql, arg) => {
if (Array.isArray(arg)) {
if (arg.length === 0) {
return acc;
}
acc.sql +=
(acc.sql.length > 0 ? sep : '') + arg.map(a => a.sql).join(sep);
acc.params = arg.reduce((acc, a) => {
Object.assign(acc, a.params);
return acc;
}, acc.params);
} else if (arg.sql.length > 0) {
acc.sql += `${acc.sql.length > 0 ? sep : ''}${arg.sql}`;
Object.assign(acc.params, arg.params);
}
return acc;
},
{ sql: '', params: {} },
);
};
const isChSqlEmpty = (chSql: ChSql | ChSql[]) => {
if (Array.isArray(chSql)) {
return chSql.every(c => c.sql.length === 0);
}
return chSql.sql.length === 0;
};
export const wrapChSqlIfNotEmpty = (
sql: ChSql | ChSql[],
left: string,
right: string,
): ChSql | [] => {
if (isChSqlEmpty(sql)) {
return [];
}
return chSql`${left}${sql}${right}`;
};
export class ClickHouseQueryError extends Error {
constructor(
message: string,
public query: string,
) {
super(message);
this.name = 'ClickHouseQueryError';
}
}
export function extractColumnReference(
sql: string,
maxIterations = 10,
): string | null {
let iterations = 0;
// Loop until we remove all function calls and get just the column, with a maximum limit
while (/\w+\([^()]*\)/.test(sql) && iterations < maxIterations) {
// Replace the outermost function with its content
sql = sql.replace(/\w+\(([^()]*)\)/, '$1');
iterations++;
}
// If we reached the max iterations without resolving, return null to indicate an issue
return iterations < maxIterations ? sql.trim() : null;
}
const client = {
async query<T extends DataFormat>({
query,
format = 'JSON',
query_params = {},
abort_signal,
clickhouse_settings,
host,
username,
password,
includeCredentials,
includeCorsHeader,
connectionId,
queryId,
}: {
query: string;
format?: string;
abort_signal?: AbortSignal;
query_params?: Record<string, any>;
clickhouse_settings?: Record<string, any>;
host?: string;
username?: string;
password?: string;
includeCredentials: boolean;
includeCorsHeader: boolean;
connectionId?: string;
queryId?: string;
}): Promise<BaseResultSet<any, T>> {
const searchParams = new URLSearchParams([
...(includeCorsHeader ? [['add_http_cors_header', '1']] : []),
...(connectionId ? [['hyperdx_connection_id', connectionId]] : []),
['query', query],
['default_format', format],
['date_time_output_format', 'iso'],
['wait_end_of_query', '0'],
['cancel_http_readonly_queries_on_client_close', '1'],
...(username ? [['user', username]] : []),
...(password ? [['password', password]] : []),
...(queryId ? [['query_id', queryId]] : []),
...Object.entries(query_params).map(([key, value]) => [
`param_${key}`,
value,
]),
...Object.entries(clickhouse_settings ?? {}).map(([key, value]) => [
key,
value,
]),
]);
let debugSql = '';
try {
debugSql = parameterizedQueryToSql({ sql: query, params: query_params });
} catch (e) {
debugSql = query;
}
// eslint-disable-next-line no-console
console.log('--------------------------------------------------------');
// eslint-disable-next-line no-console
console.log('Sending Query:', debugSql);
// eslint-disable-next-line no-console
console.log('--------------------------------------------------------');
const res = await fetch(`${host}/?${searchParams.toString()}`, {
...(includeCredentials ? { credentials: 'include' } : {}),
signal: abort_signal,
method: 'GET',
});
// TODO: Send command to CH to cancel query on abort_signal
if (!res.ok) {
if (!isSuccessfulResponse(res.status)) {
const text = await res.text();
throw new ClickHouseQueryError(`${text}`, debugSql);
}
}
if (res.body == null) {
// TODO: Handle empty responses better?
throw new Error('Unexpected empty response from ClickHouse');
}
// @ts-ignore
return new BaseResultSet(res.body, format, '');
},
};
export const testLocalConnection = async ({
host,
username,
password,
}: {
host: string;
username: string;
password: string;
}): Promise<boolean> => {
try {
const result = await client.query({
query: 'SELECT 1',
format: 'TabSeparatedRaw',
host: host,
username: username,
password: password,
includeCredentials: false,
includeCorsHeader: true,
});
return result.text().then(text => text.trim() === '1');
} catch (e) {
console.warn('Failed to test local connection', e);
return false;
}
};
export const sendQuery = async <T extends DataFormat>({
query,
format = 'JSON',
query_params = {},
abort_signal,
clickhouse_settings,
connectionId,
queryId,
}: {
query: string;
format?: string;
query_params?: Record<string, any>;
abort_signal?: AbortSignal;
clickhouse_settings?: Record<string, any>;
connectionId: string;
queryId?: string;
}) => {
const IS_LOCAL_MODE = false;
// TODO: decide what to do here
let host, username, password;
if (IS_LOCAL_MODE) {
const localConnections: any = [];
if (localConnections.length === 0) {
throw new Error('No local connection found');
}
host = localConnections[0].host;
username = localConnections[0].username;
password = localConnections[0].password;
}
return client.query<T>({
query,
format,
query_params,
abort_signal,
clickhouse_settings,
queryId,
connectionId: IS_LOCAL_MODE ? undefined : connectionId,
host: IS_LOCAL_MODE ? host : CLICKHOUSE_HOST,
username: IS_LOCAL_MODE ? username : undefined,
password: IS_LOCAL_MODE ? password : undefined,
includeCredentials: !IS_LOCAL_MODE,
includeCorsHeader: IS_LOCAL_MODE,
});
};
export const tableExpr = ({
database,
table,
}: {
database: string;
table: string;
}) => {
return chSql`${{ Identifier: database }}.${{ Identifier: table }}`;
};
/**
* SELECT
* aggFnIf(fieldToColumn(field), where),
* timeBucketing(Granularity, timeConversion(fieldToColumn(field))),
* FROM db.table
* WHERE where
* GROUP BY timeBucketing, fieldToColumn(groupBy)
* ORDER BY orderBy
*/
export function parameterizedQueryToSql({
sql,
params,
}: {
sql: string;
params: Record<string, any>;
}) {
return Object.entries(params).reduce((acc, [key, value]) => {
return acc.replace(new RegExp(`{${key}:\\w+}`, 'g'), value);
}, sql);
}
export type ColumnMetaType = { name: string; type: string };
export function filterColumnMetaByType(
meta: Array<ColumnMetaType>,
types: JSDataType[],
): Array<ColumnMetaType> | undefined {
return meta.filter(column =>
types.includes(convertCHDataTypeToJSType(column.type) as JSDataType),
);
}
export function inferTimestampColumn(
// from: https://github.com/ClickHouse/clickhouse-js/blob/442392c83834f313a964f9e5bd7ff44474631755/packages/client-common/src/clickhouse_types.ts#L8C3-L8C47
meta: Array<ColumnMetaType>,
) {
return filterColumnMetaByType(meta, [JSDataType.Date])?.[0];
}
function inferValueColumns(meta: Array<{ name: string; type: string }>) {
return filterColumnMetaByType(meta, [JSDataType.Number]);
}
function inferGroupColumns(meta: Array<{ name: string; type: string }>) {
return filterColumnMetaByType(meta, [
JSDataType.String,
JSDataType.Map,
JSDataType.Array,
]);
}
// TODO: Move to ChartUtils
// Input: { ts, value1, value2, groupBy1, groupBy2 },
// Output: { ts, [value1Name, groupBy1, groupBy2]: value1, [...]: value2 }
export function formatResponseForTimeChart({
res,
dateRange,
granularity,
generateEmptyBuckets = true,
}: {
dateRange: [Date, Date];
granularity?: SQLInterval;
res: ResponseJSON<Record<string, any>>;
generateEmptyBuckets?: boolean;
}) {
const meta = res.meta;
const data = res.data;
if (meta == null) {
throw new Error('No meta data found in response');
}
const timestampColumn = inferTimestampColumn(meta);
const valueColumns = inferValueColumns(meta) ?? [];
const groupColumns = inferGroupColumns(meta) ?? [];
if (timestampColumn == null) {
throw new Error(
`No timestamp column found with meta: ${JSON.stringify(meta)}`,
);
}
// Timestamp -> { tsCol, line1, line2, ...}
const tsBucketMap: Map<number, Record<string, any>> = new Map();
const lineDataMap: {
[keyName: string]: {
dataKey: string;
displayName: string;
maxValue: number;
minValue: number;
color: string | undefined;
};
} = {};
for (const row of data) {
const date = new Date(row[timestampColumn.name]);
const ts = date.getTime() / 1000;
for (const valueColumn of valueColumns) {
const tsBucket = tsBucketMap.get(ts) ?? {};
const keyName = [
valueColumn.name,
...groupColumns.map(g => row[g.name]),
].join(' · ');
// UInt64 are returned as strings, we'll convert to number
// and accept a bit of floating point error
const rawValue = row[valueColumn.name];
const value =
typeof rawValue === 'number' ? rawValue : Number.parseFloat(rawValue);
tsBucketMap.set(ts, {
...tsBucket,
[timestampColumn.name]: ts,
[keyName]: value,
});
// TODO: Set name and color correctly
lineDataMap[keyName] = {
dataKey: keyName,
displayName: keyName,
color: undefined,
maxValue: Math.max(
lineDataMap[keyName]?.maxValue ?? Number.NEGATIVE_INFINITY,
value,
),
minValue: Math.min(
lineDataMap[keyName]?.minValue ?? Number.POSITIVE_INFINITY,
value,
),
};
}
}
// TODO: Custom sort and truncate top N lines
const sortedLineDataMap = Object.values(lineDataMap).sort((a, b) => {
return a.maxValue - b.maxValue;
});
if (generateEmptyBuckets && granularity != null) {
// Zero fill TODO: Make this an option
const generatedTsBuckets = timeBucketByGranularity(
dateRange[0],
dateRange[1],
granularity,
);
generatedTsBuckets.forEach(date => {
const ts = date.getTime() / 1000;
const tsBucket = tsBucketMap.get(ts);
if (tsBucket == null) {
const tsBucket: Record<string, any> = {
[timestampColumn.name]: ts,
};
for (const line of sortedLineDataMap) {
tsBucket[line.dataKey] = 0;
}
tsBucketMap.set(ts, tsBucket);
} else {
for (const line of sortedLineDataMap) {
if (tsBucket[line.dataKey] == null) {
tsBucket[line.dataKey] = 0;
}
}
tsBucketMap.set(ts, tsBucket);
}
});
}
// Sort results again by timestamp
const graphResults: {
[key: string]: number | undefined;
}[] = Array.from(tsBucketMap.values()).sort(
(a, b) => a[timestampColumn.name] - b[timestampColumn.name],
);
// TODO: Return line color and names
return {
// dateRange: [minDate, maxDate],
graphResults,
timestampColumn,
groupKeys: sortedLineDataMap.map(l => l.dataKey),
lineNames: sortedLineDataMap.map(l => l.displayName),
lineColors: sortedLineDataMap.map(l => l.color),
};
}
export type ColumnMeta = {
codec_expression: string;
comment: string;
default_expression: string;
default_type: string;
name: string;
ttl_expression: string;
type: string;
};

View file

@ -0,0 +1,245 @@
import { z } from 'zod';
import { DisplayType } from '@/common/DisplayType';
// --------------------------
// SQL TYPES
// --------------------------
// TODO: infer types from here and replaces all types in sqlTypes.ts
export const SQLIntervalSchema = z
.string()
.regex(/^\d+ (second|minute|hour|day)$/);
export const SearchConditionSchema = z.string();
export const SearchConditionLanguageSchema = z
.enum(['sql', 'lucene'])
.optional();
export const AggregateFunctionSchema = z.enum([
'avg',
'count',
'count_distinct',
'max',
'min',
'quantile',
'sum',
]);
export const AggregateFunctionWithCombinatorsSchema = z
.string()
.regex(/^(\w+)If(State|Merge)$/);
export const RootValueExpressionSchema = z
.object({
aggFn: z.union([
AggregateFunctionSchema,
AggregateFunctionWithCombinatorsSchema,
]),
aggCondition: SearchConditionSchema,
aggConditionLanguage: SearchConditionLanguageSchema,
valueExpression: z.string(),
})
.or(
z.object({
aggFn: z.literal('quantile'),
level: z.number(),
aggCondition: SearchConditionSchema,
aggConditionLanguage: SearchConditionLanguageSchema,
valueExpression: z.string(),
}),
)
.or(
z.object({
aggFn: z.string().optional(),
aggCondition: z.string().optional(),
aggConditionLanguage: SearchConditionLanguageSchema,
valueExpression: z.string(),
}),
);
export const DerivedColumnSchema = z.intersection(
RootValueExpressionSchema,
z.object({
alias: z.string().optional(),
}),
);
export const SelectListSchema = z.array(DerivedColumnSchema).or(z.string());
export const SortSpecificationSchema = z.intersection(
RootValueExpressionSchema,
z.object({
ordering: z.enum(['ASC', 'DESC']),
}),
);
export const SortSpecificationListSchema = z
.array(SortSpecificationSchema)
.or(z.string());
export const LimitSchema = z.object({
limit: z.number().optional(),
offset: z.number().optional(),
});
export const SelectSQLStatementSchema = z.object({
select: SelectListSchema,
from: z.object({
databaseName: z.string(),
tableName: z.string(),
}),
where: SearchConditionSchema,
whereLanguage: SearchConditionLanguageSchema,
groupBy: SelectListSchema.optional(),
having: SearchConditionSchema.optional(),
havingLanguage: SearchConditionLanguageSchema.optional(),
orderBy: SortSpecificationListSchema.optional(),
limit: LimitSchema.optional(),
});
// --------------------------
// SAVED SEARCH
// --------------------------
export const SavedSearchSchema = z.object({
id: z.string(),
name: z.string(),
select: z.string(),
where: z.string(),
whereLanguage: SearchConditionLanguageSchema,
source: z.string(),
tags: z.array(z.string()),
orderBy: z.string().optional(),
});
export type SavedSearch = z.infer<typeof SavedSearchSchema>;
// --------------------------
// DASHBOARDS
// --------------------------
export const NumberFormatSchema = z.object({
output: z.enum(['currency', 'percent', 'byte', 'time', 'number']),
mantissa: z.number().optional(),
thousandSeparated: z.boolean().optional(),
average: z.boolean().optional(),
decimalBytes: z.boolean().optional(),
factor: z.number().optional(),
currencySymbol: z.string().optional(),
unit: z.string().optional(),
});
export const SqlAstFilterSchema = z.object({
type: z.literal('sql_ast'),
operator: z.enum(['=', '<', '>', '!=', '<=', '>=']),
left: z.string(),
right: z.string(),
});
export const FilterSchema = z.union([
z.object({
type: z.enum(['lucene', 'sql']),
condition: z.string(),
}),
SqlAstFilterSchema,
]);
export const _ChartConfigSchema = z.object({
displayType: z.nativeEnum(DisplayType),
numberFormat: NumberFormatSchema,
timestampValueExpression: z.string(),
implicitColumnExpression: z.string().optional(),
granularity: z.string().optional(),
markdown: z.string().optional(),
filtersLogicalOperator: z.enum(['AND', 'OR']).optional(),
filters: z.array(FilterSchema),
connection: z.string(),
fillNulls: z.number().optional(),
selectGroupBy: z.boolean().optional(),
});
export const ChartConfigSchema = z.intersection(
_ChartConfigSchema,
SelectSQLStatementSchema,
);
export const SavedChartConfigSchema = z.intersection(
z.intersection(
z.object({
name: z.string(),
source: z.string(),
}),
_ChartConfigSchema.omit({
connection: true,
timestampValueExpression: true,
}),
),
SelectSQLStatementSchema.omit({
from: true,
}),
);
export type SavedChartConfig = z.infer<typeof SavedChartConfigSchema>;
export const TileSchema = z.object({
id: z.string(),
x: z.number(),
y: z.number(),
w: z.number(),
h: z.number(),
config: SavedChartConfigSchema,
});
export type Tile = z.infer<typeof TileSchema>;
export const DashboardSchema = z.object({
id: z.string(),
name: z.string(),
tiles: z.array(TileSchema),
tags: z.array(z.string()),
});
export const DashboardWithoutIdSchema = DashboardSchema.omit({ id: true });
export const ConnectionSchema = z.object({
id: z.string(),
name: z.string(),
host: z.string(),
username: z.string(),
password: z.string().optional(),
});
// --------------------------
// TABLE SOURCES
// --------------------------
export const SourceSchema = z.object({
from: z.object({
databaseName: z.string(),
tableName: z.string(),
}),
timestampValueExpression: z.string(),
connection: z.string(),
// Common
kind: z.enum(['log', 'trace']),
id: z.string(),
name: z.string(),
displayedTimestampValueExpression: z.string().optional(),
implicitColumnExpression: z.string().optional(),
serviceNameExpression: z.string().optional(),
bodyExpression: z.string().optional(),
tableFilterExpression: z.string().optional(),
eventAttributesExpression: z.string().optional(),
resourceAttributesExpression: z.string().optional(),
defaultTableSelectExpression: z.string().optional(),
// Logs
uniqueRowIdExpression: z.string().optional(),
severityTextExpression: z.string().optional(),
traceSourceId: z.string().optional(),
// Traces & Logs
traceIdExpression: z.string().optional(),
spanIdExpression: z.string().optional(),
// Traces
durationExpression: z.string().optional(),
durationPrecision: z.number().min(0).max(9).optional(),
parentSpanIdExpression: z.string().optional(),
spanNameExpression: z.string().optional(),
spanKindExpression: z.string().optional(),
statusCodeExpression: z.string().optional(),
statusMessageExpression: z.string().optional(),
logSourceId: z.string().optional(),
});
export type TSource = z.infer<typeof SourceSchema>;

View file

@ -0,0 +1,436 @@
import {
ChSql,
chSql,
ColumnMeta,
convertCHDataTypeToJSType,
filterColumnMetaByType,
JSDataType,
sendQuery,
tableExpr,
} from '@/common/clickhouse';
import {
ChartConfigWithDateRange,
renderChartConfig,
} from './renderChartConfig';
const DEFAULT_SAMPLE_SIZE = 1e6;
class MetadataCache {
private cache = new Map<string, any>();
// this should be getOrUpdate... or just query to follow react query
get<T>(key: string): T | undefined {
return this.cache.get(key);
}
async getOrFetch<T>(key: string, query: () => Promise<T>): Promise<T> {
const value = this.get(key) as T | undefined;
if (value != null) {
return value;
}
const newValue = await query();
this.cache.set(key, newValue);
return newValue;
}
set<T>(key: string, value: T) {
return this.cache.set(key, value);
}
// TODO: This needs to be async, and use tanstack query on frontend for cache
// TODO: Implement locks for refreshing
// TODO: Shard cache by time
}
export type TableMetadata = {
database: string;
name: string;
uuid: string;
engine: string;
is_temporary: number;
data_paths: string[];
metadata_path: string;
metadata_modification_time: string;
metadata_version: number;
create_table_query: string;
engine_full: string;
as_select: string;
partition_key: string;
sorting_key: string;
primary_key: string;
sampling_key: string;
storage_policy: string;
total_rows: string;
total_bytes: string;
total_bytes_uncompressed: string;
parts: string;
active_parts: string;
total_marks: string;
comment: string;
};
export class Metadata {
private cache = new MetadataCache();
private static async queryTableMetadata({
database,
table,
cache,
connectionId,
}: {
database: string;
table: string;
cache: MetadataCache;
connectionId: string;
}) {
return cache.getOrFetch(`${database}.${table}.metadata`, async () => {
const sql = chSql`SELECT * FROM system.tables where database = ${{ String: database }} AND name = ${{ String: table }}`;
const json = await sendQuery<'JSON'>({
query: sql.sql,
query_params: sql.params,
connectionId,
}).then(res => res.json<TableMetadata>());
return json.data[0];
});
}
async getColumns({
databaseName,
tableName,
connectionId,
}: {
databaseName: string;
tableName: string;
connectionId: string;
}) {
return this.cache.getOrFetch<ColumnMeta[]>(
`${databaseName}.${tableName}.columns`,
async () => {
const sql = chSql`DESCRIBE ${tableExpr({ database: databaseName, table: tableName })}`;
const columns = await sendQuery<'JSON'>({
query: sql.sql,
query_params: sql.params,
connectionId,
})
.then(res => res.json())
.then(d => d.data);
return columns as ColumnMeta[];
},
);
}
async getMaterializedColumnsLookupTable({
databaseName,
tableName,
connectionId,
}: {
databaseName: string;
tableName: string;
connectionId: string;
}) {
const columns = await this.getColumns({
databaseName,
tableName,
connectionId,
});
// Build up materalized fields lookup table
return new Map(
columns
.filter(
c =>
c.default_type === 'MATERIALIZED' || c.default_type === 'DEFAULT',
)
.map(c => [c.default_expression, c.name]),
);
}
async getColumn({
databaseName,
tableName,
column,
matchLowercase = false,
connectionId,
}: {
databaseName: string;
tableName: string;
column: string;
matchLowercase?: boolean;
connectionId: string;
}): Promise<ColumnMeta | undefined> {
const tableColumns = await this.getColumns({
databaseName,
tableName,
connectionId,
});
return tableColumns.filter(c => {
if (matchLowercase) {
return c.name.toLowerCase() === column.toLowerCase();
}
return c.name === column;
})[0];
}
async getMapKeys({
databaseName,
tableName,
column,
maxKeys = 1000,
connectionId,
}: {
databaseName: string;
tableName: string;
column: string;
maxKeys?: number;
connectionId: string;
}) {
const cachedKeys = this.cache.get<string[]>(
`${databaseName}.${tableName}.${column}.keys`,
);
if (cachedKeys != null) {
return cachedKeys;
}
const colMeta = await this.getColumn({
databaseName,
tableName,
column,
connectionId,
});
if (colMeta == null) {
throw new Error(
`Column ${column} not found in ${databaseName}.${tableName}`,
);
}
let strategy: 'groupUniqArrayArray' | 'lowCardinalityKeys' =
'groupUniqArrayArray';
if (colMeta.type.startsWith('Map(LowCardinality(String)')) {
strategy = 'lowCardinalityKeys';
}
let sql: ChSql;
if (strategy === 'groupUniqArrayArray') {
sql = chSql`SELECT groupUniqArrayArray(${{ Int32: maxKeys }})(${{
Identifier: column,
}}) as keysArr
FROM ${tableExpr({ database: databaseName, table: tableName })}`;
} else {
sql = chSql`SELECT DISTINCT lowCardinalityKeys(arrayJoin(${{
Identifier: column,
}}.keys)) as key
FROM ${tableExpr({ database: databaseName, table: tableName })}
LIMIT ${{
Int32: maxKeys,
}}`;
}
return this.cache.getOrFetch<string[]>(
`${databaseName}.${tableName}.${column}.keys`,
async () => {
const keys = await sendQuery<'JSON'>({
query: sql.sql,
query_params: sql.params,
connectionId,
clickhouse_settings: {
max_rows_to_read: DEFAULT_SAMPLE_SIZE,
read_overflow_mode: 'break',
},
})
.then(res => res.json<Record<string, unknown>>())
.then(d => {
let output: string[];
if (strategy === 'groupUniqArrayArray') {
output = d.data[0].keysArr as string[];
} else {
output = d.data.map(row => row.key) as string[];
}
return output.filter(r => r);
});
return keys;
},
);
}
async getMapValues({
databaseName,
tableName,
column,
key,
maxValues = 20,
connectionId,
}: {
databaseName: string;
tableName: string;
column: string;
key?: string;
maxValues?: number;
connectionId: string;
}) {
const cachedValues = this.cache.get<string[]>(
`${databaseName}.${tableName}.${column}.${key}.values`,
);
if (cachedValues != null) {
return cachedValues;
}
const sql = key
? chSql`
SELECT DISTINCT ${{
Identifier: column,
}}[${{ String: key }}] as value
FROM ${tableExpr({ database: databaseName, table: tableName })}
WHERE value != ''
LIMIT ${{
Int32: maxValues,
}}
`
: chSql`
SELECT DISTINCT ${{
Identifier: column,
}} as value
FROM ${tableExpr({ database: databaseName, table: tableName })}
WHERE value != ''
LIMIT ${{
Int32: maxValues,
}}
`;
return this.cache.getOrFetch<string[]>(
`${databaseName}.${tableName}.${column}.${key}.values`,
async () => {
const values = await sendQuery<'JSON'>({
query: sql.sql,
query_params: sql.params,
connectionId,
clickhouse_settings: {
max_rows_to_read: DEFAULT_SAMPLE_SIZE,
read_overflow_mode: 'break',
},
})
.then(res => res.json<Record<string, unknown>>())
.then(d => d.data.map(row => row.value as string));
return values;
},
);
}
async getAllFields({
databaseName,
tableName,
connectionId,
}: {
databaseName: string;
tableName: string;
connectionId: string;
}) {
const fields: Field[] = [];
const columns = await this.getColumns({
databaseName,
tableName,
connectionId,
});
for (const c of columns) {
fields.push({
path: [c.name],
type: c.type,
jsType: convertCHDataTypeToJSType(c.type),
});
}
const mapColumns = filterColumnMetaByType(columns, [JSDataType.Map]) ?? [];
await Promise.all(
mapColumns.map(async column => {
const keys = await this.getMapKeys({
databaseName,
tableName,
column: column.name,
connectionId,
});
const match = column.type.match(/Map\(.+,\s*(.+)\)/);
const chType = match?.[1] ?? 'String'; // default to string ?
for (const key of keys) {
fields.push({
path: [column.name, key],
type: chType,
jsType: convertCHDataTypeToJSType(chType),
});
}
}),
);
return fields;
}
async getTableMetadata({
databaseName,
tableName,
connectionId,
}: {
databaseName: string;
tableName: string;
connectionId: string;
}) {
const tableMetadata = await Metadata.queryTableMetadata({
cache: this.cache,
database: databaseName,
table: tableName,
connectionId,
});
return tableMetadata;
}
async getKeyValues({
chartConfig,
keys,
limit = 20,
}: {
chartConfig: ChartConfigWithDateRange;
keys: string[];
limit?: number;
}) {
const sql = await renderChartConfig({
...chartConfig,
select: keys
.map((k, i) => `groupUniqArray(${limit})(${k}) AS param${i}`)
.join(', '),
});
const json = await sendQuery<'JSON'>({
query: sql.sql,
query_params: sql.params,
connectionId: chartConfig.connection,
clickhouse_settings: {
max_rows_to_read: DEFAULT_SAMPLE_SIZE,
read_overflow_mode: 'break',
},
}).then(res => res.json<any>());
return Object.entries(json.data[0]).map(([key, value]) => ({
key: keys[parseInt(key.replace('param', ''))],
value: (value as string[])?.filter(Boolean), // remove nulls
}));
}
}
export type Field = {
path: string[];
type: string;
jsType: JSDataType | null;
};
export const metadata = new Metadata();

View file

@ -0,0 +1,717 @@
import lucene from '@hyperdx/lucene';
import SqlString from 'sqlstring';
import { convertCHTypeToPrimitiveJSType } from '@/common/clickhouse';
import { Metadata } from '@/common/metadata';
function encodeSpecialTokens(query: string): string {
return query
.replace(/\\\\/g, 'HDX_BACKSLASH_LITERAL')
.replace('http://', 'http_COLON_//')
.replace('https://', 'https_COLON_//')
.replace(/localhost:(\d{1,5})/, 'localhost_COLON_$1')
.replace(/\\:/g, 'HDX_COLON');
}
function decodeSpecialTokens(query: string): string {
return query
.replace(/\\"/g, '"')
.replace(/HDX_BACKSLASH_LITERAL/g, '\\')
.replace('http_COLON_//', 'http://')
.replace('https_COLON_//', 'https://')
.replace(/localhost_COLON_(\d{1,5})/, 'localhost:$1')
.replace(/HDX_COLON/g, ':');
}
export function parse(query: string): lucene.AST {
return lucene.parse(encodeSpecialTokens(query));
}
const IMPLICIT_FIELD = '<implicit>';
interface Serializer {
operator(op: lucene.Operator): string;
eq(field: string, term: string, isNegatedField: boolean): Promise<string>;
isNotNull(field: string, isNegatedField: boolean): Promise<string>;
gte(field: string, term: string): Promise<string>;
lte(field: string, term: string): Promise<string>;
lt(field: string, term: string): Promise<string>;
gt(field: string, term: string): Promise<string>;
fieldSearch(
field: string,
term: string,
isNegatedField: boolean,
prefixWildcard: boolean,
suffixWildcard: boolean,
): Promise<string>;
range(
field: string,
start: string,
end: string,
isNegatedField: boolean,
): Promise<string>;
}
class EnglishSerializer implements Serializer {
private translateField(field: string) {
if (field === IMPLICIT_FIELD) {
return 'event';
}
return `'${field}'`;
}
operator(op: lucene.Operator) {
switch (op) {
case 'NOT':
case 'AND NOT':
return 'AND NOT';
case 'OR NOT':
return 'OR NOT';
// @ts-ignore TODO: Types need to be fixed upstream
case '&&':
case '<implicit>':
case 'AND':
return 'AND';
// @ts-ignore TODO: Types need to be fixed upstream
case '||':
case 'OR':
return 'OR';
default:
throw new Error(`Unexpected operator. ${op}`);
}
}
async eq(field: string, term: string, isNegatedField: boolean) {
return `${this.translateField(field)} ${
isNegatedField ? 'is not' : 'is'
} ${term}`;
}
async isNotNull(field: string, isNegatedField: boolean) {
return `${this.translateField(field)} ${
isNegatedField ? 'is null' : 'is not null'
}`;
}
async gte(field: string, term: string) {
return `${this.translateField(field)} is greater than or equal to ${term}`;
}
async lte(field: string, term: string) {
return `${this.translateField(field)} is less than or equal to ${term}`;
}
async lt(field: string, term: string) {
return `${this.translateField(field)} is less than ${term}`;
}
async gt(field: string, term: string) {
return `${this.translateField(field)} is greater than ${term}`;
}
// async fieldSearch(field: string, term: string, isNegatedField: boolean) {
// return `${this.translateField(field)} ${
// isNegatedField ? 'does not contain' : 'contains'
// } ${term}`;
// }
async fieldSearch(
field: string,
term: string,
isNegatedField: boolean,
prefixWildcard: boolean,
suffixWildcard: boolean,
) {
if (field === IMPLICIT_FIELD) {
return `${this.translateField(field)} ${
prefixWildcard && suffixWildcard
? isNegatedField
? 'does not contain'
: 'contains'
: prefixWildcard
? isNegatedField
? 'does not end with'
: 'ends with'
: suffixWildcard
? isNegatedField
? 'does not start with'
: 'starts with'
: isNegatedField
? 'does not have whole word'
: 'has whole word'
} ${term}`;
} else {
return `${this.translateField(field)} ${
isNegatedField ? 'does not contain' : 'contains'
} ${term}`;
}
}
async range(
field: string,
start: string,
end: string,
isNegatedField: boolean,
) {
return `${field} ${
isNegatedField ? 'is not' : 'is'
} between ${start} and ${end}`;
}
}
export abstract class SQLSerializer implements Serializer {
private NOT_FOUND_QUERY = '(1 = 0)';
abstract getColumnForField(field: string): Promise<{
column?: string;
propertyType?: 'string' | 'number' | 'bool';
found: boolean;
}>;
operator(op: lucene.Operator) {
switch (op) {
case 'NOT':
case 'AND NOT':
return 'AND NOT';
case 'OR NOT':
return 'OR NOT';
// @ts-ignore TODO: Types need to be fixed upstream
case '&&':
case '<implicit>':
case 'AND':
return 'AND';
// @ts-ignore TODO: Types need to be fixed upstream
case '||':
case 'OR':
return 'OR';
default:
throw new Error(`Unexpected operator. ${op}`);
}
}
// Only for exact string matches
async eq(field: string, term: string, isNegatedField: boolean) {
const { column, found, propertyType } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
if (propertyType === 'bool') {
// numeric and boolean fields must be equality matched
const normTerm = `${term}`.trim().toLowerCase();
return SqlString.format(`(?? ${isNegatedField ? '!' : ''}= ?)`, [
column,
normTerm === 'true' ? 1 : normTerm === 'false' ? 0 : parseInt(normTerm),
]);
} else if (propertyType === 'number') {
return SqlString.format(
`(${column} ${isNegatedField ? '!' : ''}= CAST(?, 'Float64'))`,
[term],
);
}
return SqlString.format(`(${column} ${isNegatedField ? '!' : ''}= ?)`, [
term,
]);
}
async isNotNull(field: string, isNegatedField: boolean) {
const { found, column } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
return `notEmpty(${column}) ${isNegatedField ? '!' : ''}= 1`;
}
async gte(field: string, term: string) {
const { column, found } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
return SqlString.format(`(${column} >= ?)`, [term]);
}
async lte(field: string, term: string) {
const { column, found } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
return SqlString.format(`(${column} <= ?)`, [term]);
}
async lt(field: string, term: string) {
const { column, found } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
return SqlString.format(`(${column} < ?)`, [term]);
}
async gt(field: string, term: string) {
const { column, found } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
return SqlString.format(`(${column} > ?)`, [term]);
}
// TODO: Not sure if SQL really needs this or if it'll coerce itself
private attemptToParseNumber(term: string): string | number {
const number = Number.parseFloat(term);
if (Number.isNaN(number)) {
return term;
}
return number;
}
// Ref: https://clickhouse.com/codebrowser/ClickHouse/src/Functions/HasTokenImpl.h.html#_ZN2DB12HasTokenImpl16isTokenSeparatorEDu
// Split by anything that's ascii 0-128, that's not a letter or a number
private tokenizeTerm(term: string): string[] {
return term.split(/[ -/:-@[-`{-~\t\n\r]+/).filter(t => t.length > 0);
}
private termHasSeperators(term: string): boolean {
return term.match(/[ -/:-@[-`{-~\t\n\r]+/) != null;
}
async fieldSearch(
field: string,
term: string,
isNegatedField: boolean,
prefixWildcard: boolean,
suffixWildcard: boolean,
) {
const isImplicitField = field === IMPLICIT_FIELD;
const { column, propertyType, found } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
// If it's a string field, we will always try to match with ilike
if (propertyType === 'bool') {
// numeric and boolean fields must be equality matched
const normTerm = `${term}`.trim().toLowerCase();
return SqlString.format(`(?? ${isNegatedField ? '!' : ''}= ?)`, [
column,
normTerm === 'true' ? 1 : normTerm === 'false' ? 0 : parseInt(normTerm),
]);
} else if (propertyType === 'number') {
return SqlString.format(
`(?? ${isNegatedField ? '!' : ''}= CAST(?, 'Float64'))`,
[column, term],
);
}
// // If the query is empty, or is a empty quoted string ex: ""
// // we should match all
if (term.length === 0) {
return '(1=1)';
}
if (isImplicitField) {
// For the _source column, we'll try to do whole word searches by default
// to utilize the token bloom filter unless a prefix/sufix wildcard is specified
if (prefixWildcard || suffixWildcard) {
return SqlString.format(
`(lower(??) ${isNegatedField ? 'NOT ' : ''}LIKE lower(?))`,
[
column,
`${prefixWildcard ? '%' : ''}${term}${suffixWildcard ? '%' : ''}`,
],
);
} else {
// We can't search multiple tokens with `hasToken`, so we need to split up the term into tokens
const hasSeperators = this.termHasSeperators(term);
if (hasSeperators) {
const tokens = this.tokenizeTerm(term);
return `(${isNegatedField ? 'NOT (' : ''}${[
...tokens.map(token =>
SqlString.format(`hasTokenCaseInsensitive(??, ?)`, [
column,
token,
]),
),
// If there are symbols in the term, we'll try to match the whole term as well (ex. Scott!)
SqlString.format(`(lower(??) LIKE lower(?))`, [
column,
`%${term}%`,
]),
].join(' AND ')}${isNegatedField ? ')' : ''})`;
} else {
return SqlString.format(
`(${isNegatedField ? 'NOT ' : ''}hasTokenCaseInsensitive(??, ?))`,
[column, term],
);
}
}
} else {
const shoudUseTokenBf = isImplicitField;
return SqlString.format(
`(${column} ${isNegatedField ? 'NOT ' : ''}? ?)`,
[SqlString.raw(shoudUseTokenBf ? 'LIKE' : 'ILIKE'), `%${term}%`],
);
}
}
async range(
field: string,
start: string,
end: string,
isNegatedField: boolean,
) {
const { column, found } = await this.getColumnForField(field);
if (!found) {
return this.NOT_FOUND_QUERY;
}
return SqlString.format(
`(${column} ${isNegatedField ? 'NOT ' : ''}BETWEEN ? AND ?)`,
[this.attemptToParseNumber(start), this.attemptToParseNumber(end)],
);
}
}
export type CustomSchemaConfig = {
databaseName: string;
implicitColumnExpression?: string;
tableName: string;
connectionId: string;
};
export class CustomSchemaSQLSerializerV2 extends SQLSerializer {
private metadata: Metadata;
private tableName: string;
private databaseName: string;
private implicitColumnExpression?: string;
private connectionId: string;
constructor({
metadata,
databaseName,
tableName,
connectionId,
implicitColumnExpression,
}: { metadata: Metadata } & CustomSchemaConfig) {
super();
this.metadata = metadata;
this.databaseName = databaseName;
this.tableName = tableName;
this.implicitColumnExpression = implicitColumnExpression;
this.connectionId = connectionId;
}
/**
* Translate field from user ex. column.property.subproperty to SQL expression
* Supports:
* - Materialized Columns
* - Map
* - JSON Strings (via JSONExtract)
* TODO:
* - Nested Map
* - JSONExtract for non-string types
*/
private async buildColumnExpressionFromField(field: string) {
const exactMatch = await this.metadata.getColumn({
databaseName: this.databaseName,
tableName: this.tableName,
column: field,
connectionId: this.connectionId,
});
if (exactMatch) {
return {
found: true,
columnType: exactMatch.type,
columnExpression: exactMatch.name,
};
}
const fieldPrefix = field.split('.')[0];
const prefixMatch = await this.metadata.getColumn({
databaseName: this.databaseName,
tableName: this.tableName,
column: fieldPrefix,
connectionId: this.connectionId,
});
if (prefixMatch) {
const fieldPostfix = field.split('.').slice(1).join('.');
if (prefixMatch.type.startsWith('Map')) {
const valueType = prefixMatch.type.match(/,\s+(\w+)\)$/)?.[1];
return {
found: true,
columnExpression: SqlString.format(`??[?]`, [
prefixMatch.name,
fieldPostfix,
]),
columnType: valueType ?? 'Unknown',
};
} else if (prefixMatch.type === 'String') {
// TODO: Support non-strings
const nestedPaths = fieldPostfix.split('.');
return {
found: true,
columnExpression: SqlString.format(
`JSONExtractString(??, ${Array(nestedPaths.length)
.fill('?')
.join(',')})`,
[prefixMatch.name, ...nestedPaths],
),
columnType: 'String',
};
}
// TODO: Support arrays and tuples
throw new Error('Unsupported column type for prefix match');
}
throw new Error(`Column not found: ${field}`);
}
async getColumnForField(field: string) {
if (field === IMPLICIT_FIELD) {
if (!this.implicitColumnExpression) {
throw new Error(
'Can not search bare text without an implicit column set.',
);
}
return {
column: this.implicitColumnExpression,
propertyType: 'string' as const,
found: true,
};
}
const expression = await this.buildColumnExpressionFromField(field);
return {
column: expression.columnExpression,
propertyType:
convertCHTypeToPrimitiveJSType(expression.columnType) ?? undefined,
found: expression.found,
};
}
}
async function nodeTerm(
node: lucene.Node,
serializer: Serializer,
): Promise<string> {
const field = node.field[0] === '-' ? node.field.slice(1) : node.field;
let isNegatedField = node.field[0] === '-';
const isImplicitField = node.field === IMPLICIT_FIELD;
// NodeTerm
if ((node as lucene.NodeTerm).term != null) {
const nodeTerm = node as lucene.NodeTerm;
let term = decodeSpecialTokens(nodeTerm.term);
// We should only negate the search for negated bare terms (ex. '-5')
// This meeans the field is implicit and the prefix is -
if (isImplicitField && nodeTerm.prefix === '-') {
isNegatedField = true;
}
// Otherwise, if we have a negated term for a field (ex. 'level:-5')
// we should not negate the search, and search for -5
if (!isImplicitField && nodeTerm.prefix === '-') {
term = nodeTerm.prefix + decodeSpecialTokens(nodeTerm.term);
}
// TODO: Decide if this is good behavior
// If the term is quoted, we should search for the exact term in a property (ex. foo:"bar")
// Implicit field searches should still use substring matching (ex. "foo bar")
if (nodeTerm.quoted && !isImplicitField) {
return serializer.eq(field, term, isNegatedField);
}
if (!nodeTerm.quoted && term === '*') {
return serializer.isNotNull(field, isNegatedField);
}
if (!nodeTerm.quoted && term.substring(0, 2) === '>=') {
if (isNegatedField) {
return serializer.lt(field, term.slice(2));
}
return serializer.gte(field, term.slice(2));
}
if (!nodeTerm.quoted && term.substring(0, 2) === '<=') {
if (isNegatedField) {
return serializer.gt(field, term.slice(2));
}
return serializer.lte(field, term.slice(2));
}
if (!nodeTerm.quoted && term[0] === '>') {
if (isNegatedField) {
return serializer.lte(field, term.slice(1));
}
return serializer.gt(field, term.slice(1));
}
if (!nodeTerm.quoted && term[0] === '<') {
if (isNegatedField) {
return serializer.gte(field, term.slice(1));
}
return serializer.lt(field, term.slice(1));
}
let prefixWildcard = false;
let suffixWildcard = false;
if (!nodeTerm.quoted && term[0] === '*') {
prefixWildcard = true;
term = term.slice(1);
}
if (!nodeTerm.quoted && term[term.length - 1] === '*') {
suffixWildcard = true;
term = term.slice(0, -1);
}
return serializer.fieldSearch(
field,
term,
isNegatedField,
prefixWildcard,
suffixWildcard,
);
// TODO: Handle regex, similarity, boost, prefix
}
// NodeRangedTerm
if ((node as lucene.NodeRangedTerm).inclusive != null) {
const rangedTerm = node as lucene.NodeRangedTerm;
return serializer.range(
field,
rangedTerm.term_min,
rangedTerm.term_max,
isNegatedField,
);
}
throw new Error(`Unexpected Node type. ${node}`);
}
async function serialize(
ast: lucene.AST | lucene.Node,
serializer: Serializer,
): Promise<string> {
// Node Scenarios:
// 1. NodeTerm: Single term ex. "foo:bar"
// 2. NodeRangedTerm: Two terms ex. "foo:[bar TO qux]"
if ((ast as lucene.NodeTerm).term != null) {
return await nodeTerm(ast as lucene.NodeTerm, serializer);
}
if ((ast as lucene.NodeRangedTerm).inclusive != null) {
return await nodeTerm(ast as lucene.NodeTerm, serializer);
}
// AST Scenarios:
// 1. BinaryAST: Two terms ex. "foo:bar AND baz:qux"
// 2. LeftOnlyAST: Single term ex. "foo:bar"
if ((ast as lucene.BinaryAST).right != null) {
const binaryAST = ast as lucene.BinaryAST;
const operator = serializer.operator(binaryAST.operator);
const parenthesized = binaryAST.parenthesized;
return `${parenthesized ? '(' : ''}${await serialize(
binaryAST.left,
serializer,
)} ${operator} ${await serialize(binaryAST.right, serializer)}${
parenthesized ? ')' : ''
}`;
}
if ((ast as lucene.LeftOnlyAST).left != null) {
const leftOnlyAST = ast as lucene.LeftOnlyAST;
const parenthesized = leftOnlyAST.parenthesized;
// start is used when ex. "NOT foo:bar"
return `${parenthesized ? '(' : ''}${
leftOnlyAST.start != undefined ? `${leftOnlyAST.start} ` : ''
}${await serialize(leftOnlyAST.left, serializer)}${
parenthesized ? ')' : ''
}`;
}
// Blank AST, means no text was parsed
return '';
}
// TODO: can just inline this within getSearchQuery
export async function genWhereSQL(
ast: lucene.AST,
serializer: Serializer,
): Promise<string> {
return await serialize(ast, serializer);
}
export class SearchQueryBuilder {
private readonly searchQ: string;
private readonly conditions: string[];
private serializer: SQLSerializer;
constructor(searchQ: string, serializer: SQLSerializer) {
this.conditions = [];
this.searchQ = searchQ;
// init default serializer
this.serializer = serializer;
}
setSerializer(serializer: SQLSerializer) {
this.serializer = serializer;
return this;
}
getSerializer() {
return this.serializer;
}
private async genSearchQuery() {
if (!this.searchQ) {
return '';
}
// const implicitColumn = await this.serializer.getColumnForField(
// IMPLICIT_FIELD,
// );
// let querySql = this.searchQ
// .split(/\s+/)
// .map(queryToken =>
// SqlString.format(`lower(??) LIKE lower(?)`, [
// implicitColumn.column,
// `%${queryToken}%`,
// ]),
// )
// .join(' AND ');
const parsedQ = parse(this.searchQ);
return await genWhereSQL(parsedQ, this.serializer);
}
and(condition: string) {
if (condition && condition.trim()) {
this.conditions.push(`(${condition})`);
}
return this;
}
async build() {
const searchQuery = await this.genSearchQuery();
if (this.searchQ) {
this.and(searchQuery);
}
return this.conditions.join(' AND ');
}
}
export async function genEnglishExplanation(query: string): Promise<string> {
try {
const parsedQ = parse(query);
if (parsedQ) {
const serializer = new EnglishSerializer();
return await serialize(parsedQ, serializer);
}
} catch (e) {
console.warn('Parse failure', query, e);
}
return `Message containing ${query}`;
}

View file

@ -0,0 +1,782 @@
import isPlainObject from 'lodash/isPlainObject';
import * as SQLParser from 'node-sql-parser';
import {
ChSql,
chSql,
concatChSql,
wrapChSqlIfNotEmpty,
} from '@/common/clickhouse';
import { DisplayType } from '@/common/DisplayType';
import { Metadata, metadata } from '@/common/metadata';
import {
CustomSchemaSQLSerializerV2,
SearchQueryBuilder,
} from '@/common/queryParser';
import {
AggregateFunction,
AggregateFunctionWithCombinators,
SearchCondition,
SearchConditionLanguage,
SelectList,
SelectSQLStatement,
SortSpecificationList,
SQLInterval,
} from '@/common/sqlTypes';
import {
convertDateRangeToGranularityString,
getFirstTimestampValueExpression,
} from '@/common/utils';
// FIXME: SQLParser.ColumnRef is incomplete
type ColumnRef = SQLParser.ColumnRef & {
array_index?: {
index: { type: string; value: string };
}[];
};
export type NumberFormat = {
output?: 'currency' | 'percent' | 'byte' | 'time' | 'number';
mantissa?: number;
thousandSeparated?: boolean;
average?: boolean;
decimalBytes?: boolean;
factor?: number;
currencySymbol?: string;
unit?: string;
};
export type SqlAstFilter = {
type: 'sql_ast';
operator: '=' | '<' | '>' | '!=' | '<=' | '>=';
// SQL Expressions
left: string;
right: string;
};
export type Filter =
| {
type: 'lucene' | 'sql';
condition: SearchCondition;
}
| SqlAstFilter;
// Used to actually query the data in a given chart
export type ChartConfig = {
displayType?: DisplayType;
numberFormat?: NumberFormat;
timestampValueExpression: string;
implicitColumnExpression?: string; // Where lucene will search if given bare terms
granularity?: SQLInterval | 'auto';
markdown?: string; // Markdown Content
filtersLogicalOperator?: 'AND' | 'OR'; // Default AND
filters?: Filter[]; // Additional filters to where clause
connection: string; // Connection ID
fillNulls?: number | false; // undefined = 0, false = no fill
selectGroupBy?: boolean; // Add groupBy elements to select statement (default behavior: true)
// TODO: Color support
} & SelectSQLStatement;
// Saved configuration, has a variable source ID that we pull at query time
export type SavedChartConfig = {
name: string;
source: string;
} & Omit<ChartConfig, 'timestampValueExpression' | 'from' | 'connection'>;
type DateRange = {
dateRange: [Date, Date];
dateRangeStartInclusive?: boolean; // default true
};
export type ChartConfigWithDateRange = ChartConfig & DateRange;
// For non-time-based searches (ex. grab 1 row)
export type ChartConfigWithOptDateRange = Omit<
ChartConfig,
'timestampValueExpression'
> & {
timestampValueExpression?: string;
} & Partial<DateRange>;
export const FIXED_TIME_BUCKET_EXPR_ALIAS = '__hdx_time_bucket';
export function isUsingGroupBy(
chartConfig: ChartConfigWithOptDateRange,
): chartConfig is Omit<ChartConfigWithDateRange, 'groupBy'> & {
groupBy: NonNullable<ChartConfigWithDateRange['groupBy']>;
} {
return chartConfig.groupBy != null && chartConfig.groupBy.length > 0;
}
function isUsingGranularity(
chartConfig: ChartConfigWithOptDateRange,
): chartConfig is Omit<
Omit<Omit<ChartConfigWithDateRange, 'granularity'>, 'dateRange'>,
'timestampValueExpression'
> & {
granularity: NonNullable<ChartConfigWithDateRange['granularity']>;
dateRange: NonNullable<ChartConfigWithDateRange['dateRange']>;
timestampValueExpression: NonNullable<
ChartConfigWithDateRange['timestampValueExpression']
>;
} {
return (
chartConfig.timestampValueExpression != null &&
chartConfig.granularity != null
);
}
const INVERSE_OPERATOR_MAP = {
'=': '!=',
'>': '<=',
'<': '>=',
'!=': '=',
'<=': '>',
'>=': '<',
} as const;
export function inverseSqlAstFilter(filter: SqlAstFilter): SqlAstFilter {
return {
...filter,
operator:
INVERSE_OPERATOR_MAP[
filter.operator as keyof typeof INVERSE_OPERATOR_MAP
],
};
}
export function isNonEmptyWhereExpr(where?: string): where is string {
return where != null && where.trim() != '';
}
const fastifySQL = ({
materializedFields,
rawSQL,
}: {
materializedFields: Map<string, string>;
rawSQL: string;
}) => {
// Parse the SQL AST
try {
const parser = new SQLParser.Parser();
const ast = parser.astify(rawSQL, {
database: 'Postgresql',
}) as SQLParser.Select;
// traveral ast and replace the left node with the materialized field
// FIXME: type node (AST type is incomplete): https://github.com/taozhi8833998/node-sql-parser/blob/42ea0b1800c5d425acb8c5ca708a1cee731aada8/types.d.ts#L474
const traverse = (
node:
| SQLParser.Expr
| SQLParser.ExpressionValue
| SQLParser.ExprList
| SQLParser.Function
| null,
) => {
if (node == null) {
return;
}
let colExpr;
switch (node.type) {
case 'column_ref': {
// FIXME: handle 'Value' type?
const _n = node as ColumnRef;
// @ts-ignore
if (typeof _n.column !== 'string') {
// @ts-ignore
colExpr = `${_n.column?.expr.value}['${_n.array_index?.[0]?.index.value}']`;
}
break;
}
case 'binary_expr': {
const _n = node as SQLParser.Expr;
if (Array.isArray(_n.left)) {
for (const left of _n.left) {
traverse(left);
}
} else {
traverse(_n.left);
}
if (Array.isArray(_n.right)) {
for (const right of _n.right) {
traverse(right);
}
} else {
traverse(_n.right);
}
break;
}
case 'function': {
const _n = node as SQLParser.Function;
if (_n.args?.type === 'expr_list') {
if (Array.isArray(_n.args?.value)) {
for (const arg of _n.args.value) {
traverse(arg);
}
// ex: JSONExtractString(Body, 'message')
if (
_n.args?.value?.[0]?.type === 'column_ref' &&
_n.args?.value?.[1]?.type === 'single_quote_string'
) {
colExpr = `${_n.name?.name?.[0]?.value}(${(_n.args?.value?.[0] as any)?.column.expr.value}, '${_n.args?.value?.[1]?.value}')`;
}
}
// when _n.args?.value is Expr
else if (isPlainObject(_n.args?.value)) {
traverse(_n.args.value);
}
}
break;
}
default:
// ignore other types
break;
}
if (colExpr) {
const materializedField = materializedFields.get(colExpr);
if (materializedField) {
const _n = node as ColumnRef;
// reset the node ref
for (const key in _n) {
// eslint-disable-next-line no-prototype-builtins
if (_n.hasOwnProperty(key)) {
// @ts-ignore
delete _n[key];
}
}
_n.type = 'column_ref';
// @ts-ignore
_n.table = null;
// @ts-ignore
_n.column = { expr: { type: 'default', value: materializedField } };
}
}
};
if (Array.isArray(ast.columns)) {
for (const col of ast.columns) {
traverse(col.expr);
}
}
traverse(ast.where);
return parser.sqlify(ast);
} catch (e) {
console.error('[renderWhereExpression]feat: Failed to parse SQL AST', e);
return rawSQL;
}
};
const aggFnExpr = ({
fn,
expr,
quantileLevel,
where,
}: {
fn: AggregateFunction | AggregateFunctionWithCombinators;
expr?: string;
quantileLevel?: number;
where?: string;
}) => {
const isCount = fn.startsWith('count');
const isWhereUsed = isNonEmptyWhereExpr(where);
// Cast to float64 because the expr might not be a number
const unsafeExpr = { UNSAFE_RAW_SQL: `toFloat64OrNull(toString(${expr}))` };
const whereWithExtraNullCheck = `${where} AND ${unsafeExpr.UNSAFE_RAW_SQL} IS NOT NULL`;
if (fn.endsWith('Merge')) {
return chSql`${fn}(${{
UNSAFE_RAW_SQL: expr ?? '',
}})`;
}
// TODO: merge this chunk with the rest of logics
else if (fn.endsWith('State')) {
if (expr == null || isCount) {
return isWhereUsed
? chSql`${fn}(${{ UNSAFE_RAW_SQL: where }})`
: chSql`${fn}()`;
}
return chSql`${fn}(${unsafeExpr}${
isWhereUsed ? chSql`, ${{ UNSAFE_RAW_SQL: whereWithExtraNullCheck }}` : ''
})`;
}
if (fn === 'count') {
if (isWhereUsed) {
return chSql`${fn}If(${{ UNSAFE_RAW_SQL: where }})`;
}
return {
sql: `${fn}()`,
params: {},
};
}
if (expr != null) {
if (fn === 'count_distinct') {
return chSql`count${isWhereUsed ? 'If' : ''}(DISTINCT ${{
UNSAFE_RAW_SQL: expr,
}}${isWhereUsed ? chSql`, ${{ UNSAFE_RAW_SQL: where }}` : ''})`;
}
if (quantileLevel != null) {
return chSql`quantile${isWhereUsed ? 'If' : ''}(${{
// Using Float64 param leads to an added coersion, but we don't need to
// escape number values anyways
UNSAFE_RAW_SQL: Number.isFinite(quantileLevel)
? `${quantileLevel}`
: '0',
}})(${unsafeExpr}${
isWhereUsed
? chSql`, ${{ UNSAFE_RAW_SQL: whereWithExtraNullCheck }}`
: ''
})`;
}
// TODO: Verify fn is a safe/valid function
return chSql`${{ UNSAFE_RAW_SQL: fn }}${isWhereUsed ? 'If' : ''}(
${unsafeExpr}${isWhereUsed ? chSql`, ${{ UNSAFE_RAW_SQL: whereWithExtraNullCheck }}` : ''}
)`;
} else {
throw new Error(
'Column is required for all non-count aggregation functions',
);
}
};
async function renderSelectList(
selectList: SelectList,
chartConfig: ChartConfigWithOptDateRange,
metadata: Metadata,
) {
if (typeof selectList === 'string') {
return chSql`${{ UNSAFE_RAW_SQL: selectList }}`;
}
const materializedFields = await metadata.getMaterializedColumnsLookupTable({
connectionId: chartConfig.connection,
databaseName: chartConfig.from.databaseName,
tableName: chartConfig.from.tableName,
});
return Promise.all(
selectList.map(async select => {
const whereClause = await renderWhereExpression({
condition: select.aggCondition ?? '',
from: chartConfig.from,
language: select.aggConditionLanguage ?? 'lucene',
implicitColumnExpression: chartConfig.implicitColumnExpression,
metadata,
connectionId: chartConfig.connection,
});
let expr: ChSql;
if (select.aggFn == null) {
expr = chSql`${{ UNSAFE_RAW_SQL: select.valueExpression }}`;
} else if (select.aggFn === 'quantile') {
expr = aggFnExpr({
fn: select.aggFn,
expr: select.valueExpression,
// @ts-ignore (TS doesn't know that we've already checked for quantile)
quantileLevel: select.level,
where: whereClause.sql,
});
} else {
expr = aggFnExpr({
fn: select.aggFn,
expr: select.valueExpression,
where: whereClause.sql,
});
}
const rawSQL = `SELECT ${expr.sql} FROM \`t\``;
// strip 'SELECT * FROM `t` WHERE ' from the sql
expr.sql = fastifySQL({ materializedFields, rawSQL })
.replace(/^SELECT\s+/i, '') // Remove 'SELECT ' from the start
.replace(/\s+FROM `t`$/i, ''); // Remove ' FROM t' from the end
return chSql`${expr}${
select.alias != null
? chSql` AS \`${{ UNSAFE_RAW_SQL: select.alias }}\``
: []
}`;
}),
);
}
function renderSortSpecificationList(
sortSpecificationList: SortSpecificationList,
) {
if (typeof sortSpecificationList === 'string') {
return chSql`${{ UNSAFE_RAW_SQL: sortSpecificationList }}`;
}
return sortSpecificationList.map(sortSpecification => {
return chSql`${{ UNSAFE_RAW_SQL: sortSpecification.valueExpression }} ${
sortSpecification.ordering === 'DESC' ? 'DESC' : 'ASC'
}`;
});
}
function timeBucketExpr({
interval,
timestampValueExpression,
dateRange,
alias = FIXED_TIME_BUCKET_EXPR_ALIAS,
}: {
interval: SQLInterval | 'auto';
timestampValueExpression: string;
dateRange?: [Date, Date];
alias?: string;
}) {
const unsafeTimestampValueExpression = {
UNSAFE_RAW_SQL: getFirstTimestampValueExpression(timestampValueExpression),
};
const unsafeInterval = {
UNSAFE_RAW_SQL:
interval === 'auto' && Array.isArray(dateRange)
? convertDateRangeToGranularityString(dateRange, 60)
: interval,
};
return chSql`toStartOfInterval(toDateTime(${unsafeTimestampValueExpression}), INTERVAL ${unsafeInterval}) AS \`${{
UNSAFE_RAW_SQL: alias,
}}\``;
}
async function timeFilterExpr({
timestampValueExpression,
dateRange,
dateRangeStartInclusive,
databaseName,
tableName,
metadata,
connectionId,
}: {
timestampValueExpression: string;
dateRange: [Date, Date];
dateRangeStartInclusive: boolean;
metadata: Metadata;
connectionId: string;
databaseName: string;
tableName: string;
}) {
const valueExpressions = timestampValueExpression.split(',');
const startTime = dateRange[0].getTime();
const endTime = dateRange[1].getTime();
const whereExprs = await Promise.all(
valueExpressions.map(async expr => {
const col = expr.trim();
const columnMeta = await metadata.getColumn({
databaseName,
tableName,
column: col,
connectionId,
});
const unsafeTimestampValueExpression = {
UNSAFE_RAW_SQL: col,
};
if (columnMeta == null) {
console.warn(
`Column ${col} not found in ${databaseName}.${tableName} while inferring type for time filter`,
);
}
// If it's a date type
if (columnMeta?.type === 'Date') {
return chSql`(${unsafeTimestampValueExpression} ${
dateRangeStartInclusive ? '>=' : '>'
} toDate(fromUnixTimestamp64Milli(${{
Int64: startTime,
}})) AND ${unsafeTimestampValueExpression} <= toDate(fromUnixTimestamp64Milli(${{
Int64: endTime,
}})))`;
} else {
return chSql`(${unsafeTimestampValueExpression} ${
dateRangeStartInclusive ? '>=' : '>'
} fromUnixTimestamp64Milli(${{
Int64: startTime,
}}) AND ${unsafeTimestampValueExpression} <= fromUnixTimestamp64Milli(${{
Int64: endTime,
}}))`;
}
}),
);
return concatChSql('AND', ...whereExprs);
}
async function renderSelect(
chartConfig: ChartConfigWithOptDateRange,
metadata: Metadata,
): Promise<ChSql> {
/**
* SELECT
* if granularity: toStartOfInterval,
* if groupBy: groupBy,
* select
*/
const isIncludingTimeBucket = isUsingGranularity(chartConfig);
const isIncludingGroupBy = isUsingGroupBy(chartConfig);
// TODO: clean up these await mess
return concatChSql(
',',
await renderSelectList(chartConfig.select, chartConfig, metadata),
isIncludingGroupBy && chartConfig.selectGroupBy !== false
? await renderSelectList(chartConfig.groupBy, chartConfig, metadata)
: [],
isIncludingTimeBucket
? timeBucketExpr({
interval: chartConfig.granularity,
timestampValueExpression: chartConfig.timestampValueExpression,
dateRange: chartConfig.dateRange,
})
: [],
);
}
function renderFrom({
from,
}: {
from: ChartConfigWithDateRange['from'];
}): ChSql {
return chSql`${{ Identifier: from.databaseName }}.${{
Identifier: from.tableName,
}}`;
}
async function renderWhereExpression({
condition,
language,
metadata,
from,
implicitColumnExpression,
connectionId,
}: {
condition: SearchCondition;
language: SearchConditionLanguage;
metadata: Metadata;
from: ChartConfigWithDateRange['from'];
implicitColumnExpression?: string;
connectionId: string;
}): Promise<ChSql> {
let _condition = condition;
if (language === 'lucene') {
const serializer = new CustomSchemaSQLSerializerV2({
metadata,
databaseName: from.databaseName,
tableName: from.tableName,
implicitColumnExpression,
connectionId: connectionId,
});
const builder = new SearchQueryBuilder(condition, serializer);
_condition = await builder.build();
}
const materializedFields = await metadata.getMaterializedColumnsLookupTable({
connectionId,
databaseName: from.databaseName,
tableName: from.tableName,
});
const _sqlPrefix = 'SELECT * FROM `t` WHERE ';
const rawSQL = `${_sqlPrefix}${_condition}`;
// strip 'SELECT * FROM `t` WHERE ' from the sql
_condition = fastifySQL({ materializedFields, rawSQL }).replace(
_sqlPrefix,
'',
);
return chSql`${{ UNSAFE_RAW_SQL: _condition }}`;
}
async function renderWhere(
chartConfig: ChartConfigWithOptDateRange,
metadata: Metadata,
): Promise<ChSql> {
let whereSearchCondition: ChSql | [] = [];
if (isNonEmptyWhereExpr(chartConfig.where)) {
whereSearchCondition = wrapChSqlIfNotEmpty(
await renderWhereExpression({
condition: chartConfig.where,
from: chartConfig.from,
language: chartConfig.whereLanguage ?? 'sql',
implicitColumnExpression: chartConfig.implicitColumnExpression,
metadata,
connectionId: chartConfig.connection,
}),
'(',
')',
);
}
let selectSearchConditions: ChSql[] = [];
if (
typeof chartConfig.select != 'string' &&
// Only if every select has an aggCondition, add to where clause
// otherwise we'll scan all rows anyways
chartConfig.select.every(select => isNonEmptyWhereExpr(select.aggCondition))
) {
selectSearchConditions = (
await Promise.all(
chartConfig.select.map(async select => {
if (isNonEmptyWhereExpr(select.aggCondition)) {
return await renderWhereExpression({
condition: select.aggCondition,
from: chartConfig.from,
language: select.aggConditionLanguage ?? 'sql',
implicitColumnExpression: chartConfig.implicitColumnExpression,
metadata,
connectionId: chartConfig.connection,
});
}
return null;
}),
)
).filter(v => v !== null) as ChSql[];
}
const filterConditions = await Promise.all(
(chartConfig.filters ?? []).map(async filter => {
if (filter.type === 'sql_ast') {
return wrapChSqlIfNotEmpty(
chSql`${{ UNSAFE_RAW_SQL: filter.left }} ${filter.operator} ${{ UNSAFE_RAW_SQL: filter.right }}`,
'(',
')',
);
} else if (filter.type === 'lucene' || filter.type === 'sql') {
return wrapChSqlIfNotEmpty(
await renderWhereExpression({
condition: filter.condition,
from: chartConfig.from,
language: filter.type,
implicitColumnExpression: chartConfig.implicitColumnExpression,
metadata,
connectionId: chartConfig.connection,
}),
'(',
')',
);
}
throw new Error(`Unknown filter type: ${filter.type}`);
}),
);
return concatChSql(
' AND ',
chartConfig.dateRange != null &&
chartConfig.timestampValueExpression != null
? await timeFilterExpr({
timestampValueExpression: chartConfig.timestampValueExpression,
dateRange: chartConfig.dateRange,
dateRangeStartInclusive: chartConfig.dateRangeStartInclusive ?? true,
metadata,
connectionId: chartConfig.connection,
databaseName: chartConfig.from.databaseName,
tableName: chartConfig.from.tableName,
})
: [],
whereSearchCondition,
// Add aggConditions to where clause to utilize index
wrapChSqlIfNotEmpty(concatChSql(' OR ', selectSearchConditions), '(', ')'),
wrapChSqlIfNotEmpty(
concatChSql(
chartConfig.filtersLogicalOperator === 'OR' ? ' OR ' : ' AND ',
...filterConditions,
),
'(',
')',
),
);
}
async function renderGroupBy(
chartConfig: ChartConfigWithOptDateRange,
metadata: Metadata,
): Promise<ChSql | undefined> {
return concatChSql(
',',
isUsingGroupBy(chartConfig)
? await renderSelectList(chartConfig.groupBy, chartConfig, metadata)
: [],
isUsingGranularity(chartConfig)
? timeBucketExpr({
interval: chartConfig.granularity,
timestampValueExpression: chartConfig.timestampValueExpression,
dateRange: chartConfig.dateRange,
})
: [],
);
}
function renderOrderBy(
chartConfig: ChartConfigWithOptDateRange,
): ChSql | undefined {
const isIncludingTimeBucket = isUsingGranularity(chartConfig);
if (chartConfig.orderBy == null && !isIncludingTimeBucket) {
return undefined;
}
return concatChSql(
',',
isIncludingTimeBucket
? timeBucketExpr({
interval: chartConfig.granularity,
timestampValueExpression: chartConfig.timestampValueExpression,
dateRange: chartConfig.dateRange,
})
: [],
chartConfig.orderBy != null
? renderSortSpecificationList(chartConfig.orderBy)
: [],
);
}
function renderLimit(
chartConfig: ChartConfigWithOptDateRange,
): ChSql | undefined {
if (chartConfig.limit == null || chartConfig.limit.limit == null) {
return undefined;
}
const offset =
chartConfig.limit.offset != null
? chSql` OFFSET ${{ Int32: chartConfig.limit.offset }}`
: [];
return chSql`${{ Int32: chartConfig.limit.limit }}${offset}`;
}
export async function renderChartConfig(
chartConfig: ChartConfigWithOptDateRange,
): Promise<ChSql> {
const select = await renderSelect(chartConfig, metadata);
const from = renderFrom(chartConfig);
const where = await renderWhere(chartConfig, metadata);
const groupBy = await renderGroupBy(chartConfig, metadata);
const orderBy = renderOrderBy(chartConfig);
const limit = renderLimit(chartConfig);
return chSql`SELECT ${select} FROM ${from} ${where?.sql ? chSql`WHERE ${where}` : ''} ${
groupBy?.sql ? chSql`GROUP BY ${groupBy}` : ''
} ${orderBy?.sql ? chSql`ORDER BY ${orderBy}` : ''} ${
limit?.sql ? chSql`LIMIT ${limit}` : ''
}`;
}
// EditForm -> translateToQueriedChartConfig -> QueriedChartConfig
// renderFn(QueriedChartConfig) -> sql
// query(sql) -> data
// formatter(data) -> displayspecificDs
// displaySettings(QueriedChartConfig) -> displaySepcificDs
// chartComponent(displayspecificDs) -> React.Node

View file

@ -0,0 +1,46 @@
// Derived from SQL grammar spec
// See: https://ronsavage.github.io/SQL/sql-2003-2.bnf.html#query%20specification
import { z } from 'zod';
import {
AggregateFunctionSchema,
AggregateFunctionWithCombinatorsSchema,
DerivedColumnSchema,
SearchConditionLanguageSchema,
SearchConditionSchema,
SelectListSchema,
SortSpecificationListSchema,
SQLIntervalSchema,
} from '@/common/commonTypes';
export type SQLInterval = z.infer<typeof SQLIntervalSchema>;
export type SearchCondition = z.infer<typeof SearchConditionSchema>;
export type SearchConditionLanguage = z.infer<
typeof SearchConditionLanguageSchema
>;
export type AggregateFunction = z.infer<typeof AggregateFunctionSchema>;
export type AggregateFunctionWithCombinators = z.infer<
typeof AggregateFunctionWithCombinatorsSchema
>;
export type DerivedColumn = z.infer<typeof DerivedColumnSchema>;
export type SelectList = z.infer<typeof SelectListSchema>;
export type SortSpecificationList = z.infer<typeof SortSpecificationListSchema>;
type Limit = { limit?: number; offset?: number };
export type SelectSQLStatement = {
select: SelectList;
from: { databaseName: string; tableName: string };
where: SearchCondition;
whereLanguage?: SearchConditionLanguage;
groupBy?: SelectList;
having?: SearchCondition;
havingLanguage?: SearchConditionLanguage;
orderBy?: SortSpecificationList;
limit?: Limit;
};

View file

@ -0,0 +1,173 @@
// Port from ChartUtils + source.ts
import { add } from 'date-fns';
import type { SQLInterval } from '@/common/sqlTypes';
// If a user specifies a timestampValueExpression with multiple columns,
// this will return the first one. We'll want to refine this over time
export function getFirstTimestampValueExpression(valueExpression: string) {
return valueExpression.split(',')[0].trim();
}
export enum Granularity {
FifteenSecond = '15 second',
ThirtySecond = '30 second',
OneMinute = '1 minute',
FiveMinute = '5 minute',
TenMinute = '10 minute',
FifteenMinute = '15 minute',
ThirtyMinute = '30 minute',
OneHour = '1 hour',
TwoHour = '2 hour',
SixHour = '6 hour',
TwelveHour = '12 hour',
OneDay = '1 day',
TwoDay = '2 day',
SevenDay = '7 day',
ThirtyDay = '30 day',
}
export function hashCode(str: string) {
let hash = 0,
i,
chr;
if (str.length === 0) return hash;
for (i = 0; i < str.length; i++) {
chr = str.charCodeAt(i);
hash = (hash << 5) - hash + chr;
hash |= 0; // Convert to 32bit integer
}
return hash;
}
export function convertDateRangeToGranularityString(
dateRange: [Date, Date],
maxNumBuckets: number,
): Granularity {
const start = dateRange[0].getTime();
const end = dateRange[1].getTime();
const diffSeconds = Math.floor((end - start) / 1000);
const granularitySizeSeconds = Math.ceil(diffSeconds / maxNumBuckets);
if (granularitySizeSeconds <= 15) {
return Granularity.FifteenSecond;
} else if (granularitySizeSeconds <= 30) {
return Granularity.ThirtySecond;
} else if (granularitySizeSeconds <= 60) {
return Granularity.OneMinute;
} else if (granularitySizeSeconds <= 5 * 60) {
return Granularity.FiveMinute;
} else if (granularitySizeSeconds <= 10 * 60) {
return Granularity.TenMinute;
} else if (granularitySizeSeconds <= 15 * 60) {
return Granularity.FifteenMinute;
} else if (granularitySizeSeconds <= 30 * 60) {
return Granularity.ThirtyMinute;
} else if (granularitySizeSeconds <= 3600) {
return Granularity.OneHour;
} else if (granularitySizeSeconds <= 2 * 3600) {
return Granularity.TwoHour;
} else if (granularitySizeSeconds <= 6 * 3600) {
return Granularity.SixHour;
} else if (granularitySizeSeconds <= 12 * 3600) {
return Granularity.TwelveHour;
} else if (granularitySizeSeconds <= 24 * 3600) {
return Granularity.OneDay;
} else if (granularitySizeSeconds <= 2 * 24 * 3600) {
return Granularity.TwoDay;
} else if (granularitySizeSeconds <= 7 * 24 * 3600) {
return Granularity.SevenDay;
} else if (granularitySizeSeconds <= 30 * 24 * 3600) {
return Granularity.ThirtyDay;
}
return Granularity.ThirtyDay;
}
export function convertGranularityToSeconds(granularity: SQLInterval): number {
const [num, unit] = granularity.split(' ');
const numInt = Number.parseInt(num);
switch (unit) {
case 'second':
return numInt;
case 'minute':
return numInt * 60;
case 'hour':
return numInt * 60 * 60;
case 'day':
return numInt * 60 * 60 * 24;
default:
return 0;
}
}
// Note: roundToNearestMinutes is broken in date-fns currently
// additionally it doesn't support seconds or > 30min
// so we need to write our own :(
// see: https://github.com/date-fns/date-fns/pull/3267/files
export function toStartOfInterval(date: Date, granularity: SQLInterval): Date {
const [num, unit] = granularity.split(' ');
const numInt = Number.parseInt(num);
const roundFn = Math.floor;
switch (unit) {
case 'second':
return new Date(
Date.UTC(
date.getUTCFullYear(),
date.getUTCMonth(),
date.getUTCDate(),
date.getUTCHours(),
date.getUTCMinutes(),
roundFn(date.getUTCSeconds() / numInt) * numInt,
),
);
case 'minute':
return new Date(
Date.UTC(
date.getUTCFullYear(),
date.getUTCMonth(),
date.getUTCDate(),
date.getUTCHours(),
roundFn(date.getUTCMinutes() / numInt) * numInt,
),
);
case 'hour':
return new Date(
Date.UTC(
date.getUTCFullYear(),
date.getUTCMonth(),
date.getUTCDate(),
roundFn(date.getUTCHours() / numInt) * numInt,
),
);
case 'day': {
// Clickhouse uses the # of days since unix epoch to round dates
// see: https://github.com/ClickHouse/ClickHouse/blob/master/src/Common/DateLUTImpl.h#L1059
const daysSinceEpoch = date.getTime() / 1000 / 60 / 60 / 24;
const daysSinceEpochRounded = roundFn(daysSinceEpoch / numInt) * numInt;
return new Date(daysSinceEpochRounded * 1000 * 60 * 60 * 24);
}
default:
return date;
}
}
export function timeBucketByGranularity(
start: Date,
end: Date,
granularity: SQLInterval,
): Date[] {
const buckets: Date[] = [];
let current = toStartOfInterval(start, granularity);
const granularitySeconds = convertGranularityToSeconds(granularity);
while (current < end) {
buckets.push(current);
current = add(current, {
seconds: granularitySeconds,
});
}
return buckets;
}

View file

@ -1,16 +1,13 @@
import { getHours, getMinutes } from 'date-fns';
import { sign, verify } from 'jsonwebtoken';
import ms from 'ms';
import { z } from 'zod';
import * as clickhouse from '@/clickhouse';
import { SQLSerializer } from '@/clickhouse/searchQueryParser';
import type { ObjectId } from '@/models';
import Alert, {
AlertChannel,
AlertInterval,
AlertSource,
AlertType,
AlertThresholdType,
IAlert,
} from '@/models/alert';
import Dashboard, { IDashboard } from '@/models/dashboard';
@ -20,10 +17,10 @@ import logger from '@/utils/logger';
import { alertSchema } from '@/utils/zod';
export type AlertInput = {
source: AlertSource;
source?: AlertSource;
channel: AlertChannel;
interval: AlertInterval;
type: AlertType;
thresholdType: AlertThresholdType;
threshold: number;
// Message template
@ -46,60 +43,13 @@ export type AlertInput = {
};
};
const getCron = (interval: AlertInterval) => {
const now = new Date();
const nowMins = getMinutes(now);
const nowHours = getHours(now);
switch (interval) {
case '1m':
return '* * * * *';
case '5m':
return '*/5 * * * *';
case '15m':
return '*/15 * * * *';
case '30m':
return '*/30 * * * *';
case '1h':
return `${nowMins} * * * *`;
case '6h':
return `${nowMins} */6 * * *`;
case '12h':
return `${nowMins} */12 * * *`;
case '1d':
return `${nowMins} ${nowHours} * * *`;
}
};
export const validateGroupByProperty = async ({
groupBy,
logStreamTableVersion,
teamId,
}: {
groupBy: string;
logStreamTableVersion: number | undefined;
teamId: string;
}): Promise<boolean> => {
const nowInMs = Date.now();
const propertyTypeMappingsModel =
await clickhouse.buildLogsPropertyTypeMappingsModel(
logStreamTableVersion,
teamId,
nowInMs - ms('1d'),
nowInMs,
);
const serializer = new SQLSerializer(propertyTypeMappingsModel);
const { found } = await serializer.getColumnForField(groupBy);
return !!found;
};
const makeAlert = (alert: AlertInput) => {
const makeAlert = (alert: AlertInput): Partial<IAlert> => {
return {
channel: alert.channel,
interval: alert.interval,
source: alert.source,
threshold: alert.threshold,
type: alert.type,
thresholdType: alert.thresholdType,
// Message template
// If they're undefined/null, set it to null so we clear out the field
@ -109,13 +59,11 @@ const makeAlert = (alert: AlertInput) => {
message: alert.message == null ? null : alert.message,
// Log alerts
savedSearch: alert.savedSearchId,
savedSearch: alert.savedSearchId as unknown as ObjectId,
groupBy: alert.groupBy,
// Chart alerts
dashboardId: alert.dashboardId,
dashboard: alert.dashboardId as unknown as ObjectId,
tileId: alert.tileId,
cron: getCron(alert.interval),
timezone: 'UTC', // TODO: support different timezone
};
};
@ -123,13 +71,13 @@ export const createAlert = async (
teamId: ObjectId,
alertInput: z.infer<typeof alertSchema>,
) => {
if (alertInput.source === 'CHART') {
if (alertInput.source === AlertSource.TILE) {
if ((await Dashboard.findById(alertInput.dashboardId)) == null) {
throw new Error('Dashboard ID not found');
}
}
if (alertInput.source === 'LOG') {
if (alertInput.source === AlertSource.SAVED_SEARCH) {
if ((await SavedSearch.findById(alertInput.savedSearchId)) == null) {
throw new Error('Saved Search ID not found');
}
@ -177,11 +125,11 @@ export const getAlertById = async (
export const getAlertsEnhanced = async (teamId: ObjectId) => {
return Alert.find({ team: teamId }).populate<{
savedSearch: ISavedSearch;
dashboardId: IDashboard;
dashboard: IDashboard;
silenced?: IAlert['silenced'] & {
by: IUser;
};
}>(['savedSearch', 'dashboardId', 'silenced.by']);
}>(['savedSearch', 'dashboard', 'silenced.by']);
};
export const deleteAlert = async (id: string, teamId: ObjectId) => {

View file

@ -1,11 +1,11 @@
import { differenceBy, uniq } from 'lodash';
import { z } from 'zod';
import { DashboardWithoutIdSchema, Tile } from '@/common/commonTypes';
import type { ObjectId } from '@/models';
import Alert from '@/models/alert';
import Dashboard from '@/models/dashboard';
import { DashboardSchema, DashboardWithoutIdSchema } from '@/utils/commonTypes';
import { chartSchema, tagsSchema } from '@/utils/zod';
import { tagsSchema } from '@/utils/zod';
export async function getDashboards(teamId: ObjectId) {
const dashboards = await Dashboard.find({
@ -41,7 +41,7 @@ export async function deleteDashboardAndAlerts(
team: teamId,
});
if (dashboard) {
await Alert.deleteMany({ dashboardId: dashboard._id });
await Alert.deleteMany({ dashboard: dashboard._id });
}
}
@ -50,13 +50,11 @@ export async function updateDashboard(
teamId: ObjectId,
{
name,
charts,
query,
tiles,
tags,
}: {
name: string;
charts: z.infer<typeof chartSchema>[];
query: string;
tiles: Tile[];
tags: z.infer<typeof tagsSchema>;
},
) {
@ -67,8 +65,7 @@ export async function updateDashboard(
},
{
name,
charts,
query,
tiles,
tags: tags && uniq(tags),
},
{ new: true },
@ -114,7 +111,7 @@ export async function updateDashboardAndAlerts(
if (deletedTileIds?.length > 0) {
await Alert.deleteMany({
dashboardId: dashboardId,
dashboard: dashboardId,
tileId: { $in: deletedTileIds },
});
}

View file

@ -1,7 +1,7 @@
import { z } from 'zod';
import { SavedSearchSchema } from '@/common/commonTypes';
import { SavedSearch } from '@/models/savedSearch';
import { SavedSearchSchema } from '@/utils/commonTypes';
type SavedSearchWithoutId = Omit<z.infer<typeof SavedSearchSchema>, 'id'>;

View file

@ -1,6 +1,5 @@
import mongoose from 'mongoose';
import request from 'supertest';
import { z } from 'zod';
import * as clickhouse from '@/clickhouse';
import {
@ -15,13 +14,15 @@ import {
} from '@/utils/logParser';
import { redisClient } from '@/utils/redis';
import { SavedChartConfig, Tile } from './common/commonTypes';
import { DisplayType } from './common/DisplayType';
import * as config from './config';
import { AlertInput } from './controllers/alerts';
import { getTeam } from './controllers/team';
import { findUserByEmail } from './controllers/user';
import { mongooseConnection } from './models';
import { AlertInterval, AlertSource, AlertThresholdType } from './models/alert';
import Server from './server';
import { Tile } from './utils/commonTypes';
import { externalAlertSchema } from './utils/zod';
const MOCK_USER = {
email: 'fake@deploysentinel.com',
@ -77,8 +78,12 @@ class MockServer extends Server {
if (!config.IS_CI) {
throw new Error('ONLY execute this in CI env 😈 !!!');
}
await super.start();
await initCiEnvs();
try {
await super.start();
await initCiEnvs();
} catch (err) {
console.error(err);
}
}
stop() {
@ -101,18 +106,7 @@ class MockServer extends Server {
}
}
class MockAPIServer extends MockServer {
protected readonly appType = 'api';
}
export const getServer = (appType: 'api' = 'api') => {
switch (appType) {
case 'api':
return new MockAPIServer();
default:
throw new Error(`Invalid app type: ${appType}`);
}
};
export const getServer = () => new MockServer();
export const getAgent = (server: MockServer) =>
request.agent(server.getHttpServer());
@ -349,24 +343,32 @@ export const makeTile = (opts?: { id?: string }): Tile => ({
y: 1,
w: 1,
h: 1,
config: makeChart(),
config: makeChartConfig(),
});
export const makeChart = (opts?: { id?: string }) => ({
id: opts?.id ?? randomMongoId(),
export const makeChartConfig = (opts?: { id?: string }): SavedChartConfig => ({
name: 'Test Chart',
x: 1,
y: 1,
w: 1,
h: 1,
series: [
source: 'test-source',
displayType: DisplayType.Line,
select: [
{
type: 'time',
table: 'metrics',
aggFn: 'count',
aggCondition: '',
aggConditionLanguage: 'lucene',
valueExpression: '',
},
],
where: '',
whereLanguage: 'lucene',
granularity: 'auto',
implicitColumnExpression: 'Body',
numberFormat: {
output: 'number',
},
filters: [],
});
// TODO: DEPRECATED
export const makeExternalChart = (opts?: { id?: string }) => ({
name: 'Test Chart',
x: 1,
@ -382,50 +384,25 @@ export const makeExternalChart = (opts?: { id?: string }) => ({
],
});
export const makeAlert = ({
export const makeAlertInput = ({
dashboardId,
interval = '15m',
threshold = 8,
tileId,
}: {
dashboardId: string;
interval?: AlertInterval;
threshold?: number;
tileId: string;
}) => ({
}): Partial<AlertInput> => ({
channel: {
type: 'webhook',
webhookId: 'test-webhook-id',
},
interval: '15m',
threshold: 8,
type: 'presence',
source: 'CHART',
interval,
threshold,
thresholdType: AlertThresholdType.ABOVE,
source: AlertSource.TILE,
dashboardId,
tileId,
});
export const makeExternalAlert = ({
dashboardId,
chartId,
threshold = 8,
interval = '15m',
name,
message,
}: {
dashboardId: string;
chartId: string;
threshold?: number;
interval?: '15m' | '1m' | '5m' | '30m' | '1h' | '6h' | '12h' | '1d';
name?: string;
message?: string;
}): z.infer<typeof externalAlertSchema> => ({
channel: {
type: 'slack_webhook',
webhookId: '65ad876b6b08426ab4ba7830',
},
interval,
threshold,
threshold_type: 'above',
source: 'chart',
dashboardId,
chartId,
name,
message,
});

View file

@ -2,7 +2,10 @@ import mongoose, { Schema } from 'mongoose';
import type { ObjectId } from '.';
export type AlertType = 'presence' | 'absence';
export enum AlertThresholdType {
ABOVE = 'above',
BELOW = 'below',
}
export enum AlertState {
ALERT = 'ALERT',
@ -22,35 +25,40 @@ export type AlertInterval =
| '12h'
| '1d';
export type AlertChannel = {
type: 'webhook';
webhookId: string;
};
export type AlertChannel =
| {
type: 'webhook';
webhookId: string;
}
| {
type: null;
};
export type AlertSource = 'LOG' | 'CHART';
export enum AlertSource {
SAVED_SEARCH = 'saved_search',
TILE = 'tile',
}
export interface IAlert {
_id: ObjectId;
channel: AlertChannel;
cron: string;
interval: AlertInterval;
source?: AlertSource;
state: AlertState;
team: ObjectId;
threshold: number;
timezone: string;
type: AlertType;
thresholdType: AlertThresholdType;
// Message template
name?: string | null;
message?: string | null;
// Log alerts
// SavedSearch alerts
groupBy?: string;
savedSearch?: ObjectId;
// Chart alerts
dashboardId?: ObjectId;
// Tile alerts
dashboard?: ObjectId;
tileId?: string;
// Silenced
@ -65,26 +73,19 @@ export type AlertDocument = mongoose.HydratedDocument<IAlert>;
const AlertSchema = new Schema<IAlert>(
{
type: {
type: String,
required: true,
},
threshold: {
type: Number,
required: true,
},
thresholdType: {
type: String,
enum: AlertThresholdType,
required: false,
},
interval: {
type: String,
required: true,
},
timezone: {
type: String,
required: true,
},
cron: {
type: String,
required: true,
},
channel: Schema.Types.Mixed, // slack, email, etc
state: {
type: String,
@ -94,7 +95,7 @@ const AlertSchema = new Schema<IAlert>(
source: {
type: String,
required: false,
default: 'LOG',
default: AlertSource.SAVED_SEARCH,
},
team: {
type: mongoose.Schema.Types.ObjectId,
@ -123,7 +124,7 @@ const AlertSchema = new Schema<IAlert>(
},
// Chart alerts
dashboardId: {
dashboard: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Dashboard',
required: false,

View file

@ -1,7 +1,7 @@
import mongoose, { Schema } from 'mongoose';
import { z } from 'zod';
import { DashboardSchema } from '@/utils/commonTypes';
import { DashboardSchema } from '@/common/commonTypes';
import type { ObjectId } from '.';

View file

@ -2,12 +2,12 @@ import mongoose, { Schema } from 'mongoose';
import { v4 as uuidv4 } from 'uuid';
import { z } from 'zod';
import { SavedSearchSchema } from '@/utils/commonTypes';
import { SavedSearchSchema } from '@/common/commonTypes';
type ObjectId = mongoose.Types.ObjectId;
export interface ISavedSearch
extends Omit<z.infer<typeof SavedSearchSchema>, 'source' | 'id'> {
extends Omit<z.infer<typeof SavedSearchSchema>, 'source'> {
_id: ObjectId;
team: ObjectId;
source: ObjectId;

View file

@ -1,6 +1,6 @@
import mongoose, { Schema } from 'mongoose';
import { TSource } from '@/utils/commonTypes';
import { TSource } from '@/common/commonTypes';
type ObjectId = mongoose.Types.ObjectId;

View file

@ -1,7 +1,7 @@
import {
getLoggedInAgent,
getServer,
makeAlert,
makeAlertInput,
makeTile,
randomMongoId,
} from '@/fixtures';
@ -39,13 +39,13 @@ describe('alerts router', () => {
const alert = await agent
.post('/alerts')
.send(
makeAlert({
makeAlertInput({
dashboardId: dashboard.body.id,
tileId: dashboard.body.tiles[0].id,
}),
)
.expect(200);
expect(alert.body.data.dashboardId).toBe(dashboard.body.id);
expect(alert.body.data.dashboard).toBe(dashboard.body.id);
expect(alert.body.data.tileId).toBe(dashboard.body.tiles[0].id);
});
@ -58,7 +58,7 @@ describe('alerts router', () => {
const alert = await agent
.post('/alerts')
.send(
makeAlert({
makeAlertInput({
dashboardId: resp.body.id,
tileId: MOCK_TILES[0].id,
}),
@ -78,7 +78,7 @@ describe('alerts router', () => {
const alert = await agent
.post('/alerts')
.send(
makeAlert({
makeAlertInput({
dashboardId: dashboard.body.id,
tileId: MOCK_TILES[0].id,
}),
@ -88,6 +88,7 @@ describe('alerts router', () => {
.put(`/alerts/${alert.body.data._id}`)
.send({
...alert.body.data,
dashboardId: dashboard.body.id, // because alert.body.data stores 'dashboard' instead of 'dashboardId'
threshold: 10,
})
.expect(200);
@ -109,7 +110,7 @@ describe('alerts router', () => {
agent
.post('/alerts')
.send(
makeAlert({
makeAlertInput({
dashboardId: dashboard._id,
tileId: tile.id,
}),

View file

@ -1,7 +1,11 @@
import { getLoggedInAgent, getServer, makeAlert, makeTile } from '@/fixtures';
import {
getLoggedInAgent,
getServer,
makeAlertInput,
makeTile,
} from '@/fixtures';
const MOCK_DASHBOARD = {
id: '1',
name: 'Test Dashboard',
tiles: [makeTile(), makeTile(), makeTile(), makeTile(), makeTile()],
tags: ['test'],
@ -22,6 +26,54 @@ describe('dashboard router', () => {
await server.stop();
});
it('can create a dashboard', async () => {
const { agent } = await getLoggedInAgent(server);
const dashboard = await agent
.post('/dashboards')
.send(MOCK_DASHBOARD)
.expect(200);
expect(dashboard.body.name).toBe(MOCK_DASHBOARD.name);
expect(dashboard.body.tiles.length).toBe(MOCK_DASHBOARD.tiles.length);
expect(dashboard.body.tiles.map(tile => tile.id)).toEqual(
MOCK_DASHBOARD.tiles.map(tile => tile.id),
);
});
it('can update a dashboard', async () => {
const { agent } = await getLoggedInAgent(server);
const dashboard = await agent
.post('/dashboards')
.send(MOCK_DASHBOARD)
.expect(200);
const updatedDashboard = await agent
.patch(`/dashboards/${dashboard.body.id}`)
.send({
...dashboard.body,
name: 'Updated Dashboard',
tiles: dashboard.body.tiles.slice(1),
})
.expect(200);
expect(updatedDashboard.body.name).toBe('Updated Dashboard');
expect(updatedDashboard.body.tiles.length).toBe(
dashboard.body.tiles.length - 1,
);
expect(updatedDashboard.body.tiles.map(tile => tile.id)).toEqual(
dashboard.body.tiles.slice(1).map(tile => tile.id),
);
});
it('can delete a dashboard', async () => {
const { agent } = await getLoggedInAgent(server);
const dashboard = await agent
.post('/dashboards')
.send(MOCK_DASHBOARD)
.expect(200);
await agent.delete(`/dashboards/${dashboard.body.id}`).expect(204);
const dashboards = await agent.get('/dashboards').expect(200);
expect(dashboards.body.length).toBe(0);
});
it('deletes attached alerts when deleting tiles', async () => {
const { agent } = await getLoggedInAgent(server);
@ -35,7 +87,7 @@ describe('dashboard router', () => {
agent
.post('/alerts')
.send(
makeAlert({
makeAlertInput({
dashboardId: dashboard._id,
tileId: tile.id,
}),

View file

@ -1,4 +1,4 @@
import express, { NextFunction, Request, Response } from 'express';
import express from 'express';
import _ from 'lodash';
import { z } from 'zod';
import { validateRequest } from 'zod-express-middleware';
@ -9,45 +9,12 @@ import {
getAlertById,
getAlertsEnhanced,
updateAlert,
validateGroupByProperty,
} from '@/controllers/alerts';
import { getTeam } from '@/controllers/team';
import AlertHistory from '@/models/alertHistory';
import { alertSchema, objectIdSchema } from '@/utils/zod';
const router = express.Router();
// Validate groupBy property
const validateGroupBy = async (
req: Request,
res: Response,
next: NextFunction,
) => {
const { groupBy, source } = req.body || {};
if (source === 'LOG' && groupBy) {
const teamId = req.user?.team;
if (teamId == null) {
return res.sendStatus(403);
}
const team = await getTeam(teamId);
if (team == null) {
return res.sendStatus(403);
}
// Validate groupBy property
const groupByValid = await validateGroupByProperty({
groupBy,
logStreamTableVersion: team.logStreamTableVersion,
teamId: teamId.toString(),
});
if (!groupByValid) {
return res.status(400).json({
error: 'Invalid groupBy property',
});
}
}
next();
};
router.get('/', async (req, res, next) => {
try {
const teamId = req.user?.team;
@ -82,17 +49,12 @@ router.get('/', async (req, res, next) => {
}
: undefined,
channel: _.pick(alert.channel, ['type']),
...(alert.dashboardId && {
...(alert.dashboard && {
dashboard: {
charts: alert.dashboardId.tiles
.filter(chart => chart.id === alert.tileId)
.map(chart => _.pick(chart, ['id', 'name'])),
..._.pick(alert.dashboardId, [
'_id',
'name',
'updatedAt',
'tags',
]),
tiles: alert.dashboard.tiles
.filter(tile => tile.id === alert.tileId)
.map(tile => _.pick(tile, ['id', 'name'])),
..._.pick(alert.dashboard, ['_id', 'name', 'updatedAt', 'tags']),
},
}),
...(alert.savedSearch && {
@ -108,8 +70,8 @@ router.get('/', async (req, res, next) => {
'_id',
'interval',
'threshold',
'thresholdType',
'state',
'type',
'source',
'tileId',
'createdAt',
@ -129,7 +91,6 @@ router.get('/', async (req, res, next) => {
router.post(
'/',
validateRequest({ body: alertSchema }),
validateGroupBy,
async (req, res, next) => {
const teamId = req.user?.team;
if (teamId == null) {
@ -154,7 +115,6 @@ router.put(
id: objectIdSchema,
}),
}),
validateGroupBy,
async (req, res, next) => {
try {
const teamId = req.user?.team;

View file

@ -1,6 +1,7 @@
import express from 'express';
import { validateRequest } from 'zod-express-middleware';
import { ConnectionSchema } from '@/common/commonTypes';
import {
createConnection,
deleteConnection,
@ -9,7 +10,6 @@ import {
updateConnection,
} from '@/controllers/connection';
import { getNonNullUserWithTeam } from '@/middleware/auth';
import { ConnectionSchema } from '@/utils/commonTypes';
const router = express.Router();

View file

@ -4,6 +4,10 @@ import _ from 'lodash';
import { z } from 'zod';
import { validateRequest } from 'zod-express-middleware';
import {
DashboardSchema,
DashboardWithoutIdSchema,
} from '@/common/commonTypes';
import {
createDashboard,
deleteDashboardAndAlerts,
@ -13,8 +17,6 @@ import {
} from '@/controllers/dashboard';
import { getNonNullUserWithTeam } from '@/middleware/auth';
import Alert from '@/models/alert';
import Dashboard from '@/models/dashboard';
import { DashboardSchema, DashboardWithoutIdSchema } from '@/utils/commonTypes';
import { chartSchema, objectIdSchema, tagsSchema } from '@/utils/zod';
// create routes that will get and update dashboards
@ -28,9 +30,9 @@ router.get('/', async (req, res, next) => {
const alertsByDashboard = groupBy(
await Alert.find({
dashboardId: { $in: dashboards.map(d => d._id) },
dashboard: { $in: dashboards.map(d => d._id) },
}),
'dashboardId',
'dashboard',
);
res.json(

View file

@ -3,6 +3,7 @@ import _ from 'lodash';
import { z } from 'zod';
import { validateRequest } from 'zod-express-middleware';
import { SavedSearchSchema } from '@/common/commonTypes';
import {
createSavedSearch,
deleteSavedSearch,
@ -11,7 +12,6 @@ import {
updateSavedSearch,
} from '@/controllers/savedSearch';
import { getNonNullUserWithTeam } from '@/middleware/auth';
import { SavedSearchSchema } from '@/utils/commonTypes';
import { objectIdSchema } from '@/utils/zod';
const router = express.Router();

View file

@ -2,6 +2,7 @@ import express from 'express';
import { z } from 'zod';
import { validateRequest } from 'zod-express-middleware';
import { SourceSchema } from '@/common/commonTypes';
import {
createSource,
deleteSource,
@ -9,7 +10,6 @@ import {
updateSource,
} from '@/controllers/sources';
import { getNonNullUserWithTeam } from '@/middleware/auth';
import { SourceSchema } from '@/utils/commonTypes';
import { objectIdSchema } from '@/utils/zod';
const router = express.Router();

View file

@ -3,18 +3,18 @@ import _ from 'lodash';
import {
getLoggedInAgent,
getServer,
makeChart,
makeExternalAlert,
makeAlertInput,
makeChartConfig,
} from '@/fixtures';
const MOCK_DASHBOARD = {
name: 'Test Dashboard',
charts: [
makeChart({ id: 'aaaaaaa' }),
makeChart({ id: 'bbbbbbb' }),
makeChart({ id: 'ccccccc' }),
makeChart({ id: 'ddddddd' }),
makeChart({ id: 'eeeeeee' }),
makeChartConfig({ id: 'aaaaaaa' }),
makeChartConfig({ id: 'bbbbbbb' }),
makeChartConfig({ id: 'ccccccc' }),
makeChartConfig({ id: 'ddddddd' }),
makeChartConfig({ id: 'eeeeeee' }),
],
query: 'test query',
};
@ -48,9 +48,9 @@ describe.skip('/api/v1/alerts', () => {
.post('/api/v1/alerts')
.set('Authorization', `Bearer ${user?.accessKey}`)
.send(
makeExternalAlert({
makeAlertInput({
dashboardId: dashboard._id,
chartId: chart.id,
tileId: chart.id,
...(i % 2 == 0
? {
name: 'test {{hello}}',
@ -177,9 +177,9 @@ Array [
const updateAlert = await agent
.put(`/api/v1/alerts/${remainingAlert.id}`)
.send(
makeExternalAlert({
makeAlertInput({
dashboardId: remainingAlert.dashboardId,
chartId: remainingAlert.chartId,
tileId: remainingAlert.chartId,
threshold: 1000,
interval: '1h',
}),

View file

@ -3,7 +3,7 @@ import _ from 'lodash';
import {
getLoggedInAgent,
getServer,
makeExternalAlert,
makeAlertInput,
makeExternalChart,
} from '@/fixtures';
@ -229,9 +229,9 @@ Object {
.post('/api/v1/alerts')
.set('Authorization', `Bearer ${user?.accessKey}`)
.send(
makeExternalAlert({
makeAlertInput({
dashboardId: dashboard.id,
chartId: chart.id,
tileId: chart.id,
}),
)
.expect(200),

View file

@ -1,4 +1,4 @@
import express, { NextFunction, Request, Response } from 'express';
import express from 'express';
import _ from 'lodash';
import { z } from 'zod';
import { validateRequest } from 'zod-express-middleware';
@ -9,50 +9,11 @@ import {
getAlertById,
getAlerts,
updateAlert,
validateGroupByProperty,
} from '@/controllers/alerts';
import { getTeam } from '@/controllers/team';
import {
externalAlertSchema,
objectIdSchema,
translateAlertDocumentToExternalAlert,
translateExternalAlertToInternalAlert,
} from '@/utils/zod';
import { alertSchema, objectIdSchema } from '@/utils/zod';
const router = express.Router();
// TODO: Dedup with private API router
// Validate groupBy property
const validateGroupBy = async (
req: Request,
res: Response,
next: NextFunction,
) => {
const { groupBy, source } = req.body || {};
if (source === 'LOG' && groupBy) {
const teamId = req.user?.team;
if (teamId == null) {
return res.sendStatus(403);
}
const team = await getTeam(teamId);
if (team == null) {
return res.sendStatus(403);
}
// Validate groupBy property
const groupByValid = await validateGroupByProperty({
groupBy,
logStreamTableVersion: team.logStreamTableVersion,
teamId: teamId.toString(),
});
if (!groupByValid) {
return res.status(400).json({
error: 'Invalid groupBy property',
});
}
}
next();
};
router.get(
'/:id',
validateRequest({
@ -74,7 +35,7 @@ router.get(
}
return res.json({
data: translateAlertDocumentToExternalAlert(alert),
data: alert,
});
} catch (e) {
next(e);
@ -92,49 +53,37 @@ router.get('/', async (req, res, next) => {
const alerts = await getAlerts(teamId);
return res.json({
data: alerts.map(alert => {
return translateAlertDocumentToExternalAlert(alert);
}),
data: alerts,
});
} catch (e) {
next(e);
}
});
router.post(
'/',
validateRequest({ body: externalAlertSchema }),
validateGroupBy,
async (req, res, next) => {
const teamId = req.user?.team;
if (teamId == null) {
return res.sendStatus(403);
}
try {
const alertInput = req.body;
router.post('/', async (req, res, next) => {
const teamId = req.user?.team;
if (teamId == null) {
return res.sendStatus(403);
}
try {
const alertInput = req.body;
const internalAlert = translateExternalAlertToInternalAlert(alertInput);
return res.json({
data: translateAlertDocumentToExternalAlert(
await createAlert(teamId, internalAlert),
),
});
} catch (e) {
next(e);
}
},
);
return res.json({
data: await createAlert(teamId, alertInput),
});
} catch (e) {
next(e);
}
});
router.put(
'/:id',
validateRequest({
body: externalAlertSchema,
body: alertSchema,
params: z.object({
id: objectIdSchema,
}),
}),
validateGroupBy,
async (req, res, next) => {
try {
const teamId = req.user?.team;
@ -145,15 +94,14 @@ router.put(
const { id } = req.params;
const alertInput = req.body;
const internalAlert = translateExternalAlertToInternalAlert(alertInput);
const alert = await updateAlert(id, teamId, internalAlert);
const alert = await updateAlert(id, teamId, alertInput);
if (alert == null) {
return res.sendStatus(404);
}
res.json({
data: translateAlertDocumentToExternalAlert(alert),
data: alert,
});
} catch (e) {
next(e);

View file

@ -4,6 +4,7 @@ import { ObjectId } from 'mongodb';
import { z } from 'zod';
import { validateRequest } from 'zod-express-middleware';
import { TileSchema } from '@/common/commonTypes';
import {
deleteDashboardAndAlerts,
updateDashboard,
@ -130,8 +131,7 @@ router.put(
}),
body: z.object({
name: z.string().max(1024),
charts: z.array(externalChartSchemaWithId),
query: z.string().max(2048),
tiles: z.array(TileSchema),
tags: tagsSchema,
}),
}),
@ -146,16 +146,11 @@ router.put(
return res.sendStatus(400);
}
const { name, charts, query, tags } = req.body ?? {};
const internalCharts = charts.map(chart => {
return translateExternalChartToInternalChart(chart);
});
const { name, tiles, tags } = req.body ?? {};
const updatedDashboard = await updateDashboard(dashboardId, teamId, {
name,
charts: internalCharts,
query,
tiles,
tags,
});

View file

@ -2,19 +2,19 @@ import http from 'http';
import gracefulShutdown from 'http-graceful-shutdown';
import { serializeError } from 'serialize-error';
import apiServer from './api-app';
import * as config from './config';
import { connectDB, mongooseConnection } from './models';
import logger from './utils/logger';
import redisClient from './utils/redis';
import app from '@/api-app';
import * as config from '@/config';
import { connectDB, mongooseConnection } from '@/models';
import logger from '@/utils/logger';
import redisClient from '@/utils/redis';
export default class Server {
protected shouldHandleGracefulShutdown = true;
protected httpServer!: http.Server;
private async createServer() {
return http.createServer(apiServer);
private createServer() {
return http.createServer(app);
}
protected async shutdown(signal?: string) {
@ -45,7 +45,7 @@ export default class Server {
}
async start() {
this.httpServer = await this.createServer();
this.httpServer = this.createServer();
this.httpServer.keepAliveTimeout = 61000; // Ensure all inactive connections are terminated by the ALB, by setting this a few seconds higher than the ALB idle timeout
this.httpServer.headersTimeout = 62000; // Ensure the headersTimeout is set higher than the keepAliveTimeout due to this nodejs regression bug: https://github.com/nodejs/node/issues/27363

View file

@ -2,25 +2,23 @@
import ms from 'ms';
import { createAlert } from '@/controllers/alerts';
import { createTeam } from '@/controllers/team';
import {
buildMetricSeries,
generateBuildTeamEventFn,
getServer,
makeTile,
mockLogsPropertyTypeMappingsModel,
mockSpyMetricPropertyTypeMappingsModel,
} from '@/fixtures';
import { LogType } from '@/utils/logParser';
import * as clickhouse from '../../clickhouse';
import { createAlert } from '../../controllers/alerts';
import { createTeam } from '../../controllers/team';
import AlertHistory from '../../models/alertHistory';
import Dashboard from '../../models/dashboard';
import LogView from '../../models/logView';
import Webhook from '../../models/webhook';
import * as slack from '../../utils/slack';
import * as checkAlert from '../checkAlerts';
import { AlertSource, AlertThresholdType } from '@/models/alert';
import AlertHistory from '@/models/alertHistory';
import Dashboard from '@/models/dashboard';
import LogView from '@/models/logView';
import Webhook from '@/models/webhook';
import {
AlertMessageTemplateDefaultView,
buildAlertMessageTemplateHdxLink,
buildAlertMessageTemplateTitle,
buildLogSearchLink,
@ -32,8 +30,23 @@ import {
renderAlertTemplate,
roundDownToXMinutes,
translateExternalActionsToInternal,
} from '../checkAlerts';
} from '@/tasks/checkAlerts';
import { LogType } from '@/utils/logParser';
import * as slack from '@/utils/slack';
const MOCK_DASHBOARD = {
name: 'Test Dashboard',
tiles: [makeTile(), makeTile()],
tags: ['test'],
};
const MOCK_SOURCE = {};
const MOCK_SAVED_SEARCH: any = {
id: 'fake-saved-search-id',
};
// TODO: fix tests
describe.skip('checkAlerts', () => {
afterAll(async () => {
await clickhouse.client.close();
@ -67,21 +80,16 @@ describe.skip('checkAlerts', () => {
buildLogSearchLink({
startTime: new Date('2023-03-17T22:13:03.103Z'),
endTime: new Date('2023-03-17T22:13:59.103Z'),
logViewId: '123',
savedSearch: MOCK_SAVED_SEARCH,
}),
).toBe(
'http://localhost:9090/search/123?from=1679091183103&to=1679091239103',
);
).toMatchInlineSnapshot('');
expect(
buildLogSearchLink({
startTime: new Date('2023-03-17T22:13:03.103Z'),
endTime: new Date('2023-03-17T22:13:59.103Z'),
logViewId: '123',
q: '🐱 foo:"bar"',
savedSearch: MOCK_SAVED_SEARCH,
}),
).toBe(
'http://localhost:9090/search/123?from=1679091183103&to=1679091239103&q=%F0%9F%90%B1+foo%3A%22bar%22',
);
).toMatchInlineSnapshot('');
});
it('doesExceedThreshold', () => {
@ -145,50 +153,58 @@ describe.skip('checkAlerts', () => {
});
describe('Alert Templates', () => {
const defaultSearchView: any = {
const defaultSearchView: AlertMessageTemplateDefaultView = {
alert: {
threshold_type: 'above',
thresholdType: AlertThresholdType.ABOVE,
threshold: 1,
source: 'search',
groupBy: 'span_name',
source: AlertSource.SAVED_SEARCH,
channel: {
type: 'webhook',
webhookId: 'fake-webhook-id',
},
interval: '1m',
},
savedSearch: {
id: 'id-123',
query: 'level:error',
_id: 'fake-saved-search-id' as any,
team: 'team-123' as any,
id: 'fake-saved-search-id',
name: 'My Search',
select: 'Body',
where: 'Body: "error"',
whereLanguage: 'lucene',
orderBy: 'timestamp',
source: 'fake-source-id' as any,
tags: ['test'],
},
team: {
id: 'team-123',
logStreamTableVersion: 1,
},
attributes: {},
granularity: '1m',
group: 'http',
startTime: new Date('2023-03-17T22:13:03.103Z'),
endTime: new Date('2023-03-17T22:13:59.103Z'),
value: 10,
};
const defaultChartView: any = {
const defaultChartView: AlertMessageTemplateDefaultView = {
alert: {
threshold_type: 'below',
threshold: 10,
source: 'chart',
groupBy: 'span_name',
thresholdType: AlertThresholdType.ABOVE,
threshold: 1,
source: AlertSource.TILE,
channel: {
type: 'webhook',
webhookId: 'fake-webhook-id',
},
interval: '1m',
},
dashboard: {
id: 'id-123',
name: 'My Dashboard',
charts: [
{
name: 'My Chart',
},
],
},
team: {
id: 'team-123',
logStreamTableVersion: 1,
tiles: [makeTile()],
team: 'team-123' as any,
tags: ['test'],
},
startTime: new Date('2023-03-17T22:13:03.103Z'),
endTime: new Date('2023-03-17T22:13:59.103Z'),
attributes: {},
granularity: '5 minute',
value: 5,
};
@ -209,11 +225,15 @@ describe.skip('checkAlerts', () => {
});
it('buildAlertMessageTemplateHdxLink', () => {
expect(buildAlertMessageTemplateHdxLink(defaultSearchView)).toBe(
'http://localhost:9090/search/id-123?from=1679091183103&to=1679091239103&q=level%3Aerror+span_name%3A%22http%22',
expect(
buildAlertMessageTemplateHdxLink(defaultSearchView),
).toMatchInlineSnapshot(
`"http://app:8080/search/fake-saved-search-id?from=1679091183103&to=1679091239103"`,
);
expect(buildAlertMessageTemplateHdxLink(defaultChartView)).toBe(
'http://localhost:9090/dashboards/id-123?from=1679089083103&granularity=5+minute&to=1679093339103',
expect(
buildAlertMessageTemplateHdxLink(defaultChartView),
).toMatchInlineSnapshot(
`"http://app:8080/dashboards/id-123?from=1679089083103&granularity=5+minute&to=1679093339103"`,
);
});
@ -222,23 +242,25 @@ describe.skip('checkAlerts', () => {
buildAlertMessageTemplateTitle({
view: defaultSearchView,
}),
).toBe('Alert for "My Search" - 10 lines found');
).toMatchInlineSnapshot(`"Alert for \\"My Search\\" - 10 lines found"`);
expect(
buildAlertMessageTemplateTitle({
view: defaultChartView,
}),
).toBe('Alert for "My Chart" in "My Dashboard" - 5 falls below 10');
).toMatchInlineSnapshot(
`"Alert for \\"Test Chart\\" in \\"My Dashboard\\" - 5 exceeds 1"`,
);
});
it('getDefaultExternalAction', () => {
expect(
getDefaultExternalAction({
channel: {
type: 'slack_webhook',
type: 'webhook',
webhookId: '123',
},
} as any),
).toBe('@slack_webhook-123');
).toBe('@webhook-123');
expect(
getDefaultExternalAction({
channel: {
@ -251,20 +273,20 @@ describe.skip('checkAlerts', () => {
it('translateExternalActionsToInternal', () => {
// normal
expect(
translateExternalActionsToInternal('@slack_webhook-123'),
translateExternalActionsToInternal('@webhook-123'),
).toMatchInlineSnapshot(
`"{{__hdx_notify_channel__ channel=\\"slack_webhook\\" id=\\"123\\"}}"`,
`"{{__hdx_notify_channel__ channel=\\"webhook\\" id=\\"123\\"}}"`,
);
// with multiple breaks
expect(
translateExternalActionsToInternal(`
@slack_webhook-123
@webhook-123
`),
).toMatchInlineSnapshot(`
"
{{__hdx_notify_channel__ channel=\\"slack_webhook\\" id=\\"123\\"}}
{{__hdx_notify_channel__ channel=\\"webhook\\" id=\\"123\\"}}
"
`);
@ -306,20 +328,6 @@ describe.skip('checkAlerts', () => {
it('renderAlertTemplate - with existing channel', async () => {
jest.spyOn(slack, 'postMessageToWebhook').mockResolvedValue(null as any);
jest.spyOn(clickhouse, 'getLogBatch').mockResolvedValueOnce({
data: [
{
timestamp: '2023-11-16T22:10:00.000Z',
severity_text: 'error',
body: 'Oh no! Something went wrong!',
},
{
timestamp: '2023-11-16T22:15:00.000Z',
severity_text: 'info',
body: 'All good!',
},
],
} as any);
const team = await createTeam({ name: 'My Team' });
const webhook = await new Webhook({
@ -330,13 +338,13 @@ describe.skip('checkAlerts', () => {
}).save();
await renderAlertTemplate({
template: 'Custom body @slack_webhook-My_Web', // partial name should work
template: 'Custom body @webhook-My_Web', // partial name should work
view: {
...defaultSearchView,
alert: {
...defaultSearchView.alert,
channel: {
type: 'slack_webhook',
type: 'webhook',
webhookId: webhook._id.toString(),
},
},
@ -344,7 +352,6 @@ describe.skip('checkAlerts', () => {
title: 'Alert for "My Search" - 10 lines found',
team: {
id: team._id.toString(),
logStreamTableVersion: team.logStreamTableVersion,
},
});
@ -356,20 +363,6 @@ describe.skip('checkAlerts', () => {
jest
.spyOn(slack, 'postMessageToWebhook')
.mockResolvedValueOnce(null as any);
jest.spyOn(clickhouse, 'getLogBatch').mockResolvedValueOnce({
data: [
{
timestamp: '2023-11-16T22:10:00.000Z',
severity_text: 'error',
body: 'Oh no! Something went wrong!',
},
{
timestamp: '2023-11-16T22:15:00.000Z',
severity_text: 'info',
body: 'All good!',
},
],
} as any);
const team = await createTeam({ name: 'My Team' });
await new Webhook({
@ -380,7 +373,7 @@ describe.skip('checkAlerts', () => {
}).save();
await renderAlertTemplate({
template: 'Custom body @slack_webhook-My_Web', // partial name should work
template: 'Custom body @webhook-My_Web', // partial name should work
view: {
...defaultSearchView,
alert: {
@ -393,7 +386,6 @@ describe.skip('checkAlerts', () => {
title: 'Alert for "My Search" - 10 lines found',
team: {
id: team._id.toString(),
logStreamTableVersion: team.logStreamTableVersion,
},
});
@ -406,13 +398,12 @@ describe.skip('checkAlerts', () => {
{
text: {
text: [
'*<http://localhost:9090/search/id-123?from=1679091183103&to=1679091239103&q=level%3Aerror+span_name%3A%22http%22 | Alert for "My Search" - 10 lines found>*',
'*<http://app:8080/search/fake-saved-search-id?from=1679091183103&to=1679091239103 | Alert for "My Search" - 10 lines found>*',
'Group: "http"',
'10 lines found, expected less than 1 lines',
'Custom body ',
'```',
'Nov 16 22:10:00Z [error] Oh no! Something went wrong!',
'Nov 16 22:15:00Z [info] All good!',
'',
'```',
].join('\n'),
type: 'mrkdwn',
@ -428,20 +419,6 @@ describe.skip('checkAlerts', () => {
jest
.spyOn(slack, 'postMessageToWebhook')
.mockResolvedValueOnce(null as any);
jest.spyOn(clickhouse, 'getLogBatch').mockResolvedValueOnce({
data: [
{
timestamp: '2023-11-16T22:10:00.000Z',
severity_text: 'error',
body: 'Oh no! Something went wrong!',
},
{
timestamp: '2023-11-16T22:15:00.000Z',
severity_text: 'info',
body: 'All good!',
},
],
} as any);
const team = await createTeam({ name: 'My Team' });
await new Webhook({
@ -452,7 +429,7 @@ describe.skip('checkAlerts', () => {
}).save();
await renderAlertTemplate({
template: 'Custom body @slack_webhook-{{attributes.webhookName}}', // partial name should work
template: 'Custom body @webhook-{{attributes.webhookName}}', // partial name should work
view: {
...defaultSearchView,
alert: {
@ -468,7 +445,6 @@ describe.skip('checkAlerts', () => {
title: 'Alert for "My Search" - 10 lines found',
team: {
id: team._id.toString(),
logStreamTableVersion: team.logStreamTableVersion,
},
});
@ -481,13 +457,12 @@ describe.skip('checkAlerts', () => {
{
text: {
text: [
'*<http://localhost:9090/search/id-123?from=1679091183103&to=1679091239103&q=level%3Aerror+span_name%3A%22http%22 | Alert for "My Search" - 10 lines found>*',
'*<http://app:8080/search/fake-saved-search-id?from=1679091183103&to=1679091239103 | Alert for "My Search" - 10 lines found>*',
'Group: "http"',
'10 lines found, expected less than 1 lines',
'Custom body ',
'```',
'Nov 16 22:10:00Z [error] Oh no! Something went wrong!',
'Nov 16 22:15:00Z [info] All good!',
'',
'```',
].join('\n'),
type: 'mrkdwn',
@ -501,20 +476,6 @@ describe.skip('checkAlerts', () => {
it('renderAlertTemplate - #is_match with single action', async () => {
jest.spyOn(slack, 'postMessageToWebhook').mockResolvedValue(null as any);
jest.spyOn(clickhouse, 'getLogBatch').mockResolvedValueOnce({
data: [
{
timestamp: '2023-11-16T22:10:00.000Z',
severity_text: 'error',
body: 'Oh no! Something went wrong!',
},
{
timestamp: '2023-11-16T22:15:00.000Z',
severity_text: 'info',
body: 'All good!',
},
],
} as any);
const team = await createTeam({ name: 'My Team' });
await new Webhook({
@ -535,10 +496,10 @@ describe.skip('checkAlerts', () => {
{{#is_match "attributes.k8s.pod.name" "otel-collector-123"}}
Runbook URL: {{attributes.runbook.url}}
hi i matched
@slack_webhook-My_Web
@webhook-My_Web
{{/is_match}}
@slack_webhook-Another_Webhook
@webhook-Another_Webhook
`, // partial name should work
view: {
...defaultSearchView,
@ -562,14 +523,13 @@ describe.skip('checkAlerts', () => {
title: 'Alert for "My Search" - 10 lines found',
team: {
id: team._id.toString(),
logStreamTableVersion: team.logStreamTableVersion,
},
});
// @slack_webhook should not be called
// @webhook should not be called
await renderAlertTemplate({
template:
'{{#is_match "attributes.host" "web"}} @slack_webhook-My_Web {{/is_match}}', // partial name should work
'{{#is_match "attributes.host" "web"}} @webhook-My_Web {{/is_match}}', // partial name should work
view: {
...defaultSearchView,
alert: {
@ -585,7 +545,6 @@ describe.skip('checkAlerts', () => {
title: 'Alert for "My Search" - 10 lines found',
team: {
id: team._id.toString(),
logStreamTableVersion: team.logStreamTableVersion,
},
});
@ -598,7 +557,7 @@ describe.skip('checkAlerts', () => {
{
text: {
text: [
'*<http://localhost:9090/search/id-123?from=1679091183103&to=1679091239103&q=level%3Aerror+span_name%3A%22http%22 | Alert for "My Search" - 10 lines found>*',
'*<http://app:8080/search/fake-saved-search-id?from=1679091183103&to=1679091239103 | Alert for "My Search" - 10 lines found>*',
'Group: "http"',
'10 lines found, expected less than 1 lines',
'',
@ -608,8 +567,7 @@ describe.skip('checkAlerts', () => {
'',
'',
'```',
'Nov 16 22:10:00Z [error] Oh no! Something went wrong!',
'Nov 16 22:15:00Z [info] All good!',
'',
'```',
].join('\n'),
type: 'mrkdwn',
@ -627,7 +585,7 @@ describe.skip('checkAlerts', () => {
{
text: {
text: [
'*<http://localhost:9090/search/id-123?from=1679091183103&to=1679091239103&q=level%3Aerror+span_name%3A%22http%22 | Alert for "My Search" - 10 lines found>*',
'*<http://app:8080/search/fake-saved-search-id?from=1679091183103&to=1679091239103 | Alert for "My Search" - 10 lines found>*',
'Group: "http"',
'10 lines found, expected less than 1 lines',
'',
@ -637,8 +595,7 @@ describe.skip('checkAlerts', () => {
'',
'',
'```',
'Nov 16 22:10:00Z [error] Oh no! Something went wrong!',
'Nov 16 22:15:00Z [info] All good!',
'',
'```',
].join('\n'),
type: 'mrkdwn',
@ -667,7 +624,7 @@ describe.skip('checkAlerts', () => {
await server.stop();
});
it('LOG alert - slack webhook', async () => {
it('SAVED_SEARCH alert - slack webhook', async () => {
jest
.spyOn(slack, 'postMessageToWebhook')
.mockResolvedValueOnce(null as any);
@ -688,16 +645,6 @@ describe.skip('checkAlerts', () => {
rows: 0,
data: [],
} as any);
jest.spyOn(clickhouse, 'getLogBatch').mockResolvedValueOnce({
rows: 1,
data: [
{
timestamp: '2023-11-16T22:10:00.000Z',
severity_text: 'error',
body: 'Oh no! Something went wrong!',
},
],
} as any);
const team = await createTeam({ name: 'My Team' });
const logView = await new LogView({
@ -765,7 +712,6 @@ describe.skip('checkAlerts', () => {
groupBy: alert.groupBy,
q: logView.query,
startTime: new Date('2023-11-16T22:05:00.000Z'),
tableVersion: team.logStreamTableVersion,
teamId: logView.team._id.toString(),
windowSizeInMins: 5,
});
@ -1302,7 +1248,6 @@ describe.skip('checkAlerts', () => {
groupBy: alert.groupBy,
q: logView.query,
startTime: new Date('2023-11-16T22:05:00.000Z'),
tableVersion: team.logStreamTableVersion,
teamId: logView.team._id.toString(),
windowSizeInMins: 5,
});

View file

@ -1,4 +1,3 @@
// @ts-nocheck TODO: Fix When Restoring Alerts
// --------------------------------------------------------
// -------------- EXECUTE EVERY MINUTE --------------------
// --------------------------------------------------------
@ -12,61 +11,65 @@ import ms from 'ms';
import PromisedHandlebars from 'promised-handlebars';
import { serializeError } from 'serialize-error';
import { URLSearchParams } from 'url';
import { z } from 'zod';
import * as clickhouse from '@/clickhouse';
import { Tile } from '@/common/commonTypes';
import { DisplayType } from '@/common/DisplayType';
import {
ChartConfigWithOptDateRange,
FIXED_TIME_BUCKET_EXPR_ALIAS,
} from '@/common/renderChartConfig';
import { renderChartConfig } from '@/common/renderChartConfig';
import * as config from '@/config';
import { AlertInput } from '@/controllers/alerts';
import { ObjectId } from '@/models';
import Alert, { AlertDocument, AlertState } from '@/models/alert';
import Alert, {
AlertDocument,
AlertSource,
AlertState,
AlertThresholdType,
IAlert,
} from '@/models/alert';
import AlertHistory, { IAlertHistory } from '@/models/alertHistory';
import Dashboard, { IDashboard } from '@/models/dashboard';
import LogView from '@/models/logView';
import { ISavedSearch } from '@/models/savedSearch';
import { ISource, Source } from '@/models/source';
import { ITeam } from '@/models/team';
import Webhook, { IWebhook } from '@/models/webhook';
import { convertMsToGranularityString, truncateString } from '@/utils/common';
import { translateDashboardDocumentToExternalDashboard } from '@/utils/externalApi';
import logger from '@/utils/logger';
import * as slack from '@/utils/slack';
import {
externalAlertSchema,
translateAlertDocumentToExternalAlert,
} from '@/utils/zod';
type EnhancedDashboard = Omit<IDashboard, 'team'> & { team: ITeam };
const MAX_MESSAGE_LENGTH = 500;
const NOTIFY_FN_NAME = '__hdx_notify_channel__';
const IS_MATCH_FN_NAME = 'is_match';
const getLogViewEnhanced = async (logViewId: ObjectId) => {
const logView = await LogView.findById(logViewId).populate<{
team: ITeam;
}>('team');
if (!logView) {
throw new Error(`LogView ${logViewId} not found `);
}
return logView;
type EnhancedSavedSearch = Omit<ISavedSearch, 'source'> & {
source: ISource;
};
const getAlerts = () =>
Alert.find({}).populate<{
team: ITeam;
savedSearch?: EnhancedSavedSearch;
dashboard?: IDashboard;
}>(['team', 'savedSearch', 'savedSearch.source', 'dashboard']);
type EnhancedAlert = Awaited<ReturnType<typeof getAlerts>>[0];
export const buildLogSearchLink = ({
endTime,
logViewId,
q,
savedSearch,
startTime,
}: {
endTime: Date;
logViewId: string;
q?: string;
savedSearch: EnhancedSavedSearch;
startTime: Date;
}) => {
const url = new URL(`${config.FRONTEND_URL}/search/${logViewId}`);
const url = new URL(`${config.FRONTEND_URL}/search/${savedSearch.id}`);
const queryParams = new URLSearchParams({
from: startTime.getTime().toString(),
to: endTime.getTime().toString(),
});
if (q) {
queryParams.append('q', q);
}
url.search = queryParams.toString();
return url.toString();
};
@ -143,22 +146,14 @@ export const expandToNestedObject = (
// ----------------- Alert Message Template -------------------
// ------------------------------------------------------------
// should match the external alert schema
type AlertMessageTemplateDefaultView = {
// FIXME: do we want to include groupBy in the external alert schema?
alert: z.infer<typeof externalAlertSchema> & { groupBy?: string };
export type AlertMessageTemplateDefaultView = {
alert: AlertInput;
attributes: ReturnType<typeof expandToNestedObject>;
dashboard: ReturnType<
typeof translateDashboardDocumentToExternalDashboard
> | null;
dashboard?: IDashboard | null;
endTime: Date;
granularity: string;
group?: string;
// TODO: use a translation function ?
savedSearch: {
id: string;
name: string;
query: string;
} | null;
savedSearch?: EnhancedSavedSearch | null;
startTime: Date;
value: number;
};
@ -180,7 +175,7 @@ export const notifyChannel = async ({
};
}) => {
switch (channel) {
case 'slack_webhook': {
case 'webhook': {
const webhook = await Webhook.findOne({
team: team.id,
...(mongoose.isValidObjectId(id)
@ -312,26 +307,21 @@ export const buildAlertMessageTemplateHdxLink = ({
dashboard,
endTime,
granularity,
group,
savedSearch,
startTime,
}: AlertMessageTemplateDefaultView) => {
if (alert.source === 'search') {
if (alert.source === AlertSource.SAVED_SEARCH) {
if (savedSearch == null) {
throw new Error('Source is LOG but logView is null');
throw new Error(`Source is ${alert.source} but savedSearch is null`);
}
const searchQuery = alert.groupBy
? `${savedSearch.query} ${alert.groupBy}:"${group}"`
: savedSearch.query;
return buildLogSearchLink({
endTime,
logViewId: savedSearch.id,
q: searchQuery,
savedSearch,
startTime,
});
} else if (alert.source === 'chart') {
} else if (alert.source === AlertSource.TILE) {
if (dashboard == null) {
throw new Error('Source is CHART but dashboard is null');
throw new Error(`Source is ${alert.source} but dashboard is null`);
}
return buildChartLink({
dashboardId: dashboard.id,
@ -352,31 +342,31 @@ export const buildAlertMessageTemplateTitle = ({
}) => {
const { alert, dashboard, savedSearch, value } = view;
const handlebars = Handlebars.create();
if (alert.source === 'search') {
if (alert.source === AlertSource.SAVED_SEARCH) {
if (savedSearch == null) {
throw new Error('Source is LOG but logView is null');
throw new Error(`Source is ${alert.source} but savedSearch is null`);
}
// TODO: using template engine to render the title
return template
? handlebars.compile(template)(view)
: `Alert for "${savedSearch.name}" - ${value} lines found`;
} else if (alert.source === 'chart') {
} else if (alert.source === AlertSource.TILE) {
if (dashboard == null) {
throw new Error('Source is CHART but dashboard is null');
throw new Error(`Source is ${alert.source} but dashboard is null`);
}
const chart = dashboard.charts[0];
const tile = dashboard.tiles[0];
return template
? handlebars.compile(template)(view)
: `Alert for "${chart.name}" in "${dashboard.name}" - ${value} ${
: `Alert for "${tile.config.name}" in "${dashboard.name}" - ${value} ${
doesExceedThreshold(
alert.threshold_type === 'above',
alert.thresholdType === AlertThresholdType.ABOVE,
alert.threshold,
value,
)
? alert.threshold_type === 'above'
? alert.thresholdType === AlertThresholdType.ABOVE
? 'exceeds'
: 'falls below'
: alert.threshold_type === 'above'
: alert.thresholdType === AlertThresholdType.ABOVE
? 'falls below'
: 'exceeds'
} ${alert.threshold}`;
@ -388,10 +378,7 @@ export const buildAlertMessageTemplateTitle = ({
export const getDefaultExternalAction = (
alert: AlertMessageTemplateDefaultView['alert'],
) => {
if (
alert.channel.type === 'slack_webhook' &&
alert.channel.webhookId != null
) {
if (alert.channel.type === 'webhook' && alert.channel.webhookId != null) {
return `@${alert.channel.type}-${alert.channel.webhookId}`;
}
return null;
@ -421,7 +408,6 @@ export const renderAlertTemplate = async ({
view: AlertMessageTemplateDefaultView;
team: {
id: string;
logStreamTableVersion?: ITeam['logStreamTableVersion'];
};
}) => {
const { alert, dashboard, endTime, group, savedSearch, startTime, value } =
@ -461,7 +447,7 @@ export const renderAlertTemplate = async ({
NOTIFY_FN_NAME,
async (options: { hash: Record<string, string> }) => {
const { channel, id } = options.hash;
if (channel !== 'slack_webhook') {
if (channel !== 'webhook') {
throw new Error(`Unsupported channel type: ${channel}`);
}
// render id template
@ -487,24 +473,23 @@ export const renderAlertTemplate = async ({
// TODO: support advanced routing with template engine
// users should be able to use '@' syntax to trigger alerts
if (alert.source === 'search') {
if (alert.source === AlertSource.SAVED_SEARCH) {
if (savedSearch == null) {
throw new Error('Source is LOG but logView is null');
throw new Error(`Source is ${alert.source} but savedSearch is null`);
}
const searchQuery = alert.groupBy
? `${savedSearch.query} ${alert.groupBy}:"${group}"`
: savedSearch.query;
// TODO: show group + total count for group-by alerts
const results = await clickhouse.getLogBatch({
endTime: endTime.getTime(),
limit: 5,
offset: 0,
order: clickhouse.SortOrder.Desc,
q: searchQuery,
startTime: startTime.getTime(),
tableVersion: team.logStreamTableVersion,
teamId: team.id,
});
const results: any = { data: [] };
// IMPLEMENT ME: fetching sample logs using renderChartConfig
// await clickhouse.getLogBatch({
// endTime: endTime.getTime(),
// limit: 5,
// offset: 0,
// order: clickhouse.SortOrder.Desc,
// q: searchQuery,
// startTime: startTime.getTime(),
// tableVersion: team.logStreamTableVersion,
// teamId: team.id,
// });
const truncatedResults = truncateString(
results.data
.map(row => {
@ -522,27 +507,29 @@ export const renderAlertTemplate = async ({
);
rawTemplateBody = `${group ? `Group: "${group}"` : ''}
${value} lines found, expected ${
alert.threshold_type === 'above' ? 'less than' : 'greater than'
alert.thresholdType === AlertThresholdType.ABOVE
? 'less than'
: 'greater than'
} ${alert.threshold} lines
${targetTemplate}
\`\`\`
${truncatedResults}
\`\`\``;
} else if (alert.source === 'chart') {
} else if (alert.source === AlertSource.TILE) {
if (dashboard == null) {
throw new Error('Source is CHART but dashboard is null');
throw new Error(`Source is ${alert.source} but dashboard is null`);
}
rawTemplateBody = `${group ? `Group: "${group}"` : ''}
${value} ${
doesExceedThreshold(
alert.threshold_type === 'above',
alert.thresholdType === AlertThresholdType.ABOVE,
alert.threshold,
value,
)
? alert.threshold_type === 'above'
? alert.thresholdType === AlertThresholdType.ABOVE
? 'exceeds'
: 'falls below'
: alert.threshold_type === 'above'
: alert.thresholdType === AlertThresholdType.ABOVE
? 'falls below'
: 'exceeds'
} ${alert.threshold}
@ -563,25 +550,21 @@ ${targetTemplate}`;
const fireChannelEvent = async ({
alert,
attributes,
dashboard,
endTime,
group,
logView,
startTime,
totalCount,
windowSizeInMins,
}: {
alert: AlertDocument;
alert: EnhancedAlert;
attributes: Record<string, string>; // TODO: support other types than string
dashboard: EnhancedDashboard | null;
endTime: Date;
group?: string;
logView: Awaited<ReturnType<typeof getLogViewEnhanced>> | null;
startTime: Date;
totalCount: number;
windowSizeInMins: number;
}) => {
const team = logView?.team ?? dashboard?.team;
const team = alert.team;
if (team == null) {
throw new Error('Team not found');
}
@ -598,30 +581,25 @@ const fireChannelEvent = async ({
const attributesNested = expandToNestedObject(attributes);
const templateView: AlertMessageTemplateDefaultView = {
alert: {
...translateAlertDocumentToExternalAlert(alert),
channel: alert.channel,
dashboardId: alert.dashboard?.id,
groupBy: alert.groupBy,
interval: alert.interval,
message: alert.message,
name: alert.name,
savedSearchId: alert.savedSearch?.id,
silenced: alert.silenced,
source: alert.source,
threshold: alert.threshold,
thresholdType: alert.thresholdType,
tileId: alert.tileId,
},
attributes: attributesNested,
dashboard: dashboard
? translateDashboardDocumentToExternalDashboard({
_id: dashboard._id,
name: dashboard.name,
query: dashboard.query,
team: team._id,
charts: dashboard.charts,
tags: dashboard.tags,
})
: null,
dashboard: alert.dashboard,
endTime,
granularity: `${windowSizeInMins} minute`,
group,
savedSearch: logView
? {
id: logView._id.toString(),
name: logView.name,
query: logView.query,
}
: null,
savedSearch: alert.savedSearch,
startTime,
value: totalCount,
};
@ -635,7 +613,6 @@ const fireChannelEvent = async ({
view: templateView,
team: {
id: team._id.toString(),
logStreamTableVersion: team.logStreamTableVersion,
},
});
};
@ -644,7 +621,7 @@ export const roundDownTo = (roundTo: number) => (x: Date) =>
new Date(Math.floor(x.getTime() / roundTo) * roundTo);
export const roundDownToXMinutes = (x: number) => roundDownTo(1000 * 60 * x);
export const processAlert = async (now: Date, alert: AlertDocument) => {
export const processAlert = async (now: Date, alert: EnhancedAlert) => {
try {
const previous: IAlertHistory | undefined = (
await AlertHistory.find({ alert: alert._id })
@ -674,114 +651,115 @@ export const processAlert = async (now: Date, alert: AlertDocument) => {
const checkEndTime = nowInMinsRoundDown;
// Logs Source
let checksData:
| Awaited<ReturnType<typeof clickhouse.checkAlert>>
| Awaited<ReturnType<typeof clickhouse.getMultiSeriesChartLegacyFormat>>
| null = null;
let logView: Awaited<ReturnType<typeof getLogViewEnhanced>> | null = null;
let targetDashboard: EnhancedDashboard | null = null;
if (alert.source === 'LOG' && alert.logView) {
logView = await getLogViewEnhanced(alert.logView);
// TODO: use getLogsChart instead so we can deprecate checkAlert
checksData = await clickhouse.checkAlert({
endTime: checkEndTime,
groupBy: alert.groupBy,
q: logView.query,
startTime: checkStartTime,
tableVersion: logView.team.logStreamTableVersion,
teamId: logView.team._id.toString(),
windowSizeInMins,
});
const checksData: {
data: {
__hdx_time_bucket: string;
[key: string]: any;
}[];
rows: number;
} | null = {
data: [],
rows: 0,
};
let chartConfig: ChartConfigWithOptDateRange;
if (alert.source === AlertSource.SAVED_SEARCH && alert.savedSearch) {
chartConfig = {
select: alert.savedSearch.select,
connection: alert.savedSearch.source.connection.toString(),
where: alert.savedSearch.where,
from: alert.savedSearch.source.from,
orderBy: alert.savedSearch.orderBy,
dateRange: [checkStartTime, checkEndTime],
whereLanguage: alert.savedSearch.whereLanguage,
granularity: `${windowSizeInMins} minute`,
};
logger.info({
message: 'Received alert metric [LOG source]',
message: `Received alert metric [${alert.source} source]`,
alert,
logView,
savedSearch: alert.savedSearch,
checksData,
checkStartTime,
checkEndTime,
});
}
// Chart Source
else if (alert.source === 'CHART' && alert.dashboardId && alert.chartId) {
const dashboard = await Dashboard.findOne(
{
_id: alert.dashboardId,
'charts.id': alert.chartId,
},
{
name: 1,
charts: {
$elemMatch: {
id: alert.chartId,
},
},
},
).populate<{
team: ITeam;
}>('team');
else if (
alert.source === AlertSource.TILE &&
alert.dashboard &&
alert.tileId
) {
// filter tiles
alert.dashboard.tiles = alert.dashboard.tiles.filter(
tile => tile.id === alert.tileId,
);
if (
dashboard &&
Array.isArray(dashboard.charts) &&
dashboard.charts.length === 1
alert.dashboard &&
Array.isArray(alert.dashboard.tiles) &&
alert.dashboard.tiles.length === 1
) {
const chart = dashboard.charts[0];
// Doesn't work for metric alerts yet
const MAX_NUM_GROUPS = 20;
// TODO: assuming that the chart has only 1 series for now
const firstSeries = chart.series[0];
if (firstSeries.type === 'time' && firstSeries.table === 'logs') {
targetDashboard = dashboard;
const startTimeMs = fns.getTime(checkStartTime);
const endTimeMs = fns.getTime(checkEndTime);
checksData = await clickhouse.getMultiSeriesChartLegacyFormat({
series: chart.series,
endTime: endTimeMs,
granularity: `${windowSizeInMins} minute`,
maxNumGroups: MAX_NUM_GROUPS,
startTime: startTimeMs,
tableVersion: dashboard.team.logStreamTableVersion,
teamId: dashboard.team._id.toString(),
seriesReturnType: chart.seriesReturnType,
const firstTile = alert.dashboard.tiles[0];
if (firstTile.config.displayType === DisplayType.Line) {
// fetch source data
const _source = await Source.findOne({
_id: firstTile.config.source,
});
} else if (
firstSeries.type === 'time' &&
firstSeries.table === 'metrics' &&
firstSeries.field
) {
targetDashboard = dashboard;
const startTimeMs = fns.getTime(checkStartTime);
const endTimeMs = fns.getTime(checkEndTime);
checksData = await clickhouse.getMultiSeriesChartLegacyFormat({
series: chart.series.map(series => {
if ('field' in series && series.field != null) {
const [metricName, rawMetricDataType] =
series.field.split(' - ');
const metricDataType = z
.nativeEnum(clickhouse.MetricsDataType)
.parse(rawMetricDataType);
return {
...series,
metricDataType,
field: metricName,
};
}
return series;
}),
endTime: endTimeMs,
if (!_source) {
throw new Error('Source not found');
}
// TODO: FIXED TYPE
// @ts-ignore
chartConfig = {
...firstTile.config,
connection: _source.connection.toString(),
from: _source.from,
dateRange: [checkStartTime, checkEndTime],
granularity: `${windowSizeInMins} minute`,
maxNumGroups: MAX_NUM_GROUPS,
startTime: startTimeMs,
tableVersion: dashboard.team.logStreamTableVersion,
teamId: dashboard.team._id.toString(),
seriesReturnType: chart.seriesReturnType,
});
};
}
// else if (
// firstTile.type === 'time' &&
// firstTile.table === 'metrics' &&
// firstTile.field
// ) {
// targetDashboard = dashboard;
// const startTimeMs = fns.getTime(checkStartTime);
// const endTimeMs = fns.getTime(checkEndTime);
// *****************************************************
// IMPLEMENT ME: implement query using renderChartConfig
// *****************************************************
// checksData = await clickhouse.getMultiSeriesChartLegacyFormat({
// series: chart.series.map(series => {
// if ('field' in series && series.field != null) {
// const [metricName, rawMetricDataType] =
// series.field.split(' - ');
// const metricDataType = z
// .nativeEnum(clickhouse.MetricsDataType)
// .parse(rawMetricDataType);
// return {
// ...series,
// metricDataType,
// field: metricName,
// };
// }
// return series;
// }),
// endTime: endTimeMs,
// granularity: `${windowSizeInMins} minute`,
// maxNumGroups: MAX_NUM_GROUPS,
// startTime: startTimeMs,
// tableVersion: dashboard.team.logStreamTableVersion,
// teamId: dashboard.team._id.toString(),
// seriesReturnType: chart.seriesReturnType,
// });
// }
}
logger.info({
message: 'Received alert metric [CHART source]',
message: `Received alert metric [${alert.source} source]`,
alert,
checksData,
checkStartTime,
@ -814,10 +792,10 @@ export const processAlert = async (now: Date, alert: AlertDocument) => {
const totalCount = isString(checkData.data)
? parseInt(checkData.data)
: checkData.data;
const bucketStart = new Date(checkData.ts_bucket * 1000);
const bucketStart = new Date(checkData[FIXED_TIME_BUCKET_EXPR_ALIAS]);
if (
doesExceedThreshold(
alert.type === 'presence',
alert.thresholdType === AlertThresholdType.ABOVE,
alert.threshold,
totalCount,
)
@ -834,12 +812,10 @@ export const processAlert = async (now: Date, alert: AlertDocument) => {
await fireChannelEvent({
alert,
attributes: checkData.attributes,
dashboard: targetDashboard,
endTime: fns.addMinutes(bucketStart, windowSizeInMins),
group: Array.isArray(checkData.group)
? checkData.group.join(', ')
: checkData.group,
logView,
startTime: bucketStart,
totalCount,
windowSizeInMins,
@ -876,7 +852,7 @@ export const processAlert = async (now: Date, alert: AlertDocument) => {
export default async () => {
const now = new Date();
const alerts = await Alert.find({});
const alerts = await getAlerts();
logger.info(`Going to process ${alerts.length} alerts`);
await Promise.all(alerts.map(alert => processAlert(now, alert)));
};

View file

@ -1,99 +0,0 @@
import { z } from 'zod';
// --------------------------
// SAVED SEARCH
// --------------------------
export const SavedSearchSchema = z.object({
id: z.string(),
name: z.string(),
select: z.string(),
where: z.string(),
whereLanguage: z.string().optional(),
source: z.string(),
tags: z.array(z.string()),
orderBy: z.string().optional(),
});
export type SavedSearch = z.infer<typeof SavedSearchSchema>;
// --------------------------
// DASHBOARDS
// --------------------------
// TODO: Define this
export const SavedChartConfigSchema = z.any();
export const TileSchema = z.object({
id: z.string(),
x: z.number(),
y: z.number(),
w: z.number(),
h: z.number(),
config: SavedChartConfigSchema,
});
export type Tile = z.infer<typeof TileSchema>;
export const DashboardSchema = z.object({
id: z.string(),
name: z.string(),
tiles: z.array(TileSchema),
tags: z.array(z.string()),
});
export const DashboardWithoutIdSchema = DashboardSchema.omit({ id: true });
export const ConnectionSchema = z.object({
id: z.string(),
name: z.string(),
host: z.string(),
username: z.string(),
password: z.string().optional(),
});
// --------------------------
// TABLE SOURCES
// --------------------------
export const SourceSchema = z.object({
from: z.object({
databaseName: z.string(),
tableName: z.string(),
}),
timestampValueExpression: z.string(),
connection: z.string(),
// Common
kind: z.enum(['log', 'trace']),
id: z.string(),
name: z.string(),
displayedTimestampValueExpression: z.string().optional(),
implicitColumnExpression: z.string().optional(),
serviceNameExpression: z.string().optional(),
bodyExpression: z.string().optional(),
tableFilterExpression: z.string().optional(),
eventAttributesExpression: z.string().optional(),
resourceAttributesExpression: z.string().optional(),
defaultTableSelectExpression: z.string().optional(),
// Logs
uniqueRowIdExpression: z.string().optional(),
severityTextExpression: z.string().optional(),
traceSourceId: z.string().optional(),
// Traces & Logs
traceIdExpression: z.string().optional(),
spanIdExpression: z.string().optional(),
// Traces
durationExpression: z.string().optional(),
durationPrecision: z.number().min(0).max(9).optional(),
parentSpanIdExpression: z.string().optional(),
spanNameExpression: z.string().optional(),
spanKindExpression: z.string().optional(),
statusCodeExpression: z.string().optional(),
statusMessageExpression: z.string().optional(),
logSourceId: z.string().optional(),
});
export type TSource = z.infer<typeof SourceSchema>;

View file

@ -2,7 +2,7 @@ import { Types } from 'mongoose';
import { z } from 'zod';
import { AggFn, MetricsDataType } from '@/clickhouse';
import { AlertDocument } from '@/models/alert';
import { AlertSource, AlertThresholdType } from '@/models/alert';
export const objectIdSchema = z.string().refine(val => {
return Types.ObjectId.isValid(val);
@ -201,14 +201,14 @@ export const zChannel = z.object({
webhookId: z.string().min(1),
});
export const zLogAlert = z.object({
source: z.literal('LOG'),
export const zSavedSearchAlert = z.object({
source: z.literal(AlertSource.SAVED_SEARCH),
groupBy: z.string().optional(),
savedSearchId: z.string().min(1),
});
export const zChartAlert = z.object({
source: z.literal('CHART'),
export const zTileAlert = z.object({
source: z.literal(AlertSource.TILE),
tileId: z.string().min(1),
dashboardId: z.string().min(1),
});
@ -218,101 +218,9 @@ export const alertSchema = z
channel: zChannel,
interval: z.enum(['1m', '5m', '15m', '30m', '1h', '6h', '12h', '1d']),
threshold: z.number().min(0),
type: z.enum(['presence', 'absence']),
source: z.enum(['LOG', 'CHART']).default('LOG'),
thresholdType: z.nativeEnum(AlertThresholdType),
source: z.nativeEnum(AlertSource).default(AlertSource.SAVED_SEARCH),
name: z.string().min(1).max(512).nullish(),
message: z.string().min(1).max(4096).nullish(),
})
.and(zLogAlert.or(zChartAlert));
// ==============================
// External API Alerts
// ==============================
export const externalSlackWebhookAlertChannel = z.object({
type: z.literal('slack_webhook'),
webhookId: objectIdSchema,
});
export const externalSearchAlertSchema = z.object({
source: z.literal('search'),
groupBy: z.string().optional(),
savedSearchId: objectIdSchema,
});
export const externalChartAlertSchema = z.object({
source: z.literal('chart'),
chartId: z.string().min(1),
dashboardId: objectIdSchema,
});
export const externalAlertSchema = z
.object({
channel: externalSlackWebhookAlertChannel,
interval: z.enum(['1m', '5m', '15m', '30m', '1h', '6h', '12h', '1d']),
threshold: z.number().min(0),
threshold_type: z.enum(['above', 'below']),
source: z.enum(['search', 'chart']).default('search'),
name: z.string().min(1).max(512).nullish(),
message: z.string().min(1).max(4096).nullish(),
})
.and(externalSearchAlertSchema.or(externalChartAlertSchema));
export const externalAlertSchemaWithId = externalAlertSchema.and(
z.object({
id: objectIdSchema,
}),
);
// TODO: move this to utils file since its not zod instance
export const translateExternalAlertToInternalAlert = (
alertInput: z.infer<typeof externalAlertSchema>,
): z.infer<typeof alertSchema> => {
return {
interval: alertInput.interval,
threshold: alertInput.threshold,
type: alertInput.threshold_type === 'above' ? 'presence' : 'absence',
channel: {
...alertInput.channel,
type: 'webhook',
},
name: alertInput.name,
message: alertInput.message,
...(alertInput.source === 'search' && alertInput.savedSearchId
? { source: 'LOG', savedSearchId: alertInput.savedSearchId }
: alertInput.source === 'chart' && alertInput.dashboardId
? {
source: 'CHART',
dashboardId: alertInput.dashboardId,
tileId: alertInput.chartId,
}
: ({} as never)),
};
};
// TODO: move this to utils file since its not zod instance
export const translateAlertDocumentToExternalAlert = (
alertDoc: AlertDocument,
): z.infer<typeof externalAlertSchemaWithId> => {
return {
id: alertDoc._id.toString(),
interval: alertDoc.interval,
threshold: alertDoc.threshold,
threshold_type: alertDoc.type === 'absence' ? 'below' : 'above',
channel: {
...alertDoc.channel,
type: 'slack_webhook',
},
name: alertDoc.name,
message: alertDoc.message,
...(alertDoc.source === 'LOG' && alertDoc.savedSearch
? { source: 'search', savedSearchId: alertDoc.savedSearch.toString() }
: alertDoc.source === 'CHART' && alertDoc.dashboardId
? {
source: 'chart',
dashboardId: alertDoc.dashboardId.toString(),
chartId: alertDoc.tileId as string,
}
: ({} as never)),
};
};
.and(zSavedSearchAlert.or(zTileAlert));

View file

@ -97,7 +97,7 @@
"uplot": "^1.6.30",
"uplot-react": "^1.2.2",
"use-query-params": "^2.1.2",
"zod": "^3.22.3"
"zod": "^3.24.1"
},
"devDependencies": {
"@chromatic-com/storybook": "^1.5.0",

View file

@ -3133,6 +3133,13 @@ __metadata:
languageName: node
linkType: hard
"@clickhouse/client-common@npm:^1.9.1":
version: 1.9.1
resolution: "@clickhouse/client-common@npm:1.9.1"
checksum: 10c0/d4df33c16e8ead60c8d656418c81f4cacd16f63c26d462bb265fa4892c0e0f40c5ddf447724befea36ffed569dae685049a9fb6f6210e3362bcbe632e15dfc70
languageName: node
linkType: hard
"@clickhouse/client-web@npm:^1.7.0":
version: 1.7.0
resolution: "@clickhouse/client-web@npm:1.7.0"
@ -4016,6 +4023,7 @@ __metadata:
resolution: "@hyperdx/api@workspace:packages/api"
dependencies:
"@clickhouse/client": "npm:^0.2.10"
"@clickhouse/client-common": "npm:^1.9.1"
"@hyperdx/lucene": "npm:^3.1.1"
"@hyperdx/node-opentelemetry": "npm:^0.8.1"
"@opentelemetry/api": "npm:^1.8.0"
@ -4063,6 +4071,7 @@ __metadata:
mongoose: "npm:^6.12.0"
ms: "npm:^2.1.3"
node-schedule: "npm:^2.1.1"
node-sql-parser: "npm:^5.3.5"
nodemon: "npm:^2.0.20"
object-hash: "npm:^3.0.0"
on-headers: "npm:^1.0.2"
@ -4085,7 +4094,7 @@ __metadata:
typescript: "npm:^4.9.5"
uuid: "npm:^8.3.2"
winston: "npm:^3.10.0"
zod: "npm:^3.22.3"
zod: "npm:^3.24.1"
zod-express-middleware: "npm:^1.4.0"
languageName: unknown
linkType: soft
@ -4214,7 +4223,7 @@ __metadata:
uplot: "npm:^1.6.30"
uplot-react: "npm:^1.2.2"
use-query-params: "npm:^2.1.2"
zod: "npm:^3.22.3"
zod: "npm:^3.24.1"
languageName: unknown
linkType: soft
@ -21000,6 +21009,16 @@ __metadata:
languageName: node
linkType: hard
"node-sql-parser@npm:^5.3.5":
version: 5.3.5
resolution: "node-sql-parser@npm:5.3.5"
dependencies:
"@types/pegjs": "npm:^0.10.0"
big-integer: "npm:^1.6.48"
checksum: 10c0/221c0e5d582adf9e87a4357cc437f6f66e925eaefa889f05b8e93375274b0246edbca11a673e7d44a29888ca0ec6005b5614ac08361b0e9171d11c66ce17e91a
languageName: node
linkType: hard
"nodemon@npm:^2.0.20":
version: 2.0.20
resolution: "nodemon@npm:2.0.20"
@ -27912,13 +27931,6 @@ __metadata:
languageName: node
linkType: hard
"zod@npm:^3.22.3":
version: 3.22.3
resolution: "zod@npm:3.22.3"
checksum: 10c0/cb4b24aed7dec98552eb9042e88cbd645455bf2830e5704174d2da96f554dabad4630e3b4f6623e1b6562b9eaa43535a37b7f2011f29b8d8e9eabe1ddf3b656b
languageName: node
linkType: hard
"zod@npm:^3.22.4":
version: 3.23.8
resolution: "zod@npm:3.23.8"
@ -27926,6 +27938,13 @@ __metadata:
languageName: node
linkType: hard
"zod@npm:^3.24.1":
version: 3.24.1
resolution: "zod@npm:3.24.1"
checksum: 10c0/0223d21dbaa15d8928fe0da3b54696391d8e3e1e2d0283a1a070b5980a1dbba945ce631c2d1eccc088fdbad0f2dfa40155590bf83732d3ac4fcca2cc9237591b
languageName: node
linkType: hard
"zwitch@npm:^2.0.0":
version: 2.0.4
resolution: "zwitch@npm:2.0.4"