chore: improve integration test infrastructure (#2126)

## Summary

Currently it's hard to debug integration tests because of all the extra logging/warnings/open handler issues (wasn't sure if I introduced an open handler or was existing). This fixes that.

- Bump Jest from v28 to v30 in api and common-utils packages, syncing with app's existing setup
- Replace legacy `preset: 'ts-jest'` with modern `createJsWithTsPreset()` spread pattern across all packages
- Fix open handles that required `--forceExit` and `--detectOpenHandles` workarounds
- Suppress noisy console and logger output during test runs via targeted mocks

## Changes

### Jest/ts-jest upgrade
- Bump `jest` 28 → 30, `@types/jest` 28 → 29 in api and common-utils
- Adopt `createJsWithTsPreset()` config pattern (matching app)
- Add `isolatedModules: true` to common-utils tsconfig to fix ts-jest warning with Node16 module kind
- Update snapshot files and inline snapshots for Jest 30 format changes
- Replace removed `toThrowError()` with `toThrow()`
- Fix team.test.ts inline snapshot that depended on dynamic Mongo ObjectIds

### Open handle fixes
- Add `close()` method to `BaseClickhouseClient` in common-utils
- Call `mongoose.disconnect()` in `closeDB()` (api fixtures)
- Add `closeTestFixtureClickHouseClient()` and call it in `MockServer.stop()`
- Fix common-utils integration tests to close both `hdxClient` and raw `client` in `afterAll`
- Disable `usageStats()` interval in CI to prevent leaked timers
- Remove `--forceExit` from common-utils CI/dev scripts and api dev script
- Remove `--detectOpenHandles` from all dev scripts

### Log noise suppression
- Use `jest.spyOn` for console methods instead of global console object override
- Add per-file `console.warn`/`console.error` suppression in test files that exercise error paths
- Mock pino logger module in api jest.setup.ts to suppress expected operational logs (validation errors, MCP tool errors, etc.)
- Use pino logger instead of `console.error` in Express error handler (`middleware/error.ts`)
- Add console suppression to app setupTests.tsx

## Testing
- `make ci-lint` — passes
- `make ci-unit` — 2177 tests pass, zero console noise
- `make ci-int` — 642 tests pass (606 api + 36 common-utils), zero log noise
This commit is contained in:
Brandon Pereira 2026-04-15 18:12:13 -06:00 committed by GitHub
parent 0a4fb15df2
commit 7335a23acd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 409 additions and 1374 deletions

View file

@ -12,5 +12,7 @@ MONGO_URI=mongodb://localhost:${HDX_CI_MONGO_PORT:-39999}/hyperdx-test
NODE_ENV=test
PORT=${HDX_CI_API_PORT:-19000}
OPAMP_PORT=${HDX_CI_OPAMP_PORT:-14320}
# Default to only logging warnings/errors. Adjust if you need more verbosity
HYPERDX_LOG_LEVEL=warn
# Default to only logging errors. Adjust if you need more verbosity.
# Note: the logger module is mocked in jest.setup.ts to suppress expected
# operational noise (validation errors, MCP tool errors, etc.) during tests.
HYPERDX_LOG_LEVEL=error

View file

@ -1,8 +1,12 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
const { createJsWithTsPreset } = require('ts-jest');
const tsJestTransformCfg = createJsWithTsPreset();
/** @type {import("jest").Config} **/
module.exports = {
...tsJestTransformCfg,
setupFilesAfterEnv: ['<rootDir>/../jest.setup.ts'],
setupFiles: ['dotenv-expand/config'],
preset: 'ts-jest',
testEnvironment: 'node',
verbose: true,
rootDir: './src',

View file

@ -1,12 +1,11 @@
// @eslint-disable @typescript-eslint/no-var-requires
jest.retryTimes(1, { logErrorsBeforeRetry: true });
global.console = {
...console,
// Turn off noisy console logs in tests
debug: jest.fn(),
info: jest.fn(),
};
// Suppress noisy console output during test runs.
// - debug/info: ClickHouse query logging, server startup messages
// - warn: expected column-not-found warnings from renderChartConfig on CTE tables
jest.spyOn(console, 'debug').mockImplementation(() => {});
jest.spyOn(console, 'info').mockImplementation(() => {});
jest.spyOn(console, 'warn').mockImplementation(() => {});
// Mock alert notification functions to prevent HTTP calls during tests
jest.mock('@/utils/slack', () => ({

View file

@ -58,7 +58,7 @@
"@types/cors": "^2.8.14",
"@types/express": "^4.17.13",
"@types/express-session": "^1.17.7",
"@types/jest": "^28.1.1",
"@types/jest": "^29.5.14",
"@types/lodash": "^4.14.198",
"@types/minimist": "^1.2.2",
"@types/ms": "^0.7.31",
@ -66,7 +66,7 @@
"@types/supertest": "^2.0.12",
"@types/swagger-jsdoc": "^6",
"@types/uuid": "^8.3.4",
"jest": "^28.1.3",
"jest": "^30.2.0",
"migrate-mongo": "^11.0.0",
"nodemon": "^2.0.20",
"pino-pretty": "^13.1.1",
@ -90,7 +90,7 @@
"lint:fix": "npx eslint . --ext .ts --fix",
"ci:lint": "yarn lint && yarn tsc --noEmit && yarn lint:openapi",
"ci:int": "DOTENV_CONFIG_PATH=.env.test DOTENV_CONFIG_OVERRIDE=true jest --runInBand --ci --forceExit --coverage",
"dev:int": "DOTENV_CONFIG_PATH=.env.test DOTENV_CONFIG_OVERRIDE=true jest --runInBand --forceExit --coverage",
"dev:int": "DOTENV_CONFIG_PATH=.env.test DOTENV_CONFIG_OVERRIDE=true jest --runInBand --coverage",
"dev:migrate-db-create": "ts-node node_modules/.bin/migrate-mongo create -f migrate-mongo-config.ts",
"dev:migrate-db": "ts-node node_modules/.bin/migrate-mongo up -f migrate-mongo-config.ts",
"dev:migrate-ch-create": "migrate create -ext sql -dir ./migrations/ch -seq",

View file

@ -80,7 +80,7 @@ app.use(defaultCors);
// ---------------------------------------------------------------------
// ----------------------- Background Jobs -----------------------------
// ---------------------------------------------------------------------
if (config.USAGE_STATS_ENABLED) {
if (config.USAGE_STATS_ENABLED && !config.IS_CI) {
usageStats();
}
// ---------------------------------------------------------------------

View file

@ -1,33 +1,33 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`renderChartConfig K8s Semantic Convention Migrations with metricNameSql should handle gauge metric with metricNameSql and groupBy 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"arrayElement(ResourceAttributes, 'k8s.pod.name')": "test-pod",
"avg(toFloat64OrDefault(toString(LastValue)))": 45,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
"arrayElement(ResourceAttributes, 'k8s.pod.name')": "test-pod",
"avg(toFloat64OrDefault(toString(LastValue)))": 50,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"arrayElement(ResourceAttributes, 'k8s.pod.name')": "test-pod",
"avg(toFloat64OrDefault(toString(LastValue)))": 55,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:03:00Z",
"arrayElement(ResourceAttributes, 'k8s.pod.name')": "test-pod",
"avg(toFloat64OrDefault(toString(LastValue)))": 60,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:04:00Z",
"arrayElement(ResourceAttributes, 'k8s.pod.name')": "test-pod",
"avg(toFloat64OrDefault(toString(LastValue)))": 65,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"arrayElement(ResourceAttributes, 'k8s.pod.name')": "test-pod",
"avg(toFloat64OrDefault(toString(LastValue)))": 70,
@ -36,16 +36,16 @@ Array [
`;
exports[`renderChartConfig K8s Semantic Convention Migrations with metricNameSql should handle metrics without metricNameSql (backward compatibility) 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 45,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 50,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 55,
},
@ -53,28 +53,28 @@ Array [
`;
exports[`renderChartConfig K8s Semantic Convention Migrations with metricNameSql should query k8s.pod.cpu.utilization gauge metric using metricNameSql to handle both old and new conventions 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 45,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 50,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 55,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:03:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 60,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:04:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 65,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 70,
},
@ -82,12 +82,12 @@ Array [
`;
exports[`renderChartConfig Query Events - Logs simple select + group by query logs 1`] = `
Array [
Object {
[
{
"ServiceName": "app",
"count": "1",
},
Object {
{
"ServiceName": "api",
"count": "1",
},
@ -95,31 +95,31 @@ Array [
`;
exports[`renderChartConfig Query Events - Logs simple select + where query logs 1`] = `
Array [
Object {
[
{
"Body": "Oh no! Something went wrong!",
},
]
`;
exports[`renderChartConfig Query Metrics - Gauge single avg gauge with group-by 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"arrayElement(ResourceAttributes, 'host')": "host2",
"avg(toFloat64OrDefault(toString(LastValue)))": 4,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"arrayElement(ResourceAttributes, 'host')": "host1",
"avg(toFloat64OrDefault(toString(LastValue)))": 6.25,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"arrayElement(ResourceAttributes, 'host')": "host2",
"avg(toFloat64OrDefault(toString(LastValue)))": 4,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"arrayElement(ResourceAttributes, 'host')": "host1",
"avg(toFloat64OrDefault(toString(LastValue)))": 80,
@ -128,12 +128,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Gauge single avg gauge with where 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 6.25,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 80,
},
@ -141,12 +141,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Gauge single max gauge with delta 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"max(toFloat64OrDefault(toString(LastValue)))": 5,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"max(toFloat64OrDefault(toString(LastValue)))": -1.6666666666666667,
},
@ -154,23 +154,23 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Gauge single max gauge with delta and group by 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"arrayElement(ResourceAttributes, 'host')": "host2",
"max(toFloat64OrDefault(toString(LastValue)))": 5,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"arrayElement(ResourceAttributes, 'host')": "host1",
"max(toFloat64OrDefault(toString(LastValue)))": -72.91666666666667,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"arrayElement(ResourceAttributes, 'host')": "host2",
"max(toFloat64OrDefault(toString(LastValue)))": -1.6666666666666667,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"arrayElement(ResourceAttributes, 'host')": "host1",
"max(toFloat64OrDefault(toString(LastValue)))": -33.333333333333336,
@ -179,12 +179,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Gauge single max/avg/sum gauge 1`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 5.125,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"avg(toFloat64OrDefault(toString(LastValue)))": 42,
},
@ -192,12 +192,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Gauge single max/avg/sum gauge 2`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"max(toFloat64OrDefault(toString(LastValue)))": 6.25,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"max(toFloat64OrDefault(toString(LastValue)))": 80,
},
@ -205,12 +205,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Gauge single max/avg/sum gauge 3`] = `
Array [
Object {
[
{
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"sum(toFloat64OrDefault(toString(LastValue)))": 10.25,
},
Object {
{
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
"sum(toFloat64OrDefault(toString(LastValue)))": 84,
},
@ -218,32 +218,32 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Histogram should bucket correctly when grouping by a single attribute 1`] = `
Array [
Object {
[
{
"Value": 3.5714285714285716,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-b",
],
},
Object {
{
"Value": 8.382352941176471,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-a",
],
},
Object {
{
"Value": 3.5,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-b",
],
},
Object {
{
"Value": 4.95,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-a",
],
},
@ -251,99 +251,99 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Histogram should bucket correctly when grouping by multiple attributes 1`] = `
Array [
Object {
[
{
"Value": 2.916666666666667,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-b",
"service-2",
],
},
Object {
{
"Value": 4.852941176470588,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-a",
"service-2",
],
},
Object {
{
"Value": 8.75,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-a",
"service-1",
],
},
Object {
{
"Value": 3.1578947368421053,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-b",
"service-1",
],
},
Object {
{
"Value": 58.33333333333333,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-a",
"service-3",
],
},
Object {
{
"Value": 6.25,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
"group": Array [
"group": [
"host-b",
"service-3",
],
},
Object {
{
"Value": 3.4090909090909087,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-b",
"service-1",
],
},
Object {
{
"Value": 7.916666666666667,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-a",
"service-1",
],
},
Object {
{
"Value": 3.25,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-b",
"service-3",
],
},
Object {
{
"Value": 4.25,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-a",
"service-3",
],
},
Object {
{
"Value": 4.75,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-a",
"service-2",
],
},
Object {
{
"Value": 3.888888888888889,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
"group": Array [
"group": [
"host-b",
"service-2",
],
@ -352,12 +352,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Histogram should bucket correctly when no grouping is defined 1`] = `
Array [
Object {
[
{
"Value": 5.241935483870968,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
},
Object {
{
"Value": 4.40625,
"__hdx_time_bucket": "2022-01-05T00:02:00Z",
},
@ -365,8 +365,8 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Histogram two_timestamps_bounded histogram (p25) 1`] = `
Array [
Object {
[
{
"Value": 7.5,
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
},
@ -374,8 +374,8 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Histogram two_timestamps_bounded histogram (p50) 1`] = `
Array [
Object {
[
{
"Value": 20,
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
},
@ -383,8 +383,8 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Histogram two_timestamps_bounded histogram (p90) 1`] = `
Array [
Object {
[
{
"Value": 30,
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
},
@ -392,8 +392,8 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Histogram two_timestamps_lower_bound_inf histogram (p50) 1`] = `
Array [
Object {
[
{
"Value": 0.5,
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
},
@ -401,12 +401,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Sum calculates min_rate/max_rate correctly for sum metrics: maxSum 1`] = `
Array [
Object {
[
{
"Value": 24,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
},
Object {
{
"Value": 134,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
},
@ -414,12 +414,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Sum calculates min_rate/max_rate correctly for sum metrics: minSum 1`] = `
Array [
Object {
[
{
"Value": 15,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
},
Object {
{
"Value": 52,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
},
@ -427,12 +427,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Sum handles counter resets correctly for sum metrics 1`] = `
Array [
Object {
[
{
"Value": 15,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
},
Object {
{
"Value": 52,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
},
@ -440,20 +440,20 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Sum single sum rate 1`] = `
Array [
Object {
[
{
"Value": 19,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
},
Object {
{
"Value": 79,
"__hdx_time_bucket": "2022-01-05T00:05:00Z",
},
Object {
{
"Value": 5813,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
},
Object {
{
"Value": 78754,
"__hdx_time_bucket": "2022-01-05T00:15:00Z",
},
@ -461,12 +461,12 @@ Array [
`;
exports[`renderChartConfig Query Metrics - Sum sum values as without rate computation 1`] = `
Array [
Object {
[
{
"Value": 950400,
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
},
Object {
{
"Value": 1641600,
"__hdx_time_bucket": "2022-01-05T00:10:00Z",
},
@ -474,19 +474,19 @@ Array [
`;
exports[`renderChartConfig Query settings handles the the query settings 1`] = `
Array [
Object {
[
{
"Body": "Oh no! Something went wrong!",
},
Object {
{
"Body": "This is a test message.",
},
]
`;
exports[`renderChartConfig aggFn numeric agg functions should handle numeric values as strings 1`] = `
Array [
Object {
[
{
"AVG(toFloat64OrDefault(toString(strVal)))": 0.5,
"MAX(toFloat64OrDefault(toString(strVal)))": 3,
"MIN(toFloat64OrDefault(toString(strVal)))": -1.1,
@ -497,8 +497,8 @@ Array [
`;
exports[`renderChartConfig aggFn numeric agg functions should use default values for other types 1`] = `
Array [
Object {
[
{
"AVG(toFloat64OrDefault(toString(strVal)))": 0,
"MAX(toFloat64OrDefault(toString(strVal)))": 0,
"MIN(toFloat64OrDefault(toString(strVal)))": 0,

View file

@ -63,6 +63,13 @@ export const getTestFixtureClickHouseClient = async () => {
return clickhouseClient;
};
export const closeTestFixtureClickHouseClient = async () => {
if (clickhouseClient) {
await clickhouseClient.close();
clickhouseClient = null;
}
};
const healthCheck = async () => {
const client = await getTestFixtureClickHouseClient();
const result = await client.ping();
@ -132,6 +139,7 @@ export const closeDB = async () => {
throw new Error('ONLY execute this in CI env 😈 !!!');
}
await mongooseConnection.dropDatabase();
await mongoose.disconnect();
};
export const clearDBCollections = async () => {
@ -175,8 +183,8 @@ class MockServer extends Server {
}
}
stop() {
return new Promise<void>((resolve, reject) => {
async stop() {
await new Promise<void>((resolve, reject) => {
this.appServer.close(err => {
if (err) {
reject(err);
@ -187,13 +195,12 @@ class MockServer extends Server {
reject(err);
return;
}
super
.shutdown()
.then(() => resolve())
.catch(err => reject(err));
resolve();
});
});
});
await closeTestFixtureClickHouseClient();
await super.shutdown();
}
clearDBs() {

View file

@ -3,6 +3,7 @@ import type { NextFunction, Request, Response } from 'express';
import { IS_PROD } from '@/config';
import { BaseError, isOperationalError, StatusCode } from '@/utils/errors';
import logger from '@/utils/logger';
// WARNING: need to keep the 4th arg for express to identify it as an error-handling middleware function
export const appErrorHandler = (
@ -11,7 +12,11 @@ export const appErrorHandler = (
res: Response,
next: NextFunction,
) => {
console.error(err);
if (isOperationalError(err)) {
logger.warn({ err }, err.message);
} else {
logger.error({ err }, err.message);
}
const userFacingErrorMessage = isOperationalError(err)
? err.name || err.message

View file

@ -31,11 +31,11 @@ describe('team router', () => {
expect(_.omit(resp.body, ['_id', 'id', 'apiKey', 'createdAt']))
.toMatchInlineSnapshot(`
Object {
"allowedAuthMethods": Array [],
"name": "fake@deploysentinel.com's Team",
}
`);
{
"allowedAuthMethods": [],
"name": "fake@deploysentinel.com's Team",
}
`);
});
it('GET /team/tags - no tags', async () => {
@ -43,7 +43,7 @@ Object {
const resp = await agent.get('/team/tags').expect(200);
expect(resp.body.data).toMatchInlineSnapshot(`Array []`);
expect(resp.body.data).toMatchInlineSnapshot(`[]`);
});
it('GET /team/tags', async () => {
@ -98,29 +98,27 @@ Object {
});
const resp = await agent.get('/team/members').expect(200);
expect(resp.body.data).toMatchInlineSnapshot(`
Array [
Object {
"_id": "${resp.body.data[0]._id}",
"email": "fake@deploysentinel.com",
"hasPasswordAuth": true,
"isCurrentUser": true,
"name": "fake@deploysentinel.com",
},
Object {
"_id": "${user1._id}",
"email": "user1@example.com",
"hasPasswordAuth": true,
"isCurrentUser": false,
},
Object {
"_id": "${user2._id}",
"email": "user2@example.com",
"hasPasswordAuth": true,
"isCurrentUser": false,
},
]
`);
expect(resp.body.data.map(({ _id, ...rest }: any) => rest))
.toMatchInlineSnapshot(`
[
{
"email": "fake@deploysentinel.com",
"hasPasswordAuth": true,
"isCurrentUser": true,
"name": "fake@deploysentinel.com",
},
{
"email": "user1@example.com",
"hasPasswordAuth": true,
"isCurrentUser": false,
},
{
"email": "user2@example.com",
"hasPasswordAuth": true,
"isCurrentUser": false,
},
]
`);
});
it('POST /team/invitation', async () => {
@ -236,17 +234,17 @@ Array [
name: i.name,
})),
).toMatchInlineSnapshot(`
Array [
Object {
"email": "user1@example.com",
"name": "User 1",
},
Object {
"email": "user2@example.com",
"name": "User 2",
},
]
`);
[
{
"email": "user1@example.com",
"name": "User 1",
},
{
"email": "user2@example.com",
"name": "User 2",
},
]
`);
});
it('DELETE /team/member/:userId removes a user', async () => {

View file

@ -109,15 +109,15 @@ describe('util', () => {
});
it('should handle keys with empty segments', () => {
expect(() => unflattenObject({ 'foo..bar': 'baz' })).toThrowError();
expect(() => unflattenObject({ 'foo..bar': 'baz' })).toThrow();
});
it('should handle keys starting with separator', () => {
expect(() => unflattenObject({ '.foo.bar': 'baz' })).toThrowError();
expect(() => unflattenObject({ '.foo.bar': 'baz' })).toThrow();
});
it('should handle keys ending with separator', () => {
expect(() => unflattenObject({ 'foo.bar.': 'baz' })).toThrowError();
expect(() => unflattenObject({ 'foo.bar.': 'baz' })).toThrow();
});
it('should handle complex custom separator', () => {

View file

@ -488,15 +488,13 @@ describe('checkAlerts', () => {
buildAlertMessageTemplateTitle({
view: defaultSearchView,
}),
).toMatchInlineSnapshot(
`"🚨 Alert for \\"My Search\\" - 10 lines found"`,
);
).toMatchInlineSnapshot(`"🚨 Alert for "My Search" - 10 lines found"`);
expect(
buildAlertMessageTemplateTitle({
view: defaultChartView,
}),
).toMatchInlineSnapshot(
`"🚨 Alert for \\"Test Chart\\" in \\"My Dashboard\\" - 5 exceeds 1"`,
`"🚨 Alert for "Test Chart" in "My Dashboard" - 5 exceeds 1"`,
);
});
@ -507,16 +505,14 @@ describe('checkAlerts', () => {
view: defaultSearchView,
state: AlertState.ALERT,
}),
).toMatchInlineSnapshot(
`"🚨 Alert for \\"My Search\\" - 10 lines found"`,
);
).toMatchInlineSnapshot(`"🚨 Alert for "My Search" - 10 lines found"`);
expect(
buildAlertMessageTemplateTitle({
view: defaultChartView,
state: AlertState.ALERT,
}),
).toMatchInlineSnapshot(
`"🚨 Alert for \\"Test Chart\\" in \\"My Dashboard\\" - 5 exceeds 1"`,
`"🚨 Alert for "Test Chart" in "My Dashboard" - 5 exceeds 1"`,
);
// Test OK state (should have ✅ emoji)
@ -525,16 +521,14 @@ describe('checkAlerts', () => {
view: defaultSearchView,
state: AlertState.OK,
}),
).toMatchInlineSnapshot(
`"✅ Alert for \\"My Search\\" - 10 lines found"`,
);
).toMatchInlineSnapshot(`"✅ Alert for "My Search" - 10 lines found"`);
expect(
buildAlertMessageTemplateTitle({
view: defaultChartView,
state: AlertState.OK,
}),
).toMatchInlineSnapshot(
`"✅ Alert for \\"Test Chart\\" in \\"My Dashboard\\" - 5 exceeds 1"`,
`"✅ Alert for "Test Chart" in "My Dashboard" - 5 exceeds 1"`,
);
});
@ -554,7 +548,7 @@ describe('checkAlerts', () => {
view: decimalChartView,
}),
).toMatchInlineSnapshot(
`"🚨 Alert for \\"Test Chart\\" in \\"My Dashboard\\" - 1111.1 exceeds 1.5"`,
`"🚨 Alert for "Test Chart" in "My Dashboard" - 1111.1 exceeds 1.5"`,
);
// Test with multiple decimal places
@ -572,7 +566,7 @@ describe('checkAlerts', () => {
view: multiDecimalChartView,
}),
).toMatchInlineSnapshot(
`"🚨 Alert for \\"Test Chart\\" in \\"My Dashboard\\" - 1.1235 exceeds 0.1234"`,
`"🚨 Alert for "Test Chart" in "My Dashboard" - 1.1235 exceeds 0.1234"`,
);
// Test with integer value and decimal threshold
@ -590,7 +584,7 @@ describe('checkAlerts', () => {
view: integerValueView,
}),
).toMatchInlineSnapshot(
`"🚨 Alert for \\"Test Chart\\" in \\"My Dashboard\\" - 10.00 exceeds 0.12"`,
`"🚨 Alert for "Test Chart" in "My Dashboard" - 10.00 exceeds 0.12"`,
);
});
@ -631,7 +625,7 @@ describe('checkAlerts', () => {
expect(
translateExternalActionsToInternal('@webhook-123'),
).toMatchInlineSnapshot(
`"{{__hdx_notify_channel__ channel=\\"webhook\\" id=\\"123\\"}}"`,
`"{{__hdx_notify_channel__ channel="webhook" id="123"}}"`,
);
// with multiple breaks
@ -641,44 +635,44 @@ describe('checkAlerts', () => {
@webhook-123
`),
).toMatchInlineSnapshot(`
"
{{__hdx_notify_channel__ channel=\\"webhook\\" id=\\"123\\"}}
"
`);
"
{{__hdx_notify_channel__ channel="webhook" id="123"}}
"
`);
// with body string
expect(
translateExternalActionsToInternal('blabla @action-id'),
).toMatchInlineSnapshot(
`"blabla {{__hdx_notify_channel__ channel=\\"action\\" id=\\"id\\"}}"`,
`"blabla {{__hdx_notify_channel__ channel="action" id="id"}}"`,
);
// multiple actions
expect(
translateExternalActionsToInternal('blabla @action-id @action2-id2'),
).toMatchInlineSnapshot(
`"blabla {{__hdx_notify_channel__ channel=\\"action\\" id=\\"id\\"}} {{__hdx_notify_channel__ channel=\\"action2\\" id=\\"id2\\"}}"`,
`"blabla {{__hdx_notify_channel__ channel="action" id="id"}} {{__hdx_notify_channel__ channel="action2" id="id2"}}"`,
);
// id with special characters
expect(
translateExternalActionsToInternal('send @email-mike@hyperdx.io'),
).toMatchInlineSnapshot(
`"send {{__hdx_notify_channel__ channel=\\"email\\" id=\\"mike@hyperdx.io\\"}}"`,
`"send {{__hdx_notify_channel__ channel="email" id="mike@hyperdx.io"}}"`,
);
// id with multiple dashes
expect(
translateExternalActionsToInternal('@action-id-with-multiple-dashes'),
).toMatchInlineSnapshot(
`"{{__hdx_notify_channel__ channel=\\"action\\" id=\\"id-with-multiple-dashes\\"}}"`,
`"{{__hdx_notify_channel__ channel="action" id="id-with-multiple-dashes"}}"`,
);
// custom template id
expect(
translateExternalActionsToInternal('@action-{{action_id}}'),
).toMatchInlineSnapshot(
`"{{__hdx_notify_channel__ channel=\\"action\\" id=\\"{{action_id}}\\"}}"`,
`"{{__hdx_notify_channel__ channel="action" id="{{action_id}}"}}"`,
);
});

View file

@ -1,41 +1,41 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`logParser mapObjectToKeyValuePairs 1`] = `
Object {
"bool.names": Array [
{
"bool.names": [
"foo2",
"good.burrito.is",
],
"bool.values": Array [
"bool.values": [
0,
1,
],
"number.names": Array [
"number.names": [
"foo1",
],
"number.values": Array [
"number.values": [
123,
],
"string.names": Array [
"string.names": [
"foo",
"nested.foo",
"array1",
"array2",
],
"string.values": Array [
"string.values": [
"123",
"bar",
"[456]",
"[\\"foo1\\",{\\"foo2\\":\\"bar2\\"},[{\\"foo3\\":\\"bar3\\"}]]",
"["foo1",{"foo2":"bar2"},[{"foo3":"bar3"}]]",
],
}
`;
exports[`logParser mapObjectToKeyValuePairs 2`] = `
Object {
"bool.names": Array [],
"bool.values": Array [],
"number.names": Array [
{
"bool.names": [],
"bool.values": [],
"number.names": [
"foo0",
"foo1",
"foo2",
@ -1061,7 +1061,7 @@ Object {
"foo1022",
"foo1023",
],
"number.values": Array [
"number.values": [
0,
1,
2,
@ -2087,7 +2087,7 @@ Object {
1022,
1023,
],
"string.names": Array [],
"string.values": Array [],
"string.names": [],
"string.values": [],
}
`;

View file

@ -18,7 +18,7 @@
"lint:styles": "stylelint **/*/*.{css,scss}",
"ci:lint": "yarn lint && yarn tsc --noEmit && yarn lint:styles --quiet",
"ci:unit": "jest --ci --coverage",
"dev:unit": "jest --watchAll --detectOpenHandles",
"dev:unit": "jest --watchAll",
"test:e2e": "node scripts/run-e2e.js",
"test:e2e:ci": "../../scripts/test-e2e-ci.sh",
"storybook": "storybook dev -p 6006",

View file

@ -1,6 +1,10 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
const { createJsWithTsPreset } = require('ts-jest');
const tsJestTransformCfg = createJsWithTsPreset();
/** @type {import("jest").Config} **/
module.exports = {
preset: 'ts-jest',
...tsJestTransformCfg,
testEnvironment: 'node',
setupFilesAfterEnv: ['<rootDir>/../jest.setup.ts'],
verbose: true,

View file

@ -1,7 +1,11 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
const { createJsWithTsPreset } = require('ts-jest');
const tsJestTransformCfg = createJsWithTsPreset();
/** @type {import("jest").Config} **/
module.exports = {
...tsJestTransformCfg,
setupFiles: ['dotenv-expand/config'],
preset: 'ts-jest',
testEnvironment: 'node',
setupFilesAfterEnv: ['<rootDir>/../jest.setup.ts'],
verbose: true,

View file

@ -1,6 +1,4 @@
global.console = {
...console,
// Turn off noisy console logs in tests
debug: jest.fn(),
info: jest.fn(),
};
// Suppress noisy console.debug output (e.g. ClickHouse query logging)
// during test runs. Warnings and errors still appear.
jest.spyOn(console, 'debug').mockImplementation(() => {});
jest.spyOn(console, 'info').mockImplementation(() => {});

View file

@ -26,12 +26,12 @@
},
"devDependencies": {
"@types/hyperdx__lucene": "npm:@types/lucene@*",
"@types/jest": "^28.1.1",
"@types/jest": "^29.5.14",
"@types/lodash": "^4.14.198",
"@types/object-hash": "^2.2.1",
"@types/sqlstring": "^2.3.0",
"dotenv": "^17.2.3",
"jest": "^28.1.1",
"jest": "^30.2.0",
"nodemon": "^2.0.20",
"ts-jest": "^29.4.5",
"tsup": "^8.4.0",
@ -45,9 +45,9 @@
"lint": "npx eslint --quiet . --ext .ts",
"lint:fix": "npx eslint . --ext .ts --fix",
"ci:lint": "yarn lint && yarn tsc --noEmit",
"ci:unit": "jest --runInBand --ci --forceExit --coverage",
"dev:unit": "jest --watchAll --runInBand --detectOpenHandles",
"ci:int": "DOTENV_CONFIG_PATH=.env.test jest --config jest.int.config.js --runInBand --ci --forceExit --coverage",
"dev:int": "DOTENV_CONFIG_PATH=.env.test jest --config jest.int.config.js --watchAll --runInBand --detectOpenHandles"
"ci:unit": "jest --runInBand --ci --coverage",
"dev:unit": "jest --watchAll --runInBand",
"ci:int": "DOTENV_CONFIG_PATH=.env.test jest --config jest.int.config.js --runInBand --ci --coverage",
"dev:int": "DOTENV_CONFIG_PATH=.env.test jest --config jest.int.config.js --watchAll --runInBand"
}
}

View file

@ -1,4 +1,4 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`renderChartConfig Aggregate Merge Functions should generate SQL for an aggregate merge function 1`] = `"SELECT avgMerge(Duration),severity FROM default.logs WHERE (timestamp >= fromUnixTimestamp64Milli(1739318400000) AND timestamp <= fromUnixTimestamp64Milli(1739491200000)) GROUP BY severity SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"`;
@ -57,10 +57,10 @@ exports[`renderChartConfig histogram metric queries count should generate a coun
SELECT
\`__hdx_time_bucket\`,
group,
sum(delta) AS \\"Value\\"
sum(delta) AS "Value"
FROM source
GROUP BY group, \`__hdx_time_bucket\`
) SELECT \`__hdx_time_bucket\`, group, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, group, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig histogram metric queries count should generate a count query without grouping but time bucketing 1`] = `
@ -88,10 +88,10 @@ exports[`renderChartConfig histogram metric queries count should generate a coun
SELECT
\`__hdx_time_bucket\`,
sum(delta) AS \\"Value\\"
sum(delta) AS "Value"
FROM source
GROUP BY \`__hdx_time_bucket\`
) SELECT \`__hdx_time_bucket\`, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig histogram metric queries count should generate a count query without grouping or time bucketing 1`] = `
@ -119,10 +119,10 @@ exports[`renderChartConfig histogram metric queries count should generate a coun
SELECT
\`__hdx_time_bucket\`,
sum(delta) AS \\"Value\\"
sum(delta) AS "Value"
FROM source
GROUP BY \`__hdx_time_bucket\`
) SELECT \`__hdx_time_bucket\`, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig histogram metric queries quantile should generate a query with grouping and time bucketing 1`] = `
@ -203,10 +203,10 @@ exports[`renderChartConfig histogram metric queries quantile should generate a q
WHEN upper_bound = inf THEN point[upper_idx - 1].2
WHEN lower_bound = inf THEN point[1].2
ELSE lower_bound + (upper_bound - lower_bound) * ((rank - lower_count) / (upper_count - lower_count))
END AS \\"Value\\"
END AS "Value"
FROM points
WHERE length(point) > 1 AND total > 0
) SELECT \`__hdx_time_bucket\`, group, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, group, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig histogram metric queries quantile should generate a query without grouping but time bucketing 1`] = `
@ -287,10 +287,10 @@ exports[`renderChartConfig histogram metric queries quantile should generate a q
WHEN upper_bound = inf THEN point[upper_idx - 1].2
WHEN lower_bound = inf THEN point[1].2
ELSE lower_bound + (upper_bound - lower_bound) * ((rank - lower_count) / (upper_count - lower_count))
END AS \\"Value\\"
END AS "Value"
FROM points
WHERE length(point) > 1 AND total > 0
) SELECT \`__hdx_time_bucket\`, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig histogram metric queries quantile should generate a query without grouping or time bucketing 1`] = `
@ -371,10 +371,10 @@ exports[`renderChartConfig histogram metric queries quantile should generate a q
WHEN upper_bound = inf THEN point[upper_idx - 1].2
WHEN lower_bound = inf THEN point[1].2
ELSE lower_bound + (upper_bound - lower_bound) * ((rank - lower_count) / (upper_count - lower_count))
END AS \\"Value\\"
END AS "Value"
FROM points
WHERE length(point) > 1 AND total > 0
) SELECT \`__hdx_time_bucket\`, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig k8s semantic convention migrations should generate SQL with metricNameSql for container.cpu.utilization histogram metric 1`] = `
@ -455,10 +455,10 @@ exports[`renderChartConfig k8s semantic convention migrations should generate SQ
WHEN upper_bound = inf THEN point[upper_idx - 1].2
WHEN lower_bound = inf THEN point[1].2
ELSE lower_bound + (upper_bound - lower_bound) * ((rank - lower_count) / (upper_count - lower_count))
END AS \\"Value\\"
END AS "Value"
FROM points
WHERE length(point) > 1 AND total > 0
) SELECT \`__hdx_time_bucket\`, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig k8s semantic convention migrations should generate SQL with metricNameSql for histogram metric with groupBy 1`] = `
@ -539,10 +539,10 @@ exports[`renderChartConfig k8s semantic convention migrations should generate SQ
WHEN upper_bound = inf THEN point[upper_idx - 1].2
WHEN lower_bound = inf THEN point[1].2
ELSE lower_bound + (upper_bound - lower_bound) * ((rank - lower_count) / (upper_count - lower_count))
END AS \\"Value\\"
END AS "Value"
FROM points
WHERE length(point) > 1 AND total > 0
) SELECT \`__hdx_time_bucket\`, group, \\"Value\\" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) SELECT \`__hdx_time_bucket\`, group, "Value" FROM metrics WHERE (\`__hdx_time_bucket\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket\` <= fromUnixTimestamp64Milli(1765670400000)) LIMIT 10 SETTINGS short_circuit_function_evaluation = 'force_enable', optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig k8s semantic convention migrations should generate SQL with metricNameSql for k8s.node.cpu.utilization sum metric 1`] = `
@ -588,7 +588,7 @@ exports[`renderChartConfig k8s semantic convention migrations should generate SQ
ORDER BY AttributesHash, \`__hdx_time_bucket2\`
) SELECT max(
toFloat64OrDefault(toString(Rate))
) AS \\"Value\\",toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` FROM Bucketed WHERE (\`__hdx_time_bucket2\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket2\` <= fromUnixTimestamp64Milli(1765670400000)) GROUP BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` ORDER BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` LIMIT 10 SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) AS "Value",toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` FROM Bucketed WHERE (\`__hdx_time_bucket2\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket2\` <= fromUnixTimestamp64Milli(1765670400000)) GROUP BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` ORDER BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` LIMIT 10 SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig k8s semantic convention migrations should generate SQL with metricNameSql for k8s.pod.cpu.utilization gauge metric 1`] = `
@ -660,9 +660,9 @@ exports[`renderChartConfig k8s semantic convention migrations should handle metr
exports[`renderChartConfig sample-weighted aggregations should handle complex sampleWeightExpression like SpanAttributes map access 1`] = `"SELECT sum(greatest(toUInt64OrZero(toString(SpanAttributes['SampleRate'])), 1)) FROM default.otel_traces WHERE (Timestamp >= fromUnixTimestamp64Milli(1739318400000) AND Timestamp <= fromUnixTimestamp64Milli(1739491200000)) SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"`;
exports[`renderChartConfig sample-weighted aggregations should handle mixed weighted and passthrough aggregations 1`] = `
"SELECT sum(greatest(toUInt64OrZero(toString(SampleRate)), 1)) AS \\"weighted_count\\",sumIf(toFloat64OrDefault(toString(Duration)) * greatest(toUInt64OrZero(toString(SampleRate)), 1), toFloat64OrDefault(toString(Duration)) IS NOT NULL) / nullIf(sumIf(greatest(toUInt64OrZero(toString(SampleRate)), 1), toFloat64OrDefault(toString(Duration)) IS NOT NULL), 0) AS \\"weighted_avg\\",min(
"SELECT sum(greatest(toUInt64OrZero(toString(SampleRate)), 1)) AS "weighted_count",sumIf(toFloat64OrDefault(toString(Duration)) * greatest(toUInt64OrZero(toString(SampleRate)), 1), toFloat64OrDefault(toString(Duration)) IS NOT NULL) / nullIf(sumIf(greatest(toUInt64OrZero(toString(SampleRate)), 1), toFloat64OrDefault(toString(Duration)) IS NOT NULL), 0) AS "weighted_avg",min(
toFloat64OrDefault(toString(Duration))
) AS \\"min_duration\\",count(DISTINCT TraceId) AS \\"unique_traces\\" FROM default.otel_traces WHERE (Timestamp >= fromUnixTimestamp64Milli(1739318400000) AND Timestamp <= fromUnixTimestamp64Milli(1739491200000)) SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) AS "min_duration",count(DISTINCT TraceId) AS "unique_traces" FROM default.otel_traces WHERE (Timestamp >= fromUnixTimestamp64Milli(1739318400000) AND Timestamp <= fromUnixTimestamp64Milli(1739491200000)) SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig sample-weighted aggregations should leave count_distinct unchanged with sampleWeightExpression 1`] = `"SELECT count(DISTINCT TraceId) FROM default.otel_traces WHERE (Timestamp >= fromUnixTimestamp64Milli(1739318400000) AND Timestamp <= fromUnixTimestamp64Milli(1739491200000)) SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"`;
@ -798,7 +798,7 @@ exports[`renderChartConfig should generate sql for a single sum metric 1`] = `
ORDER BY AttributesHash, \`__hdx_time_bucket2\`
) SELECT avg(
toFloat64OrDefault(toString(Rate))
) AS \\"Value\\",toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` FROM Bucketed WHERE (\`__hdx_time_bucket2\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket2\` <= fromUnixTimestamp64Milli(1765670400000)) GROUP BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` ORDER BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` LIMIT 10 SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
) AS "Value",toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` FROM Bucketed WHERE (\`__hdx_time_bucket2\` >= fromUnixTimestamp64Milli(1739318400000) AND \`__hdx_time_bucket2\` <= fromUnixTimestamp64Milli(1765670400000)) GROUP BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` ORDER BY toStartOfInterval(toDateTime(\`__hdx_time_bucket2\`), INTERVAL 5 minute) AS \`__hdx_time_bucket\` LIMIT 10 SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"
`;
exports[`renderChartConfig should not generate invalid SQL when primary key wraps toStartOfInterval 1`] = `"SELECT timestamp, cluster_id, service_id FROM default.http_request_logs WHERE (timestamp >= fromUnixTimestamp64Milli(1739319154000) AND timestamp <= fromUnixTimestamp64Milli(1739491954000)) LIMIT 200 OFFSET 0 SETTINGS optimize_read_in_order = 0, cast_keep_nullable = 1, additional_result_filter = 'x != 2', count_distinct_implementation = 'uniqCombined64', async_insert_busy_timeout_min_ms = 20000"`;

View file

@ -394,6 +394,13 @@ describe('processClickhouseSettings - optimization settings', () => {
};
beforeEach(() => {
// Suppress expected console noise from permission check fallbacks
// and ClickHouse query debug logging. These must be re-applied each
// test because afterEach calls restoreAllMocks.
jest.spyOn(console, 'debug').mockImplementation(() => {});
jest.spyOn(console, 'info').mockImplementation(() => {});
jest.spyOn(console, 'warn').mockImplementation(() => {});
jest.spyOn(console, 'error').mockImplementation(() => {});
const setup = createClient();
client = setup.client;
mockQueryMethod = setup.mockQueryMethod;

View file

@ -41,6 +41,11 @@ describe('Metadata Integration Tests', () => {
});
});
afterAll(async () => {
await hdxClient.close();
await client.close();
});
describe('getKeyValues', () => {
let metadata: Metadata;
const chartConfig: ChartConfigWithDateRange = {
@ -89,8 +94,6 @@ describe('Metadata Integration Tests', () => {
await client.command({
query: 'DROP TABLE IF EXISTS default.test_table',
});
await client.close();
});
describe.each([true, false])('with disableRowLimit=%s', disableRowLimit => {

View file

@ -35,6 +35,16 @@ const source: TSource = {
],
};
// Suppress expected console.warn/error noise from permission checks,
// distributed table fallbacks, and column parsing edge cases
beforeAll(() => {
jest.spyOn(console, 'warn').mockImplementation(() => {});
jest.spyOn(console, 'error').mockImplementation(() => {});
});
afterAll(() => {
jest.restoreAllMocks();
});
describe('MetadataCache', () => {
let metadataCache: MetadataCache;

View file

@ -7,6 +7,16 @@ import {
SearchQueryBuilder,
} from '@/queryParser';
// Suppress expected console.error/warn noise from mocked setting fetches
// and parse failures in edge-case tests
beforeAll(() => {
jest.spyOn(console, 'warn').mockImplementation(() => {});
jest.spyOn(console, 'error').mockImplementation(() => {});
});
afterAll(() => {
jest.restoreAllMocks();
});
describe('CustomSchemaSQLSerializerV2 - json', () => {
const metadata = getMetadata(
new ClickhouseClient({ host: 'http://localhost:8123' }),

View file

@ -16,6 +16,15 @@ import {
describe('renderChartConfig', () => {
let mockMetadata: jest.Mocked<Metadata>;
// Suppress expected console.warn noise from missing columns / optimization fallbacks
beforeAll(() => {
jest.spyOn(console, 'warn').mockImplementation(() => {});
jest.spyOn(console, 'error').mockImplementation(() => {});
});
afterAll(() => {
jest.restoreAllMocks();
});
beforeEach(() => {
const columns = [
{ name: 'timestamp', type: 'DateTime' },

View file

@ -144,6 +144,7 @@ describe('sample-weighted aggregations (integration)', () => {
await client.command({
query: `DROP TABLE IF EXISTS ${DB}.${MAIN_TABLE}`,
});
await hdxClient.close();
await client.close();
});

View file

@ -33,6 +33,16 @@ import {
} from '../core/utils';
describe('utils', () => {
// Suppress expected console.error noise from invalid text index types,
// unknown tokenizers, and distributed table parsing edge cases
beforeAll(() => {
jest.spyOn(console, 'warn').mockImplementation(() => {});
jest.spyOn(console, 'error').mockImplementation(() => {});
});
afterAll(() => {
jest.restoreAllMocks();
});
describe('formatDate', () => {
it('12h utc', () => {
const date = new Date('2021-01-01T12:00:00Z');

View file

@ -5,6 +5,14 @@ import {
} from '..';
describe('extractColumnReferencesFromKey', () => {
// Suppress expected console.error from parse failures in edge-case tests
beforeAll(() => {
jest.spyOn(console, 'error').mockImplementation(() => {});
});
afterAll(() => {
jest.restoreAllMocks();
});
it('should extract column references from simple column names', () => {
expect(extractColumnReferencesFromKey('col1, col2, col3')).toEqual([
'col1',

View file

@ -482,6 +482,10 @@ export abstract class BaseClickhouseClient {
return this.client;
}
async close(): Promise<void> {
await this.client?.close();
}
protected logDebugQuery(
query: string,
query_params: Record<string, any> = {},

View file

@ -7,6 +7,7 @@
"@/*": ["./*"]
},
"declaration": true,
"isolatedModules": true,
"outDir": "dist"
},
"include": ["src"],

1177
yarn.lock

File diff suppressed because it is too large Load diff