Chart alerts: Add support for chart alerts in alerts API (#104)

Add support for chart alerts in alerts API
This commit is contained in:
Shorpo 2023-11-15 20:59:32 -07:00 committed by GitHub
parent 2fcd167540
commit 0824ae76d2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 434 additions and 282 deletions

View file

@ -0,0 +1,6 @@
---
'@hyperdx/api': minor
'@hyperdx/app': minor
---
API: Add support for chart alerts

View file

@ -563,7 +563,7 @@ export const buildMetricsPropertyTypeMappingsModel = async (
// TODO: move this to PropertyTypeMappingsModel
export const doesLogsPropertyExist = (
property: string,
property: string | undefined,
model: LogsPropertyTypeMappingsModel,
) => {
if (!property) {
@ -1119,6 +1119,12 @@ export const getSessions = async ({
.map(props => buildCustomColumn(props[0], props[1]))
.map(column => SqlString.raw(column));
const componentField = buildSearchColumnName('string', 'component');
const sessionIdField = buildSearchColumnName('string', 'rum_session_id');
if (!componentField || !sessionIdField) {
throw new Error('component or sessionId is null');
}
const sessionsWithSearchQuery = SqlString.format(
`SELECT
MAX(timestamp) AS maxTimestamp,
@ -1139,8 +1145,8 @@ export const getSessions = async ({
ORDER BY maxTimestamp DESC
LIMIT ?, ?`,
[
SqlString.raw(buildSearchColumnName('string', 'component')),
SqlString.raw(buildSearchColumnName('string', 'rum_session_id')),
SqlString.raw(componentField),
SqlString.raw(sessionIdField),
columns,
tableName,
buildTeamLogStreamWhereCondition(tableVersion, teamId),
@ -1639,8 +1645,6 @@ export const getLogBatchGroupedByBody = async ({
span.end();
},
);
return result;
};
export const getLogBatch = async ({
@ -1711,6 +1715,7 @@ export const getLogBatch = async ({
span.end();
});
// @ts-ignore
return result;
};
@ -1769,6 +1774,7 @@ export const getRrwebEvents = async ({
span.end();
});
// @ts-ignore
return resultSet.stream();
};
@ -1839,5 +1845,6 @@ export const getLogStream = async ({
}
});
// @ts-ignore
return resultSet.stream();
};

View file

@ -164,7 +164,7 @@ export class SQLSerializer implements Serializer {
let propertyType = this.propertyTypeMapModel.get(field);
// TODO: Deal with ambiguous fields
let column: string = field;
let column: string | null = field;
// refresh cache if property not found
if (propertyType == null && !this.alreadyRefrehPropertyTypeMapModel) {
this.alreadyRefrehPropertyTypeMapModel = true;

View file

@ -0,0 +1,103 @@
import { getHours, getMinutes } from 'date-fns';
import Alert, {
IAlert,
AlertChannel,
AlertInterval,
AlertType,
AlertSource,
} from '../models/alert';
import * as clickhouse from '../clickhouse';
import { SQLSerializer } from '../clickhouse/searchQueryParser';
import ms from 'ms';
export type AlertInput = {
source: AlertSource;
channel: AlertChannel;
interval: AlertInterval;
type: AlertType;
threshold: number;
// Log alerts
groupBy?: string;
logViewId?: string;
// Chart alerts
dashboardId?: string;
chartId?: string;
};
const getCron = (interval: AlertInterval) => {
const now = new Date();
const nowMins = getMinutes(now);
const nowHours = getHours(now);
switch (interval) {
case '1m':
return '* * * * *';
case '5m':
return '*/5 * * * *';
case '15m':
return '*/15 * * * *';
case '30m':
return '*/30 * * * *';
case '1h':
return `${nowMins} * * * *`;
case '6h':
return `${nowMins} */6 * * *`;
case '12h':
return `${nowMins} */12 * * *`;
case '1d':
return `${nowMins} ${nowHours} * * *`;
}
};
export const validateGroupByProperty = async ({
groupBy,
logStreamTableVersion,
teamId,
}: {
groupBy: string;
logStreamTableVersion: number | undefined;
teamId: string;
}): Promise<boolean> => {
const nowInMs = Date.now();
const propertyTypeMappingsModel =
await clickhouse.buildLogsPropertyTypeMappingsModel(
logStreamTableVersion,
teamId,
nowInMs - ms('1d'),
nowInMs,
);
const serializer = new SQLSerializer(propertyTypeMappingsModel);
const { found } = await serializer.getColumnForField(groupBy);
return !!found;
};
const makeAlert = (alert: AlertInput) => {
return {
source: alert.source,
channel: alert.channel,
interval: alert.interval,
type: alert.type,
threshold: alert.threshold,
// Log alerts
logView: alert.logViewId,
groupBy: alert.groupBy,
// Chart alerts
dashboardId: alert.dashboardId,
chartId: alert.chartId,
cron: getCron(alert.interval),
timezone: 'UTC', // TODO: support different timezone
};
};
export const createAlert = async (alertInput: AlertInput) => {
return new Alert(makeAlert(alertInput)).save();
};
// create an update alert function based off of the above create alert function
export const updateAlert = async (id: string, alertInput: AlertInput) => {
return Alert.findByIdAndUpdate(id, makeAlert(alertInput), {
returnDocument: 'after',
});
};

View file

@ -3,9 +3,15 @@ import request from 'supertest';
import * as config from './config';
import Server from './server';
import { createTeam } from './controllers/team';
import { createTeam, getTeam } from './controllers/team';
import { findUserByEmail } from './controllers/user';
import { mongooseConnection } from './models';
const MOCK_USER = {
email: 'fake@deploysentinel.com',
password: 'TacoCat!2#4X',
};
export const connectDB = async () => {
if (!config.IS_CI) {
throw new Error('ONLY execute this in CI env 😈 !!!');
@ -63,3 +69,34 @@ export const getServer = () => new MockServer();
export const getAgent = (server: MockServer) =>
request.agent(server.getHttpServer());
export const getLoggedInAgent = async (server: MockServer) => {
const agent = getAgent(server);
await agent
.post('/register/password')
.send({ ...MOCK_USER, confirmPassword: 'wrong-password' })
.expect(400);
await agent
.post('/register/password')
.send({ ...MOCK_USER, confirmPassword: MOCK_USER.password })
.expect(200);
const user = await findUserByEmail(MOCK_USER.email);
const team = await getTeam(user?.team as any);
if (team === null || user === null) {
throw Error('team or user not found');
}
await user.save();
// login app
await agent.post('/login/password').send(MOCK_USER).expect(302);
return {
agent,
team,
user,
};
};

View file

@ -18,7 +18,7 @@ process.on('uncaughtException', (err: Error) => {
}
});
process.on('unhandledRejection', (err: Error) => {
process.on('unhandledRejection', (err: any) => {
// TODO: do we want to throw here ?
logger.error(serializeError(err));
});

View file

@ -27,10 +27,7 @@ export type AlertChannel = {
webhookId: string;
};
export enum AlertSource {
LOG = 'LOG',
CHART = 'CHART',
}
export type AlertSource = 'LOG' | 'CHART';
export interface IAlert {
_id: ObjectId;
@ -83,9 +80,8 @@ const AlertSchema = new Schema<IAlert>(
},
source: {
type: String,
enum: AlertSource,
required: false,
default: AlertSource.LOG,
default: 'LOG',
},
// Log alerts

View file

@ -34,9 +34,9 @@ const bulkInsert = async (
);
break;
default: {
const rrwebEvents = [];
const logs = [];
for (const log of data) {
const rrwebEvents: VectorLog[] = [];
const logs: VectorLog[] = [];
for (const log of data as VectorLog[]) {
if (log.hdx_platform === 'rrweb') {
rrwebEvents.push(log);
} else {
@ -47,14 +47,12 @@ const bulkInsert = async (
bulkInsertTeamLogStream(
team.logStreamTableVersion,
team._id.toString(),
vectorLogParser.parse(logs as VectorLog[]),
vectorLogParser.parse(logs),
),
];
if (rrwebEvents.length > 0) {
promises.push(
bulkInsertRrwebEvents(
vectorRrwebParser.parse(rrwebEvents as VectorLog[]),
),
bulkInsertRrwebEvents(vectorRrwebParser.parse(rrwebEvents)),
);
}
await Promise.all(promises);

View file

@ -0,0 +1,118 @@
import {
closeDB,
clearDBCollections,
getLoggedInAgent,
getServer,
} from '../../../fixtures';
const randomId = () => Math.random().toString(36).substring(7);
const makeChart = () => ({
id: randomId(),
name: 'Test Chart',
x: 1,
y: 1,
w: 1,
h: 1,
series: [
{
type: 'time',
table: 'metrics',
},
],
});
const makeAlert = ({
dashboardId,
chartId,
}: {
dashboardId: string;
chartId: string;
}) => ({
channel: {
type: 'webhook',
webhookId: 'test-webhook-id',
},
interval: '15m',
threshold: 8,
type: 'presence',
source: 'CHART',
dashboardId,
chartId,
});
const MOCK_DASHBOARD = {
name: 'Test Dashboard',
charts: [makeChart(), makeChart(), makeChart(), makeChart(), makeChart()],
query: 'test query',
};
describe('dashboard router', () => {
const server = getServer();
it('deletes attached alerts when deleting charts', async () => {
const { agent } = await getLoggedInAgent(server);
await agent.post('/dashboards').send(MOCK_DASHBOARD).expect(200);
const initialDashboards = await agent.get('/dashboards').expect(200);
// Create alerts for all charts
const dashboard = initialDashboards.body.data[0];
await Promise.all(
dashboard.charts.map(chart =>
agent
.post('/alerts')
.send(
makeAlert({
dashboardId: dashboard._id,
chartId: chart.id,
}),
)
.expect(200),
),
);
const dashboards = await agent.get(`/dashboards`).expect(200);
// Make sure all alerts are attached to the dashboard charts
const allCharts = dashboard.charts.map(chart => chart.id).sort();
const chartsWithAlerts = dashboards.body.data[0].alerts
.map(alert => alert.chartId)
.sort();
expect(allCharts).toEqual(chartsWithAlerts);
// Delete the first chart
await agent
.put(`/dashboards/${dashboard._id}`)
.send({
...dashboard,
charts: dashboard.charts.slice(1),
})
.expect(200);
const dashboardPostDelete = (await agent.get(`/dashboards`).expect(200))
.body.data[0];
// Make sure all alerts are attached to the dashboard charts
const allChartsPostDelete = dashboardPostDelete.charts
.map(chart => chart.id)
.sort();
const chartsWithAlertsPostDelete = dashboardPostDelete.alerts
.map(alert => alert.chartId)
.sort();
expect(allChartsPostDelete).toEqual(chartsWithAlertsPostDelete);
});
beforeAll(async () => {
await server.start();
});
afterEach(async () => {
await clearDBCollections();
});
afterAll(async () => {
await server.closeHttpServer();
await closeDB();
});
});

View file

@ -3,51 +3,13 @@ import _ from 'lodash';
import {
clearDBCollections,
closeDB,
getAgent,
getLoggedInAgent,
getServer,
} from '../../../fixtures';
import { getTeam } from '../../../controllers/team';
import { findUserByEmail } from '../../../controllers/user';
const MOCK_USER = {
email: 'fake@deploysentinel.com',
password: 'TacoCat!2#4X',
};
describe('team router', () => {
const server = getServer();
const login = async () => {
const agent = getAgent(server);
await agent
.post('/register/password')
.send({ ...MOCK_USER, confirmPassword: 'wrong-password' })
.expect(400);
await agent
.post('/register/password')
.send({ ...MOCK_USER, confirmPassword: MOCK_USER.password })
.expect(200);
const user = await findUserByEmail(MOCK_USER.email);
const team = await getTeam(user?.team as any);
if (team === null || user === null) {
throw Error('team or user not found');
}
await user.save();
// login app
await agent.post('/login/password').send(MOCK_USER).expect(302);
return {
agent,
team,
user,
};
};
beforeAll(async () => {
await server.start();
});
@ -62,7 +24,7 @@ describe('team router', () => {
});
it('GET /team', async () => {
const { agent } = await login();
const { agent } = await getLoggedInAgent(server);
const resp = await agent.get('/team').expect(200);

View file

@ -1,210 +1,111 @@
import express from 'express';
import ms from 'ms';
import { getHours, getMinutes } from 'date-fns';
import { z } from 'zod';
import { validateRequest } from 'zod-express-middleware';
import Alert, {
AlertChannel,
AlertInterval,
AlertType,
AlertSource,
} from '../../models/alert';
import * as clickhouse from '../../clickhouse';
import { SQLSerializer } from '../../clickhouse/searchQueryParser';
import Alert from '../../models/alert';
import { getTeam } from '../../controllers/team';
import { isUserAuthenticated } from '../../middleware/auth';
import {
createAlert,
updateAlert,
validateGroupByProperty,
} from '../../controllers/alerts';
const router = express.Router();
const getCron = (interval: AlertInterval) => {
const now = new Date();
const nowMins = getMinutes(now);
const nowHours = getHours(now);
// Input validation
const zChannel = z.object({
type: z.literal('webhook'),
webhookId: z.string().min(1),
});
switch (interval) {
case '1m':
return '* * * * *';
case '5m':
return '*/5 * * * *';
case '15m':
return '*/15 * * * *';
case '30m':
return '*/30 * * * *';
case '1h':
return `${nowMins} * * * *`;
case '6h':
return `${nowMins} */6 * * *`;
case '12h':
return `${nowMins} */12 * * *`;
case '1d':
return `${nowMins} ${nowHours} * * *`;
}
};
const zLogAlert = z.object({
source: z.literal('LOG'),
groupBy: z.string().optional(),
logViewId: z.string().min(1),
message: z.string().optional(),
});
const createAlert = async ({
channel,
groupBy,
interval,
logViewId,
threshold,
type,
}: {
channel: AlertChannel;
groupBy?: string;
interval: AlertInterval;
logViewId: string;
threshold: number;
type: AlertType;
}) => {
return new Alert({
channel,
cron: getCron(interval),
groupBy,
interval,
source: AlertSource.LOG,
logView: logViewId,
threshold,
timezone: 'UTC', // TODO: support different timezone
type,
}).save();
};
const zChartAlert = z.object({
source: z.literal('CHART'),
chartId: z.string().min(1),
dashboardId: z.string().min(1),
});
// create an update alert function based off of the above create alert function
const updateAlert = async ({
channel,
groupBy,
id,
interval,
logViewId,
threshold,
type,
}: {
channel: AlertChannel;
groupBy?: string;
id: string;
interval: AlertInterval;
logViewId: string;
threshold: number;
type: AlertType;
}) => {
return Alert.findByIdAndUpdate(
id,
{
channel,
cron: getCron(interval),
groupBy: groupBy ?? null,
interval,
source: AlertSource.LOG,
logView: logViewId,
threshold,
timezone: 'UTC', // TODO: support different timezone
type,
},
{
returnDocument: 'after',
},
);
};
const zAlert = z
.object({
channel: zChannel,
interval: z.enum(['1m', '5m', '15m', '30m', '1h', '6h', '12h', '1d']),
threshold: z.number().min(1),
type: z.enum(['presence', 'absence']),
source: z.enum(['LOG', 'CHART']).default('LOG'),
})
.and(zLogAlert.or(zChartAlert));
router.post('/', isUserAuthenticated, async (req, res, next) => {
try {
const zAlertInput = zAlert;
// Validate groupBy property
const validateGroupBy = async (req, res, next) => {
const { groupBy, source } = req.body || {};
if (source === 'LOG' && groupBy) {
const teamId = req.user?.team;
const { channel, groupBy, interval, logViewId, threshold, type } = req.body;
if (teamId == null) {
return res.sendStatus(403);
}
if (!channel || !threshold || !interval || !type) {
return res.sendStatus(400);
}
if (!['slack', 'email', 'pagerduty', 'webhook'].includes(channel.type)) {
return res.sendStatus(400);
}
const team = await getTeam(teamId);
if (team == null) {
return res.sendStatus(403);
}
// validate groupBy property
if (groupBy) {
const nowInMs = Date.now();
const propertyTypeMappingsModel =
await clickhouse.buildLogsPropertyTypeMappingsModel(
team.logStreamTableVersion,
teamId.toString(),
nowInMs - ms('1d'),
nowInMs,
);
const serializer = new SQLSerializer(propertyTypeMappingsModel);
const { found } = await serializer.getColumnForField(groupBy);
if (!found) {
return res.sendStatus(400);
}
}
res.json({
data: await createAlert({
channel,
groupBy,
interval,
logViewId,
threshold,
type,
}),
// Validate groupBy property
const groupByValid = await validateGroupByProperty({
groupBy,
logStreamTableVersion: team.logStreamTableVersion,
teamId: teamId.toString(),
});
} catch (e) {
next(e);
if (!groupByValid) {
return res.status(400).json({
error: 'Invalid groupBy property',
});
}
}
});
next();
};
router.put('/:id', isUserAuthenticated, async (req, res, next) => {
try {
const teamId = req.user?.team;
const { id: alertId } = req.params;
const { channel, interval, logViewId, threshold, type, groupBy } = req.body;
if (teamId == null) {
return res.sendStatus(403);
}
if (!channel || !threshold || !interval || !type || !alertId) {
return res.sendStatus(400);
// Routes
router.post(
'/',
isUserAuthenticated,
validateRequest({ body: zAlertInput }),
validateGroupBy,
async (req, res, next) => {
try {
const alertInput = req.body;
return res.json({
data: await createAlert(alertInput),
});
} catch (e) {
next(e);
}
},
);
const team = await getTeam(teamId);
if (team == null) {
return res.sendStatus(403);
router.put(
'/:id',
isUserAuthenticated,
validateRequest({ body: zAlertInput }),
validateGroupBy,
async (req, res, next) => {
try {
const { id } = req.params;
const alertInput = req.body;
res.json({
data: await updateAlert(id, alertInput),
});
} catch (e) {
next(e);
}
// validate groupBy property
if (groupBy) {
const nowInMs = Date.now();
const propertyTypeMappingsModel =
await clickhouse.buildLogsPropertyTypeMappingsModel(
team.logStreamTableVersion,
teamId.toString(),
nowInMs - ms('1d'),
nowInMs,
);
const serializer = new SQLSerializer(propertyTypeMappingsModel);
const { found } = await serializer.getColumnForField(groupBy);
if (!found) {
return res.sendStatus(400);
}
}
res.json({
data: await updateAlert({
channel,
groupBy,
id: alertId,
interval,
logViewId,
threshold,
type,
}),
});
} catch (e) {
next(e);
}
});
},
);
router.delete('/:id', isUserAuthenticated, async (req, res, next) => {
try {

View file

@ -5,7 +5,7 @@ import Alert from '../../models/alert';
import { isUserAuthenticated } from '../../middleware/auth';
import { validateRequest } from 'zod-express-middleware';
import { z } from 'zod';
import { groupBy } from 'lodash';
import { groupBy, differenceBy } from 'lodash';
// create routes that will get and update dashboards
const router = express.Router();
@ -129,6 +129,7 @@ router.put(
const { name, charts, query } = req.body ?? {};
// Update dashboard from name and charts
const oldDashboard = await Dashboard.findById(dashboardId);
const updatedDashboard = await Dashboard.findByIdAndUpdate(
dashboardId,
{
@ -139,6 +140,20 @@ router.put(
{ new: true },
);
// Delete related alerts
const deletedChartIds = differenceBy(
oldDashboard?.charts || [],
updatedDashboard?.charts || [],
'id',
).map(c => c.id);
if (deletedChartIds?.length > 0) {
await Alert.deleteMany({
dashboardId: dashboardId,
chartId: { $in: deletedChartIds },
});
}
res.json({
data: updatedDashboard,
});

View file

@ -60,8 +60,8 @@ router.get('/', isUserAuthenticated, async (req, res, next) => {
);
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
@ -109,10 +109,12 @@ router.get('/patterns', isUserAuthenticated, async (req, res, next) => {
teamId: teamId.toString(),
});
// @ts-expect-error
if (logs.data.length === 0) {
return res.json({ data: [] });
}
// @ts-expect-error
// use the 1st id as the representative id
const lines = logs.data.map(log => [log.ids[0], log.body]);
// TODO: separate patterns by service
@ -134,6 +136,7 @@ router.get('/patterns', isUserAuthenticated, async (req, res, next) => {
trends: Record<string, number>;
}
> = {};
// @ts-expect-error
for (const log of logs.data) {
const patternId = logsPatternsData.result[log.ids[0]];
if (patternId) {
@ -200,8 +203,8 @@ router.get('/patterns', isUserAuthenticated, async (req, res, next) => {
});
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
@ -276,8 +279,8 @@ router.get('/stream', isUserAuthenticated, async (req, res, next) => {
}
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
// WARNING: no need to call next(e) here, as the stream will be closed
logger.error({
message: 'Error streaming logs',
@ -321,8 +324,8 @@ router.get(
});
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
},
@ -359,8 +362,8 @@ router.get('/chart/histogram', isUserAuthenticated, async (req, res, next) => {
);
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
});
@ -433,11 +436,14 @@ router.get(
await clickhouse.getLogsChart({
aggFn,
endTime: endTimeNum,
// @ts-expect-error
field,
granularity,
// @ts-expect-error
groupBy,
maxNumGroups: MAX_NUM_GROUPS,
propertyTypeMappingsModel,
// @ts-expect-error
q,
sortOrder,
startTime: startTimeNum,
@ -447,8 +453,8 @@ router.get(
);
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
},
@ -483,8 +489,8 @@ router.get('/histogram', isUserAuthenticated, async (req, res, next) => {
);
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
@ -517,8 +523,8 @@ router.get('/:id', isUserAuthenticated, async (req, res, next) => {
);
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}

View file

@ -17,8 +17,8 @@ router.get('/tags', isUserAuthenticated, async (req, res, next) => {
res.json(await clickhouse.getMetricsTags(teamId.toString()));
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
});
@ -59,8 +59,8 @@ router.post('/chart', isUserAuthenticated, async (req, res, next) => {
);
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
});

View file

@ -41,8 +41,8 @@ router.get('/', isUserAuthenticated, async (req, res, next) => {
);
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
next(e);
}
@ -96,8 +96,8 @@ router.get('/:sessionId/rrweb', isUserAuthenticated, async (req, res, next) => {
});
} catch (e) {
const span = opentelemetry.trace.getActiveSpan();
span.recordException(e as Error);
span.setStatus({ code: SpanStatusCode.ERROR });
span?.recordException(e as Error);
span?.setStatus({ code: SpanStatusCode.ERROR });
// WARNING: no need to call next(e) here, as the stream will be closed
logger.error({
message: 'Error while streaming rrweb events',

View file

@ -9,7 +9,7 @@ import redisClient from './utils/redis';
import { connectDB, mongooseConnection } from './models';
export default class Server {
protected httpServer: http.Server;
protected httpServer!: http.Server;
private async createServer() {
switch (config.APP_TYPE) {

View file

@ -161,7 +161,7 @@ const fireChannelEvent = async ({
_id: alert.channel.webhookId,
});
// ONLY SUPPORTS SLACK WEBHOOKS FOR NOW
if (webhook.service === 'slack') {
if (webhook?.service === 'slack') {
await slack.postMessageToWebhook(
webhook.url,
buildEventSlackMessage({
@ -200,7 +200,7 @@ export const roundDownToXMinutes = (x: number) => roundDownTo(1000 * 60 * x);
const processAlert = async (now: Date, alert: IAlert) => {
try {
if (alert.source === AlertSource.CHART || !alert.logView) {
if (alert.source === 'CHART' || !alert.logView) {
logger.info({
message: `[Not implemented] Skipping Chart alert processing`,
alert,

View file

@ -77,7 +77,7 @@ process.on('uncaughtException', (err: Error) => {
process.exit(1);
});
process.on('unhandledRejection', (err: Error) => {
process.on('unhandledRejection', (err: any) => {
console.log(err);
logger.error(serializeError(err));
process.exit(1);

View file

@ -15,7 +15,7 @@ export default async () => {
const nowInMs = Date.now();
const teams = await Team.find({});
let c = 0;
const promises = [];
const promises: Promise<any>[] = [];
for (const team of teams) {
if (c >= MAX_PROCESS_TEAMS) {
logger.info(`${LOG_PREFIX} Processed ${c} teams, exiting...`);

View file

@ -13,8 +13,8 @@ describe('logParser', () => {
},
},
});
const keys = [];
const values = [];
const keys: any[] = [];
const values: any[] = [];
for (const [key, value] of jsonIt) {
keys.push(key);
values.push(value);

View file

@ -7,8 +7,8 @@ describe('validators', () => {
});
it('should return false if password is invalid', () => {
expect(validators.validatePassword(null)).toBe(false);
expect(validators.validatePassword(undefined)).toBe(false);
expect(validators.validatePassword(null!)).toBe(false);
expect(validators.validatePassword(undefined!)).toBe(false);
expect(validators.validatePassword('')).toBe(false);
expect(validators.validatePassword('1234567')).toBe(false);
expect(validators.validatePassword('a'.repeat(65))).toBe(false);

View file

@ -1,6 +1,6 @@
export const useTry = <T>(fn: () => T): [null | Error | unknown, null | T] => {
let output = null;
let error = null;
let output: T | null = null;
let error: any = null;
try {
output = fn();
return [error, output];

View file

@ -225,7 +225,7 @@ abstract class ParsingInterface<T> {
): LogStreamModel | MetricModel | RrwebEventModel;
parse(logs: T[], ...args: any[]) {
const parsedLogs = [];
const parsedLogs: any[] = [];
for (const log of logs) {
try {
parsedLogs.push(this._parse(log, ...args));

View file

@ -18,7 +18,6 @@
"skipLibCheck": false,
"sourceMap": true,
"strict": true,
"strictNullChecks": false,
"target": "ES2022"
},
"include": ["src"]

View file

@ -114,7 +114,7 @@ function AlertForm({
id="threshold"
size="sm"
defaultValue={1}
{...register('threshold')}
{...register('threshold', { valueAsNumber: true })}
/>
<div className="me-2 mb-2">lines appear within</div>
<div className="me-2 mb-2">
@ -332,6 +332,7 @@ export default function CreateLogAlertModal({
if (savedSearchId != null) {
saveAlert.mutate(
{
source: 'LOG',
type,
threshold,
interval,
@ -388,6 +389,7 @@ export default function CreateLogAlertModal({
updateAlert.mutate(
{
id: selectedAlertId,
source: 'LOG',
type,
threshold,
interval,

View file

@ -370,6 +370,7 @@ const api = {
logViewId: string;
threshold: number;
type: string;
source: 'LOG' | 'CHART';
}
>(`alerts`, async alert =>
server('alerts', {
@ -390,6 +391,7 @@ const api = {
logViewId: string;
threshold: number;
type: string;
source: 'LOG' | 'CHART';
}
>(`alerts`, async alert =>
server(`alerts/${alert.id}`, {