mirror of
https://github.com/hyperdxio/hyperdx
synced 2026-04-21 13:37:15 +00:00
fix: better source validation and refine required source fields (#1895)
## Summary Large refactor changing the TSource type to a true discriminated union. This means that the expected fields for `kind: 'log'` will differ from those for `'trace', 'session', 'metrics'`. This avoids the current laissez faire source type that currently exists, and required extensive changes across the api and app packages. Also includes a nice addition to `useSource` - you can now specify a `kind` field, which will properly infer the type of the returned source. This also makes use of discriminators in mongoose. This does change a bit of the way that we create and update sources. Obvious changes to sources have also been made, namely making `timeValueExpression` required on sources. Care has been taken to avoid requiring a migration. ### How to test locally or on Vercel 1. `yarn dev` 2. Play around with the app, especially around source creation, source edits, and loading existing sources from a previous version ### References - Linear Issue: References HDX-3352 - Related PRs: Ref: HDX-3352
This commit is contained in:
parent
a36b350df8
commit
ce8506478d
89 changed files with 1885 additions and 802 deletions
7
.changeset/six-ways-sell.md
Normal file
7
.changeset/six-ways-sell.md
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
"@hyperdx/common-utils": patch
|
||||
"@hyperdx/api": patch
|
||||
"@hyperdx/app": patch
|
||||
---
|
||||
|
||||
fix: change sources to discriminated union
|
||||
|
|
@ -1,21 +1,31 @@
|
|||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, SourceSchema } from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
import { ISource, Source } from '@/models/source';
|
||||
import {
|
||||
ISourceInput,
|
||||
LogSource,
|
||||
MetricSource,
|
||||
SessionSource,
|
||||
Source,
|
||||
TraceSource,
|
||||
} from '@/models/source';
|
||||
|
||||
/**
|
||||
* Clean up metricTables property when changing source type away from Metric.
|
||||
* This prevents metric-specific configuration from persisting when switching
|
||||
* to Log, Trace, or Session sources.
|
||||
*/
|
||||
function cleanSourceData(source: Omit<ISource, 'id'>): Omit<ISource, 'id'> {
|
||||
// Only clean metricTables if the source is not a Metric type
|
||||
if (source.kind !== SourceKind.Metric) {
|
||||
// explicitly setting to null for mongoose to clear column
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
|
||||
source.metricTables = null as any;
|
||||
// Returns the discriminator model for the given source kind.
|
||||
// Updates must go through the correct discriminator model so Mongoose
|
||||
// recognises kind-specific fields (e.g. metricTables on MetricSource).
|
||||
function getModelForKind(kind: SourceKind) {
|
||||
switch (kind) {
|
||||
case SourceKind.Log:
|
||||
return LogSource;
|
||||
case SourceKind.Trace:
|
||||
return TraceSource;
|
||||
case SourceKind.Session:
|
||||
return SessionSource;
|
||||
case SourceKind.Metric:
|
||||
return MetricSource;
|
||||
default:
|
||||
kind satisfies never;
|
||||
throw new Error(`${kind} is not a valid SourceKind`);
|
||||
}
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
export function getSources(team: string) {
|
||||
|
|
@ -26,19 +36,56 @@ export function getSource(team: string, sourceId: string) {
|
|||
return Source.findOne({ _id: sourceId, team });
|
||||
}
|
||||
|
||||
export function createSource(team: string, source: Omit<ISource, 'id'>) {
|
||||
return Source.create({ ...source, team });
|
||||
type DistributiveOmit<T, K extends PropertyKey> = T extends T
|
||||
? Omit<T, K>
|
||||
: never;
|
||||
|
||||
export function createSource(
|
||||
team: string,
|
||||
source: DistributiveOmit<ISourceInput, 'id'>,
|
||||
) {
|
||||
// @ts-expect-error The create method has incompatible type signatures but is actually safe
|
||||
return getModelForKind(source.kind)?.create({ ...source, team });
|
||||
}
|
||||
|
||||
export function updateSource(
|
||||
export async function updateSource(
|
||||
team: string,
|
||||
sourceId: string,
|
||||
source: Omit<ISource, 'id'>,
|
||||
source: DistributiveOmit<ISourceInput, 'id'>,
|
||||
) {
|
||||
const cleanedSource = cleanSourceData(source);
|
||||
return Source.findOneAndUpdate({ _id: sourceId, team }, cleanedSource, {
|
||||
new: true,
|
||||
});
|
||||
const existing = await Source.findOne({ _id: sourceId, team });
|
||||
if (!existing) return null;
|
||||
|
||||
// Same kind: simple update through the discriminator model
|
||||
if (existing.kind === source.kind) {
|
||||
// @ts-expect-error The findOneAndUpdate method has incompatible type signatures but is actually safe
|
||||
return getModelForKind(source.kind)?.findOneAndUpdate(
|
||||
{ _id: sourceId, team },
|
||||
source,
|
||||
{ new: true },
|
||||
);
|
||||
}
|
||||
|
||||
// Kind changed: validate through Zod before writing since the raw
|
||||
// collection bypass skips Mongoose's discriminator validation.
|
||||
const parseResult = SourceSchema.safeParse(source);
|
||||
if (!parseResult.success) {
|
||||
throw new Error(
|
||||
`Invalid source data: ${parseResult.error.errors.map(e => e.message).join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Use replaceOne on the raw collection to swap the entire document
|
||||
// in place (including the discriminator key). This is a single atomic
|
||||
// write — the document is never absent from the collection.
|
||||
const replacement = {
|
||||
...parseResult.data,
|
||||
_id: existing._id,
|
||||
team: existing.team,
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
await Source.collection.replaceOne({ _id: existing._id }, replacement);
|
||||
return getModelForKind(replacement.kind)?.hydrate(replacement);
|
||||
}
|
||||
|
||||
export function deleteSource(team: string, sourceId: string) {
|
||||
|
|
|
|||
|
|
@ -1,17 +1,41 @@
|
|||
import {
|
||||
BaseSourceSchema,
|
||||
LogSourceSchema,
|
||||
MetricsDataType,
|
||||
MetricSourceSchema,
|
||||
QuerySettings,
|
||||
SessionSourceSchema,
|
||||
SourceKind,
|
||||
TSource,
|
||||
TraceSourceSchema,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import mongoose, { Schema } from 'mongoose';
|
||||
import z from 'zod';
|
||||
|
||||
type ObjectId = mongoose.Types.ObjectId;
|
||||
import { objectIdSchema } from '@/utils/zod';
|
||||
|
||||
export interface ISource extends Omit<TSource, 'connection'> {
|
||||
team: ObjectId;
|
||||
connection: ObjectId | string;
|
||||
}
|
||||
// ISource is a discriminated union (inherits from TSource) with team added
|
||||
// and connection widened to ObjectId | string for Mongoose.
|
||||
// Omit and & distribute over the union, preserving the discriminated structure.
|
||||
export const ISourceSchema = z.discriminatedUnion('kind', [
|
||||
LogSourceSchema.omit({ connection: true }).extend({
|
||||
team: objectIdSchema,
|
||||
connection: objectIdSchema.or(z.string()),
|
||||
}),
|
||||
TraceSourceSchema.omit({ connection: true }).extend({
|
||||
team: objectIdSchema,
|
||||
connection: objectIdSchema.or(z.string()),
|
||||
}),
|
||||
SessionSourceSchema.omit({ connection: true }).extend({
|
||||
team: objectIdSchema,
|
||||
connection: objectIdSchema.or(z.string()),
|
||||
}),
|
||||
MetricSourceSchema.omit({ connection: true }).extend({
|
||||
team: objectIdSchema,
|
||||
connection: objectIdSchema.or(z.string()),
|
||||
}),
|
||||
]);
|
||||
export type ISource = z.infer<typeof ISourceSchema>;
|
||||
export type ISourceInput = z.input<typeof ISourceSchema>;
|
||||
|
||||
export type SourceDocument = mongoose.HydratedDocument<ISource>;
|
||||
|
||||
|
|
@ -36,91 +60,164 @@ const QuerySetting = new Schema<QuerySettings[number]>(
|
|||
{ _id: false },
|
||||
);
|
||||
|
||||
export const Source = mongoose.model<ISource>(
|
||||
'Source',
|
||||
new Schema<ISource>(
|
||||
{
|
||||
kind: {
|
||||
type: String,
|
||||
enum: Object.values(SourceKind),
|
||||
required: true,
|
||||
},
|
||||
team: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
required: true,
|
||||
ref: 'Team',
|
||||
},
|
||||
from: {
|
||||
databaseName: String,
|
||||
tableName: String,
|
||||
},
|
||||
timestampValueExpression: String,
|
||||
connection: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
required: true,
|
||||
ref: 'Connection',
|
||||
},
|
||||
// --------------------------
|
||||
// Base schema (common fields shared by all source kinds)
|
||||
// --------------------------
|
||||
|
||||
name: String,
|
||||
orderByExpression: String,
|
||||
displayedTimestampValueExpression: String,
|
||||
implicitColumnExpression: String,
|
||||
serviceNameExpression: String,
|
||||
bodyExpression: String,
|
||||
tableFilterExpression: String,
|
||||
eventAttributesExpression: String,
|
||||
resourceAttributesExpression: String,
|
||||
defaultTableSelectExpression: String,
|
||||
uniqueRowIdExpression: String,
|
||||
severityTextExpression: String,
|
||||
traceIdExpression: String,
|
||||
spanIdExpression: String,
|
||||
traceSourceId: String,
|
||||
sessionSourceId: String,
|
||||
metricSourceId: String,
|
||||
type MongooseSourceBase = Omit<
|
||||
z.infer<typeof BaseSourceSchema>,
|
||||
'connection'
|
||||
> & {
|
||||
team: mongoose.Types.ObjectId;
|
||||
connection: mongoose.Types.ObjectId;
|
||||
};
|
||||
|
||||
durationExpression: String,
|
||||
durationPrecision: Number,
|
||||
parentSpanIdExpression: String,
|
||||
spanNameExpression: String,
|
||||
|
||||
logSourceId: String,
|
||||
spanKindExpression: String,
|
||||
statusCodeExpression: String,
|
||||
statusMessageExpression: String,
|
||||
spanEventsValueExpression: String,
|
||||
highlightedTraceAttributeExpressions: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
highlightedRowAttributeExpressions: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
materializedViews: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
|
||||
metricTables: {
|
||||
type: {
|
||||
[MetricsDataType.Gauge]: String,
|
||||
[MetricsDataType.Histogram]: String,
|
||||
[MetricsDataType.Sum]: String,
|
||||
[MetricsDataType.Summary]: String,
|
||||
[MetricsDataType.ExponentialHistogram]: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
|
||||
querySettings: {
|
||||
type: [QuerySetting],
|
||||
validate: {
|
||||
validator: maxLength(10),
|
||||
message: '{PATH} exceeds the limit of 10',
|
||||
},
|
||||
const sourceBaseSchema = new Schema<MongooseSourceBase>(
|
||||
{
|
||||
team: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
required: true,
|
||||
ref: 'Team',
|
||||
},
|
||||
connection: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
required: true,
|
||||
ref: 'Connection',
|
||||
},
|
||||
name: String,
|
||||
from: {
|
||||
databaseName: String,
|
||||
tableName: String,
|
||||
},
|
||||
timestampValueExpression: String,
|
||||
querySettings: {
|
||||
type: [QuerySetting],
|
||||
validate: {
|
||||
validator: maxLength(10),
|
||||
message: '{PATH} exceeds the limit of 10',
|
||||
},
|
||||
},
|
||||
{
|
||||
toJSON: { virtuals: true },
|
||||
timestamps: true,
|
||||
},
|
||||
),
|
||||
},
|
||||
{
|
||||
discriminatorKey: 'kind',
|
||||
toJSON: { virtuals: true },
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
// Model is typed with the base schema type internally. Consumers use ISource
|
||||
// (the discriminated union) via the exported type and discriminator models.
|
||||
const SourceModel = mongoose.model<MongooseSourceBase>(
|
||||
'Source',
|
||||
sourceBaseSchema,
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
|
||||
export const Source = SourceModel as unknown as mongoose.Model<ISource>;
|
||||
|
||||
// --------------------------
|
||||
// Log discriminator
|
||||
// --------------------------
|
||||
type ILogSource = Extract<ISource, { kind: SourceKind.Log }>;
|
||||
export const LogSource = Source.discriminator<ILogSource>(
|
||||
SourceKind.Log,
|
||||
new Schema<ILogSource>({
|
||||
defaultTableSelectExpression: String,
|
||||
serviceNameExpression: String,
|
||||
severityTextExpression: String,
|
||||
bodyExpression: String,
|
||||
eventAttributesExpression: String,
|
||||
resourceAttributesExpression: String,
|
||||
displayedTimestampValueExpression: String,
|
||||
metricSourceId: String,
|
||||
traceSourceId: String,
|
||||
traceIdExpression: String,
|
||||
spanIdExpression: String,
|
||||
implicitColumnExpression: String,
|
||||
uniqueRowIdExpression: String,
|
||||
tableFilterExpression: String,
|
||||
highlightedTraceAttributeExpressions: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
highlightedRowAttributeExpressions: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
materializedViews: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
orderByExpression: String,
|
||||
}),
|
||||
);
|
||||
|
||||
// --------------------------
|
||||
// Trace discriminator
|
||||
// --------------------------
|
||||
type ITraceSource = Extract<ISource, { kind: SourceKind.Trace }>;
|
||||
export const TraceSource = Source.discriminator<ITraceSource>(
|
||||
SourceKind.Trace,
|
||||
new Schema<ITraceSource>({
|
||||
defaultTableSelectExpression: String,
|
||||
durationExpression: String,
|
||||
durationPrecision: Number,
|
||||
traceIdExpression: String,
|
||||
spanIdExpression: String,
|
||||
parentSpanIdExpression: String,
|
||||
spanNameExpression: String,
|
||||
spanKindExpression: String,
|
||||
logSourceId: String,
|
||||
sessionSourceId: String,
|
||||
metricSourceId: String,
|
||||
statusCodeExpression: String,
|
||||
statusMessageExpression: String,
|
||||
serviceNameExpression: String,
|
||||
resourceAttributesExpression: String,
|
||||
eventAttributesExpression: String,
|
||||
spanEventsValueExpression: String,
|
||||
implicitColumnExpression: String,
|
||||
displayedTimestampValueExpression: String,
|
||||
highlightedTraceAttributeExpressions: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
highlightedRowAttributeExpressions: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
materializedViews: {
|
||||
type: mongoose.Schema.Types.Array,
|
||||
},
|
||||
orderByExpression: String,
|
||||
}),
|
||||
);
|
||||
|
||||
// --------------------------
|
||||
// Session discriminator
|
||||
// --------------------------
|
||||
type ISessionSource = Extract<ISource, { kind: SourceKind.Session }>;
|
||||
export const SessionSource = Source.discriminator<ISessionSource>(
|
||||
SourceKind.Session,
|
||||
new Schema<Extract<ISource, { kind: SourceKind.Session }>>({
|
||||
traceSourceId: String,
|
||||
resourceAttributesExpression: String,
|
||||
}),
|
||||
);
|
||||
|
||||
// --------------------------
|
||||
// Metric discriminator
|
||||
// --------------------------
|
||||
type IMetricSource = Extract<ISource, { kind: SourceKind.Metric }>;
|
||||
export const MetricSource = Source.discriminator<IMetricSource>(
|
||||
SourceKind.Metric,
|
||||
new Schema<Extract<ISource, { kind: SourceKind.Metric }>>({
|
||||
metricTables: {
|
||||
type: {
|
||||
[MetricsDataType.Gauge]: String,
|
||||
[MetricsDataType.Histogram]: String,
|
||||
[MetricsDataType.Sum]: String,
|
||||
[MetricsDataType.Summary]: String,
|
||||
[MetricsDataType.ExponentialHistogram]: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
resourceAttributesExpression: String,
|
||||
serviceNameExpression: String,
|
||||
logSourceId: String,
|
||||
}),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import {
|
|||
MetricsDataType,
|
||||
PresetDashboard,
|
||||
SourceKind,
|
||||
TSourceUnion,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { omit } from 'lodash';
|
||||
import mongoose, { Types } from 'mongoose';
|
||||
|
|
@ -418,7 +418,7 @@ describe('dashboard router', () => {
|
|||
});
|
||||
|
||||
describe('preset dashboards', () => {
|
||||
const MOCK_SOURCE: Omit<Extract<TSourceUnion, { kind: 'log' }>, 'id'> = {
|
||||
const MOCK_SOURCE: Omit<Extract<TSource, { kind: 'log' }>, 'id'> = {
|
||||
kind: SourceKind.Log,
|
||||
name: 'Test Source',
|
||||
connection: new Types.ObjectId().toString(),
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import { SourceKind, TSourceUnion } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Types } from 'mongoose';
|
||||
|
||||
import { getLoggedInAgent, getServer } from '@/fixtures';
|
||||
import { Source } from '@/models/source';
|
||||
|
||||
const MOCK_SOURCE: Omit<Extract<TSourceUnion, { kind: 'log' }>, 'id'> = {
|
||||
const MOCK_SOURCE: Omit<Extract<TSource, { kind: 'log' }>, 'id'> = {
|
||||
kind: SourceKind.Log,
|
||||
name: 'Test Source',
|
||||
connection: new Types.ObjectId().toString(),
|
||||
|
|
@ -273,7 +273,7 @@ describe('sources router', () => {
|
|||
|
||||
// Verify the metric source has metricTables
|
||||
const createdSource = await Source.findById(metricSource._id).lean();
|
||||
expect(createdSource?.metricTables).toBeDefined();
|
||||
expect(createdSource).toHaveProperty('metricTables');
|
||||
|
||||
// Update the source to a trace source
|
||||
const traceSource = {
|
||||
|
|
@ -303,9 +303,13 @@ describe('sources router', () => {
|
|||
|
||||
// Verify the trace source does NOT have metricTables property
|
||||
const updatedSource = await Source.findById(metricSource._id).lean();
|
||||
expect(updatedSource?.kind).toBe(SourceKind.Trace);
|
||||
expect(updatedSource?.metricTables).toBeNull();
|
||||
expect(updatedSource?.durationExpression).toBe('Duration');
|
||||
if (updatedSource?.kind !== SourceKind.Trace) {
|
||||
expect(updatedSource?.kind).toBe(SourceKind.Trace);
|
||||
throw new Error('Source did not update to trace');
|
||||
}
|
||||
expect(updatedSource.kind).toBe(SourceKind.Trace);
|
||||
expect(updatedSource).not.toHaveProperty('metricTables');
|
||||
expect(updatedSource.durationExpression).toBe('Duration');
|
||||
});
|
||||
|
||||
it('PUT /:id - preserves metricTables when source remains Metric, removes when changed to another type', async () => {
|
||||
|
|
@ -352,8 +356,11 @@ describe('sources router', () => {
|
|||
let updatedSource = await Source.findById(metricSource._id).lean();
|
||||
|
||||
// Verify the metric source still has metricTables with updated values
|
||||
expect(updatedSource?.kind).toBe(SourceKind.Metric);
|
||||
expect(updatedSource?.metricTables).toMatchObject({
|
||||
if (updatedSource?.kind !== SourceKind.Metric) {
|
||||
expect(updatedSource?.kind).toBe(SourceKind.Metric);
|
||||
throw new Error('Source is not a metric');
|
||||
}
|
||||
expect(updatedSource.metricTables).toMatchObject({
|
||||
gauge: 'otel_metrics_gauge_v2',
|
||||
sum: 'otel_metrics_sum_v2',
|
||||
});
|
||||
|
|
@ -378,9 +385,12 @@ describe('sources router', () => {
|
|||
updatedSource = await Source.findById(metricSource._id).lean();
|
||||
|
||||
// Verify the source is now a Log and metricTables is removed
|
||||
expect(updatedSource?.kind).toBe(SourceKind.Log);
|
||||
expect(updatedSource?.metricTables).toBeNull();
|
||||
expect(updatedSource?.severityTextExpression).toBe('SeverityText');
|
||||
if (updatedSource?.kind !== SourceKind.Log) {
|
||||
expect(updatedSource?.kind).toBe(SourceKind.Log);
|
||||
throw new Error('Source did not update to log');
|
||||
}
|
||||
expect(updatedSource).not.toHaveProperty('metricTables');
|
||||
expect(updatedSource.severityTextExpression).toBe('SeverityText');
|
||||
});
|
||||
|
||||
it('DELETE /:id - deletes a source', async () => {
|
||||
|
|
@ -407,4 +417,272 @@ describe('sources router', () => {
|
|||
// This will succeed even if the ID doesn't exist, consistent with the implementation
|
||||
await agent.delete(`/sources/${nonExistentId}`).expect(200);
|
||||
});
|
||||
|
||||
describe('backward compatibility with legacy flat-model documents', () => {
|
||||
// These tests insert documents directly into MongoDB (bypassing Mongoose
|
||||
// validation) to simulate documents created by the old flat Source model,
|
||||
// which stored ALL fields from all source kinds in a single schema.
|
||||
|
||||
it('reads a legacy Session source without timestampValueExpression', async () => {
|
||||
const { agent, team } = await getLoggedInAgent(server);
|
||||
|
||||
// Old flat model allowed Session sources without timestampValueExpression
|
||||
await Source.collection.insertOne({
|
||||
kind: SourceKind.Session,
|
||||
name: 'Legacy Session',
|
||||
team: team._id,
|
||||
connection: new Types.ObjectId(),
|
||||
from: { databaseName: 'default', tableName: 'otel_sessions' },
|
||||
traceSourceId: new Types.ObjectId().toString(),
|
||||
// timestampValueExpression intentionally omitted
|
||||
});
|
||||
|
||||
const response = await agent.get('/sources').expect(200);
|
||||
|
||||
expect(response.body).toHaveLength(1);
|
||||
expect(response.body[0].kind).toBe(SourceKind.Session);
|
||||
expect(response.body[0].name).toBe('Legacy Session');
|
||||
// timestampValueExpression should be absent or undefined in response
|
||||
});
|
||||
|
||||
it('reads a legacy Trace source without defaultTableSelectExpression', async () => {
|
||||
const { agent, team } = await getLoggedInAgent(server);
|
||||
|
||||
// Old flat model allowed Trace sources without defaultTableSelectExpression
|
||||
await Source.collection.insertOne({
|
||||
kind: SourceKind.Trace,
|
||||
name: 'Legacy Trace',
|
||||
team: team._id,
|
||||
connection: new Types.ObjectId(),
|
||||
from: { databaseName: 'default', tableName: 'otel_traces' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
// defaultTableSelectExpression intentionally omitted
|
||||
});
|
||||
|
||||
const response = await agent.get('/sources').expect(200);
|
||||
|
||||
expect(response.body).toHaveLength(1);
|
||||
expect(response.body[0].kind).toBe(SourceKind.Trace);
|
||||
expect(response.body[0].durationExpression).toBe('Duration');
|
||||
});
|
||||
|
||||
it('reads a legacy Trace source with logSourceId: null', async () => {
|
||||
const { agent, team } = await getLoggedInAgent(server);
|
||||
|
||||
await Source.collection.insertOne({
|
||||
kind: SourceKind.Trace,
|
||||
name: 'Trace with null logSourceId',
|
||||
team: team._id,
|
||||
connection: new Types.ObjectId(),
|
||||
from: { databaseName: 'default', tableName: 'otel_traces' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 3,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
logSourceId: null, // Old schema allowed .nullable()
|
||||
});
|
||||
|
||||
const response = await agent.get('/sources').expect(200);
|
||||
|
||||
expect(response.body).toHaveLength(1);
|
||||
expect(response.body[0].kind).toBe(SourceKind.Trace);
|
||||
// logSourceId: null should be readable (Mongoose doesn't reject it)
|
||||
expect(response.body[0].logSourceId).toBeNull();
|
||||
});
|
||||
|
||||
it('cross-kind fields from legacy flat-model documents are NOT stripped on internal API read', async () => {
|
||||
const { agent, team } = await getLoggedInAgent(server);
|
||||
|
||||
// Old flat model stored ALL fields regardless of kind.
|
||||
// NOTE: Mongoose discriminators do NOT strip unknown/cross-kind fields
|
||||
// from toJSON() output. The discriminator only controls validation on
|
||||
// write — unknown fields stored in MongoDB are still returned on read.
|
||||
// This means the internal API response shape may differ from the external
|
||||
// API (which runs SourceSchema.safeParse() to strip extra fields).
|
||||
await Source.collection.insertOne({
|
||||
kind: SourceKind.Log,
|
||||
name: 'Flat Model Log',
|
||||
team: team._id,
|
||||
connection: new Types.ObjectId(),
|
||||
from: { databaseName: 'default', tableName: 'otel_logs' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Body',
|
||||
bodyExpression: 'Body',
|
||||
// These fields belong to other kinds but were stored in old flat model
|
||||
metricTables: { gauge: 'otel_metrics_gauge' },
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
sessionSourceId: 'some-session-id',
|
||||
});
|
||||
|
||||
const response = await agent.get('/sources').expect(200);
|
||||
|
||||
expect(response.body).toHaveLength(1);
|
||||
expect(response.body[0].kind).toBe(SourceKind.Log);
|
||||
expect(response.body[0].bodyExpression).toBe('Body');
|
||||
// Cross-kind fields are still present in the internal API response —
|
||||
// discriminator toJSON does NOT strip them from existing documents.
|
||||
expect(response.body[0]).toHaveProperty('metricTables');
|
||||
expect(response.body[0]).toHaveProperty('durationExpression');
|
||||
});
|
||||
|
||||
it('fails to update a legacy Session source without providing timestampValueExpression', async () => {
|
||||
const { agent, team } = await getLoggedInAgent(server);
|
||||
|
||||
const result = await Source.collection.insertOne({
|
||||
kind: SourceKind.Session,
|
||||
name: 'Legacy Session',
|
||||
team: team._id,
|
||||
connection: new Types.ObjectId(),
|
||||
from: { databaseName: 'default', tableName: 'otel_sessions' },
|
||||
traceSourceId: 'some-trace-source-id',
|
||||
});
|
||||
|
||||
// PUT validation (SourceSchema) requires timestampValueExpression
|
||||
await agent
|
||||
.put(`/sources/${result.insertedId}`)
|
||||
.send({
|
||||
kind: SourceKind.Session,
|
||||
id: result.insertedId.toString(),
|
||||
name: 'Updated Session',
|
||||
connection: new Types.ObjectId().toString(),
|
||||
from: { databaseName: 'default', tableName: 'otel_sessions' },
|
||||
traceSourceId: 'some-trace-source-id',
|
||||
// timestampValueExpression intentionally omitted
|
||||
})
|
||||
.expect(400);
|
||||
});
|
||||
|
||||
it('successfully updates a legacy Session source when timestampValueExpression is provided', async () => {
|
||||
const { agent, team } = await getLoggedInAgent(server);
|
||||
|
||||
const connectionId = new Types.ObjectId();
|
||||
const result = await Source.collection.insertOne({
|
||||
kind: SourceKind.Session,
|
||||
name: 'Legacy Session',
|
||||
team: team._id,
|
||||
connection: connectionId,
|
||||
from: { databaseName: 'default', tableName: 'otel_sessions' },
|
||||
traceSourceId: 'some-trace-source-id',
|
||||
});
|
||||
|
||||
await agent
|
||||
.put(`/sources/${result.insertedId}`)
|
||||
.send({
|
||||
kind: SourceKind.Session,
|
||||
id: result.insertedId.toString(),
|
||||
name: 'Updated Session',
|
||||
connection: connectionId.toString(),
|
||||
from: { databaseName: 'default', tableName: 'otel_sessions' },
|
||||
traceSourceId: 'some-trace-source-id',
|
||||
timestampValueExpression: 'TimestampTime',
|
||||
})
|
||||
.expect(200);
|
||||
|
||||
const updated = await Source.findById(result.insertedId);
|
||||
expect(updated?.name).toBe('Updated Session');
|
||||
expect(updated?.timestampValueExpression).toBe('TimestampTime');
|
||||
});
|
||||
|
||||
it('cross-kind fields persist in both raw MongoDB and discriminator toJSON', async () => {
|
||||
const { team } = await getLoggedInAgent(server);
|
||||
|
||||
// Insert a flat-model doc with cross-kind fields
|
||||
const result = await Source.collection.insertOne({
|
||||
kind: SourceKind.Log,
|
||||
name: 'Flat Log',
|
||||
team: team._id,
|
||||
connection: new Types.ObjectId(),
|
||||
from: { databaseName: 'default', tableName: 'otel_logs' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Body',
|
||||
metricTables: { gauge: 'otel_metrics_gauge' },
|
||||
});
|
||||
|
||||
// Raw query shows fields as stored
|
||||
const rawDoc = await Source.collection.findOne({
|
||||
_id: result.insertedId,
|
||||
});
|
||||
expect(rawDoc).toHaveProperty('metricTables');
|
||||
|
||||
// NOTE: Mongoose discriminator toJSON does NOT strip cross-kind fields.
|
||||
// Unknown fields stored in MongoDB are still included in toJSON() output.
|
||||
const hydrated = await Source.findById(result.insertedId);
|
||||
// @ts-expect-error toJSON has differing type signatures depending on the source, but it's fine at runtime
|
||||
const json = hydrated?.toJSON({ getters: true });
|
||||
expect(json).toHaveProperty('metricTables');
|
||||
});
|
||||
|
||||
it('Source.find() returns correctly typed discriminators for all kinds', async () => {
|
||||
const { team } = await getLoggedInAgent(server);
|
||||
const connectionId = new Types.ObjectId();
|
||||
|
||||
await Source.collection.insertMany([
|
||||
{
|
||||
kind: SourceKind.Log,
|
||||
name: 'Log',
|
||||
team: team._id,
|
||||
connection: connectionId,
|
||||
from: { databaseName: 'default', tableName: 'otel_logs' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Body',
|
||||
},
|
||||
{
|
||||
kind: SourceKind.Trace,
|
||||
name: 'Trace',
|
||||
team: team._id,
|
||||
connection: connectionId,
|
||||
from: { databaseName: 'default', tableName: 'otel_traces' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 3,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
},
|
||||
{
|
||||
kind: SourceKind.Session,
|
||||
name: 'Session',
|
||||
team: team._id,
|
||||
connection: connectionId,
|
||||
from: { databaseName: 'default', tableName: 'otel_sessions' },
|
||||
timestampValueExpression: 'TimestampTime',
|
||||
traceSourceId: 'some-id',
|
||||
},
|
||||
{
|
||||
kind: SourceKind.Metric,
|
||||
name: 'Metric',
|
||||
team: team._id,
|
||||
connection: connectionId,
|
||||
from: { databaseName: 'default', tableName: '' },
|
||||
timestampValueExpression: 'TimeUnix',
|
||||
resourceAttributesExpression: 'ResourceAttributes',
|
||||
metricTables: { gauge: 'otel_metrics_gauge' },
|
||||
},
|
||||
]);
|
||||
|
||||
const sources = await Source.find({ team: team._id }).sort({ name: 1 });
|
||||
expect(sources).toHaveLength(4);
|
||||
|
||||
expect(sources[0].kind).toBe(SourceKind.Log);
|
||||
expect(sources[1].kind).toBe(SourceKind.Metric);
|
||||
expect(sources[2].kind).toBe(SourceKind.Session);
|
||||
expect(sources[3].kind).toBe(SourceKind.Trace);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -60,16 +60,28 @@ Here are some guidelines:
|
|||
|
||||
The user is looking to do a query on their data source named: ${source.name} of type ${source.kind}.
|
||||
|
||||
The ${source.kind === SourceKind.Log ? 'log level' : 'span status code'} is stored in ${source.severityTextExpression}.
|
||||
You can identify services via ${source.serviceNameExpression}
|
||||
${
|
||||
source.kind === SourceKind.Log
|
||||
? `The log level is stored in ${source.severityTextExpression}.`
|
||||
: source.kind === SourceKind.Trace
|
||||
? `The span status code is stored in ${source.statusCodeExpression}.`
|
||||
: ''
|
||||
}
|
||||
${'serviceNameExpression' in source ? `You can identify services via ${source.serviceNameExpression}` : ''}
|
||||
${
|
||||
source.kind === SourceKind.Trace
|
||||
? `Duration of spans can be queried via ${source.durationExpression} which is expressed in 10^-${source.durationPrecision} seconds of precision.
|
||||
Span names under ${source.spanNameExpression} and span kinds under ${source.spanKindExpression}`
|
||||
: `The log body can be queried via ${source.bodyExpression}`
|
||||
: 'bodyExpression' in source
|
||||
? `The log body can be queried via ${source.bodyExpression}`
|
||||
: ''
|
||||
}
|
||||
${
|
||||
source.kind === SourceKind.Trace || source.kind === SourceKind.Log
|
||||
? `Various log/span-specific attributes as a Map can be found under ${source.eventAttributesExpression} while resource attributes that follow the OpenTelemetry semantic convention can be found under ${source.resourceAttributesExpression}
|
||||
You must use the full field name ex. "column['key']" or "column.key" as it appears.`
|
||||
: ''
|
||||
}
|
||||
Various log/span-specific attributes as a Map can be found under ${source.eventAttributesExpression} while resource attributes that follow the OpenTelemetry semantic convention can be found under ${source.resourceAttributesExpression}
|
||||
You must use the full field name ex. "column['key']" or "column.key" as it appears.
|
||||
|
||||
The following is a list of properties and example values that exist in the source:
|
||||
${JSON.stringify(keyValues)}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import {
|
||||
SourceSchema,
|
||||
sourceSchemaWithout,
|
||||
SourceSchemaNoId,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import express from 'express';
|
||||
import { z } from 'zod';
|
||||
|
|
@ -23,14 +23,17 @@ router.get('/', async (req, res, next) => {
|
|||
|
||||
const sources = await getSources(teamId.toString());
|
||||
|
||||
return res.json(sources.map(s => s.toJSON({ getters: true })));
|
||||
return res.json(
|
||||
sources.map(
|
||||
// @ts-expect-error source.toJSON has incompatible type signatures but is actually a safe operation
|
||||
source => source.toJSON({ getters: true }),
|
||||
),
|
||||
);
|
||||
} catch (e) {
|
||||
next(e);
|
||||
}
|
||||
});
|
||||
|
||||
const SourceSchemaNoId = sourceSchemaWithout({ id: true });
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
validateRequest({
|
||||
|
|
@ -40,11 +43,10 @@ router.post(
|
|||
try {
|
||||
const { teamId } = getNonNullUserWithTeam(req);
|
||||
|
||||
// TODO: HDX-1768 Eliminate type assertion
|
||||
const source = await createSource(teamId.toString(), {
|
||||
...req.body,
|
||||
team: teamId,
|
||||
} as any);
|
||||
team: teamId.toJSON(),
|
||||
});
|
||||
|
||||
res.json(source);
|
||||
} catch (e) {
|
||||
|
|
@ -65,11 +67,10 @@ router.put(
|
|||
try {
|
||||
const { teamId } = getNonNullUserWithTeam(req);
|
||||
|
||||
// TODO: HDX-1768 Eliminate type assertion
|
||||
const source = await updateSource(teamId.toString(), req.params.id, {
|
||||
...req.body,
|
||||
team: teamId,
|
||||
} as any);
|
||||
team: teamId.toJSON(),
|
||||
});
|
||||
|
||||
if (!source) {
|
||||
res.status(404).send('Source not found');
|
||||
|
|
|
|||
|
|
@ -13,7 +13,14 @@ import {
|
|||
getServer,
|
||||
} from '../../../fixtures';
|
||||
import Connection, { IConnection } from '../../../models/connection';
|
||||
import { Source } from '../../../models/source';
|
||||
import {
|
||||
ISource,
|
||||
LogSource,
|
||||
MetricSource,
|
||||
SessionSource,
|
||||
Source,
|
||||
TraceSource,
|
||||
} from '../../../models/source';
|
||||
import { mapGranularityToExternalFormat } from '../v2/sources';
|
||||
|
||||
describe('External API v2 Sources', () => {
|
||||
|
|
@ -73,7 +80,7 @@ describe('External API v2 Sources', () => {
|
|||
});
|
||||
|
||||
it('should return a single log source', async () => {
|
||||
const logSource = await Source.create({
|
||||
const logSource = await LogSource.create({
|
||||
kind: SourceKind.Log,
|
||||
team: team._id,
|
||||
name: 'Test Log Source',
|
||||
|
|
@ -108,7 +115,7 @@ describe('External API v2 Sources', () => {
|
|||
});
|
||||
|
||||
it('should return a single trace source', async () => {
|
||||
const traceSource = await Source.create({
|
||||
const traceSource = await TraceSource.create({
|
||||
kind: SourceKind.Trace,
|
||||
team: team._id,
|
||||
name: 'Test Trace Source',
|
||||
|
|
@ -117,6 +124,7 @@ describe('External API v2 Sources', () => {
|
|||
tableName: 'otel_traces',
|
||||
},
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 3,
|
||||
traceIdExpression: 'TraceId',
|
||||
|
|
@ -188,6 +196,7 @@ describe('External API v2 Sources', () => {
|
|||
kind: SourceKind.Trace,
|
||||
connection: connection._id.toString(),
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 3,
|
||||
traceIdExpression: 'TraceId',
|
||||
|
|
@ -247,7 +256,7 @@ describe('External API v2 Sources', () => {
|
|||
});
|
||||
|
||||
it('should return a single metric source', async () => {
|
||||
const metricSource = await Source.create({
|
||||
const metricSource = await MetricSource.create({
|
||||
kind: SourceKind.Metric,
|
||||
team: team._id,
|
||||
name: 'Test Metric Source',
|
||||
|
|
@ -289,7 +298,7 @@ describe('External API v2 Sources', () => {
|
|||
});
|
||||
|
||||
it('should return a single session source', async () => {
|
||||
const traceSource = await Source.create({
|
||||
const traceSource = await TraceSource.create({
|
||||
kind: SourceKind.Trace,
|
||||
team: team._id,
|
||||
name: 'Trace Source for Session',
|
||||
|
|
@ -298,6 +307,7 @@ describe('External API v2 Sources', () => {
|
|||
tableName: 'otel_traces',
|
||||
},
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 3,
|
||||
traceIdExpression: 'TraceId',
|
||||
|
|
@ -308,17 +318,18 @@ describe('External API v2 Sources', () => {
|
|||
connection: connection._id,
|
||||
});
|
||||
|
||||
const sessionSource = await Source.create({
|
||||
const sessionSource = await SessionSource.create({
|
||||
kind: SourceKind.Session,
|
||||
team: team._id,
|
||||
team: team._id.toString(),
|
||||
name: 'Test Session Source',
|
||||
from: {
|
||||
databaseName: DEFAULT_DATABASE,
|
||||
tableName: 'rrweb_events',
|
||||
},
|
||||
timestampValueExpression: 'Timestamp',
|
||||
traceSourceId: traceSource._id.toString(),
|
||||
connection: connection._id,
|
||||
});
|
||||
connection: connection._id.toString(),
|
||||
} satisfies Omit<Extract<ISource, { kind: SourceKind.Session }>, 'id'>);
|
||||
|
||||
const response = await authRequest('get', BASE_URL).expect(200);
|
||||
|
||||
|
|
@ -338,11 +349,12 @@ describe('External API v2 Sources', () => {
|
|||
},
|
||||
traceSourceId: traceSource._id.toString(),
|
||||
querySettings: [],
|
||||
timestampValueExpression: 'Timestamp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return multiple sources of different kinds', async () => {
|
||||
const logSource = await Source.create({
|
||||
const logSource = await LogSource.create({
|
||||
kind: SourceKind.Log,
|
||||
team: team._id,
|
||||
name: 'Logs',
|
||||
|
|
@ -355,7 +367,7 @@ describe('External API v2 Sources', () => {
|
|||
connection: connection._id,
|
||||
});
|
||||
|
||||
const traceSource = await Source.create({
|
||||
const traceSource = await TraceSource.create({
|
||||
kind: SourceKind.Trace,
|
||||
team: team._id,
|
||||
name: 'Traces',
|
||||
|
|
@ -364,6 +376,7 @@ describe('External API v2 Sources', () => {
|
|||
tableName: 'otel_traces',
|
||||
},
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 3,
|
||||
traceIdExpression: 'TraceId',
|
||||
|
|
@ -374,7 +387,7 @@ describe('External API v2 Sources', () => {
|
|||
connection: connection._id,
|
||||
});
|
||||
|
||||
const metricSource = await Source.create({
|
||||
const metricSource = await MetricSource.create({
|
||||
kind: SourceKind.Metric,
|
||||
team: team._id,
|
||||
name: 'Metrics',
|
||||
|
|
@ -407,7 +420,7 @@ describe('External API v2 Sources', () => {
|
|||
|
||||
it("should only return sources for the authenticated user's team", async () => {
|
||||
// Create a source for the current team
|
||||
const currentTeamSource = await Source.create({
|
||||
const currentTeamSource = await LogSource.create({
|
||||
kind: SourceKind.Log,
|
||||
team: team._id,
|
||||
name: 'Current Team Source',
|
||||
|
|
@ -430,7 +443,7 @@ describe('External API v2 Sources', () => {
|
|||
password: config.CLICKHOUSE_PASSWORD,
|
||||
});
|
||||
|
||||
await Source.create({
|
||||
await LogSource.create({
|
||||
kind: SourceKind.Log,
|
||||
team: otherTeamId,
|
||||
name: 'Other Team Source',
|
||||
|
|
@ -452,7 +465,7 @@ describe('External API v2 Sources', () => {
|
|||
});
|
||||
|
||||
it('should format sources according to SourceSchema', async () => {
|
||||
await Source.create({
|
||||
await LogSource.create({
|
||||
kind: SourceKind.Log,
|
||||
team: team._id,
|
||||
name: 'Test Source',
|
||||
|
|
@ -483,7 +496,7 @@ describe('External API v2 Sources', () => {
|
|||
|
||||
it('should filter out sources that fail schema validation', async () => {
|
||||
// Create a valid source
|
||||
const validSource = await Source.create({
|
||||
const validSource = await LogSource.create({
|
||||
kind: SourceKind.Log,
|
||||
team: team._id,
|
||||
name: 'Valid Source',
|
||||
|
|
@ -517,6 +530,92 @@ describe('External API v2 Sources', () => {
|
|||
expect(response.body.data[0].id).toBe(validSource._id.toString());
|
||||
});
|
||||
});
|
||||
|
||||
describe('backward compatibility with legacy flat-model documents', () => {
|
||||
const BASE_URL = '/api/v2/sources';
|
||||
|
||||
it('returns legacy Session source without timestampValueExpression using default TimestampTime', async () => {
|
||||
// Legacy Session sources were created before timestampValueExpression was
|
||||
// required. applyLegacyDefaults() backfills 'TimestampTime' before
|
||||
// SourceSchema.safeParse(), so these sources still appear in the response.
|
||||
await Source.collection.insertOne({
|
||||
kind: SourceKind.Session,
|
||||
name: 'Legacy Session',
|
||||
team: team._id,
|
||||
connection: connection._id,
|
||||
from: { databaseName: DEFAULT_DATABASE, tableName: 'otel_sessions' },
|
||||
traceSourceId: 'some-trace-source-id',
|
||||
// timestampValueExpression intentionally omitted
|
||||
});
|
||||
|
||||
const response = await authRequest('get', BASE_URL).expect(200);
|
||||
|
||||
expect(response.body.data).toHaveLength(1);
|
||||
expect(response.body.data[0].kind).toBe(SourceKind.Session);
|
||||
// Default is applied at read time, not persisted to the database
|
||||
expect(response.body.data[0].timestampValueExpression).toBe(
|
||||
'TimestampTime',
|
||||
);
|
||||
});
|
||||
|
||||
it('returns Trace source with logSourceId: null (Zod optional accepts null)', async () => {
|
||||
// Old schema had logSourceId: z.string().optional().nullable()
|
||||
// New schema removed .nullable() — however, Zod's optional() in
|
||||
// discriminatedUnion context still accepts null values (they pass
|
||||
// safeParse). This means logSourceId: null is NOT a breaking change.
|
||||
await Source.collection.insertOne({
|
||||
kind: SourceKind.Trace,
|
||||
name: 'Trace with null logSourceId',
|
||||
team: team._id,
|
||||
connection: connection._id,
|
||||
from: { databaseName: DEFAULT_DATABASE, tableName: 'otel_traces' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 3,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
logSourceId: null,
|
||||
});
|
||||
|
||||
const response = await authRequest('get', BASE_URL).expect(200);
|
||||
|
||||
// Source IS returned — logSourceId: null passes Zod safeParse
|
||||
expect(response.body.data).toHaveLength(1);
|
||||
expect(response.body.data[0].kind).toBe(SourceKind.Trace);
|
||||
expect(response.body.data[0].logSourceId).toBeNull();
|
||||
});
|
||||
|
||||
it('strips cross-kind fields from legacy flat-model Log source via SourceSchema.safeParse', async () => {
|
||||
// The external API runs SourceSchema.safeParse() which DOES strip
|
||||
// unknown/cross-kind fields (unlike Mongoose toJSON which keeps them).
|
||||
// This is the key difference between internal and external APIs.
|
||||
await Source.collection.insertOne({
|
||||
kind: SourceKind.Log,
|
||||
name: 'Flat Model Log',
|
||||
team: team._id,
|
||||
connection: connection._id,
|
||||
from: { databaseName: DEFAULT_DATABASE, tableName: DEFAULT_LOGS_TABLE },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Body',
|
||||
// Cross-kind fields from old flat model
|
||||
metricTables: { gauge: 'otel_metrics_gauge' },
|
||||
durationExpression: 'Duration',
|
||||
});
|
||||
|
||||
const response = await authRequest('get', BASE_URL).expect(200);
|
||||
|
||||
expect(response.body.data).toHaveLength(1);
|
||||
expect(response.body.data[0].kind).toBe(SourceKind.Log);
|
||||
expect(response.body.data[0].defaultTableSelectExpression).toBe('Body');
|
||||
// Cross-kind fields ARE stripped by SourceSchema.safeParse in the external API
|
||||
expect(response.body.data[0]).not.toHaveProperty('metricTables');
|
||||
expect(response.body.data[0]).not.toHaveProperty('durationExpression');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('External API v2 Sources Mapping', () => {
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ import { Granularity } from '@hyperdx/common-utils/dist/core/utils';
|
|||
import {
|
||||
ChartConfigWithOptDateRange,
|
||||
DisplayType,
|
||||
SourceKind,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import opentelemetry, { SpanStatusCode } from '@opentelemetry/api';
|
||||
import express from 'express';
|
||||
import _ from 'lodash';
|
||||
|
|
@ -260,7 +260,7 @@ const buildChartConfigFromRequest = async (
|
|||
databaseName: source.from.databaseName,
|
||||
tableName: !isMetricSource ? source.from.tableName : '',
|
||||
},
|
||||
...(isMetricSource && {
|
||||
...(source.kind === SourceKind.Metric && {
|
||||
metricTables: source.metricTables,
|
||||
}),
|
||||
select: [
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import {
|
||||
SourceKind,
|
||||
SourceSchema,
|
||||
type TSourceUnion,
|
||||
type TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import express from 'express';
|
||||
|
||||
|
|
@ -27,7 +28,7 @@ export function mapGranularityToExternalFormat(granularity: string): string {
|
|||
}
|
||||
}
|
||||
|
||||
function mapSourceToExternalSource(source: TSourceUnion): TSourceUnion {
|
||||
function mapSourceToExternalSource(source: TSource): TSource {
|
||||
if (!('materializedViews' in source)) return source;
|
||||
if (!Array.isArray(source.materializedViews)) return source;
|
||||
|
||||
|
|
@ -42,12 +43,41 @@ function mapSourceToExternalSource(source: TSourceUnion): TSourceUnion {
|
|||
};
|
||||
}
|
||||
|
||||
function applyLegacyDefaults(
|
||||
parsed: Record<string, unknown>,
|
||||
): Record<string, unknown> {
|
||||
// Legacy Session sources were created before timestampValueExpression was
|
||||
// required. The old code defaulted it to 'TimestampTime' at query time.
|
||||
if (parsed.kind === SourceKind.Session && !parsed.timestampValueExpression) {
|
||||
return { ...parsed, timestampValueExpression: 'TimestampTime' };
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function formatExternalSource(source: SourceDocument) {
|
||||
// Convert to JSON so that any ObjectIds are converted to strings
|
||||
const json = JSON.stringify(source.toJSON({ getters: true }));
|
||||
const json = JSON.stringify(
|
||||
(() => {
|
||||
switch (source.kind) {
|
||||
case SourceKind.Log:
|
||||
return source.toJSON({ getters: true });
|
||||
case SourceKind.Trace:
|
||||
return source.toJSON({ getters: true });
|
||||
case SourceKind.Metric:
|
||||
return source.toJSON({ getters: true });
|
||||
case SourceKind.Session:
|
||||
return source.toJSON({ getters: true });
|
||||
default:
|
||||
source satisfies never;
|
||||
return {};
|
||||
}
|
||||
})(),
|
||||
);
|
||||
|
||||
// Parse using the SourceSchema to strip out any fields not defined in the schema
|
||||
const parseResult = SourceSchema.safeParse(JSON.parse(json));
|
||||
const parseResult = SourceSchema.safeParse(
|
||||
applyLegacyDefaults(JSON.parse(json)),
|
||||
);
|
||||
if (parseResult.success) {
|
||||
return mapSourceToExternalSource(parseResult.data);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { ClickhouseClient } from '@hyperdx/common-utils/dist/clickhouse/node';
|
||||
import {
|
||||
AlertState,
|
||||
SourceKind,
|
||||
Tile,
|
||||
WebhookService,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
|
|
@ -293,16 +294,17 @@ describe('checkAlerts', () => {
|
|||
interval: '1m',
|
||||
},
|
||||
source: {
|
||||
id: 'fake-source-id' as any,
|
||||
kind: 'log' as any,
|
||||
team: 'team-123' as any,
|
||||
id: 'fake-source-id',
|
||||
kind: SourceKind.Log,
|
||||
team: 'team-123',
|
||||
from: {
|
||||
databaseName: 'default',
|
||||
tableName: 'otel_logs',
|
||||
},
|
||||
timestampValueExpression: 'Timestamp',
|
||||
connection: 'connection-123' as any,
|
||||
connection: 'connection-123',
|
||||
name: 'Logs',
|
||||
defaultTableSelectExpression: 'Timestamp, Body',
|
||||
},
|
||||
savedSearch: {
|
||||
_id: 'fake-saved-search-id' as any,
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import {
|
|||
import {
|
||||
BuilderChartConfigWithOptDateRange,
|
||||
DisplayType,
|
||||
SourceKind,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import * as fns from 'date-fns';
|
||||
import { chunk, isString } from 'lodash';
|
||||
|
|
@ -91,7 +92,10 @@ export async function computeAliasWithClauses(
|
|||
metadata: Metadata,
|
||||
): Promise<BuilderChartConfigWithOptDateRange['with']> {
|
||||
const resolvedSelect =
|
||||
savedSearch.select || source.defaultTableSelectExpression || '';
|
||||
savedSearch.select ||
|
||||
((source.kind === SourceKind.Log || source.kind === SourceKind.Trace) &&
|
||||
source.defaultTableSelectExpression) ||
|
||||
'';
|
||||
const config: BuilderChartConfigWithOptDateRange = {
|
||||
connection: '',
|
||||
displayType: DisplayType.Search,
|
||||
|
|
@ -99,7 +103,10 @@ export async function computeAliasWithClauses(
|
|||
select: resolvedSelect,
|
||||
where: savedSearch.where,
|
||||
whereLanguage: savedSearch.whereLanguage,
|
||||
implicitColumnExpression: source.implicitColumnExpression,
|
||||
implicitColumnExpression:
|
||||
source.kind === SourceKind.Log || source.kind === SourceKind.Trace
|
||||
? source.implicitColumnExpression
|
||||
: undefined,
|
||||
timestampValueExpression: source.timestampValueExpression,
|
||||
};
|
||||
const query = await renderChartConfig(config, metadata, source.querySettings);
|
||||
|
|
@ -442,7 +449,10 @@ const getChartConfigFromAlert = (
|
|||
whereLanguage: savedSearch.whereLanguage,
|
||||
filters: savedSearch.filters?.map(f => ({ ...f })),
|
||||
groupBy: alert.groupBy,
|
||||
implicitColumnExpression: source.implicitColumnExpression,
|
||||
implicitColumnExpression:
|
||||
source.kind === SourceKind.Log || source.kind === SourceKind.Trace
|
||||
? source.implicitColumnExpression
|
||||
: undefined,
|
||||
timestampValueExpression: source.timestampValueExpression,
|
||||
};
|
||||
} else if (details.taskType === AlertTaskType.TILE) {
|
||||
|
|
@ -457,6 +467,15 @@ const getChartConfigFromAlert = (
|
|||
tile.config.displayType === DisplayType.StackedBar ||
|
||||
tile.config.displayType === DisplayType.Number
|
||||
) {
|
||||
// Tile alerts can use Log, Trace, or Metric sources.
|
||||
// implicitColumnExpression exists on Log and Trace sources;
|
||||
// metricTables exists on Metric sources.
|
||||
const implicitColumnExpression =
|
||||
source.kind === SourceKind.Log || source.kind === SourceKind.Trace
|
||||
? source.implicitColumnExpression
|
||||
: undefined;
|
||||
const metricTables =
|
||||
source.kind === SourceKind.Metric ? source.metricTables : undefined;
|
||||
return {
|
||||
connection,
|
||||
dateRange,
|
||||
|
|
@ -466,8 +485,8 @@ const getChartConfigFromAlert = (
|
|||
from: source.from,
|
||||
granularity: `${windowSizeInMins} minute`,
|
||||
groupBy: tile.config.groupBy,
|
||||
implicitColumnExpression: source.implicitColumnExpression,
|
||||
metricTables: source.metricTables,
|
||||
implicitColumnExpression,
|
||||
metricTables,
|
||||
select: tile.config.select,
|
||||
timestampValueExpression: source.timestampValueExpression,
|
||||
where: tile.config.where,
|
||||
|
|
@ -671,14 +690,19 @@ export const processAlert = async (
|
|||
}
|
||||
}
|
||||
|
||||
// Optimize chart config with materialized views, if available
|
||||
const optimizedChartConfig = source?.materializedViews?.length
|
||||
// Optimize chart config with materialized views, if available.
|
||||
// materializedViews exists on Log and Trace sources.
|
||||
const mvSource =
|
||||
source.kind === SourceKind.Log || source.kind === SourceKind.Trace
|
||||
? source
|
||||
: undefined;
|
||||
const optimizedChartConfig = mvSource?.materializedViews?.length
|
||||
? await tryOptimizeConfigWithMaterializedView(
|
||||
chartConfig,
|
||||
metadata,
|
||||
clickhouseClient,
|
||||
undefined,
|
||||
source,
|
||||
mvSource,
|
||||
)
|
||||
: chartConfig;
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import {
|
|||
AlertChannelType,
|
||||
ChartConfigWithOptDateRange,
|
||||
DisplayType,
|
||||
SourceKind,
|
||||
WebhookService,
|
||||
zAlertChannelType,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
|
|
@ -579,6 +580,11 @@ ${targetTemplate}`;
|
|||
if (source == null) {
|
||||
throw new Error(`Source ID is ${alert.source} but source is null`);
|
||||
}
|
||||
if (source.kind !== SourceKind.Log && source.kind !== SourceKind.Trace) {
|
||||
throw new Error(
|
||||
`Expecting SourceKind 'trace' or 'log', got ${source.kind}`,
|
||||
);
|
||||
}
|
||||
// TODO: show group + total count for group-by alerts
|
||||
// fetch sample logs
|
||||
const resolvedSelect =
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
import { ResponseJSON } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { ClickhouseClient } from '@hyperdx/common-utils/dist/clickhouse/node';
|
||||
import { MetricsDataType, SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
MetricsDataType,
|
||||
SourceKind,
|
||||
TMetricSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import * as HyperDX from '@hyperdx/node-opentelemetry';
|
||||
import ms from 'ms';
|
||||
import os from 'os';
|
||||
|
|
@ -30,8 +34,10 @@ const logger = pino({
|
|||
function extractTableNames(source: SourceDocument): string[] {
|
||||
const tables: string[] = [];
|
||||
if (source.kind === SourceKind.Metric) {
|
||||
// Cast to TMetricSource to access metricTables after kind narrowing
|
||||
const metricSource = source;
|
||||
for (const key of Object.values(MetricsDataType)) {
|
||||
const metricTable = source.metricTables?.[key];
|
||||
const metricTable = metricSource.metricTables?.[key];
|
||||
if (!metricTable) continue;
|
||||
tables.push(metricTable);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ import {
|
|||
MetricsDataType as MetricsDataTypeV2,
|
||||
SourceKind,
|
||||
SQLInterval,
|
||||
TMetricSource,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { SegmentedControl } from '@mantine/core';
|
||||
|
|
@ -630,14 +631,14 @@ function firstGroupColumnIsLogLevel(
|
|||
source: TSource | undefined,
|
||||
groupColumns: ColumnMetaType[],
|
||||
) {
|
||||
return (
|
||||
source &&
|
||||
groupColumns.length === 1 &&
|
||||
groupColumns[0].name ===
|
||||
(source.kind === SourceKind.Log
|
||||
? source.severityTextExpression
|
||||
: source.statusCodeExpression)
|
||||
);
|
||||
if (!source || groupColumns.length !== 1) return false;
|
||||
if (source.kind === SourceKind.Log) {
|
||||
return groupColumns[0].name === source.severityTextExpression;
|
||||
}
|
||||
if (source.kind === SourceKind.Trace) {
|
||||
return groupColumns[0].name === source.statusCodeExpression;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function addResponseToFormattedData({
|
||||
|
|
@ -905,7 +906,7 @@ export const mapV1AggFnToV2 = (aggFn?: AggFn): AggFnV2 | undefined => {
|
|||
};
|
||||
|
||||
export const convertV1GroupByToV2 = (
|
||||
metricSource: TSource,
|
||||
metricSource: TMetricSource,
|
||||
groupBy: string[],
|
||||
): string => {
|
||||
return groupBy
|
||||
|
|
@ -932,7 +933,7 @@ export const convertV1ChartConfigToV2 = (
|
|||
},
|
||||
source: {
|
||||
log?: TSource;
|
||||
metric?: TSource;
|
||||
metric?: TMetricSource;
|
||||
trace?: TSource;
|
||||
},
|
||||
): BuilderChartConfigWithDateRange => {
|
||||
|
|
@ -1020,12 +1021,18 @@ export function buildEventsSearchUrl({
|
|||
}
|
||||
|
||||
const isMetricChart = isMetricChartConfig(config);
|
||||
if (isMetricChart && source?.logSourceId == null) {
|
||||
notifications.show({
|
||||
color: 'yellow',
|
||||
message: 'No log source is associated with the selected metric source.',
|
||||
});
|
||||
return null;
|
||||
if (isMetricChart) {
|
||||
const logSourceId =
|
||||
source.kind === SourceKind.Metric || source.kind === SourceKind.Trace
|
||||
? source.logSourceId
|
||||
: undefined;
|
||||
if (logSourceId == null) {
|
||||
notifications.show({
|
||||
color: 'yellow',
|
||||
message: 'No log source is associated with the selected metric source.',
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
let where = config.where;
|
||||
|
|
@ -1089,7 +1096,10 @@ export function buildEventsSearchUrl({
|
|||
params.where = '';
|
||||
params.whereLanguage = 'lucene';
|
||||
params.filters = JSON.stringify([]);
|
||||
params.source = source?.logSourceId ?? '';
|
||||
params.source =
|
||||
(source.kind === SourceKind.Metric || source.kind === SourceKind.Trace
|
||||
? source.logSourceId
|
||||
: undefined) ?? '';
|
||||
}
|
||||
|
||||
// Include the select parameter if provided to preserve custom columns
|
||||
|
|
|
|||
|
|
@ -31,11 +31,13 @@ import {
|
|||
DashboardFilter,
|
||||
DisplayType,
|
||||
Filter,
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
SearchCondition,
|
||||
SearchConditionLanguage,
|
||||
SourceKind,
|
||||
SQLInterval,
|
||||
TSourceUnion,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
ActionIcon,
|
||||
|
|
@ -253,7 +255,10 @@ const Tile = forwardRef(
|
|||
databaseName: source.from?.databaseName || 'default',
|
||||
tableName: tableName || '',
|
||||
},
|
||||
implicitColumnExpression: source.implicitColumnExpression,
|
||||
implicitColumnExpression:
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source.implicitColumnExpression
|
||||
: undefined,
|
||||
filters,
|
||||
metricTables: isMetricSource ? source.metricTables : undefined,
|
||||
});
|
||||
|
|
@ -528,7 +533,10 @@ const Tile = forwardRef(
|
|||
dateRange,
|
||||
select:
|
||||
queriedConfig.select ||
|
||||
source?.defaultTableSelectExpression ||
|
||||
(source?.kind === SourceKind.Log ||
|
||||
source?.kind === SourceKind.Trace
|
||||
? source.defaultTableSelectExpression
|
||||
: '') ||
|
||||
'',
|
||||
groupBy: undefined,
|
||||
granularity: undefined,
|
||||
|
|
@ -1420,7 +1428,7 @@ function DBDashboardPage({ presetConfig }: { presetConfig?: Dashboard }) {
|
|||
convertToDashboardTemplate(
|
||||
dashboard,
|
||||
// TODO: fix this type issue
|
||||
sources as TSourceUnion[],
|
||||
sources,
|
||||
connections,
|
||||
),
|
||||
dashboard?.name,
|
||||
|
|
|
|||
|
|
@ -36,6 +36,8 @@ import {
|
|||
ChartConfigWithDateRange,
|
||||
DisplayType,
|
||||
Filter,
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
SourceKind,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
|
|
@ -663,6 +665,7 @@ function useSearchedConfigToChartConfig(
|
|||
) {
|
||||
const { data: sourceObj, isLoading } = useSource({
|
||||
id: source,
|
||||
kinds: [SourceKind.Log, SourceKind.Trace],
|
||||
});
|
||||
const defaultOrderBy = useDefaultOrderBy(source);
|
||||
|
||||
|
|
@ -673,11 +676,10 @@ function useSearchedConfigToChartConfig(
|
|||
select:
|
||||
select ||
|
||||
defaultSearchConfig?.select ||
|
||||
sourceObj.defaultTableSelectExpression ||
|
||||
'',
|
||||
sourceObj.defaultTableSelectExpression,
|
||||
from: sourceObj.from,
|
||||
source: sourceObj.id,
|
||||
...(sourceObj.tableFilterExpression != null
|
||||
...(isLogSource(sourceObj) && sourceObj.tableFilterExpression != null
|
||||
? {
|
||||
filters: [
|
||||
{
|
||||
|
|
@ -785,7 +787,10 @@ function optimizeDefaultOrderBy(
|
|||
}
|
||||
|
||||
export function useDefaultOrderBy(sourceID: string | undefined | null) {
|
||||
const { data: source } = useSource({ id: sourceID });
|
||||
const { data: source } = useSource({
|
||||
id: sourceID,
|
||||
kinds: [SourceKind.Log, SourceKind.Trace],
|
||||
});
|
||||
const { data: tableMetadata } = useTableMetadata(tcFromSource(source));
|
||||
|
||||
// When source changes, make sure select and orderby fields are set to default
|
||||
|
|
@ -796,7 +801,7 @@ export function useDefaultOrderBy(sourceID: string | undefined | null) {
|
|||
if (trimmedOrderBy) return trimmedOrderBy;
|
||||
return optimizeDefaultOrderBy(
|
||||
source?.timestampValueExpression ?? '',
|
||||
source?.displayedTimestampValueExpression,
|
||||
source.displayedTimestampValueExpression,
|
||||
tableMetadata?.sorting_key,
|
||||
);
|
||||
}, [source, tableMetadata]);
|
||||
|
|
@ -835,6 +840,7 @@ function DBSearchPage() {
|
|||
);
|
||||
const { data: searchedSource } = useSource({
|
||||
id: searchedConfig.source,
|
||||
kinds: [SourceKind.Log, SourceKind.Trace],
|
||||
});
|
||||
|
||||
const [analysisMode, setAnalysisMode] = useQueryState(
|
||||
|
|
@ -918,7 +924,11 @@ function DBSearchPage() {
|
|||
}
|
||||
return {
|
||||
select:
|
||||
_savedSearch?.select ?? searchedSource?.defaultTableSelectExpression,
|
||||
_savedSearch?.select ??
|
||||
(searchedSource?.kind === SourceKind.Log ||
|
||||
searchedSource?.kind === SourceKind.Trace
|
||||
? searchedSource.defaultTableSelectExpression
|
||||
: undefined),
|
||||
where: _savedSearch?.where ?? '',
|
||||
whereLanguage: _savedSearch?.whereLanguage ?? 'lucene',
|
||||
source: _savedSearch?.source,
|
||||
|
|
@ -1807,7 +1817,11 @@ function DBSearchPage() {
|
|||
setAnalysisMode={setAnalysisMode}
|
||||
chartConfig={filtersChartConfig}
|
||||
sourceId={inputSourceObj?.id}
|
||||
showDelta={!!searchedSource?.durationExpression}
|
||||
showDelta={
|
||||
!!(searchedSource?.kind === SourceKind.Trace
|
||||
? searchedSource.durationExpression
|
||||
: undefined)
|
||||
}
|
||||
{...searchFilters}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
|
|
@ -1865,18 +1879,20 @@ function DBSearchPage() {
|
|||
</Box>
|
||||
</Flex>
|
||||
)}
|
||||
{analysisMode === 'delta' && searchedSource != null && (
|
||||
<DBSearchHeatmapChart
|
||||
chartConfig={{
|
||||
...chartConfig,
|
||||
dateRange: searchedTimeRange,
|
||||
with: aliasWith,
|
||||
}}
|
||||
isReady={isReady}
|
||||
source={searchedSource}
|
||||
onAddFilter={searchFilters.setFilterValue}
|
||||
/>
|
||||
)}
|
||||
{analysisMode === 'delta' &&
|
||||
searchedSource != null &&
|
||||
isTraceSource(searchedSource) && (
|
||||
<DBSearchHeatmapChart
|
||||
chartConfig={{
|
||||
...chartConfig,
|
||||
dateRange: searchedTimeRange,
|
||||
with: aliasWith,
|
||||
}}
|
||||
isReady={isReady}
|
||||
source={searchedSource}
|
||||
onAddFilter={searchFilters.setFilterValue}
|
||||
/>
|
||||
)}
|
||||
{analysisMode === 'results' && (
|
||||
<Flex direction="column" mih="0" miw={0}>
|
||||
{chartConfig && histogramTimeChartConfig && (
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { useEffect, useState } from 'react';
|
|||
import dynamic from 'next/dynamic';
|
||||
import { parseAsInteger, useQueryState } from 'nuqs';
|
||||
import { useForm, useWatch } from 'react-hook-form';
|
||||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
|
|
@ -70,12 +70,13 @@ function DBServiceMapPage() {
|
|||
});
|
||||
|
||||
const defaultSource = sources?.find(
|
||||
source => source.kind === SourceKind.Trace,
|
||||
(source): source is TTraceSource => source.kind === SourceKind.Trace,
|
||||
);
|
||||
const source =
|
||||
sourceId && sources
|
||||
? (sources.find(
|
||||
source => source.id === sourceId && source.kind === SourceKind.Trace,
|
||||
(source): source is TTraceSource =>
|
||||
source.id === sourceId && source.kind === SourceKind.Trace,
|
||||
) ?? defaultSource)
|
||||
: defaultSource;
|
||||
|
||||
|
|
|
|||
|
|
@ -118,8 +118,8 @@ const DashboardFilterEditForm = ({
|
|||
: undefined;
|
||||
|
||||
const sourceIsMetric = source?.kind === SourceKind.Metric;
|
||||
const metricTypes = Object.values(MetricsDataType).filter(
|
||||
type => source?.metricTables?.[type],
|
||||
const metricTypes = Object.values(MetricsDataType).filter(type =>
|
||||
source?.kind === SourceKind.Metric ? source.metricTables?.[type] : false,
|
||||
);
|
||||
|
||||
const [modalContentRef, setModalContentRef] = useState<HTMLElement | null>(
|
||||
|
|
|
|||
|
|
@ -7,13 +7,19 @@ import sub from 'date-fns/sub';
|
|||
import { useQueryState } from 'nuqs';
|
||||
import { useForm, useWatch } from 'react-hook-form';
|
||||
import { convertDateRangeToGranularityString } from '@hyperdx/common-utils/dist/core/utils';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
isLogSource,
|
||||
isMetricSource,
|
||||
SourceKind,
|
||||
TLogSource,
|
||||
TMetricSource,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
ActionIcon,
|
||||
Alert,
|
||||
Badge,
|
||||
Box,
|
||||
Button,
|
||||
Card,
|
||||
Flex,
|
||||
Grid,
|
||||
|
|
@ -56,7 +62,7 @@ import { withAppNav } from './layout';
|
|||
import NamespaceDetailsSidePanel from './NamespaceDetailsSidePanel';
|
||||
import NodeDetailsSidePanel from './NodeDetailsSidePanel';
|
||||
import PodDetailsSidePanel from './PodDetailsSidePanel';
|
||||
import { useSources } from './source';
|
||||
import { useSource, useSources } from './source';
|
||||
import { parseTimeQuery, useTimeQuery } from './timeQuery';
|
||||
import { KubePhase } from './types';
|
||||
import { formatNumber, formatUptime } from './utils';
|
||||
|
|
@ -143,7 +149,7 @@ export const InfraPodsStatusTable = ({
|
|||
where,
|
||||
}: {
|
||||
dateRange: [Date, Date];
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
where: string;
|
||||
}) => {
|
||||
const [phaseFilter, setPhaseFilter] = React.useState('running');
|
||||
|
|
@ -532,7 +538,7 @@ const NodesTable = ({
|
|||
where,
|
||||
dateRange,
|
||||
}: {
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
where: string;
|
||||
dateRange: [Date, Date];
|
||||
}) => {
|
||||
|
|
@ -736,7 +742,7 @@ const NamespacesTable = ({
|
|||
where,
|
||||
}: {
|
||||
dateRange: [Date, Date];
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
where: string;
|
||||
}) => {
|
||||
const groupBy = ['k8s.namespace.name'];
|
||||
|
|
@ -965,8 +971,12 @@ export const resolveSourceIds = (
|
|||
|
||||
// Find a default metric source that matches the existing log source
|
||||
if (_logSourceId && !_metricSourceId) {
|
||||
const { connection, metricSourceId: correlatedMetricSourceId } =
|
||||
findSource(sources, { id: _logSourceId }) ?? {};
|
||||
const foundSource = findSource(sources, { id: _logSourceId });
|
||||
const connection = foundSource?.connection;
|
||||
const correlatedMetricSourceId =
|
||||
foundSource && isLogSource(foundSource)
|
||||
? foundSource.metricSourceId
|
||||
: undefined;
|
||||
const metricSourceId =
|
||||
(correlatedMetricSourceId &&
|
||||
findSource(sources, { id: correlatedMetricSourceId })?.id) ??
|
||||
|
|
@ -977,8 +987,12 @@ export const resolveSourceIds = (
|
|||
|
||||
// Find a default log source that matches the existing metric source
|
||||
if (!_logSourceId && _metricSourceId) {
|
||||
const { connection, logSourceId: correlatedLogSourceId } =
|
||||
findSource(sources, { id: _metricSourceId }) ?? {};
|
||||
const foundSource = findSource(sources, { id: _metricSourceId });
|
||||
const connection = foundSource?.connection;
|
||||
const correlatedLogSourceId =
|
||||
foundSource && isMetricSource(foundSource)
|
||||
? foundSource.logSourceId
|
||||
: undefined;
|
||||
const logSourceId =
|
||||
(correlatedLogSourceId &&
|
||||
findSource(sources, { id: correlatedLogSourceId })?.id) ??
|
||||
|
|
@ -989,10 +1003,10 @@ export const resolveSourceIds = (
|
|||
|
||||
// Find any two correlated log and metric sources
|
||||
const logSourceWithMetricSource = sources.find(
|
||||
s =>
|
||||
(s): s is TLogSource =>
|
||||
s.kind === SourceKind.Log &&
|
||||
s.metricSourceId &&
|
||||
findSource(sources, { id: s.metricSourceId }),
|
||||
!!s.metricSourceId &&
|
||||
!!findSource(sources, { id: s.metricSourceId }),
|
||||
);
|
||||
|
||||
if (logSourceWithMetricSource) {
|
||||
|
|
@ -1036,8 +1050,14 @@ function KubernetesDashboardPage() {
|
|||
[_logSourceId, _metricSourceId, sources],
|
||||
);
|
||||
|
||||
const logSource = sources?.find(s => s.id === logSourceId);
|
||||
const metricSource = sources?.find(s => s.id === metricSourceId);
|
||||
const { data: logSource } = useSource({
|
||||
id: logSourceId,
|
||||
kinds: [SourceKind.Log],
|
||||
});
|
||||
const { data: metricSource } = useSource({
|
||||
id: metricSourceId,
|
||||
kinds: [SourceKind.Metric],
|
||||
});
|
||||
|
||||
const { control } = useForm({
|
||||
values: {
|
||||
|
|
@ -1077,8 +1097,12 @@ function KubernetesDashboardPage() {
|
|||
// Default to the log source's correlated metric source
|
||||
if (watchedLogSourceId && sources) {
|
||||
const logSource = findSource(sources, { id: watchedLogSourceId });
|
||||
const correlatedMetricSource = logSource?.metricSourceId
|
||||
? findSource(sources, { id: logSource.metricSourceId })
|
||||
const logSourceMetricSourceId =
|
||||
logSource && isLogSource(logSource)
|
||||
? logSource.metricSourceId
|
||||
: undefined;
|
||||
const correlatedMetricSource = logSourceMetricSourceId
|
||||
? findSource(sources, { id: logSourceMetricSourceId })
|
||||
: undefined;
|
||||
if (
|
||||
correlatedMetricSource &&
|
||||
|
|
@ -1119,8 +1143,12 @@ function KubernetesDashboardPage() {
|
|||
// Default to the metric source's correlated log source
|
||||
if (watchedMetricSourceId && sources) {
|
||||
const metricSource = findSource(sources, { id: watchedMetricSourceId });
|
||||
const correlatedLogSource = metricSource?.logSourceId
|
||||
? findSource(sources, { id: metricSource.logSourceId })
|
||||
const metricSourceLogSourceId =
|
||||
metricSource && isMetricSource(metricSource)
|
||||
? metricSource.logSourceId
|
||||
: undefined;
|
||||
const correlatedLogSource = metricSourceLogSourceId
|
||||
? findSource(sources, { id: metricSourceLogSourceId })
|
||||
: undefined;
|
||||
if (
|
||||
correlatedLogSource &&
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import * as React from 'react';
|
|||
import { StringParam, useQueryParam, withDefault } from 'use-query-params';
|
||||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { convertDateRangeToGranularityString } from '@hyperdx/common-utils/dist/core/utils';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TLogSource, TMetricSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Badge,
|
||||
Card,
|
||||
|
|
@ -55,7 +55,7 @@ const NamespaceDetails = ({
|
|||
}: {
|
||||
name: string;
|
||||
dateRange: [Date, Date];
|
||||
metricSource?: TSource;
|
||||
metricSource?: TMetricSource;
|
||||
}) => {
|
||||
const where = `${metricSource?.resourceAttributesExpression}.k8s.namespace.name:"${name}"`;
|
||||
const groupBy = ['k8s.namespace.name'];
|
||||
|
|
@ -138,7 +138,7 @@ function NamespaceLogs({
|
|||
where,
|
||||
}: {
|
||||
dateRange: [Date, Date];
|
||||
logSource: TSource;
|
||||
logSource: TLogSource;
|
||||
where: string;
|
||||
}) {
|
||||
const [resultType, setResultType] = React.useState<'all' | 'error'>('all');
|
||||
|
|
@ -226,8 +226,8 @@ export default function NamespaceDetailsSidePanel({
|
|||
metricSource,
|
||||
logSource,
|
||||
}: {
|
||||
metricSource: TSource;
|
||||
logSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
logSource: TLogSource;
|
||||
}) {
|
||||
const [namespaceName, setNamespaceName] = useQueryParam(
|
||||
'namespaceName',
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import * as React from 'react';
|
|||
import { StringParam, useQueryParam, withDefault } from 'use-query-params';
|
||||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { convertDateRangeToGranularityString } from '@hyperdx/common-utils/dist/core/utils';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TLogSource, TMetricSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Badge,
|
||||
Card,
|
||||
|
|
@ -56,7 +56,7 @@ const NodeDetails = ({
|
|||
}: {
|
||||
name: string;
|
||||
dateRange: [Date, Date];
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
}) => {
|
||||
const where = `${metricSource.resourceAttributesExpression}.k8s.node.name:"${name}"`;
|
||||
const groupBy = ['k8s.node.name'];
|
||||
|
|
@ -151,7 +151,7 @@ function NodeLogs({
|
|||
where,
|
||||
}: {
|
||||
dateRange: [Date, Date];
|
||||
logSource: TSource;
|
||||
logSource: TLogSource;
|
||||
where: string;
|
||||
}) {
|
||||
const [resultType, setResultType] = React.useState<'all' | 'error'>('all');
|
||||
|
|
@ -239,8 +239,8 @@ export default function NodeDetailsSidePanel({
|
|||
metricSource,
|
||||
logSource,
|
||||
}: {
|
||||
metricSource: TSource;
|
||||
logSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
logSource: TLogSource;
|
||||
}) {
|
||||
const [nodeName, setNodeName] = useQueryParam(
|
||||
'nodeName',
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import * as React from 'react';
|
|||
import { StringParam, useQueryParam, withDefault } from 'use-query-params';
|
||||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { convertDateRangeToGranularityString } from '@hyperdx/common-utils/dist/core/utils';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TLogSource, TMetricSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Box,
|
||||
Card,
|
||||
|
|
@ -56,7 +56,7 @@ const PodDetails = ({
|
|||
podName,
|
||||
}: {
|
||||
dateRange: [Date, Date];
|
||||
logSource: TSource;
|
||||
logSource: TLogSource;
|
||||
podName: string;
|
||||
}) => {
|
||||
const { data: logsData } = useV2LogBatch<{
|
||||
|
|
@ -134,7 +134,7 @@ function PodLogs({
|
|||
onRowClick,
|
||||
}: {
|
||||
dateRange: [Date, Date];
|
||||
logSource: TSource;
|
||||
logSource: TLogSource;
|
||||
where: string;
|
||||
rowId: string | null;
|
||||
onRowClick: (rowWhere: RowWhereResult) => void;
|
||||
|
|
@ -220,8 +220,8 @@ export default function PodDetailsSidePanel({
|
|||
logSource,
|
||||
metricSource,
|
||||
}: {
|
||||
logSource: TSource;
|
||||
metricSource: TSource;
|
||||
logSource: TLogSource;
|
||||
metricSource: TMetricSource;
|
||||
}) {
|
||||
const [podName, setPodName] = useQueryParam(
|
||||
'podName',
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import dynamic from 'next/dynamic';
|
||||
import { pick } from 'lodash';
|
||||
import {
|
||||
parseAsString,
|
||||
parseAsStringEnum,
|
||||
|
|
@ -11,15 +10,31 @@ import { UseControllerProps, useForm, useWatch } from 'react-hook-form';
|
|||
import SqlString from 'sqlstring';
|
||||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { convertDateRangeToGranularityString } from '@hyperdx/common-utils/dist/core/utils';
|
||||
import type { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
BuilderChartConfigWithDateRange,
|
||||
CteChartConfig,
|
||||
DisplayType,
|
||||
Filter,
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
PresetDashboard,
|
||||
SourceKind,
|
||||
TSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
// Extract common chart config fields from a source.
|
||||
// This avoids union type issues with lodash `pick` on discriminated unions.
|
||||
function pickSourceConfigFields(source: TSource) {
|
||||
return {
|
||||
timestampValueExpression: source.timestampValueExpression,
|
||||
connection: source.connection,
|
||||
from: source.from,
|
||||
...(isLogSource(source) || isTraceSource(source)
|
||||
? { implicitColumnExpression: source.implicitColumnExpression }
|
||||
: {}),
|
||||
};
|
||||
}
|
||||
import {
|
||||
ActionIcon,
|
||||
Box,
|
||||
|
|
@ -146,7 +161,10 @@ function ServiceSelectControlled({
|
|||
dateRange: [Date, Date];
|
||||
onCreate?: () => void;
|
||||
} & UseControllerProps<any>) {
|
||||
const { data: source } = useSource({ id: sourceId });
|
||||
const { data: source } = useSource({
|
||||
id: sourceId,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
const { expressions } = useServiceDashboardExpressions({ source });
|
||||
|
||||
const queriedConfig = {
|
||||
|
|
@ -213,7 +231,7 @@ export function EndpointLatencyChart({
|
|||
appliedConfig = {},
|
||||
extraFilters = [],
|
||||
}: {
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
dateRange: [Date, Date];
|
||||
appliedConfig?: AppliedConfig;
|
||||
extraFilters?: Filter[];
|
||||
|
|
@ -260,12 +278,7 @@ export function EndpointLatencyChart({
|
|||
]}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -318,12 +331,7 @@ export function EndpointLatencyChart({
|
|||
toolbarSuffix={[displaySwitcher]}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -358,7 +366,10 @@ function HttpTab({
|
|||
searchedTimeRange: [Date, Date];
|
||||
appliedConfig: AppliedConfig;
|
||||
}) {
|
||||
const { data: source } = useSource({ id: appliedConfig.source });
|
||||
const { data: source } = useSource({
|
||||
id: appliedConfig.source,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
const { expressions } = useServiceDashboardExpressions({ source });
|
||||
|
||||
const [reqChartType, setReqChartType] = useQueryState(
|
||||
|
|
@ -385,12 +396,7 @@ function HttpTab({
|
|||
if (reqChartType === 'overall') {
|
||||
return {
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -421,7 +427,10 @@ function HttpTab({
|
|||
}
|
||||
return {
|
||||
timestampValueExpression: 'series_time_bucket',
|
||||
implicitColumnExpression: source.implicitColumnExpression,
|
||||
implicitColumnExpression:
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source.implicitColumnExpression
|
||||
: undefined,
|
||||
connection: source.connection,
|
||||
source: source.id,
|
||||
with: [
|
||||
|
|
@ -429,7 +438,10 @@ function HttpTab({
|
|||
name: 'error_series',
|
||||
chartConfig: {
|
||||
timestampValueExpression: source?.timestampValueExpression || '',
|
||||
implicitColumnExpression: source?.implicitColumnExpression || '',
|
||||
implicitColumnExpression:
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source?.implicitColumnExpression || ''
|
||||
: '',
|
||||
connection: source?.connection ?? '',
|
||||
from: source?.from ?? {
|
||||
databaseName: '',
|
||||
|
|
@ -595,12 +607,7 @@ function HttpTab({
|
|||
sourceId={source.id}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -639,12 +646,7 @@ function HttpTab({
|
|||
]}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -724,7 +726,7 @@ function HttpTab({
|
|||
</ChartBox>
|
||||
</Grid.Col>
|
||||
<Grid.Col span={6}>
|
||||
{source && (
|
||||
{source && isTraceSource(source) && (
|
||||
<EndpointLatencyChart
|
||||
appliedConfig={appliedConfig}
|
||||
dateRange={searchedTimeRange}
|
||||
|
|
@ -773,12 +775,7 @@ function HttpTab({
|
|||
]}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -870,7 +867,10 @@ function DatabaseTab({
|
|||
searchedTimeRange: [Date, Date];
|
||||
appliedConfig: AppliedConfig;
|
||||
}) {
|
||||
const { data: source } = useSource({ id: appliedConfig.source });
|
||||
const { data: source } = useSource({
|
||||
id: appliedConfig.source,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
const { expressions } = useServiceDashboardExpressions({ source });
|
||||
|
||||
const [chartType, setChartType] = useState<'table' | 'list'>('list');
|
||||
|
|
@ -891,12 +891,7 @@ function DatabaseTab({
|
|||
name: 'queries_by_total_time',
|
||||
isSubquery: true,
|
||||
chartConfig: {
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -1014,12 +1009,7 @@ function DatabaseTab({
|
|||
name: 'queries_by_total_count',
|
||||
isSubquery: true,
|
||||
chartConfig: {
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -1193,12 +1183,7 @@ function DatabaseTab({
|
|||
]}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) ||
|
||||
|
|
@ -1279,12 +1264,7 @@ function DatabaseTab({
|
|||
]}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) ||
|
||||
|
|
@ -1366,7 +1346,10 @@ function ErrorsTab({
|
|||
searchedTimeRange: [Date, Date];
|
||||
appliedConfig: AppliedConfig;
|
||||
}) {
|
||||
const { data: source } = useSource({ id: appliedConfig.source });
|
||||
const { data: source } = useSource({
|
||||
id: appliedConfig.source,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
const { expressions } = useServiceDashboardExpressions({ source });
|
||||
|
||||
return (
|
||||
|
|
@ -1379,12 +1362,7 @@ function ErrorsTab({
|
|||
sourceId={source.id}
|
||||
config={{
|
||||
source: source.id,
|
||||
...pick(source, [
|
||||
'timestampValueExpression',
|
||||
'implicitColumnExpression',
|
||||
'connection',
|
||||
'from',
|
||||
]),
|
||||
...pickSourceConfigFields(source),
|
||||
where: appliedConfig.where || '',
|
||||
whereLanguage:
|
||||
(appliedConfig.whereLanguage ?? getStoredLanguage()) || 'sql',
|
||||
|
|
@ -1403,7 +1381,10 @@ function ErrorsTab({
|
|||
},
|
||||
...getScopedFilters({ appliedConfig, expressions }),
|
||||
],
|
||||
groupBy: source.serviceNameExpression || expressions.service,
|
||||
groupBy:
|
||||
(isLogSource(source) || isTraceSource(source)
|
||||
? source.serviceNameExpression
|
||||
: undefined) || expressions.service,
|
||||
dateRange: searchedTimeRange,
|
||||
}}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import {
|
|||
DateRange,
|
||||
SearchCondition,
|
||||
SearchConditionLanguage,
|
||||
TSource,
|
||||
TSessionSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { ActionIcon, Button, Drawer } from '@mantine/core';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
|
|
@ -31,8 +32,8 @@ export default function SessionSidePanel({
|
|||
generateChartUrl,
|
||||
zIndex = 100,
|
||||
}: {
|
||||
traceSource: TSource;
|
||||
sessionSource: TSource;
|
||||
traceSource: TTraceSource;
|
||||
sessionSource: TSessionSource;
|
||||
sessionId: string;
|
||||
session: Session;
|
||||
dateRange: DateRange['dateRange'];
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ import {
|
|||
DateRange,
|
||||
SearchCondition,
|
||||
SearchConditionLanguage,
|
||||
TSource,
|
||||
TSessionSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
ActionIcon,
|
||||
|
|
@ -55,7 +56,7 @@ function useSessionChartConfigs({
|
|||
end,
|
||||
tab,
|
||||
}: {
|
||||
traceSource: TSource;
|
||||
traceSource: TTraceSource;
|
||||
rumSessionId: string;
|
||||
where: string;
|
||||
whereLanguage?: SearchConditionLanguage;
|
||||
|
|
@ -244,8 +245,8 @@ export default function SessionSubpanel({
|
|||
whereLanguage = 'lucene',
|
||||
onLanguageChange,
|
||||
}: {
|
||||
traceSource: TSource;
|
||||
sessionSource: TSource;
|
||||
traceSource: TTraceSource;
|
||||
sessionSource: TSessionSource;
|
||||
session: { serviceName: string };
|
||||
generateSearchUrl?: (query?: string, timeRange?: [Date, Date]) => string;
|
||||
generateChartUrl?: (config: {
|
||||
|
|
|
|||
|
|
@ -249,10 +249,12 @@ export default function SessionsPage() {
|
|||
const sourceId = useWatch({ control, name: 'source' });
|
||||
const { data: sessionSource, isPending: isSessionSourceLoading } = useSource({
|
||||
id: sourceId,
|
||||
kinds: [SourceKind.Session],
|
||||
});
|
||||
|
||||
const { data: traceTrace } = useSource({
|
||||
id: sessionSource?.traceSourceId,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
|
||||
// Get all sources and select the first session type source by default
|
||||
|
|
@ -376,7 +378,7 @@ export default function SessionsPage() {
|
|||
|
||||
const { data: tableData, isLoading: isSessionsLoading } = useSessions({
|
||||
dateRange: searchedTimeRange,
|
||||
sessionSource: sessionSource,
|
||||
sessionSource,
|
||||
traceSource: traceTrace,
|
||||
// TODO: if selectedSession is not null, we should filter by that session id
|
||||
where: appliedConfig.where as SearchCondition,
|
||||
|
|
@ -472,16 +474,6 @@ export default function SessionsPage() {
|
|||
</Group>
|
||||
) : (
|
||||
<>
|
||||
{sessionSource && sessionSource.kind !== SourceKind.Session && (
|
||||
<Alert
|
||||
icon={<IconInfoCircleFilled size={16} />}
|
||||
color="gray"
|
||||
py="xs"
|
||||
mt="md"
|
||||
>
|
||||
Please select a valid session source
|
||||
</Alert>
|
||||
)}
|
||||
{!sessions.length ? (
|
||||
<SessionSetupInstructions />
|
||||
) : (
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import { renderHook } from '@testing-library/react';
|
||||
|
||||
import * as sourceModule from '@/source';
|
||||
|
|
@ -159,6 +160,7 @@ describe('useDefaultOrderBy', () => {
|
|||
for (const testCase of testCases) {
|
||||
it(`${testCase.sortingKey}`, () => {
|
||||
const mockSource = {
|
||||
kind: SourceKind.Log,
|
||||
timestampValueExpression:
|
||||
testCase.timestampValueExpression || 'Timestamp',
|
||||
displayedTimestampValueExpression:
|
||||
|
|
@ -226,6 +228,7 @@ describe('useDefaultOrderBy', () => {
|
|||
|
||||
it('should return orderByExpression when set on the source', () => {
|
||||
const mockSource = {
|
||||
kind: SourceKind.Log,
|
||||
timestampValueExpression: 'Timestamp',
|
||||
orderByExpression: 'Timestamp ASC',
|
||||
};
|
||||
|
|
@ -253,6 +256,7 @@ describe('useDefaultOrderBy', () => {
|
|||
|
||||
it('should fall back to optimized order when orderByExpression is empty', () => {
|
||||
const mockSource = {
|
||||
kind: SourceKind.Log,
|
||||
timestampValueExpression: 'Timestamp',
|
||||
orderByExpression: '',
|
||||
};
|
||||
|
|
@ -280,6 +284,7 @@ describe('useDefaultOrderBy', () => {
|
|||
|
||||
it('should fall back to optimized order when orderByExpression is undefined', () => {
|
||||
const mockSource = {
|
||||
kind: SourceKind.Log,
|
||||
timestampValueExpression: 'Timestamp',
|
||||
};
|
||||
|
||||
|
|
@ -306,6 +311,7 @@ describe('useDefaultOrderBy', () => {
|
|||
|
||||
it('should handle complex Timestamp expressions', () => {
|
||||
const mockSource = {
|
||||
kind: SourceKind.Log,
|
||||
timestampValueExpression: 'toDateTime(timestamp_ms / 1000)',
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import type { ColumnMeta } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import type { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import type { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import { renderHook } from '@testing-library/react';
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ function removeAllWhitespace(str: string) {
|
|||
}
|
||||
|
||||
describe('Service Dashboard', () => {
|
||||
const mockSource: TSource = {
|
||||
const mockSource: TTraceSource = {
|
||||
id: 'test-source',
|
||||
name: 'Test Source',
|
||||
kind: SourceKind.Trace,
|
||||
|
|
@ -25,13 +25,16 @@ describe('Service Dashboard', () => {
|
|||
},
|
||||
connection: 'test-connection',
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Timestamp',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
serviceNameExpression: 'ServiceName',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
severityTextExpression: 'StatusCode',
|
||||
statusCodeExpression: 'StatusCode',
|
||||
};
|
||||
|
||||
describe('getExpressions', () => {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
import { getEventBody } from '../source';
|
||||
|
||||
describe('getEventBody', () => {
|
||||
// Added to prevent regression back to HDX-3361
|
||||
it('returns spanNameExpression for trace kind source when both bodyExpression and spanNameExpression are present', () => {
|
||||
const source: TSource = {
|
||||
const source = {
|
||||
kind: SourceKind.Trace,
|
||||
from: {
|
||||
databaseName: 'default',
|
||||
|
|
@ -15,9 +15,14 @@ describe('getEventBody', () => {
|
|||
connection: 'test-connection',
|
||||
name: 'Traces',
|
||||
id: 'test-source-id',
|
||||
bodyExpression: 'Body',
|
||||
spanNameExpression: 'SpanName',
|
||||
};
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
spanKindExpression: 'SpanKind',
|
||||
} as TTraceSource;
|
||||
|
||||
const result = getEventBody(source);
|
||||
|
||||
|
|
|
|||
|
|
@ -3,10 +3,12 @@ import { aliasMapToWithClauses } from '@hyperdx/common-utils/dist/core/utils';
|
|||
import {
|
||||
AlertInterval,
|
||||
Filter,
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
SearchCondition,
|
||||
SearchConditionLanguage,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Paper } from '@mantine/core';
|
||||
|
||||
import { DBTimeChart } from '@/components/DBTimeChart';
|
||||
|
|
@ -41,7 +43,9 @@ export const AlertPreviewChart = ({
|
|||
const resolvedSelect =
|
||||
(select && select.trim().length > 0
|
||||
? select
|
||||
: source.defaultTableSelectExpression) ?? '';
|
||||
: isLogSource(source) || isTraceSource(source)
|
||||
? source.defaultTableSelectExpression
|
||||
: undefined) ?? '';
|
||||
|
||||
const { data: aliasMap } = useAliasMapFromChartConfig({
|
||||
select: resolvedSelect,
|
||||
|
|
@ -66,7 +70,10 @@ export const AlertPreviewChart = ({
|
|||
dateRange: intervalToDateRange(interval),
|
||||
granularity: intervalToGranularity(interval),
|
||||
filters: filters || undefined,
|
||||
implicitColumnExpression: source.implicitColumnExpression,
|
||||
implicitColumnExpression:
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source.implicitColumnExpression
|
||||
: undefined,
|
||||
groupBy,
|
||||
with: aliasWith,
|
||||
select: [
|
||||
|
|
|
|||
|
|
@ -6,7 +6,12 @@ import {
|
|||
} from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { MACRO_SUGGESTIONS } from '@hyperdx/common-utils/dist/macros';
|
||||
import { QUERY_PARAMS_BY_DISPLAY_TYPE } from '@hyperdx/common-utils/dist/rawSqlParams';
|
||||
import { DisplayType, SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
DisplayType,
|
||||
isLogSource,
|
||||
isMetricSource,
|
||||
isTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Box, Button, Group, Stack, Text, Tooltip } from '@mantine/core';
|
||||
import { IconHelpCircle } from '@tabler/icons-react';
|
||||
|
||||
|
|
@ -100,11 +105,14 @@ export default function RawSqlChartEditor({
|
|||
.flatMap(source => {
|
||||
const tables: TableConnection[] = getAllMetricTables(source);
|
||||
|
||||
if (source.kind !== SourceKind.Metric) {
|
||||
if (!isMetricSource(source)) {
|
||||
tables.push(tcFromSource(source));
|
||||
}
|
||||
|
||||
if (source.materializedViews) {
|
||||
if (
|
||||
(isLogSource(source) || isTraceSource(source)) &&
|
||||
source.materializedViews
|
||||
) {
|
||||
tables.push(
|
||||
...source.materializedViews.map(mv => ({
|
||||
databaseName: mv.databaseName,
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import type {
|
|||
BuilderChartConfig,
|
||||
BuilderSavedChartConfig,
|
||||
RawSqlSavedChartConfig,
|
||||
TMetricSource,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
|
|
@ -38,14 +39,14 @@ const logSource: TSource = {
|
|||
implicitColumnExpression: 'Body',
|
||||
};
|
||||
|
||||
const metricSource: TSource = {
|
||||
const metricSource: TMetricSource = {
|
||||
id: 'source-metric',
|
||||
name: 'Metric Source',
|
||||
kind: SourceKind.Metric,
|
||||
connection: 'conn-1',
|
||||
from: { databaseName: 'db', tableName: '' },
|
||||
timestampValueExpression: 'TimeUnix',
|
||||
metricTables: { gauge: 'gauge_table' } as TSource['metricTables'],
|
||||
metricTables: { gauge: 'gauge_table' } as TMetricSource['metricTables'],
|
||||
resourceAttributesExpression: 'ResourceAttributes',
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@ import {
|
|||
BuilderSavedChartConfig,
|
||||
ChartConfigWithDateRange,
|
||||
DisplayType,
|
||||
isLogSource,
|
||||
isMetricSource,
|
||||
isTraceSource,
|
||||
RawSqlChartConfig,
|
||||
RawSqlSavedChartConfig,
|
||||
SavedChartConfig,
|
||||
|
|
@ -135,11 +138,16 @@ export function convertFormStateToChartConfig(
|
|||
timestampValueExpression: source.timestampValueExpression,
|
||||
dateRange,
|
||||
connection: source.connection,
|
||||
implicitColumnExpression: source.implicitColumnExpression,
|
||||
metricTables: source.metricTables,
|
||||
implicitColumnExpression:
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source.implicitColumnExpression
|
||||
: undefined,
|
||||
metricTables: isMetricSource(source) ? source.metricTables : undefined,
|
||||
where: form.where ?? '',
|
||||
select: isSelectEmpty
|
||||
? source.defaultTableSelectExpression || ''
|
||||
? ((isLogSource(source) || isTraceSource(source)) &&
|
||||
source.defaultTableSelectExpression) ||
|
||||
''
|
||||
: mergedSelect,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ import { useForm, useWatch } from 'react-hook-form';
|
|||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import {
|
||||
BuilderChartConfigWithDateRange,
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Badge, Flex, Group, SegmentedControl } from '@mantine/core';
|
||||
|
|
@ -228,7 +230,10 @@ export default function ContextSubpanel({
|
|||
connection: source.connection,
|
||||
from: source.from,
|
||||
timestampValueExpression: source.timestampValueExpression,
|
||||
select: source.defaultTableSelectExpression || '',
|
||||
select:
|
||||
((isLogSource(source) || isTraceSource(source)) &&
|
||||
source.defaultTableSelectExpression) ||
|
||||
'',
|
||||
limit: { limit: 200 },
|
||||
orderBy: `${source.timestampValueExpression} DESC`,
|
||||
where: whereClause,
|
||||
|
|
@ -249,10 +254,7 @@ export default function ContextSubpanel({
|
|||
originalLanguage,
|
||||
newDateRange,
|
||||
contextBy,
|
||||
source.connection,
|
||||
source.defaultTableSelectExpression,
|
||||
source.from,
|
||||
source.timestampValueExpression,
|
||||
source,
|
||||
]);
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -238,12 +238,15 @@ function ChartSeriesEditorComponent({
|
|||
});
|
||||
const groupBy = useWatch({ control, name: 'groupBy' });
|
||||
|
||||
const metricTableSource =
|
||||
tableSource?.kind === SourceKind.Metric ? tableSource : undefined;
|
||||
|
||||
const { data: attributeSuggestions, isLoading: isLoadingAttributes } =
|
||||
useFetchMetricResourceAttrs({
|
||||
databaseName,
|
||||
metricType,
|
||||
metricName,
|
||||
tableSource,
|
||||
tableSource: metricTableSource,
|
||||
isSql: aggConditionLanguage === 'sql',
|
||||
});
|
||||
|
||||
|
|
@ -256,7 +259,7 @@ function ChartSeriesEditorComponent({
|
|||
databaseName,
|
||||
metricType,
|
||||
metricName,
|
||||
tableSource,
|
||||
tableSource: metricTableSource,
|
||||
});
|
||||
|
||||
const handleAddToWhere = useCallback(
|
||||
|
|
@ -1020,7 +1023,11 @@ export default function EditTimeChartForm({
|
|||
connection: tableSource.connection,
|
||||
from: tableSource.from,
|
||||
limit: { limit: 200 },
|
||||
select: tableSource?.defaultTableSelectExpression || '',
|
||||
select:
|
||||
((tableSource?.kind === SourceKind.Log ||
|
||||
tableSource?.kind === SourceKind.Trace) &&
|
||||
tableSource.defaultTableSelectExpression) ||
|
||||
'',
|
||||
filters: seriesToFilters(queriedConfig.select),
|
||||
filtersLogicalOperator: 'OR' as const,
|
||||
groupBy: undefined,
|
||||
|
|
@ -1364,10 +1371,17 @@ export default function EditTimeChartForm({
|
|||
control={control}
|
||||
name="select"
|
||||
placeholder={
|
||||
tableSource?.defaultTableSelectExpression ||
|
||||
((tableSource?.kind === SourceKind.Log ||
|
||||
tableSource?.kind === SourceKind.Trace) &&
|
||||
tableSource.defaultTableSelectExpression) ||
|
||||
'SELECT Columns'
|
||||
}
|
||||
defaultValue={tableSource?.defaultTableSelectExpression}
|
||||
defaultValue={
|
||||
tableSource?.kind === SourceKind.Log ||
|
||||
tableSource?.kind === SourceKind.Trace
|
||||
? tableSource.defaultTableSelectExpression
|
||||
: undefined
|
||||
}
|
||||
onSubmit={onSubmit}
|
||||
label="SELECT"
|
||||
/>
|
||||
|
|
@ -1649,7 +1663,10 @@ export default function EditTimeChartForm({
|
|||
typeof queriedConfig.select === 'string' &&
|
||||
queriedConfig.select
|
||||
? queriedConfig.select
|
||||
: tableSource?.defaultTableSelectExpression || '',
|
||||
: ((tableSource?.kind === SourceKind.Log ||
|
||||
tableSource?.kind === SourceKind.Trace) &&
|
||||
tableSource.defaultTableSelectExpression) ||
|
||||
'',
|
||||
groupBy: undefined,
|
||||
having: undefined,
|
||||
granularity: undefined,
|
||||
|
|
|
|||
|
|
@ -4,7 +4,13 @@ import {
|
|||
convertDateRangeToGranularityString,
|
||||
Granularity,
|
||||
} from '@hyperdx/common-utils/dist/core/utils';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
SourceKind,
|
||||
TMetricSource,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Box,
|
||||
Card,
|
||||
|
|
@ -35,7 +41,7 @@ const InfraSubpanelGroup = ({
|
|||
where,
|
||||
}: {
|
||||
fieldPrefix: string;
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
timestamp: any;
|
||||
title: string;
|
||||
where: string;
|
||||
|
|
@ -205,7 +211,14 @@ export default ({
|
|||
rowId: string | undefined | null;
|
||||
source: TSource;
|
||||
}) => {
|
||||
const { data: metricSource } = useSource({ id: source.metricSourceId });
|
||||
const metricSourceId =
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source.metricSourceId
|
||||
: undefined;
|
||||
const { data: metricSource } = useSource({
|
||||
id: metricSourceId,
|
||||
kinds: [SourceKind.Metric],
|
||||
});
|
||||
|
||||
const podUid = rowData?.__hdx_resource_attributes['k8s.pod.uid'];
|
||||
const nodeName = rowData?.__hdx_resource_attributes['k8s.node.name'];
|
||||
|
|
@ -225,7 +238,7 @@ export default ({
|
|||
metricSource={metricSource}
|
||||
/>
|
||||
)}
|
||||
{source && (
|
||||
{source && source.kind === SourceKind.Log && (
|
||||
<Card p="md" mt="xl">
|
||||
<Card.Section p="md" py="xs">
|
||||
Pod Timeline
|
||||
|
|
|
|||
|
|
@ -1,7 +1,12 @@
|
|||
import { useMemo } from 'react';
|
||||
import { flatten } from 'flat';
|
||||
import type { ResponseJSON } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
SourceKind,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Box } from '@mantine/core';
|
||||
|
||||
import { useQueriedChartConfig } from '@/hooks/useChartConfig';
|
||||
|
|
@ -35,11 +40,20 @@ export function useRowData({
|
|||
}) {
|
||||
const eventBodyExpr = getEventBody(source);
|
||||
|
||||
const searchedTraceIdExpr = source.traceIdExpression;
|
||||
const searchedSpanIdExpr = source.spanIdExpression;
|
||||
const searchedTraceIdExpr =
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source.traceIdExpression
|
||||
: undefined;
|
||||
const searchedSpanIdExpr =
|
||||
isLogSource(source) || isTraceSource(source)
|
||||
? source.spanIdExpression
|
||||
: undefined;
|
||||
|
||||
const severityTextExpr =
|
||||
source.severityTextExpression || source.statusCodeExpression;
|
||||
const severityTextExpr = isLogSource(source)
|
||||
? source.severityTextExpression
|
||||
: isTraceSource(source)
|
||||
? source.statusCodeExpression
|
||||
: undefined;
|
||||
|
||||
const selectHighlightedRowAttributes =
|
||||
source.kind === SourceKind.Trace || source.kind === SourceKind.Log
|
||||
|
|
@ -91,7 +105,8 @@ export function useRowData({
|
|||
},
|
||||
]
|
||||
: []),
|
||||
...(source.serviceNameExpression
|
||||
...((isLogSource(source) || isTraceSource(source)) &&
|
||||
source.serviceNameExpression
|
||||
? [
|
||||
{
|
||||
valueExpression: source.serviceNameExpression,
|
||||
|
|
@ -107,7 +122,8 @@ export function useRowData({
|
|||
},
|
||||
]
|
||||
: []),
|
||||
...(source.eventAttributesExpression
|
||||
...((isLogSource(source) || isTraceSource(source)) &&
|
||||
source.eventAttributesExpression
|
||||
? [
|
||||
{
|
||||
valueExpression: source.eventAttributesExpression,
|
||||
|
|
|
|||
|
|
@ -53,7 +53,10 @@ export function RowOverviewPanel({
|
|||
|
||||
const jsonColumns = getJSONColumnNames(data?.meta);
|
||||
|
||||
const eventAttributesExpr = source.eventAttributesExpression;
|
||||
const eventAttributesExpr =
|
||||
source.kind === SourceKind.Log || source.kind === SourceKind.Trace
|
||||
? source.eventAttributesExpression
|
||||
: undefined;
|
||||
|
||||
const firstRow = useMemo(() => {
|
||||
const firstRow = { ...(data?.data?.[0] ?? {}) };
|
||||
|
|
|
|||
|
|
@ -12,7 +12,15 @@ import { isString } from 'lodash';
|
|||
import { parseAsStringEnum, useQueryState } from 'nuqs';
|
||||
import { ErrorBoundary } from 'react-error-boundary';
|
||||
import { useHotkeys } from 'react-hotkeys-hook';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
isLogSource,
|
||||
isSessionSource,
|
||||
isTraceSource,
|
||||
SourceKind,
|
||||
TLogSource,
|
||||
TSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { BuilderChartConfigWithDateRange } from '@hyperdx/common-utils/dist/types';
|
||||
import { Box, Drawer, Flex, Stack } from '@mantine/core';
|
||||
|
||||
|
|
@ -66,7 +74,7 @@ export type RowSidePanelContextProps = {
|
|||
dbSqlRowTableConfig?: BuilderChartConfigWithDateRange;
|
||||
isChildModalOpen?: boolean;
|
||||
setChildModalOpen?: (open: boolean) => void;
|
||||
source?: TSource;
|
||||
source?: TLogSource | TTraceSource;
|
||||
};
|
||||
|
||||
export const RowSidePanelContext = createContext<RowSidePanelContextProps>({});
|
||||
|
|
@ -145,14 +153,21 @@ const DBRowSidePanel = ({
|
|||
);
|
||||
|
||||
const hasOverviewPanel = useMemo(() => {
|
||||
if (
|
||||
source.resourceAttributesExpression ||
|
||||
source.eventAttributesExpression
|
||||
if (isLogSource(source) || isTraceSource(source)) {
|
||||
if (
|
||||
source.resourceAttributesExpression ||
|
||||
source.eventAttributesExpression
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
} else if (
|
||||
source.kind === SourceKind.Metric &&
|
||||
source.resourceAttributesExpression
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}, [source.eventAttributesExpression, source.resourceAttributesExpression]);
|
||||
}, [source]);
|
||||
|
||||
const defaultTab =
|
||||
source.kind === 'trace'
|
||||
|
|
@ -195,8 +210,9 @@ const DBRowSidePanel = ({
|
|||
normalizedRow?.['__hdx_severity_text'];
|
||||
|
||||
const highlightedAttributeValues = useMemo(() => {
|
||||
const attributeExpressions: TSource['highlightedRowAttributeExpressions'] =
|
||||
[];
|
||||
const attributeExpressions: NonNullable<
|
||||
(TLogSource | TTraceSource)['highlightedRowAttributeExpressions']
|
||||
> = [];
|
||||
if (
|
||||
(source.kind === SourceKind.Trace || source.kind === SourceKind.Log) &&
|
||||
source.highlightedRowAttributeExpressions
|
||||
|
|
@ -206,7 +222,10 @@ const DBRowSidePanel = ({
|
|||
|
||||
// Add service name expression to all sources, to maintain compatibility with
|
||||
// the behavior prior to the addition of highlightedRowAttributeExpressions
|
||||
if (source.serviceNameExpression) {
|
||||
if (
|
||||
(isLogSource(source) || isTraceSource(source)) &&
|
||||
source.serviceNameExpression
|
||||
) {
|
||||
attributeExpressions.push({
|
||||
sqlExpression: source.serviceNameExpression,
|
||||
});
|
||||
|
|
@ -240,15 +259,19 @@ const DBRowSidePanel = ({
|
|||
const focusDate = timestampDate;
|
||||
const traceId: string | undefined = normalizedRow?.['__hdx_trace_id'];
|
||||
|
||||
const childSourceId =
|
||||
source.kind === 'log'
|
||||
? source.traceSourceId
|
||||
: source.kind === 'trace'
|
||||
? source.logSourceId
|
||||
: undefined;
|
||||
const childSourceId = isLogSource(source)
|
||||
? source.traceSourceId
|
||||
: isTraceSource(source)
|
||||
? source.logSourceId
|
||||
: undefined;
|
||||
|
||||
const traceSourceId =
|
||||
source.kind === 'trace' ? source.id : source.traceSourceId;
|
||||
const traceSourceId = isTraceSource(source)
|
||||
? source.id
|
||||
: isLogSource(source)
|
||||
? source.traceSourceId
|
||||
: isSessionSource(source)
|
||||
? source.traceSourceId
|
||||
: undefined;
|
||||
|
||||
const enableServiceMap = traceId && traceSourceId;
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ import { splitAndTrimWithBracket } from '@hyperdx/common-utils/dist/core/utils';
|
|||
import {
|
||||
BuilderChartConfigWithDateRange,
|
||||
SelectList,
|
||||
SourceKind,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
|
|
@ -1669,7 +1670,10 @@ function DBSqlRowTableComponent({
|
|||
config,
|
||||
samples: 10_000,
|
||||
bodyValueExpression: patternColumn ?? '',
|
||||
severityTextExpression: source?.severityTextExpression ?? '',
|
||||
severityTextExpression:
|
||||
(source?.kind === SourceKind.Log
|
||||
? source.severityTextExpression
|
||||
: undefined) ?? '',
|
||||
totalCount: undefined,
|
||||
enabled: denoiseResults,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1120,29 +1120,33 @@ const DBSearchPageFiltersComponent = ({
|
|||
[filterState],
|
||||
);
|
||||
|
||||
const parentSpanIdExpr =
|
||||
source?.kind === SourceKind.Trace
|
||||
? source.parentSpanIdExpression
|
||||
: undefined;
|
||||
|
||||
const setRootSpansOnly = useCallback(
|
||||
(rootSpansOnly: boolean) => {
|
||||
if (!source?.parentSpanIdExpression) return;
|
||||
if (!parentSpanIdExpr) return;
|
||||
|
||||
if (rootSpansOnly) {
|
||||
if (columns?.some(col => col.name === IS_ROOT_SPAN_COLUMN_NAME)) {
|
||||
setFilterValue(IS_ROOT_SPAN_COLUMN_NAME, true, 'only');
|
||||
} else {
|
||||
setFilterValue(source.parentSpanIdExpression, '', 'only');
|
||||
setFilterValue(parentSpanIdExpr, '', 'only');
|
||||
}
|
||||
} else {
|
||||
clearFilter(source.parentSpanIdExpression);
|
||||
clearFilter(parentSpanIdExpr);
|
||||
clearFilter(IS_ROOT_SPAN_COLUMN_NAME);
|
||||
}
|
||||
},
|
||||
[setFilterValue, clearFilter, source, columns],
|
||||
[setFilterValue, clearFilter, parentSpanIdExpr, columns],
|
||||
);
|
||||
|
||||
const isRootSpansOnly = useMemo(() => {
|
||||
if (!source?.parentSpanIdExpression || source.kind !== SourceKind.Trace)
|
||||
return false;
|
||||
if (!parentSpanIdExpr || source?.kind !== SourceKind.Trace) return false;
|
||||
|
||||
const parentSpanIdFilter = filterState?.[source?.parentSpanIdExpression];
|
||||
const parentSpanIdFilter = filterState?.[parentSpanIdExpr];
|
||||
const isRootSpanFilter = filterState?.[IS_ROOT_SPAN_COLUMN_NAME];
|
||||
return (
|
||||
(parentSpanIdFilter?.included.size === 1 &&
|
||||
|
|
@ -1150,7 +1154,7 @@ const DBSearchPageFiltersComponent = ({
|
|||
(isRootSpanFilter?.included.size === 1 &&
|
||||
isRootSpanFilter?.included.has(true))
|
||||
);
|
||||
}, [filterState, source]);
|
||||
}, [filterState, source, parentSpanIdExpr]);
|
||||
|
||||
return (
|
||||
<Box className={classes.filtersPanel} style={{ width: `${size}%` }}>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import { useMemo } from 'react';
|
||||
import Link from 'next/link';
|
||||
import { isTraceSource, SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import { Loader } from '@mantine/core';
|
||||
|
||||
import useFieldExpressionGenerator from '@/hooks/useFieldExpressionGenerator';
|
||||
|
|
@ -19,8 +20,10 @@ export const useSessionId = ({
|
|||
dateRange: [Date, Date];
|
||||
enabled?: boolean;
|
||||
}) => {
|
||||
// trace source
|
||||
const { data: source } = useSource({ id: sourceId });
|
||||
const { data: source } = useSource({
|
||||
id: sourceId,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
|
||||
const { getFieldExpression } = useFieldExpressionGenerator(source);
|
||||
|
||||
|
|
@ -99,9 +102,16 @@ export const DBSessionPanel = ({
|
|||
serviceName: string;
|
||||
setSubDrawerOpen: (open: boolean) => void;
|
||||
}) => {
|
||||
const { data: traceSource } = useSource({ id: traceSourceId });
|
||||
const { data: traceSource } = useSource({
|
||||
id: traceSourceId,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
const { data: sessionSource, isLoading: isSessionSourceLoading } = useSource({
|
||||
id: traceSource?.sessionSourceId,
|
||||
id:
|
||||
traceSource && isTraceSource(traceSource)
|
||||
? traceSource.sessionSourceId
|
||||
: undefined,
|
||||
kinds: [SourceKind.Session],
|
||||
});
|
||||
|
||||
if (!traceSource || (!sessionSource && isSessionSourceLoading)) {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,11 @@ import { useEffect, useState } from 'react';
|
|||
import { useQueryState } from 'nuqs';
|
||||
import { useForm, useWatch } from 'react-hook-form';
|
||||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
SourceKind,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Button,
|
||||
Center,
|
||||
|
|
@ -112,14 +116,22 @@ export default function DBTracePanel({
|
|||
setValue: traceIdSetValue,
|
||||
} = useForm<{ traceIdExpression: string }>({
|
||||
defaultValues: {
|
||||
traceIdExpression: parentSourceData?.traceIdExpression ?? '',
|
||||
traceIdExpression:
|
||||
(parentSourceData &&
|
||||
(isLogSource(parentSourceData) || isTraceSource(parentSourceData)) &&
|
||||
parentSourceData.traceIdExpression) ||
|
||||
'',
|
||||
},
|
||||
});
|
||||
useEffect(() => {
|
||||
if (parentSourceData?.traceIdExpression) {
|
||||
if (
|
||||
parentSourceData &&
|
||||
(isLogSource(parentSourceData) || isTraceSource(parentSourceData)) &&
|
||||
parentSourceData.traceIdExpression
|
||||
) {
|
||||
traceIdSetValue('traceIdExpression', parentSourceData.traceIdExpression);
|
||||
}
|
||||
}, [parentSourceData?.traceIdExpression, traceIdSetValue]);
|
||||
}, [parentSourceData, traceIdSetValue]);
|
||||
|
||||
const [showTraceIdInput, setShowTraceIdInput] = useState(false);
|
||||
|
||||
|
|
@ -137,8 +149,11 @@ export default function DBTracePanel({
|
|||
<Flex align="center" justify="space-between" mb="sm">
|
||||
<Flex align="center">
|
||||
<Text size="xs" me="xs">
|
||||
{parentSourceData?.traceIdExpression}:{' '}
|
||||
{traceId || 'No trace id found for event'}
|
||||
{parentSourceData &&
|
||||
(isLogSource(parentSourceData) || isTraceSource(parentSourceData))
|
||||
? parentSourceData.traceIdExpression
|
||||
: ''}
|
||||
: {traceId || 'No trace id found for event'}
|
||||
</Text>
|
||||
{traceId != null && (
|
||||
<Button
|
||||
|
|
@ -175,7 +190,11 @@ export default function DBTracePanel({
|
|||
ms="sm"
|
||||
variant="primary"
|
||||
onClick={traceIdHandleSubmit(({ traceIdExpression }) => {
|
||||
if (parentSourceData != null) {
|
||||
if (
|
||||
parentSourceData &&
|
||||
(isLogSource(parentSourceData) ||
|
||||
isTraceSource(parentSourceData))
|
||||
) {
|
||||
updateTableSource({
|
||||
source: {
|
||||
...parentSourceData,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ import {
|
|||
ChartConfigWithDateRange,
|
||||
SelectList,
|
||||
SourceKind,
|
||||
TLogSource,
|
||||
TSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Anchor,
|
||||
|
|
@ -121,24 +123,37 @@ function getTableBody(tableModel: TSource) {
|
|||
}
|
||||
|
||||
function getConfig(
|
||||
source: TSource,
|
||||
source: TTraceSource | TLogSource,
|
||||
traceId: string,
|
||||
hiddenRowExpression?: string,
|
||||
) {
|
||||
const alias: Record<string, string> = {
|
||||
Body: getTableBody(source),
|
||||
Timestamp: getDisplayedTimestampValueExpression(source),
|
||||
Duration: source.durationExpression
|
||||
? getDurationSecondsExpression(source)
|
||||
: '',
|
||||
Duration:
|
||||
source.kind === SourceKind.Trace && source.durationExpression
|
||||
? getDurationSecondsExpression(source)
|
||||
: '',
|
||||
TraceId: source.traceIdExpression ?? '',
|
||||
SpanId: source.spanIdExpression ?? '',
|
||||
ParentSpanId: source.parentSpanIdExpression ?? '',
|
||||
StatusCode: source.statusCodeExpression ?? '',
|
||||
ParentSpanId:
|
||||
source.kind === SourceKind.Trace
|
||||
? (source.parentSpanIdExpression ?? '')
|
||||
: '',
|
||||
StatusCode:
|
||||
source.kind === SourceKind.Trace
|
||||
? (source.statusCodeExpression ?? '')
|
||||
: '',
|
||||
ServiceName: source.serviceNameExpression ?? '',
|
||||
SeverityText: source.severityTextExpression ?? '',
|
||||
SeverityText:
|
||||
source.kind === SourceKind.Log
|
||||
? (source.severityTextExpression ?? '')
|
||||
: '',
|
||||
SpanAttributes: source.eventAttributesExpression ?? '',
|
||||
SpanEvents: source.spanEventsValueExpression ?? '',
|
||||
SpanEvents:
|
||||
source.kind === SourceKind.Trace
|
||||
? (source.spanEventsValueExpression ?? '')
|
||||
: '',
|
||||
};
|
||||
|
||||
// Aliases for trace attributes must be added here to ensure
|
||||
|
|
@ -287,7 +302,7 @@ export function useEventsAroundFocus({
|
|||
enabled,
|
||||
hiddenRowExpression,
|
||||
}: {
|
||||
tableSource: TSource;
|
||||
tableSource: TTraceSource | TLogSource;
|
||||
focusDate: Date;
|
||||
dateRange: [Date, Date];
|
||||
traceId: string;
|
||||
|
|
@ -375,8 +390,8 @@ export function DBTraceWaterfallChartContainer({
|
|||
highlightedRowWhere,
|
||||
initialRowHighlightHint,
|
||||
}: {
|
||||
traceTableSource: TSource;
|
||||
logTableSource: TSource | null;
|
||||
traceTableSource: TTraceSource;
|
||||
logTableSource: TLogSource | null;
|
||||
traceId: string;
|
||||
dateRange: [Date, Date];
|
||||
focusDate: Date;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import {
|
|||
DateRange,
|
||||
SearchCondition,
|
||||
SearchConditionLanguage,
|
||||
TSource,
|
||||
TLogSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Badge, Group, Text, Timeline } from '@mantine/core';
|
||||
import { useQuery, UseQueryOptions } from '@tanstack/react-query';
|
||||
|
|
@ -49,7 +49,7 @@ export const useV2LogBatch = <T = any,>(
|
|||
dateRange: DateRange['dateRange'];
|
||||
extraSelects?: BuilderChartConfigWithDateRange['select'];
|
||||
limit?: number;
|
||||
logSource: TSource;
|
||||
logSource: TLogSource;
|
||||
order: 'asc' | 'desc';
|
||||
where: SearchCondition;
|
||||
whereLanguage: SearchConditionLanguage;
|
||||
|
|
@ -117,7 +117,7 @@ export const useV2LogBatch = <T = any,>(
|
|||
});
|
||||
};
|
||||
|
||||
const renderKubeEvent = (source: TSource) => (event: KubeEvent) => {
|
||||
const renderKubeEvent = (source: TLogSource) => (event: KubeEvent) => {
|
||||
let href = '#';
|
||||
try {
|
||||
// FIXME: should check if it works in v2
|
||||
|
|
@ -171,7 +171,7 @@ export const KubeTimeline = ({
|
|||
dateRange,
|
||||
}: {
|
||||
q: string;
|
||||
logSource: TSource;
|
||||
logSource: TLogSource;
|
||||
dateRange?: [Date, Date];
|
||||
anchorEvent?: AnchorEvent;
|
||||
}) => {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { useForm, useWatch } from 'react-hook-form';
|
|||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import {
|
||||
BuilderChartConfigWithDateRange,
|
||||
TSource,
|
||||
TMetricSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Box, Group, Select } from '@mantine/core';
|
||||
|
||||
|
|
@ -12,13 +12,13 @@ import { useGetKeyValues } from '@/hooks/useMetadata';
|
|||
|
||||
type KubernetesFiltersProps = {
|
||||
dateRange: [Date, Date];
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
searchQuery: string;
|
||||
setSearchQuery: (query: string) => void;
|
||||
};
|
||||
|
||||
type FilterSelectProps = {
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
placeholder: string;
|
||||
fieldName: string;
|
||||
value: string | null;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { useState } from 'react';
|
||||
import {
|
||||
BuilderChartConfigWithOptDateRange,
|
||||
SourceKind,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { ActionIcon, Badge, Tooltip } from '@mantine/core';
|
||||
|
|
@ -65,7 +66,10 @@ export default function MVOptimizationIndicator({
|
|||
const [modalOpen, setModalOpen] = useState(false);
|
||||
const { data } = useMVOptimizationExplanation(config);
|
||||
|
||||
const mvConfigs = source.materializedViews ?? [];
|
||||
const mvConfigs =
|
||||
((source.kind === SourceKind.Log || source.kind === SourceKind.Trace) &&
|
||||
source.materializedViews) ||
|
||||
[];
|
||||
if (!mvConfigs?.length) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TMetricSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Badge,
|
||||
Box,
|
||||
|
|
@ -36,7 +36,7 @@ interface MetricAttributeHelperPanelProps {
|
|||
databaseName: string;
|
||||
metricType: string;
|
||||
metricName: string;
|
||||
tableSource: TSource | undefined;
|
||||
tableSource: TMetricSource | undefined;
|
||||
attributeKeys: AttributeKey[];
|
||||
isLoading?: boolean;
|
||||
language: 'sql' | 'lucene';
|
||||
|
|
@ -165,7 +165,7 @@ interface AttributeValueListProps {
|
|||
databaseName: string;
|
||||
metricType: string;
|
||||
metricName: string;
|
||||
tableSource: TSource | undefined;
|
||||
tableSource: TMetricSource | undefined;
|
||||
attribute: AttributeKey;
|
||||
language: 'sql' | 'lucene';
|
||||
onAddToWhere: (clause: string) => void;
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { addDays, differenceInDays, subDays } from 'date-fns';
|
|||
import {
|
||||
DateRange,
|
||||
MetricsDataType,
|
||||
TSource,
|
||||
TMetricSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Select } from '@mantine/core';
|
||||
|
||||
|
|
@ -19,7 +19,7 @@ const chartConfigByMetricType = ({
|
|||
metricType,
|
||||
}: {
|
||||
dateRange?: DateRange['dateRange'];
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
metricType: MetricsDataType;
|
||||
}) => {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
|
|
@ -57,7 +57,7 @@ const chartConfigByMetricType = ({
|
|||
};
|
||||
|
||||
function useMetricNames(
|
||||
metricSource: TSource,
|
||||
metricSource: TMetricSource,
|
||||
dateRange?: DateRange['dateRange'],
|
||||
) {
|
||||
const { gaugeConfig, histogramConfig, sumConfig } = useMemo(() => {
|
||||
|
|
@ -163,7 +163,7 @@ export function MetricNameSelect({
|
|||
setMetricName: (metricName: string) => void;
|
||||
isLoading?: boolean;
|
||||
isError?: boolean;
|
||||
metricSource: TSource;
|
||||
metricSource: TMetricSource;
|
||||
error?: string;
|
||||
onFocus?: () => void;
|
||||
'data-testid'?: string;
|
||||
|
|
|
|||
|
|
@ -1,10 +1,14 @@
|
|||
import { memo, useCallback, useEffect, useState } from 'react';
|
||||
import { ClickhouseClient } from '@hyperdx/common-utils/dist/clickhouse/browser';
|
||||
import {
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
MetricsDataType,
|
||||
MetricTable,
|
||||
SourceKind,
|
||||
TLogSource,
|
||||
TSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Button, Divider, Flex, Loader, Modal, Text } from '@mantine/core';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
|
|
@ -68,16 +72,16 @@ async function addOtelDemoSources({
|
|||
traceSourceDatabaseName: string;
|
||||
traceSourceName: string;
|
||||
traceSourceTableName: string;
|
||||
traceSourceHighlightedTraceAttributes?: TSource['highlightedTraceAttributeExpressions'];
|
||||
traceSourceMaterializedViews?: TSource['materializedViews'];
|
||||
traceSourceHighlightedTraceAttributes?: TTraceSource['highlightedTraceAttributeExpressions'];
|
||||
traceSourceMaterializedViews?: TTraceSource['materializedViews'];
|
||||
}) {
|
||||
const hasLogSource =
|
||||
logSourceDatabaseName && logSourceName && logSourceTableName;
|
||||
const hasMetricsSource = metricsSourceDatabaseName && metricsSourceName;
|
||||
|
||||
let logSource: TSource | undefined;
|
||||
let logSource: TLogSource | undefined;
|
||||
if (hasLogSource) {
|
||||
logSource = await createSourceMutation.mutateAsync({
|
||||
const newSource = await createSourceMutation.mutateAsync({
|
||||
source: {
|
||||
kind: SourceKind.Log,
|
||||
name: logSourceName,
|
||||
|
|
@ -99,6 +103,9 @@ async function addOtelDemoSources({
|
|||
displayedTimestampValueExpression: 'Timestamp',
|
||||
},
|
||||
});
|
||||
if (isLogSource(newSource)) {
|
||||
logSource = newSource;
|
||||
}
|
||||
}
|
||||
const traceSource = await createSourceMutation.mutateAsync({
|
||||
source: {
|
||||
|
|
@ -132,6 +139,10 @@ async function addOtelDemoSources({
|
|||
materializedViews: traceSourceMaterializedViews,
|
||||
},
|
||||
});
|
||||
if (!isTraceSource(traceSource)) {
|
||||
// Should be impossible
|
||||
throw new Error('Source that is not trace was somehow created');
|
||||
}
|
||||
let metricsSource: TSource | undefined;
|
||||
if (hasMetricsSource) {
|
||||
metricsSource = await createSourceMutation.mutateAsync({
|
||||
|
|
@ -168,15 +179,8 @@ async function addOtelDemoSources({
|
|||
tableName: sessionSourceTableName,
|
||||
},
|
||||
timestampValueExpression: 'TimestampTime',
|
||||
defaultTableSelectExpression: 'Timestamp, ServiceName, Body',
|
||||
serviceNameExpression: 'ServiceName',
|
||||
severityTextExpression: 'SeverityText',
|
||||
eventAttributesExpression: 'LogAttributes',
|
||||
resourceAttributesExpression: 'ResourceAttributes',
|
||||
traceSourceId: traceSource.id,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
implicitColumnExpression: 'Body',
|
||||
},
|
||||
});
|
||||
await Promise.all([
|
||||
|
|
@ -185,7 +189,6 @@ async function addOtelDemoSources({
|
|||
updateSourceMutation.mutateAsync({
|
||||
source: {
|
||||
...logSource,
|
||||
sessionSourceId: sessionSource.id,
|
||||
traceSourceId: traceSource.id,
|
||||
...(hasMetricsSource && metricsSource
|
||||
? { metricSourceId: metricsSource.id }
|
||||
|
|
@ -319,16 +322,19 @@ function OnboardingModalComponent({
|
|||
// Create Log Source if available
|
||||
if (otelTables.tables.logs) {
|
||||
const inferredConfig = await inferTableSourceConfig({
|
||||
kind: SourceKind.Log,
|
||||
databaseName: otelTables.database,
|
||||
tableName: otelTables.tables.logs,
|
||||
connectionId,
|
||||
metadata,
|
||||
});
|
||||
|
||||
if (inferredConfig.timestampValueExpression != null) {
|
||||
if (
|
||||
inferredConfig.kind === SourceKind.Log &&
|
||||
inferredConfig.timestampValueExpression != null
|
||||
) {
|
||||
const logSource = await createSourceMutation.mutateAsync({
|
||||
source: {
|
||||
kind: SourceKind.Log,
|
||||
name: 'Logs',
|
||||
connection: connectionId,
|
||||
from: {
|
||||
|
|
@ -338,6 +344,8 @@ function OnboardingModalComponent({
|
|||
...inferredConfig,
|
||||
timestampValueExpression:
|
||||
inferredConfig.timestampValueExpression,
|
||||
defaultTableSelectExpression:
|
||||
inferredConfig.defaultTableSelectExpression ?? '',
|
||||
},
|
||||
});
|
||||
createdSources.push(logSource);
|
||||
|
|
@ -352,16 +360,19 @@ function OnboardingModalComponent({
|
|||
// Create Trace Source if available
|
||||
if (otelTables.tables.traces) {
|
||||
const inferredConfig = await inferTableSourceConfig({
|
||||
kind: SourceKind.Trace,
|
||||
databaseName: otelTables.database,
|
||||
tableName: otelTables.tables.traces,
|
||||
connectionId,
|
||||
metadata,
|
||||
});
|
||||
|
||||
if (inferredConfig.timestampValueExpression != null) {
|
||||
if (
|
||||
inferredConfig.kind === SourceKind.Trace &&
|
||||
inferredConfig.timestampValueExpression != null
|
||||
) {
|
||||
const traceSource = await createSourceMutation.mutateAsync({
|
||||
source: {
|
||||
kind: SourceKind.Trace,
|
||||
name: 'Traces',
|
||||
connection: connectionId,
|
||||
from: {
|
||||
|
|
@ -370,8 +381,18 @@ function OnboardingModalComponent({
|
|||
},
|
||||
...inferredConfig,
|
||||
// Help typescript understand it's not null
|
||||
defaultTableSelectExpression:
|
||||
inferredConfig.defaultTableSelectExpression ?? '',
|
||||
timestampValueExpression:
|
||||
inferredConfig.timestampValueExpression,
|
||||
durationExpression: inferredConfig.durationExpression ?? '',
|
||||
durationPrecision: inferredConfig.durationPrecision ?? 9,
|
||||
traceIdExpression: inferredConfig.traceIdExpression ?? '',
|
||||
spanIdExpression: inferredConfig.spanIdExpression ?? '',
|
||||
parentSpanIdExpression:
|
||||
inferredConfig.parentSpanIdExpression ?? '',
|
||||
spanNameExpression: inferredConfig.spanNameExpression ?? '',
|
||||
spanKindExpression: inferredConfig.spanKindExpression ?? '',
|
||||
},
|
||||
});
|
||||
createdSources.push(traceSource);
|
||||
|
|
@ -425,7 +446,6 @@ function OnboardingModalComponent({
|
|||
tableName: '',
|
||||
},
|
||||
timestampValueExpression: 'TimeUnix',
|
||||
serviceNameExpression: 'ServiceName',
|
||||
metricTables,
|
||||
resourceAttributesExpression: 'ResourceAttributes',
|
||||
},
|
||||
|
|
@ -436,6 +456,7 @@ function OnboardingModalComponent({
|
|||
// Create Session Source if available
|
||||
if (otelTables.tables.sessions) {
|
||||
const inferredConfig = await inferTableSourceConfig({
|
||||
kind: SourceKind.Session,
|
||||
databaseName: otelTables.database,
|
||||
tableName: otelTables.tables.sessions,
|
||||
connectionId,
|
||||
|
|
@ -446,12 +467,12 @@ function OnboardingModalComponent({
|
|||
);
|
||||
|
||||
if (
|
||||
inferredConfig.kind === SourceKind.Session &&
|
||||
inferredConfig.timestampValueExpression != null &&
|
||||
traceSource != null
|
||||
) {
|
||||
const sessionSource = await createSourceMutation.mutateAsync({
|
||||
source: {
|
||||
kind: SourceKind.Session,
|
||||
name: 'Sessions',
|
||||
connection: connectionId,
|
||||
from: {
|
||||
|
|
@ -501,7 +522,6 @@ function OnboardingModalComponent({
|
|||
...logSource,
|
||||
...(traceSource ? { traceSourceId: traceSource.id } : {}),
|
||||
...(metricsSource ? { metricSourceId: metricsSource.id } : {}),
|
||||
...(sessionSource ? { sessionSourceId: sessionSource.id } : {}),
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import * as React from 'react';
|
||||
import { JSDataType } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Card, Drawer, Stack, Text } from '@mantine/core';
|
||||
|
||||
import DBRowSidePanel from '@/components/DBRowSidePanel';
|
||||
|
|
@ -37,7 +37,10 @@ export default function PatternSidePanel({
|
|||
const [selectedRowWhere, setSelectedRowWhere] =
|
||||
React.useState<RowWhereResult | null>(null);
|
||||
|
||||
const serviceNameExpression = source?.serviceNameExpression || 'Service';
|
||||
const serviceNameExpression =
|
||||
((source?.kind === SourceKind.Log || source?.kind === SourceKind.Trace) &&
|
||||
source.serviceNameExpression) ||
|
||||
'Service';
|
||||
|
||||
const columnTypeMap = React.useMemo(() => {
|
||||
const map = new Map<string, { _type: JSDataType | null }>([
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { useMemo, useState } from 'react';
|
||||
import {
|
||||
BuilderChartConfigWithDateRange,
|
||||
SourceKind,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
|
|
@ -43,8 +44,10 @@ export default function PatternTable({
|
|||
config,
|
||||
samples: SAMPLES,
|
||||
bodyValueExpression,
|
||||
severityTextExpression: source?.severityTextExpression ?? '',
|
||||
statusCodeExpression: source?.statusCodeExpression ?? '',
|
||||
severityTextExpression:
|
||||
(source?.kind === SourceKind.Log && source.severityTextExpression) || '',
|
||||
statusCodeExpression:
|
||||
(source?.kind === SourceKind.Trace && source.statusCodeExpression) || '',
|
||||
totalCount,
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import {
|
|||
import {
|
||||
BuilderChartConfigWithDateRange,
|
||||
DisplayType,
|
||||
TSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
ActionIcon,
|
||||
|
|
@ -54,7 +54,7 @@ export function DBSearchHeatmapChart({
|
|||
onAddFilter,
|
||||
}: {
|
||||
chartConfig: BuilderChartConfigWithDateRange;
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
isReady: boolean;
|
||||
onAddFilter?: AddFilterFn;
|
||||
}) {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
import { useCallback, useMemo } from 'react';
|
||||
import { pick } from 'lodash';
|
||||
import { parseAsString, useQueryState } from 'nuqs';
|
||||
import { DisplayType, type Filter } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
DisplayType,
|
||||
type Filter,
|
||||
SourceKind,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Drawer, Grid, Text } from '@mantine/core';
|
||||
import { IconServer } from '@tabler/icons-react';
|
||||
|
||||
|
|
@ -25,7 +29,10 @@ export default function ServiceDashboardDbQuerySidePanel({
|
|||
service?: string;
|
||||
searchedTimeRange: [Date, Date];
|
||||
}) {
|
||||
const { data: source } = useSource({ id: sourceId });
|
||||
const { data: source } = useSource({
|
||||
id: sourceId,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
const { expressions } = useServiceDashboardExpressions({ source });
|
||||
|
||||
const [dbQuery, setDbQuery] = useQueryState('dbquery', parseAsString);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { pick } from 'lodash';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
import { MS_NUMBER_FORMAT } from '@/ChartUtils';
|
||||
import { ChartBox } from '@/components/ChartBox';
|
||||
|
|
@ -18,7 +18,7 @@ export default function ServiceDashboardEndpointPerformanceChart({
|
|||
service,
|
||||
endpoint,
|
||||
}: {
|
||||
source?: TSource;
|
||||
source?: TTraceSource;
|
||||
dateRange: [Date, Date];
|
||||
service?: string;
|
||||
endpoint?: string;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
import { useCallback, useMemo } from 'react';
|
||||
import { pick } from 'lodash';
|
||||
import { parseAsString, useQueryState } from 'nuqs';
|
||||
import { DisplayType, type Filter } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
DisplayType,
|
||||
type Filter,
|
||||
SourceKind,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { Drawer, Grid, Text } from '@mantine/core';
|
||||
import { IconServer } from '@tabler/icons-react';
|
||||
|
||||
|
|
@ -30,7 +34,10 @@ export default function ServiceDashboardEndpointSidePanel({
|
|||
service?: string;
|
||||
searchedTimeRange: [Date, Date];
|
||||
}) {
|
||||
const { data: source } = useSource({ id: sourceId });
|
||||
const { data: source } = useSource({
|
||||
id: sourceId,
|
||||
kinds: [SourceKind.Trace],
|
||||
});
|
||||
const { expressions } = useServiceDashboardExpressions({ source });
|
||||
|
||||
const [endpoint, setEndpoint] = useQueryState('endpoint', parseAsString);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { pick } from 'lodash';
|
||||
import { ClickHouseQueryError } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import type { Filter, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import type { Filter, TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Box, Code, Group, Text } from '@mantine/core';
|
||||
|
||||
import { ChartBox } from '@/components/ChartBox';
|
||||
|
|
@ -19,7 +19,7 @@ export default function SlowestEventsTile({
|
|||
enabled = true,
|
||||
extraFilters = [],
|
||||
}: {
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
dateRange: [Date, Date];
|
||||
height?: number;
|
||||
title: React.ReactNode;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import dagre from '@dagrejs/dagre';
|
||||
import { ClickHouseQueryError } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Box, Center, Code, Loader, Text } from '@mantine/core';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import {
|
||||
|
|
@ -75,7 +75,7 @@ interface ServiceMapPresentationProps {
|
|||
isLoading: boolean;
|
||||
error: Error | null;
|
||||
dateRange: [Date, Date];
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
isSingleTrace?: boolean;
|
||||
}
|
||||
|
||||
|
|
@ -242,7 +242,7 @@ function ServiceMapPresentation({
|
|||
|
||||
interface ServiceMapProps {
|
||||
traceId?: string;
|
||||
traceTableSource: TSource;
|
||||
traceTableSource: TTraceSource;
|
||||
dateRange: [Date, Date];
|
||||
samplingFactor?: number;
|
||||
isSingleTrace?: boolean;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
BaseEdge,
|
||||
Edge,
|
||||
|
|
@ -13,7 +13,7 @@ export type ServiceMapEdgeData = {
|
|||
totalRequests: number;
|
||||
errorPercentage: number;
|
||||
dateRange: [Date, Date];
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
serviceName: string;
|
||||
isSingleTrace?: boolean;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Text } from '@mantine/core';
|
||||
import { Handle, Node, NodeProps, NodeToolbar, Position } from '@xyflow/react';
|
||||
|
||||
|
|
@ -11,7 +11,7 @@ import styles from './ServiceMap.module.scss';
|
|||
|
||||
export type ServiceMapNodeData = ServiceAggregation & {
|
||||
dateRange: [Date, Date];
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
maxErrorPercentage: number;
|
||||
isSingleTrace?: boolean;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import { Badge, Group, Stack, Text } from '@mantine/core';
|
||||
|
||||
import { useSource } from '@/source';
|
||||
|
|
@ -27,7 +28,7 @@ export default function ServiceMapSidePanel({
|
|||
Beta
|
||||
</Badge>
|
||||
</Group>
|
||||
{traceTableSource ? (
|
||||
{traceTableSource && traceTableSource.kind === SourceKind.Trace ? (
|
||||
<ServiceMap
|
||||
traceTableSource={traceTableSource}
|
||||
traceId={traceId}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { useCallback } from 'react';
|
||||
import SqlString from 'sqlstring';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Button, Group, Stack, UnstyledButton } from '@mantine/core';
|
||||
import { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { Button, Stack } from '@mantine/core';
|
||||
import { IconSearch } from '@tabler/icons-react';
|
||||
|
||||
import { formatApproximateNumber, navigateToTraceSearch } from './utils';
|
||||
|
|
@ -18,7 +18,7 @@ export default function ServiceMapTooltip({
|
|||
}: {
|
||||
totalRequests: number;
|
||||
errorPercentage: number;
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
dateRange: [Date, Date];
|
||||
serviceName: string;
|
||||
isSingleTrace?: boolean;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import router from 'next/router';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
import {
|
||||
formatApproximateNumber,
|
||||
|
|
@ -16,7 +16,7 @@ jest.mock('next/router', () => ({
|
|||
}));
|
||||
|
||||
describe('navigateToTraceSearch', () => {
|
||||
const mockSource: TSource = {
|
||||
const mockSource: TTraceSource = {
|
||||
id: 'test-source-id',
|
||||
name: 'Test Source',
|
||||
from: {
|
||||
|
|
@ -24,8 +24,16 @@ describe('navigateToTraceSearch', () => {
|
|||
databaseName: 'test_db',
|
||||
},
|
||||
timestampValueExpression: 'timestamp',
|
||||
defaultTableSelectExpression: 'timestamp',
|
||||
connection: 'test-connection',
|
||||
kind: SourceKind.Trace,
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import router from 'next/router';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
export function navigateToTraceSearch({
|
||||
dateRange,
|
||||
|
|
@ -7,7 +7,7 @@ export function navigateToTraceSearch({
|
|||
where,
|
||||
}: {
|
||||
dateRange: [Date, Date];
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
where: string;
|
||||
}) {
|
||||
const from = dateRange[0].getTime().toString();
|
||||
|
|
|
|||
|
|
@ -1,5 +1,10 @@
|
|||
import { useState } from 'react';
|
||||
import { MetricsDataType, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
MetricsDataType,
|
||||
TLogSource,
|
||||
TMetricSource,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
Modal,
|
||||
Paper,
|
||||
|
|
@ -118,9 +123,17 @@ const TableSchemaPreview = ({
|
|||
);
|
||||
};
|
||||
|
||||
export interface SourceSchemaPreviewSource {
|
||||
connection: TSource['connection'];
|
||||
from: TSource['from'];
|
||||
metricTables?: TMetricSource['metricTables'];
|
||||
kind?: TSource['kind'];
|
||||
name?: TSource['name'];
|
||||
materializedViews?: TLogSource['materializedViews'];
|
||||
}
|
||||
|
||||
export interface SourceSchemaPreviewProps {
|
||||
source?: Pick<TSource, 'connection' | 'from' | 'metricTables'> &
|
||||
Partial<Pick<TSource, 'kind' | 'name' | 'materializedViews'>>;
|
||||
source?: SourceSchemaPreviewSource;
|
||||
iconStyles?: Pick<TextProps, 'size' | 'color'>;
|
||||
variant?: 'icon' | 'text';
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,14 +18,13 @@ import { ClickHouseQueryError } from '@hyperdx/common-utils/dist/clickhouse';
|
|||
import {
|
||||
MetricsDataType,
|
||||
SourceKind,
|
||||
sourceSchemaWithout,
|
||||
SourceSchema,
|
||||
SourceSchemaNoId,
|
||||
TSource,
|
||||
TSourceUnion,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
ActionIcon,
|
||||
Anchor,
|
||||
Badge,
|
||||
Box,
|
||||
Button,
|
||||
Center,
|
||||
|
|
@ -60,7 +59,6 @@ import { useMetadataWithSettings } from '@/hooks/useMetadata';
|
|||
import {
|
||||
inferTableSourceConfig,
|
||||
isValidMetricTable,
|
||||
isValidSessionsTable,
|
||||
useCreateSource,
|
||||
useDeleteSource,
|
||||
useSource,
|
||||
|
|
@ -82,6 +80,74 @@ import { ErrorCollapse } from '../Error/ErrorCollapse';
|
|||
import { InputControlled } from '../InputControlled';
|
||||
import SelectControlled from '../SelectControlled';
|
||||
|
||||
type CorrelationField =
|
||||
| 'logSourceId'
|
||||
| 'traceSourceId'
|
||||
| 'sessionSourceId'
|
||||
| 'metricSourceId';
|
||||
|
||||
function getCorrelationFieldValue(
|
||||
source: TSource,
|
||||
field: CorrelationField,
|
||||
): string | undefined {
|
||||
switch (field) {
|
||||
case 'logSourceId':
|
||||
if (source.kind === SourceKind.Trace)
|
||||
return source.logSourceId ?? undefined;
|
||||
if (source.kind === SourceKind.Metric)
|
||||
return source.logSourceId ?? undefined;
|
||||
return undefined;
|
||||
case 'traceSourceId':
|
||||
if (source.kind === SourceKind.Log)
|
||||
return source.traceSourceId ?? undefined;
|
||||
if (source.kind === SourceKind.Session) return source.traceSourceId;
|
||||
return undefined;
|
||||
case 'sessionSourceId':
|
||||
if (source.kind === SourceKind.Trace)
|
||||
return source.sessionSourceId ?? undefined;
|
||||
return undefined;
|
||||
case 'metricSourceId':
|
||||
if (source.kind === SourceKind.Log)
|
||||
return source.metricSourceId ?? undefined;
|
||||
if (source.kind === SourceKind.Trace)
|
||||
return source.metricSourceId ?? undefined;
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function setCorrelationFieldValue(
|
||||
source: TSource,
|
||||
field: CorrelationField,
|
||||
value: string | undefined,
|
||||
): TSource {
|
||||
switch (source.kind) {
|
||||
case SourceKind.Log:
|
||||
if (field === 'traceSourceId' || field === 'metricSourceId') {
|
||||
return { ...source, [field]: value };
|
||||
}
|
||||
return source;
|
||||
case SourceKind.Trace:
|
||||
if (
|
||||
field === 'logSourceId' ||
|
||||
field === 'sessionSourceId' ||
|
||||
field === 'metricSourceId'
|
||||
) {
|
||||
return { ...source, [field]: value };
|
||||
}
|
||||
return source;
|
||||
case SourceKind.Session:
|
||||
if (field === 'traceSourceId') {
|
||||
return { ...source, traceSourceId: value ?? '' };
|
||||
}
|
||||
return source;
|
||||
case SourceKind.Metric:
|
||||
if (field === 'logSourceId') {
|
||||
return { ...source, [field]: value };
|
||||
}
|
||||
return source;
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_DATABASE = 'default';
|
||||
|
||||
const MV_AGGREGATE_FUNCTION_OPTIONS = MV_AGGREGATE_FUNCTIONS.map(fn => ({
|
||||
|
|
@ -97,7 +163,12 @@ const OTEL_CLICKHOUSE_EXPRESSIONS = {
|
|||
|
||||
const CORRELATION_FIELD_MAP: Record<
|
||||
SourceKind,
|
||||
Record<string, { targetKind: SourceKind; targetField: keyof TSource }[]>
|
||||
Partial<
|
||||
Record<
|
||||
CorrelationField,
|
||||
{ targetKind: SourceKind; targetField: CorrelationField }[]
|
||||
>
|
||||
>
|
||||
> = {
|
||||
[SourceKind.Log]: {
|
||||
metricSourceId: [
|
||||
|
|
@ -889,7 +960,7 @@ function OrderByFormRow({
|
|||
tableName,
|
||||
connectionId,
|
||||
}: {
|
||||
control: Control<TSourceUnion>;
|
||||
control: Control<TSource>;
|
||||
databaseName: string;
|
||||
tableName: string;
|
||||
connectionId: string;
|
||||
|
|
@ -1554,37 +1625,6 @@ export function SessionTableModelForm({ control }: TableModelProps) {
|
|||
});
|
||||
const connectionId = useWatch({ control, name: 'connection' });
|
||||
const tableName = useWatch({ control, name: 'from.tableName' });
|
||||
const prevTableNameRef = useRef(tableName);
|
||||
const metadata = useMetadataWithSettings();
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
try {
|
||||
if (tableName && tableName !== prevTableNameRef.current) {
|
||||
prevTableNameRef.current = tableName;
|
||||
const isValid = await isValidSessionsTable({
|
||||
databaseName,
|
||||
tableName,
|
||||
connectionId,
|
||||
metadata,
|
||||
});
|
||||
|
||||
if (!isValid) {
|
||||
notifications.show({
|
||||
color: 'red',
|
||||
message: `${tableName} is not a valid Sessions schema.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
notifications.show({
|
||||
color: 'red',
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
})();
|
||||
}, [tableName, databaseName, connectionId, metadata]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
|
@ -1610,14 +1650,26 @@ export function SessionTableModelForm({ control }: TableModelProps) {
|
|||
disableKeywordAutocomplete
|
||||
/>
|
||||
</FormRow>
|
||||
<FormRow label={'Resource Attributes Expression'}>
|
||||
<SQLInlineEditorControlled
|
||||
tableConnection={{
|
||||
databaseName,
|
||||
tableName,
|
||||
connectionId,
|
||||
}}
|
||||
control={control}
|
||||
name="resourceAttributesExpression"
|
||||
placeholder="ResourceAttributes"
|
||||
/>
|
||||
</FormRow>
|
||||
</Stack>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
interface TableModelProps {
|
||||
control: Control<TSourceUnion>;
|
||||
setValue: UseFormSetValue<TSourceUnion>;
|
||||
control: Control<TSource>;
|
||||
setValue: UseFormSetValue<TSource>;
|
||||
}
|
||||
|
||||
export function MetricTableModelForm({ control, setValue }: TableModelProps) {
|
||||
|
|
@ -1718,8 +1770,8 @@ function TableModelForm({
|
|||
setValue,
|
||||
kind,
|
||||
}: {
|
||||
control: Control<TSourceUnion>;
|
||||
setValue: UseFormSetValue<TSourceUnion>;
|
||||
control: Control<TSource>;
|
||||
setValue: UseFormSetValue<TSource>;
|
||||
kind: SourceKind;
|
||||
}) {
|
||||
switch (kind) {
|
||||
|
|
@ -1753,7 +1805,7 @@ export function TableSourceForm({
|
|||
const { data: connections } = useConnections();
|
||||
|
||||
const { control, setValue, handleSubmit, resetField, setError, clearErrors } =
|
||||
useForm<TSourceUnion>({
|
||||
useForm<TSource>({
|
||||
defaultValues: {
|
||||
kind: SourceKind.Log,
|
||||
name: defaultName,
|
||||
|
|
@ -1764,8 +1816,7 @@ export function TableSourceForm({
|
|||
},
|
||||
querySettings: source?.querySettings,
|
||||
},
|
||||
// TODO: HDX-1768 remove type assertion
|
||||
values: source as TSourceUnion,
|
||||
values: source,
|
||||
resetOptions: {
|
||||
keepDirtyValues: true,
|
||||
keepErrors: true,
|
||||
|
|
@ -1812,6 +1863,7 @@ export function TableSourceForm({
|
|||
tableName:
|
||||
watchedKind !== SourceKind.Metric ? watchedTableName : '',
|
||||
connectionId: watchedConnection,
|
||||
kind: watchedKind,
|
||||
metadata,
|
||||
});
|
||||
if (Object.keys(config).length > 0) {
|
||||
|
|
@ -1882,28 +1934,28 @@ export function TableSourceForm({
|
|||
|
||||
// Check each field for changes
|
||||
const changedFields: Array<{
|
||||
name: keyof TSourceUnion;
|
||||
name: CorrelationField;
|
||||
value: string | undefined;
|
||||
}> = [];
|
||||
|
||||
if (logSourceId !== prevLogSourceIdRef.current) {
|
||||
prevLogSourceIdRef.current = logSourceId;
|
||||
changedFields.push({
|
||||
name: 'logSourceId' as keyof TSourceUnion,
|
||||
name: 'logSourceId',
|
||||
value: logSourceId ?? undefined,
|
||||
});
|
||||
}
|
||||
if (traceSourceId !== prevTraceSourceIdRef.current) {
|
||||
prevTraceSourceIdRef.current = traceSourceId;
|
||||
changedFields.push({
|
||||
name: 'traceSourceId' as keyof TSourceUnion,
|
||||
name: 'traceSourceId',
|
||||
value: traceSourceId ?? undefined,
|
||||
});
|
||||
}
|
||||
if (metricSourceId !== prevMetricSourceIdRef.current) {
|
||||
prevMetricSourceIdRef.current = metricSourceId;
|
||||
changedFields.push({
|
||||
name: 'metricSourceId' as keyof TSourceUnion,
|
||||
name: 'metricSourceId',
|
||||
value: metricSourceId ?? undefined,
|
||||
});
|
||||
}
|
||||
|
|
@ -1913,7 +1965,7 @@ export function TableSourceForm({
|
|||
) {
|
||||
prevSessionTraceSourceIdRef.current = sessionTraceSourceId;
|
||||
changedFields.push({
|
||||
name: 'traceSourceId' as keyof TSourceUnion,
|
||||
name: 'traceSourceId',
|
||||
value: sessionTraceSourceId ?? undefined,
|
||||
});
|
||||
}
|
||||
|
|
@ -1922,14 +1974,15 @@ export function TableSourceForm({
|
|||
name: fieldName,
|
||||
value: newTargetSourceId,
|
||||
} of changedFields) {
|
||||
if (!(fieldName in correlationFields)) continue;
|
||||
|
||||
const targetConfigs = correlationFields[fieldName];
|
||||
if (!targetConfigs) continue;
|
||||
|
||||
for (const { targetKind, targetField } of targetConfigs) {
|
||||
// Find the previously linked source if any
|
||||
const previouslyLinkedSource = sources.find(
|
||||
s => s.kind === targetKind && s[targetField] === currentSourceId,
|
||||
s =>
|
||||
s.kind === targetKind &&
|
||||
getCorrelationFieldValue(s, targetField) === currentSourceId,
|
||||
);
|
||||
|
||||
// If there was a previously linked source and it's different from the new one, unlink it
|
||||
|
|
@ -1938,10 +1991,11 @@ export function TableSourceForm({
|
|||
previouslyLinkedSource.id !== newTargetSourceId
|
||||
) {
|
||||
await updateSource.mutateAsync({
|
||||
source: {
|
||||
...previouslyLinkedSource,
|
||||
[targetField]: undefined,
|
||||
} as TSource,
|
||||
source: setCorrelationFieldValue(
|
||||
previouslyLinkedSource,
|
||||
targetField,
|
||||
undefined,
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -1950,12 +2004,13 @@ export function TableSourceForm({
|
|||
const targetSource = sources.find(s => s.id === newTargetSourceId);
|
||||
if (targetSource && targetSource.kind === targetKind) {
|
||||
// Only update if the target field is empty to avoid overwriting existing correlations
|
||||
if (!targetSource[targetField]) {
|
||||
if (!getCorrelationFieldValue(targetSource, targetField)) {
|
||||
await updateSource.mutateAsync({
|
||||
source: {
|
||||
...targetSource,
|
||||
[targetField]: currentSourceId,
|
||||
} as TSource,
|
||||
source: setCorrelationFieldValue(
|
||||
targetSource,
|
||||
targetField,
|
||||
currentSourceId,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -1974,9 +2029,8 @@ export function TableSourceForm({
|
|||
updateSource,
|
||||
]);
|
||||
|
||||
const sourceFormSchema = sourceSchemaWithout({ id: true });
|
||||
const handleError = useCallback(
|
||||
({ errors }: z.ZodError<TSourceUnion>, eventName: 'create' | 'save') => {
|
||||
({ errors }: z.ZodError<TSource>, eventName: 'create' | 'save') => {
|
||||
const notificationMsgs: string[] = [];
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
|
|
@ -1988,7 +2042,7 @@ export function TableSourceForm({
|
|||
|
||||
for (const err of errors) {
|
||||
const errorPath: string = err.path.join('.');
|
||||
// TODO: HDX-1768 get rid of this type assertion if possible
|
||||
// react-hook-form requires a static path type; dynamic errorPath needs assertion
|
||||
setError(errorPath as any, { ...err });
|
||||
|
||||
const message =
|
||||
|
|
@ -2022,15 +2076,14 @@ export function TableSourceForm({
|
|||
const _onCreate = useCallback(() => {
|
||||
clearErrors();
|
||||
handleSubmit(async data => {
|
||||
const parseResult = sourceFormSchema.safeParse(data);
|
||||
const parseResult = SourceSchemaNoId.safeParse(data);
|
||||
if (parseResult.error) {
|
||||
handleError(parseResult.error, 'create');
|
||||
return;
|
||||
}
|
||||
|
||||
createSource.mutate(
|
||||
// TODO: HDX-1768 get rid of this type assertion
|
||||
{ source: data as TSource },
|
||||
{ source: parseResult.data },
|
||||
{
|
||||
onSuccess: async newSource => {
|
||||
// Handle bidirectional linking for new sources
|
||||
|
|
@ -2039,7 +2092,10 @@ export function TableSourceForm({
|
|||
for (const [fieldName, targetConfigs] of Object.entries(
|
||||
correlationFields,
|
||||
)) {
|
||||
const targetSourceId = (newSource as any)[fieldName];
|
||||
const targetSourceId = getCorrelationFieldValue(
|
||||
newSource,
|
||||
fieldName as CorrelationField,
|
||||
);
|
||||
if (targetSourceId) {
|
||||
for (const { targetKind, targetField } of targetConfigs) {
|
||||
const targetSource = sources.find(
|
||||
|
|
@ -2047,12 +2103,15 @@ export function TableSourceForm({
|
|||
);
|
||||
if (targetSource && targetSource.kind === targetKind) {
|
||||
// Only update if the target field is empty to avoid overwriting existing correlations
|
||||
if (!targetSource[targetField]) {
|
||||
if (
|
||||
!getCorrelationFieldValue(targetSource, targetField)
|
||||
) {
|
||||
await updateSource.mutateAsync({
|
||||
source: {
|
||||
...targetSource,
|
||||
[targetField]: newSource.id,
|
||||
} as TSource,
|
||||
source: setCorrelationFieldValue(
|
||||
targetSource,
|
||||
targetField,
|
||||
newSource.id,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -2079,7 +2138,6 @@ export function TableSourceForm({
|
|||
}, [
|
||||
clearErrors,
|
||||
handleError,
|
||||
sourceFormSchema,
|
||||
handleSubmit,
|
||||
createSource,
|
||||
onCreate,
|
||||
|
|
@ -2090,14 +2148,13 @@ export function TableSourceForm({
|
|||
const _onSave = useCallback(() => {
|
||||
clearErrors();
|
||||
handleSubmit(data => {
|
||||
const parseResult = sourceFormSchema.safeParse(data);
|
||||
const parseResult = SourceSchema.safeParse(data);
|
||||
if (parseResult.error) {
|
||||
handleError(parseResult.error, 'save');
|
||||
return;
|
||||
}
|
||||
updateSource.mutate(
|
||||
// TODO: HDX-1768 get rid of this type assertion
|
||||
{ source: data as TSource },
|
||||
{ source: parseResult.data },
|
||||
{
|
||||
onSuccess: () => {
|
||||
onSave?.();
|
||||
|
|
@ -2115,14 +2172,7 @@ export function TableSourceForm({
|
|||
},
|
||||
);
|
||||
})();
|
||||
}, [
|
||||
handleSubmit,
|
||||
updateSource,
|
||||
onSave,
|
||||
clearErrors,
|
||||
handleError,
|
||||
sourceFormSchema,
|
||||
]);
|
||||
}, [handleSubmit, updateSource, onSave, clearErrors, handleError]);
|
||||
|
||||
const databaseName = useWatch({
|
||||
control,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
import React from 'react';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
SourceKind,
|
||||
TLogSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { screen, waitFor } from '@testing-library/react';
|
||||
import { renderHook } from '@testing-library/react';
|
||||
|
||||
|
|
@ -49,30 +53,34 @@ const MockTimelineChart = TimelineChart as any;
|
|||
|
||||
describe('DBTraceWaterfallChartContainer', () => {
|
||||
// Common test data
|
||||
const mockTraceTableSource: TSource = {
|
||||
const mockTraceTableSource: TTraceSource = {
|
||||
id: 'trace-source-id',
|
||||
kind: SourceKind.Trace,
|
||||
name: 'trace-source',
|
||||
from: { databaseName: 'test_db', tableName: 'trace_table' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Timestamp',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
statusCodeExpression: 'StatusCode',
|
||||
serviceNameExpression: 'ServiceName',
|
||||
severityTextExpression: 'SeverityText',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
eventAttributesExpression: 'SpanAttributes',
|
||||
implicitColumnExpression: 'Body',
|
||||
connection: 'conn1',
|
||||
};
|
||||
|
||||
const mockLogTableSource: TSource = {
|
||||
const mockLogTableSource: TLogSource = {
|
||||
id: 'log-source-id',
|
||||
kind: SourceKind.Log,
|
||||
name: 'log-source',
|
||||
from: { databaseName: 'test_db', tableName: 'log_table' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Timestamp',
|
||||
implicitColumnExpression: 'Body',
|
||||
connection: 'conn2',
|
||||
};
|
||||
|
|
@ -267,19 +275,22 @@ describe('DBTraceWaterfallChartContainer', () => {
|
|||
|
||||
describe('useEventsAroundFocus', () => {
|
||||
// Test data
|
||||
const mockTableSource: TSource = {
|
||||
const mockTableSource: TTraceSource = {
|
||||
id: 'test-table-source-id',
|
||||
kind: SourceKind.Trace,
|
||||
name: 'trace-source',
|
||||
from: { databaseName: 'test_db', tableName: 'trace_table' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Timestamp',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
statusCodeExpression: 'StatusCode',
|
||||
serviceNameExpression: 'ServiceName',
|
||||
severityTextExpression: 'SeverityText',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
eventAttributesExpression: 'SpanAttributes',
|
||||
implicitColumnExpression: 'Body',
|
||||
connection: 'conn1',
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import { Metadata } from '@hyperdx/common-utils/dist/core/metadata';
|
|||
import {
|
||||
DashboardFilter,
|
||||
MetricsDataType,
|
||||
SourceKind,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
|
|
@ -34,6 +35,7 @@ describe('useDashboardFilterValues', () => {
|
|||
const mockSources: Partial<TSource>[] = [
|
||||
{
|
||||
id: 'logs-source',
|
||||
kind: SourceKind.Log,
|
||||
name: 'Logs',
|
||||
timestampValueExpression: 'timestamp',
|
||||
connection: 'clickhouse-conn',
|
||||
|
|
@ -44,6 +46,7 @@ describe('useDashboardFilterValues', () => {
|
|||
},
|
||||
{
|
||||
id: 'traces-source',
|
||||
kind: SourceKind.Trace,
|
||||
name: 'Traces',
|
||||
timestampValueExpression: 'timestamp',
|
||||
connection: 'clickhouse-conn',
|
||||
|
|
@ -54,6 +57,7 @@ describe('useDashboardFilterValues', () => {
|
|||
},
|
||||
{
|
||||
id: 'metric-source',
|
||||
kind: SourceKind.Metric,
|
||||
name: 'Metrics',
|
||||
timestampValueExpression: 'timestamp',
|
||||
connection: 'clickhouse-conn',
|
||||
|
|
@ -235,6 +239,7 @@ describe('useDashboardFilterValues', () => {
|
|||
]),
|
||||
);
|
||||
|
||||
// Only Log and Trace sources use optimizeGetKeyValuesCalls (Metric uses direct fetch)
|
||||
expect(optimizeGetKeyValuesCalls).toHaveBeenCalledTimes(3);
|
||||
expect(optimizeGetKeyValuesCalls).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
|
|
@ -495,6 +500,7 @@ describe('useDashboardFilterValues', () => {
|
|||
tableName: 'logs',
|
||||
},
|
||||
id: 'logs-source',
|
||||
kind: SourceKind.Log,
|
||||
name: 'Logs',
|
||||
timestampValueExpression: 'timestamp',
|
||||
},
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import {
|
|||
ResponseJSON,
|
||||
tableExpr,
|
||||
} from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TMetricSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
|
||||
import { getClickhouseClient } from '@/clickhouse';
|
||||
|
|
@ -19,7 +19,7 @@ interface MetricAttributeValuesProps {
|
|||
attributeName: string;
|
||||
attributeCategory: AttributeCategory;
|
||||
searchTerm?: string;
|
||||
tableSource: TSource | undefined;
|
||||
tableSource: TMetricSource | undefined;
|
||||
metricType: string;
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import {
|
|||
ResponseJSON,
|
||||
tableExpr,
|
||||
} from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TMetricSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
|
||||
import { getClickhouseClient } from '@/clickhouse';
|
||||
|
|
@ -18,7 +18,7 @@ interface MetricMetadataProps {
|
|||
databaseName: string;
|
||||
metricType?: string;
|
||||
metricName?: string;
|
||||
tableSource: TSource | undefined;
|
||||
tableSource: TMetricSource | undefined;
|
||||
}
|
||||
|
||||
interface MetricMetadataResponse {
|
||||
|
|
|
|||
|
|
@ -3,8 +3,7 @@ import {
|
|||
ResponseJSON,
|
||||
tableExpr,
|
||||
} from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { SourceKind } from '@hyperdx/common-utils/dist/types';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TMetricSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
|
||||
import { getClickhouseClient } from '@/clickhouse';
|
||||
|
|
@ -130,7 +129,7 @@ interface MetricResourceAttrsProps {
|
|||
databaseName: string;
|
||||
metricType?: string;
|
||||
metricName?: string;
|
||||
tableSource: TSource | undefined;
|
||||
tableSource: TMetricSource | undefined;
|
||||
isSql: boolean;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,10 @@ import {
|
|||
MVOptimizationExplanation,
|
||||
tryOptimizeConfigWithMaterializedViewWithExplanations,
|
||||
} from '@hyperdx/common-utils/dist/core/materializedViews';
|
||||
import { BuilderChartConfigWithOptDateRange } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
BuilderChartConfigWithOptDateRange,
|
||||
SourceKind,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
keepPreviousData,
|
||||
useQuery,
|
||||
|
|
@ -39,7 +42,11 @@ export function useMVOptimizationExplanation<
|
|||
return useQuery<MVOptimizationExplanationResult<C>>({
|
||||
queryKey: ['optimizationExplanation', config],
|
||||
queryFn: async ({ signal }) => {
|
||||
if (!config || !source) {
|
||||
if (
|
||||
!config ||
|
||||
!source ||
|
||||
(source.kind !== SourceKind.Log && source.kind !== SourceKind.Trace)
|
||||
) {
|
||||
return {
|
||||
explanations: [],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import SqlString from 'sqlstring';
|
|||
import { chSql } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { Metadata } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { renderChartConfig } from '@hyperdx/common-utils/dist/core/renderChartConfig';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
|
||||
import { useClickhouseClient } from '@/clickhouse';
|
||||
|
|
@ -23,7 +23,7 @@ async function getServiceMapQuery({
|
|||
metadata,
|
||||
samplingFactor,
|
||||
}: {
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
dateRange: [Date, Date];
|
||||
traceId?: string;
|
||||
metadata: Metadata;
|
||||
|
|
@ -247,7 +247,7 @@ export default function useServiceMap({
|
|||
traceId,
|
||||
samplingFactor,
|
||||
}: {
|
||||
source: TSource;
|
||||
source: TTraceSource;
|
||||
dateRange: [Date, Date];
|
||||
traceId?: string;
|
||||
samplingFactor: number;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { useMemo } from 'react';
|
||||
import { ColumnMeta } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { tcFromSource } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { SourceKind, TTraceSource } from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
import { useColumns, useJsonColumns } from './hooks/useMetadata';
|
||||
|
||||
|
|
@ -139,7 +139,7 @@ function getDefaults({
|
|||
const ENDPOINT_MATERIALIZED_COLUMN_NAME = 'endpoint';
|
||||
|
||||
export function getExpressions(
|
||||
source: TSource,
|
||||
source: TTraceSource,
|
||||
columns: ColumnMeta[],
|
||||
jsonColumns: string[],
|
||||
) {
|
||||
|
|
@ -160,7 +160,7 @@ export function getExpressions(
|
|||
service: source.serviceNameExpression || defaults.service,
|
||||
spanName: source.spanNameExpression || defaults.spanName,
|
||||
spanKind: source.spanKindExpression || defaults.spanKind,
|
||||
severityText: source.severityTextExpression || defaults.severityText,
|
||||
severityText: source.statusCodeExpression || defaults.severityText,
|
||||
|
||||
// HTTP
|
||||
httpHost: defaults.httpHost,
|
||||
|
|
@ -201,7 +201,7 @@ export function getExpressions(
|
|||
export function useServiceDashboardExpressions({
|
||||
source,
|
||||
}: {
|
||||
source: TSource | undefined;
|
||||
source: TTraceSource | undefined;
|
||||
}) {
|
||||
const tableConnection = useMemo(() => tcFromSource(source), [source]);
|
||||
|
||||
|
|
@ -214,6 +214,7 @@ export function useServiceDashboardExpressions({
|
|||
|
||||
const expressions = useMemo(() => {
|
||||
if (isLoading || !jsonColumns || !columns) return undefined;
|
||||
if (source?.kind !== SourceKind.Trace) return undefined;
|
||||
|
||||
return getExpressions(source, columns, jsonColumns);
|
||||
}, [source, columns, jsonColumns, isLoading]);
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ import {
|
|||
DateRange,
|
||||
SearchCondition,
|
||||
SearchConditionLanguage,
|
||||
TSource,
|
||||
TSessionSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { useQuery, UseQueryOptions } from '@tanstack/react-query';
|
||||
|
||||
|
|
@ -43,8 +44,8 @@ export function useSessions(
|
|||
where,
|
||||
whereLanguage,
|
||||
}: {
|
||||
traceSource?: TSource;
|
||||
sessionSource?: TSource;
|
||||
traceSource?: TTraceSource;
|
||||
sessionSource?: TSessionSource;
|
||||
dateRange: DateRange['dateRange'];
|
||||
where?: SearchCondition;
|
||||
whereLanguage?: SearchConditionLanguage;
|
||||
|
|
@ -171,16 +172,24 @@ export function useSessions(
|
|||
{
|
||||
select: [
|
||||
{
|
||||
valueExpression: `DISTINCT ${getSessionsSourceFieldExpression(sessionSource.resourceAttributesExpression ?? 'ResourceAttributes', 'rum.sessionId')}`,
|
||||
valueExpression: `DISTINCT ${getSessionsSourceFieldExpression(
|
||||
sessionSource.resourceAttributesExpression ??
|
||||
'ResourceAttributes',
|
||||
'rum.sessionId',
|
||||
)}`,
|
||||
alias: 'sessionId',
|
||||
},
|
||||
],
|
||||
from: sessionSource.from,
|
||||
dateRange,
|
||||
where: `${getSessionsSourceFieldExpression(sessionSource.resourceAttributesExpression ?? 'ResourceAttributes', 'rum.sessionId')} IN (SELECT sessions.sessionId FROM ${SESSIONS_CTE_NAME})`,
|
||||
where: `${getSessionsSourceFieldExpression(
|
||||
sessionSource.resourceAttributesExpression ??
|
||||
'ResourceAttributes',
|
||||
'rum.sessionId',
|
||||
)} IN (SELECT sessions.sessionId FROM ${SESSIONS_CTE_NAME})`,
|
||||
whereLanguage: 'sql',
|
||||
timestampValueExpression: sessionSource.timestampValueExpression,
|
||||
implicitColumnExpression: sessionSource.implicitColumnExpression,
|
||||
implicitColumnExpression: undefined,
|
||||
connection: sessionSource.connection,
|
||||
},
|
||||
metadata,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
// TODO: HDX-1768 Change TSource here to TSourceUnion and adjust as needed. Then, go to
|
||||
// SourceForm.tsx and remove type assertions for TSource and TSourceUnion
|
||||
import React from 'react';
|
||||
import pick from 'lodash/pick';
|
||||
import objectHash from 'object-hash';
|
||||
import {
|
||||
|
|
@ -13,10 +12,21 @@ import { splitAndTrimWithBracket } from '@hyperdx/common-utils/dist/core/utils';
|
|||
import {
|
||||
MetricsDataType,
|
||||
SourceKind,
|
||||
SourceSchema,
|
||||
TLogSource,
|
||||
TMetricSource,
|
||||
TSessionSource,
|
||||
TSource,
|
||||
TSourceUnion,
|
||||
TSourceNoId,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import {
|
||||
useMutation,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
UseQueryResult,
|
||||
} from '@tanstack/react-query';
|
||||
|
||||
import { hdxServer } from '@/api';
|
||||
import { IS_LOCAL_MODE } from '@/config';
|
||||
|
|
@ -41,36 +51,45 @@ export function getFirstTimestampValueExpression(valueExpression: string) {
|
|||
return splitAndTrimWithBracket(valueExpression)[0];
|
||||
}
|
||||
|
||||
export function getSpanEventBody(eventModel: TSource) {
|
||||
export function getSpanEventBody(eventModel: TTraceSource) {
|
||||
return eventModel.spanNameExpression;
|
||||
}
|
||||
|
||||
export function getDisplayedTimestampValueExpression(eventModel: TSource) {
|
||||
const displayed =
|
||||
eventModel.kind === SourceKind.Log || eventModel.kind === SourceKind.Trace
|
||||
? eventModel.displayedTimestampValueExpression
|
||||
: undefined;
|
||||
return (
|
||||
eventModel.displayedTimestampValueExpression ??
|
||||
displayed ??
|
||||
getFirstTimestampValueExpression(eventModel.timestampValueExpression)
|
||||
);
|
||||
}
|
||||
|
||||
export function getEventBody(eventModel: TSource) {
|
||||
const expression =
|
||||
eventModel.kind === SourceKind.Trace
|
||||
? (eventModel.spanNameExpression ?? undefined)
|
||||
: (eventModel.bodyExpression ?? eventModel.implicitColumnExpression);
|
||||
let expression: string | undefined;
|
||||
if (eventModel.kind === SourceKind.Trace) {
|
||||
expression = eventModel.spanNameExpression ?? undefined;
|
||||
} else if (eventModel.kind === SourceKind.Log) {
|
||||
expression =
|
||||
eventModel.bodyExpression ?? eventModel.implicitColumnExpression;
|
||||
}
|
||||
const multiExpr = splitAndTrimWithBracket(expression ?? '');
|
||||
return multiExpr.length === 1 ? expression : multiExpr[0];
|
||||
}
|
||||
|
||||
function addDefaultsToSource(source: TSourceUnion): TSource {
|
||||
return {
|
||||
...source,
|
||||
// Session sources have optional timestampValueExpressions, with default
|
||||
timestampValueExpression:
|
||||
source.kind === SourceKind.Session
|
||||
? source.timestampValueExpression ||
|
||||
SESSION_TABLE_EXPRESSIONS.timestampValueExpression
|
||||
: source.timestampValueExpression,
|
||||
};
|
||||
// This function is for supporting legacy sources, which did not require this field.
|
||||
// Will be defaulted to `TimestampTime` when queried, if undefined.
|
||||
function addDefaultsToSource(source: TSource): TSource {
|
||||
if (source.kind === SourceKind.Session) {
|
||||
return {
|
||||
...source,
|
||||
timestampValueExpression:
|
||||
source.timestampValueExpression ||
|
||||
SESSION_TABLE_EXPRESSIONS.timestampValueExpression,
|
||||
};
|
||||
}
|
||||
return source;
|
||||
}
|
||||
|
||||
export function useSources() {
|
||||
|
|
@ -80,24 +99,67 @@ export function useSources() {
|
|||
if (IS_LOCAL_MODE) {
|
||||
return localSources.getAll();
|
||||
}
|
||||
const rawSources = await hdxServer('sources').json<TSourceUnion[]>();
|
||||
return rawSources.map(addDefaultsToSource);
|
||||
const rawSources = await hdxServer('sources').json<TSource[]>();
|
||||
const sources = rawSources.map(addDefaultsToSource);
|
||||
|
||||
sources.forEach(source => {
|
||||
const result = SourceSchema.safeParse(source);
|
||||
if (!result.success) {
|
||||
const fields = result.error.issues
|
||||
.map(issue => issue.path.join('.'))
|
||||
.join(', ');
|
||||
notifications.show({
|
||||
color: 'yellow',
|
||||
title: `Source "${source.name}" has validation issues`,
|
||||
message: React.createElement(
|
||||
React.Fragment,
|
||||
null,
|
||||
fields ? `Fields: ${fields}. ` : '',
|
||||
React.createElement(
|
||||
'a',
|
||||
{ href: '/team#sources' },
|
||||
'Edit sources',
|
||||
),
|
||||
' to ensure compatibility.',
|
||||
),
|
||||
autoClose: false,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return sources;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function useSource({ id }: { id?: string | null }) {
|
||||
export function useSource<K extends SourceKind>(opts: {
|
||||
id?: string | null;
|
||||
kinds: K[];
|
||||
}): UseQueryResult<Extract<TSource, { kind: K }> | undefined>;
|
||||
export function useSource(opts: {
|
||||
id?: string | null;
|
||||
}): UseQueryResult<TSource | undefined>;
|
||||
export function useSource({
|
||||
id,
|
||||
kinds,
|
||||
}: {
|
||||
id?: string | null;
|
||||
kinds?: SourceKind[];
|
||||
}) {
|
||||
return useQuery({
|
||||
queryKey: ['sources'],
|
||||
queryFn: async () => {
|
||||
if (IS_LOCAL_MODE) {
|
||||
return localSources.getAll();
|
||||
}
|
||||
const rawSources = await hdxServer('sources').json<TSourceUnion[]>();
|
||||
const rawSources = await hdxServer('sources').json<TSource[]>();
|
||||
return rawSources.map(addDefaultsToSource);
|
||||
},
|
||||
select: (data: TSource[]): TSource => {
|
||||
return data.filter((s: any) => s.id === id)[0];
|
||||
select: (data: TSource[]) => {
|
||||
const source = data.find(s => s.id === id);
|
||||
if (source && kinds?.length && !kinds.includes(source.kind))
|
||||
return undefined;
|
||||
return source;
|
||||
},
|
||||
enabled: id != null,
|
||||
});
|
||||
|
|
@ -127,7 +189,7 @@ export function useCreateSource() {
|
|||
const queryClient = useQueryClient();
|
||||
|
||||
const mut = useMutation({
|
||||
mutationFn: async ({ source }: { source: Omit<TSource, 'id'> }) => {
|
||||
mutationFn: async ({ source }: { source: TSourceNoId }) => {
|
||||
if (IS_LOCAL_MODE) {
|
||||
const existing = localSources
|
||||
.getAll()
|
||||
|
|
@ -139,7 +201,7 @@ export function useCreateSource() {
|
|||
if (existing) {
|
||||
// Replace the existing source in-place rather than duplicating
|
||||
localSources.update(existing.id, source);
|
||||
return { ...source, id: existing.id } as TSource;
|
||||
return { ...source, id: existing.id };
|
||||
}
|
||||
return localSources.create(source);
|
||||
}
|
||||
|
|
@ -183,19 +245,28 @@ function hasAllColumns(columns: ColumnMeta[], requiredColumns: string[]) {
|
|||
return missingColumns.length === 0;
|
||||
}
|
||||
|
||||
type TStrippedSource<T extends TSource> = Partial<
|
||||
Omit<T, 'id' | 'name' | 'from' | 'connection'>
|
||||
> & { kind: T['kind'] };
|
||||
type InferredSourceConfig =
|
||||
| TStrippedSource<TLogSource>
|
||||
| TStrippedSource<TTraceSource>
|
||||
| TStrippedSource<TMetricSource>
|
||||
| TStrippedSource<TSessionSource>;
|
||||
|
||||
export async function inferTableSourceConfig({
|
||||
databaseName,
|
||||
tableName,
|
||||
connectionId,
|
||||
kind,
|
||||
metadata,
|
||||
}: {
|
||||
databaseName: string;
|
||||
tableName: string;
|
||||
connectionId: string;
|
||||
kind: SourceKind;
|
||||
metadata: Metadata;
|
||||
}): Promise<
|
||||
Partial<Omit<TSource, 'id' | 'name' | 'from' | 'connection' | 'kind'>>
|
||||
> {
|
||||
}): Promise<InferredSourceConfig> {
|
||||
const columns = await metadata.getColumns({
|
||||
databaseName,
|
||||
tableName,
|
||||
|
|
@ -213,6 +284,33 @@ export async function inferTableSourceConfig({
|
|||
? new Set(extractColumnReferencesFromKey(primaryKeys))
|
||||
: new Set();
|
||||
|
||||
const timestampColumns = filterColumnMetaByType(columns, [JSDataType.Date]);
|
||||
const primaryKeyTimestampColumn = timestampColumns?.find(c =>
|
||||
primaryKeyColumns.has(c.name),
|
||||
);
|
||||
|
||||
const baseConfig = {
|
||||
...(primaryKeyTimestampColumn != null
|
||||
? { timestampValueExpression: primaryKeyTimestampColumn.name }
|
||||
: {}),
|
||||
kind,
|
||||
};
|
||||
|
||||
if (kind === SourceKind.Session) {
|
||||
const isSessionSchema =
|
||||
hasAllColumns(columns, Object.values(SESSION_TABLE_EXPRESSIONS)) ||
|
||||
hasAllColumns(columns, Object.values(JSON_SESSION_TABLE_EXPRESSIONS));
|
||||
|
||||
if (isSessionSchema) {
|
||||
return {
|
||||
...baseConfig,
|
||||
resourceAttributesExpression:
|
||||
SESSION_TABLE_EXPRESSIONS.resourceAttributesExpression,
|
||||
};
|
||||
}
|
||||
return baseConfig;
|
||||
}
|
||||
|
||||
const isOtelLogSchema = hasAllColumns(columns, [
|
||||
'Timestamp',
|
||||
'Body',
|
||||
|
|
@ -242,17 +340,8 @@ export async function inferTableSourceConfig({
|
|||
// Check if SpanEvents column is available
|
||||
const hasSpanEvents = columns.some(col => col.name === 'Events.Timestamp');
|
||||
|
||||
const timestampColumns = filterColumnMetaByType(columns, [JSDataType.Date]);
|
||||
const primaryKeyTimestampColumn = timestampColumns?.find(c =>
|
||||
primaryKeyColumns.has(c.name),
|
||||
);
|
||||
|
||||
return {
|
||||
...(primaryKeyTimestampColumn != null
|
||||
? {
|
||||
timestampValueExpression: primaryKeyTimestampColumn.name,
|
||||
}
|
||||
: {}),
|
||||
...baseConfig,
|
||||
...(isOtelLogSchema
|
||||
? {
|
||||
defaultTableSelectExpression:
|
||||
|
|
@ -295,11 +384,11 @@ export async function inferTableSourceConfig({
|
|||
};
|
||||
}
|
||||
|
||||
export function getDurationMsExpression(source: TSource) {
|
||||
export function getDurationMsExpression(source: TTraceSource) {
|
||||
return `(${source.durationExpression})/1e${(source.durationPrecision ?? 9) - 3}`;
|
||||
}
|
||||
|
||||
export function getDurationSecondsExpression(source: TSource) {
|
||||
export function getDurationSecondsExpression(source: TTraceSource) {
|
||||
return `(${source.durationExpression})/1e${source.durationPrecision ?? 9}`;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,11 @@ import { formatDistanceToNowStrict } from 'date-fns';
|
|||
import numbro from 'numbro';
|
||||
import type { MutableRefObject, SetStateAction } from 'react';
|
||||
import { TableConnection } from '@hyperdx/common-utils/dist/core/metadata';
|
||||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
SourceKind,
|
||||
TMetricSource,
|
||||
TSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
import { SortingState } from '@tanstack/react-table';
|
||||
|
||||
import { NOW } from './config';
|
||||
|
|
@ -830,11 +834,15 @@ export function getMetricTableName(
|
|||
source: TSource,
|
||||
metricType?: string,
|
||||
): string | undefined {
|
||||
return metricType == null
|
||||
? source.from.tableName
|
||||
: source.metricTables?.[
|
||||
metricType.toLowerCase() as keyof typeof source.metricTables
|
||||
];
|
||||
if (metricType == null) {
|
||||
return source.from.tableName;
|
||||
}
|
||||
if (source.kind === SourceKind.Metric) {
|
||||
return source.metricTables?.[
|
||||
metricType.toLowerCase() as keyof typeof source.metricTables
|
||||
];
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function getAllMetricTables(source: TSource): TableConnection[] {
|
||||
|
|
@ -843,15 +851,17 @@ export function getAllMetricTables(source: TSource): TableConnection[] {
|
|||
return Object.values(MetricsDataType)
|
||||
.filter(
|
||||
metricType =>
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
|
||||
!!source.metricTables![metricType as keyof TSource['metricTables']],
|
||||
!!source.metricTables[
|
||||
metricType as unknown as keyof TMetricSource['metricTables']
|
||||
],
|
||||
)
|
||||
.map(
|
||||
metricType =>
|
||||
({
|
||||
tableName:
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
|
||||
source.metricTables![metricType as keyof TSource['metricTables']],
|
||||
source.metricTables[
|
||||
metricType as unknown as keyof TMetricSource['metricTables']
|
||||
],
|
||||
databaseName: source.from.databaseName,
|
||||
connectionId: source.connection,
|
||||
}) satisfies TableConnection,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import { SourceKind, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import {
|
||||
SourceKind,
|
||||
TSource,
|
||||
TTraceSource,
|
||||
} from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
import {
|
||||
getHighlightedAttributesFromData,
|
||||
|
|
@ -8,17 +12,25 @@ import {
|
|||
describe('getHighlightedAttributesFromData', () => {
|
||||
const createBasicSource = (
|
||||
highlightedTraceAttributeExpressions: any[] = [],
|
||||
): TSource => ({
|
||||
): TTraceSource => ({
|
||||
kind: SourceKind.Trace,
|
||||
from: {
|
||||
databaseName: 'default',
|
||||
tableName: 'otel_traces',
|
||||
},
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: 'Timestamp',
|
||||
connection: 'test-connection',
|
||||
name: 'Traces',
|
||||
highlightedTraceAttributeExpressions,
|
||||
id: 'test-source-id',
|
||||
durationExpression: 'Duration',
|
||||
durationPrecision: 9,
|
||||
traceIdExpression: 'TraceId',
|
||||
spanIdExpression: 'SpanId',
|
||||
parentSpanIdExpression: 'ParentSpanId',
|
||||
spanNameExpression: 'SpanName',
|
||||
spanKindExpression: 'SpanKind',
|
||||
});
|
||||
|
||||
const basicMeta = [
|
||||
|
|
@ -481,7 +493,7 @@ describe('getHighlightedAttributesFromData', () => {
|
|||
});
|
||||
|
||||
it('extracts highlightedRowAttributeExpressions correctly', () => {
|
||||
const source: TSource = {
|
||||
const source: TTraceSource = {
|
||||
...createBasicSource(),
|
||||
highlightedRowAttributeExpressions: [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
import { ResponseJSON } from '@hyperdx/common-utils/dist/clickhouse';
|
||||
import { TSource } from '@hyperdx/common-utils/dist/types';
|
||||
import { TLogSource, TSource } from '@hyperdx/common-utils/dist/types';
|
||||
|
||||
import { getJSONColumnNames } from '@/components/DBRowDataPanel';
|
||||
|
||||
type HighlightedAttributeExpressions =
|
||||
TLogSource['highlightedRowAttributeExpressions'];
|
||||
|
||||
export function getSelectExpressionsForHighlightedAttributes(
|
||||
expressions: TSource[
|
||||
| 'highlightedRowAttributeExpressions'
|
||||
| 'highlightedTraceAttributeExpressions'] = [],
|
||||
expressions: HighlightedAttributeExpressions = [],
|
||||
) {
|
||||
return expressions.map(({ sqlExpression, alias }) => ({
|
||||
valueExpression: sqlExpression,
|
||||
|
|
@ -16,9 +17,7 @@ export function getSelectExpressionsForHighlightedAttributes(
|
|||
|
||||
export function getHighlightedAttributesFromData(
|
||||
source: TSource,
|
||||
attributes: TSource[
|
||||
| 'highlightedRowAttributeExpressions'
|
||||
| 'highlightedTraceAttributeExpressions'] = [],
|
||||
attributes: HighlightedAttributeExpressions = [],
|
||||
data: Record<string, unknown>[],
|
||||
meta: ResponseJSON['meta'],
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -3,18 +3,25 @@ import { ClickHouseClient } from '@clickhouse/client-common';
|
|||
|
||||
import { ClickhouseClient as HdxClickhouseClient } from '@/clickhouse/node';
|
||||
import { Metadata, MetadataCache } from '@/core/metadata';
|
||||
import { ChartConfigWithDateRange, TSource } from '@/types';
|
||||
import { ChartConfigWithDateRange, SourceKind, TSource } from '@/types';
|
||||
|
||||
describe('Metadata Integration Tests', () => {
|
||||
let client: ClickHouseClient;
|
||||
let hdxClient: HdxClickhouseClient;
|
||||
|
||||
const source = {
|
||||
const source: TSource = {
|
||||
id: 'test-source',
|
||||
name: 'Test',
|
||||
kind: SourceKind.Log,
|
||||
connection: 'conn-1',
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
querySettings: [
|
||||
{ setting: 'optimize_read_in_order', value: '0' },
|
||||
{ setting: 'cast_keep_nullable', value: '0' },
|
||||
],
|
||||
} as TSource;
|
||||
};
|
||||
|
||||
beforeAll(() => {
|
||||
const host = process.env.CLICKHOUSE_HOST || 'http://localhost:8123';
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { ClickhouseClient } from '../clickhouse/node';
|
|||
import { Metadata, MetadataCache } from '../core/metadata';
|
||||
import * as renderChartConfigModule from '../core/renderChartConfig';
|
||||
import { isBuilderChartConfig } from '../guards';
|
||||
import { BuilderChartConfigWithDateRange, TSource } from '../types';
|
||||
import { BuilderChartConfigWithDateRange, SourceKind, TSource } from '../types';
|
||||
|
||||
// Mock ClickhouseClient
|
||||
const mockClickhouseClient = {
|
||||
|
|
@ -21,12 +21,19 @@ jest.mock('../core/renderChartConfig', () => ({
|
|||
.mockResolvedValue({ sql: 'SELECT 1', params: {} }),
|
||||
}));
|
||||
|
||||
const source = {
|
||||
const source: TSource = {
|
||||
id: 'test-source',
|
||||
name: 'Test',
|
||||
kind: SourceKind.Log,
|
||||
connection: 'conn-1',
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
timestampValueExpression: 'Timestamp',
|
||||
defaultTableSelectExpression: '*',
|
||||
querySettings: [
|
||||
{ setting: 'optimize_read_in_order', value: '0' },
|
||||
{ setting: 'cast_keep_nullable', value: '0' },
|
||||
],
|
||||
} as TSource;
|
||||
};
|
||||
|
||||
describe('MetadataCache', () => {
|
||||
let metadataCache: MetadataCache;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import {
|
|||
DashboardSchema,
|
||||
MetricsDataType,
|
||||
SourceKind,
|
||||
TSourceUnion,
|
||||
TSource,
|
||||
} from '@/types';
|
||||
|
||||
import {
|
||||
|
|
@ -507,7 +507,7 @@ describe('utils', () => {
|
|||
],
|
||||
};
|
||||
|
||||
const sources: TSourceUnion[] = [
|
||||
const sources: TSource[] = [
|
||||
{
|
||||
id: 'source1',
|
||||
name: 'Logs',
|
||||
|
|
@ -628,7 +628,7 @@ describe('utils', () => {
|
|||
],
|
||||
};
|
||||
|
||||
const sources: TSourceUnion[] = [
|
||||
const sources: TSource[] = [
|
||||
{
|
||||
id: 'source1',
|
||||
name: 'Logs',
|
||||
|
|
@ -727,7 +727,7 @@ describe('utils', () => {
|
|||
],
|
||||
};
|
||||
|
||||
const sources: TSourceUnion[] = [
|
||||
const sources: TSource[] = [
|
||||
{
|
||||
id: 'source1',
|
||||
name: 'Logs',
|
||||
|
|
@ -851,7 +851,7 @@ describe('utils', () => {
|
|||
],
|
||||
};
|
||||
|
||||
const sources: TSourceUnion[] = [
|
||||
const sources: TSource[] = [
|
||||
{
|
||||
id: 'source1',
|
||||
kind: SourceKind.Log,
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ import { isBuilderChartConfig } from '@/guards';
|
|||
import {
|
||||
ChartConfigWithOptDateRange,
|
||||
MaterializedViewConfiguration,
|
||||
TSource,
|
||||
SourceKind,
|
||||
TLogSource,
|
||||
} from '@/types';
|
||||
|
||||
import { ColumnMeta } from '..';
|
||||
|
|
@ -83,7 +84,7 @@ describe('materializedViews', () => {
|
|||
const SOURCE = {
|
||||
from: { databaseName: 'default', tableName: 'otel_spans' },
|
||||
materializedViews: [MV_CONFIG_METRIC_ROLLUP_1M],
|
||||
} as TSource;
|
||||
} as TLogSource;
|
||||
|
||||
describe('tryConvertConfigToMaterializedViewSelect', () => {
|
||||
it('should return empty object if selecting a string instead of an array of aggregates', async () => {
|
||||
|
|
@ -1094,7 +1095,7 @@ describe('materializedViews', () => {
|
|||
{} as any,
|
||||
{
|
||||
from: { databaseName: 'default', tableName: 'table_without_mv' },
|
||||
} as TSource,
|
||||
} as TLogSource,
|
||||
);
|
||||
|
||||
expect(actual).toEqual(chartConfig);
|
||||
|
|
@ -1509,7 +1510,7 @@ describe('materializedViews', () => {
|
|||
{} as any,
|
||||
{
|
||||
from: { databaseName: 'default', tableName: 'table_without_mv' },
|
||||
} as TSource,
|
||||
} as TLogSource,
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
|
|
@ -1916,6 +1917,7 @@ describe('materializedViews', () => {
|
|||
|
||||
const keys = ['environment', 'service', 'status_code'];
|
||||
const source = {
|
||||
kind: SourceKind.Log,
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
materializedViews: [MV_CONFIG_LOGS_1M],
|
||||
};
|
||||
|
|
@ -1958,6 +1960,7 @@ describe('materializedViews', () => {
|
|||
|
||||
const keys = ['environment', 'service', 'region'];
|
||||
const source = {
|
||||
kind: SourceKind.Log,
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
materializedViews: [MV_CONFIG_LOGS_1M, MV_CONFIG_LOGS_1H],
|
||||
};
|
||||
|
|
@ -2022,6 +2025,7 @@ describe('materializedViews', () => {
|
|||
|
||||
const keys = ['environment', 'unsupported_key'];
|
||||
const source = {
|
||||
kind: SourceKind.Log,
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
materializedViews: [MV_CONFIG_LOGS_1M],
|
||||
};
|
||||
|
|
@ -2121,6 +2125,7 @@ describe('materializedViews', () => {
|
|||
|
||||
const keys = ['environment'];
|
||||
const source = {
|
||||
kind: SourceKind.Log,
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
materializedViews: [MV_CONFIG_LOGS_1M, MV_CONFIG_LOGS_1H],
|
||||
};
|
||||
|
|
@ -2345,6 +2350,7 @@ describe('materializedViews', () => {
|
|||
|
||||
const keys = ['environment', 'service', 'status_code'];
|
||||
const source = {
|
||||
kind: SourceKind.Log,
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
materializedViews: [MV_CONFIG_LOGS_1M],
|
||||
};
|
||||
|
|
@ -2398,6 +2404,7 @@ describe('materializedViews', () => {
|
|||
|
||||
const keys = ['environment', 'service'];
|
||||
const source = {
|
||||
kind: SourceKind.Log,
|
||||
from: { databaseName: 'default', tableName: 'logs' },
|
||||
materializedViews: [MV_CONFIG_WITH_DIFFERENT_TIMESTAMP],
|
||||
timestampValueExpression: 'source_timestamp',
|
||||
|
|
|
|||
|
|
@ -6,10 +6,17 @@ import {
|
|||
CteChartConfig,
|
||||
InternalAggregateFunction,
|
||||
InternalAggregateFunctionSchema,
|
||||
isLogSource,
|
||||
isTraceSource,
|
||||
MaterializedViewConfiguration,
|
||||
TLogSource,
|
||||
TSource,
|
||||
TTraceSource,
|
||||
} from '@/types';
|
||||
|
||||
// Source types that support materialized views
|
||||
type TMVSource = TLogSource | TTraceSource;
|
||||
|
||||
import { Metadata, TableConnection } from './metadata';
|
||||
import {
|
||||
convertDateRangeToGranularityString,
|
||||
|
|
@ -383,7 +390,7 @@ async function tryOptimizeConfig<C extends BuilderChartConfigWithOptDateRange>(
|
|||
clickhouseClient: BaseClickhouseClient,
|
||||
signal: AbortSignal | undefined,
|
||||
mvConfig: MaterializedViewConfiguration,
|
||||
source: Omit<TSource, 'connection'>, // for overlap with ISource type
|
||||
source: Omit<TMVSource, 'connection'>, // for overlap with ISource type
|
||||
) {
|
||||
const errors: string[] = [];
|
||||
// Attempt to optimize any CTEs that exist in the config
|
||||
|
|
@ -487,7 +494,7 @@ export async function tryOptimizeConfigWithMaterializedViewWithExplanations<
|
|||
metadata: Metadata,
|
||||
clickhouseClient: BaseClickhouseClient,
|
||||
signal: AbortSignal | undefined,
|
||||
source: Omit<TSource, 'connection'>, // for overlap with ISource type
|
||||
source: Omit<TMVSource, 'connection'>, // for overlap with ISource type
|
||||
): Promise<{
|
||||
optimizedConfig?: C;
|
||||
explanations: MVOptimizationExplanation[];
|
||||
|
|
@ -541,7 +548,7 @@ export async function tryOptimizeConfigWithMaterializedView<
|
|||
metadata: Metadata,
|
||||
clickhouseClient: BaseClickhouseClient,
|
||||
signal: AbortSignal | undefined,
|
||||
source: Omit<TSource, 'connection'>, // for overlap with ISource type
|
||||
source: Omit<TMVSource, 'connection'>, // for overlap with ISource type
|
||||
) {
|
||||
const { optimizedConfig } =
|
||||
await tryOptimizeConfigWithMaterializedViewWithExplanations(
|
||||
|
|
@ -603,7 +610,10 @@ export async function optimizeGetKeyValuesCalls<
|
|||
signal?: AbortSignal;
|
||||
}): Promise<GetKeyValueCall<C>[]> {
|
||||
// Get the MVs from the source
|
||||
const mvs = source?.materializedViews || [];
|
||||
const mvs =
|
||||
((isTraceSource(source) || isLogSource(source)) &&
|
||||
source?.materializedViews) ||
|
||||
[];
|
||||
const mvsById = new Map(mvs.map(mv => [toMvId(mv), mv]));
|
||||
|
||||
// Identify keys which can be queried from a materialized view
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import type {
|
|||
BuilderChartConfigWithDateRange,
|
||||
TSource,
|
||||
} from '@/types';
|
||||
import { SourceKind } from '@/types';
|
||||
|
||||
import { optimizeGetKeyValuesCalls } from './materializedViews';
|
||||
import { getLocalTableFromDistributedTable, objectHash } from './utils';
|
||||
|
|
@ -1251,7 +1252,10 @@ export class Metadata {
|
|||
if (keys.length === 0) return [];
|
||||
|
||||
const defaultKeyValueCall = { chartConfig, keys };
|
||||
const getKeyValueCalls = source
|
||||
const canHaveMVs =
|
||||
source &&
|
||||
(source.kind === SourceKind.Log || source.kind === SourceKind.Trace);
|
||||
const getKeyValueCalls = canHaveMVs
|
||||
? await optimizeGetKeyValuesCalls({
|
||||
chartConfig,
|
||||
keys,
|
||||
|
|
|
|||
|
|
@ -1122,7 +1122,12 @@ async function translateMetricChartConfig(
|
|||
}
|
||||
|
||||
const { metricType, metricName, metricNameSql, ..._select } = select[0]; // Initial impl only supports one metric select per chart config
|
||||
if (metricType === MetricsDataType.Gauge && metricName) {
|
||||
if (
|
||||
metricType === MetricsDataType.Gauge &&
|
||||
metricName &&
|
||||
MetricsDataType.Gauge in metricTables &&
|
||||
metricTables[MetricsDataType.Gauge]
|
||||
) {
|
||||
const timeBucketCol = '__hdx_time_bucket2';
|
||||
const timeExpr = timeBucketExpr({
|
||||
interval: chartConfig.granularity || 'auto',
|
||||
|
|
@ -1209,7 +1214,12 @@ async function translateMetricChartConfig(
|
|||
timestampValueExpression: timeBucketCol,
|
||||
settings: chSql`short_circuit_function_evaluation = 'force_enable'`,
|
||||
};
|
||||
} else if (metricType === MetricsDataType.Sum && metricName) {
|
||||
} else if (
|
||||
metricType === MetricsDataType.Sum &&
|
||||
metricName &&
|
||||
MetricsDataType.Sum in metricTables &&
|
||||
metricTables[MetricsDataType.Sum]
|
||||
) {
|
||||
const timeBucketCol = '__hdx_time_bucket2';
|
||||
const valueHighCol = '`__hdx_value_high`';
|
||||
const valueHighPrevCol = '`__hdx_value_high_prev`';
|
||||
|
|
@ -1331,7 +1341,12 @@ async function translateMetricChartConfig(
|
|||
where: '', // clear up the condition since the where clause is already applied at the upstream CTE
|
||||
timestampValueExpression: `\`${timeBucketCol}\``,
|
||||
};
|
||||
} else if (metricType === MetricsDataType.Histogram && metricName) {
|
||||
} else if (
|
||||
metricType === MetricsDataType.Histogram &&
|
||||
metricName &&
|
||||
MetricsDataType.Histogram in metricTables &&
|
||||
metricTables[MetricsDataType.Histogram]
|
||||
) {
|
||||
const { alias } = _select;
|
||||
// Use the alias from the select, defaulting to 'Value' for backwards compatibility
|
||||
const valueAlias = alias || 'Value';
|
||||
|
|
@ -1358,7 +1373,7 @@ async function translateMetricChartConfig(
|
|||
Array.isArray(chartConfig.dateRange)
|
||||
? convertDateRangeToGranularityString(chartConfig.dateRange)
|
||||
: chartConfig.granularity,
|
||||
} as BuilderChartConfigWithOptDateRangeEx;
|
||||
} satisfies BuilderChartConfigWithOptDateRangeEx;
|
||||
|
||||
const timeBucketSelect = isUsingGranularity(cteChartConfig)
|
||||
? timeBucketExpr({
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import {
|
|||
QuerySettings,
|
||||
SQLInterval,
|
||||
TileTemplateSchema,
|
||||
TSourceUnion,
|
||||
TSource,
|
||||
} from '@/types';
|
||||
|
||||
import { SkipIndexMetadata, TableMetadata } from './metadata';
|
||||
|
|
@ -460,7 +460,7 @@ type TileTemplate = z.infer<typeof TileTemplateSchema>;
|
|||
|
||||
export function convertToDashboardTemplate(
|
||||
input: Dashboard,
|
||||
sources: TSourceUnion[],
|
||||
sources: TSource[],
|
||||
connections: Connection[] = [],
|
||||
): DashboardTemplate {
|
||||
const output: DashboardTemplate = {
|
||||
|
|
@ -471,7 +471,7 @@ export function convertToDashboardTemplate(
|
|||
|
||||
const convertToTileTemplate = (
|
||||
input: Dashboard['tiles'][0],
|
||||
sources: TSourceUnion[],
|
||||
sources: TSource[],
|
||||
connections: Connection[],
|
||||
): TileTemplate => {
|
||||
const tile = TileTemplateSchema.strip().parse(structuredClone(input));
|
||||
|
|
@ -497,7 +497,7 @@ export function convertToDashboardTemplate(
|
|||
|
||||
const convertToFilterTemplate = (
|
||||
input: DashboardFilter,
|
||||
sources: TSourceUnion[],
|
||||
sources: TSource[],
|
||||
): DashboardFilter => {
|
||||
const filter = DashboardFilterSchema.strip().parse(structuredClone(input));
|
||||
// Extract name from source or default to '' if not found
|
||||
|
|
|
|||
|
|
@ -820,8 +820,12 @@ const QuerySettingsSchema = z
|
|||
|
||||
export type QuerySettings = z.infer<typeof QuerySettingsSchema>;
|
||||
|
||||
const RequiredTimestampColumnSchema = z
|
||||
.string()
|
||||
.min(1, 'Timestamp Column is required');
|
||||
|
||||
// Base schema with fields common to all source types
|
||||
const SourceBaseSchema = z.object({
|
||||
export const BaseSourceSchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1, 'Name is required'),
|
||||
kind: z.nativeEnum(SourceKind),
|
||||
|
|
@ -831,12 +835,9 @@ const SourceBaseSchema = z.object({
|
|||
tableName: z.string().min(1, 'Table is required'),
|
||||
}),
|
||||
querySettings: QuerySettingsSchema.optional(),
|
||||
timestampValueExpression: RequiredTimestampColumnSchema,
|
||||
});
|
||||
|
||||
const RequiredTimestampColumnSchema = z
|
||||
.string()
|
||||
.min(1, 'Timestamp Column is required');
|
||||
|
||||
const HighlightedAttributeExpressionsSchema = z.array(
|
||||
z.object({
|
||||
sqlExpression: z.string().min(1, 'Attribute SQL Expression is required'),
|
||||
|
|
@ -879,13 +880,11 @@ export type MaterializedViewConfiguration = z.infer<
|
|||
>;
|
||||
|
||||
// Log source form schema
|
||||
const LogSourceAugmentation = {
|
||||
export const LogSourceSchema = BaseSourceSchema.extend({
|
||||
kind: z.literal(SourceKind.Log),
|
||||
defaultTableSelectExpression: z.string({
|
||||
message: 'Default Table Select Expression is required',
|
||||
}),
|
||||
timestampValueExpression: RequiredTimestampColumnSchema,
|
||||
|
||||
defaultTableSelectExpression: z
|
||||
.string()
|
||||
.min(1, 'Default Select Expression is required'),
|
||||
// Optional fields for logs
|
||||
serviceNameExpression: z.string().optional(),
|
||||
severityTextExpression: z.string().optional(),
|
||||
|
|
@ -906,13 +905,14 @@ const LogSourceAugmentation = {
|
|||
HighlightedAttributeExpressionsSchema.optional(),
|
||||
materializedViews: z.array(MaterializedViewConfigurationSchema).optional(),
|
||||
orderByExpression: z.string().optional(),
|
||||
};
|
||||
});
|
||||
|
||||
// Trace source form schema
|
||||
const TraceSourceAugmentation = {
|
||||
export const TraceSourceSchema = BaseSourceSchema.extend({
|
||||
kind: z.literal(SourceKind.Trace),
|
||||
defaultTableSelectExpression: z.string().optional(),
|
||||
timestampValueExpression: RequiredTimestampColumnSchema,
|
||||
defaultTableSelectExpression: z
|
||||
.string()
|
||||
.min(1, 'Default Select Expression is required'),
|
||||
|
||||
// Required fields for traces
|
||||
durationExpression: z.string().min(1, 'Duration Expression is required'),
|
||||
|
|
@ -943,26 +943,25 @@ const TraceSourceAugmentation = {
|
|||
HighlightedAttributeExpressionsSchema.optional(),
|
||||
materializedViews: z.array(MaterializedViewConfigurationSchema).optional(),
|
||||
orderByExpression: z.string().optional(),
|
||||
};
|
||||
});
|
||||
|
||||
// Session source form schema
|
||||
const SessionSourceAugmentation = {
|
||||
export const SessionSourceSchema = BaseSourceSchema.extend({
|
||||
kind: z.literal(SourceKind.Session),
|
||||
|
||||
// Optional to support legacy sources, which did not require this field.
|
||||
// Will be defaulted to `TimestampTime` when queried, if undefined.
|
||||
timestampValueExpression: z.string().optional(),
|
||||
|
||||
// Required fields for sessions
|
||||
traceSourceId: z
|
||||
.string({ message: 'Correlated Trace Source is required' })
|
||||
.min(1, 'Correlated Trace Source is required'),
|
||||
};
|
||||
|
||||
// Optional fields for sessions
|
||||
resourceAttributesExpression: z.string().optional(),
|
||||
});
|
||||
|
||||
// Metric source form schema
|
||||
const MetricSourceAugmentation = {
|
||||
export const MetricSourceSchema = BaseSourceSchema.extend({
|
||||
kind: z.literal(SourceKind.Metric),
|
||||
// override from SourceBaseSchema
|
||||
// override from BaseSourceSchema
|
||||
from: z.object({
|
||||
databaseName: z.string().min(1, 'Database is required'),
|
||||
tableName: z.string(),
|
||||
|
|
@ -970,87 +969,51 @@ const MetricSourceAugmentation = {
|
|||
|
||||
// Metric tables - at least one should be provided
|
||||
metricTables: MetricTableSchema,
|
||||
timestampValueExpression: RequiredTimestampColumnSchema,
|
||||
resourceAttributesExpression: z
|
||||
.string()
|
||||
.min(1, 'Resource Attributes is required'),
|
||||
|
||||
// Optional fields for metrics
|
||||
serviceNameExpression: z.string().optional(),
|
||||
logSourceId: z.string().optional(),
|
||||
};
|
||||
});
|
||||
|
||||
// Union of all source form schemas for validation
|
||||
export const SourceSchema = z.discriminatedUnion('kind', [
|
||||
SourceBaseSchema.extend(LogSourceAugmentation),
|
||||
SourceBaseSchema.extend(TraceSourceAugmentation),
|
||||
SourceBaseSchema.extend(SessionSourceAugmentation),
|
||||
SourceBaseSchema.extend(MetricSourceAugmentation),
|
||||
LogSourceSchema,
|
||||
TraceSourceSchema,
|
||||
SessionSourceSchema,
|
||||
MetricSourceSchema,
|
||||
]);
|
||||
export type TSourceUnion = z.infer<typeof SourceSchema>;
|
||||
export type TSource = z.infer<typeof SourceSchema>;
|
||||
|
||||
// This function exists to perform schema validation with omission of a certain
|
||||
// value. It is not possible to do on the discriminatedUnion directly
|
||||
export function sourceSchemaWithout(
|
||||
omissions: { [k in keyof z.infer<typeof SourceBaseSchema>]?: true } = {},
|
||||
) {
|
||||
// TODO: Make these types work better if possible
|
||||
return z.discriminatedUnion('kind', [
|
||||
SourceBaseSchema.omit(omissions).extend(LogSourceAugmentation),
|
||||
SourceBaseSchema.omit(omissions).extend(TraceSourceAugmentation),
|
||||
SourceBaseSchema.omit(omissions).extend(SessionSourceAugmentation),
|
||||
SourceBaseSchema.omit(omissions).extend(MetricSourceAugmentation),
|
||||
]);
|
||||
export const SourceSchemaNoId = z.discriminatedUnion('kind', [
|
||||
LogSourceSchema.omit({ id: true }),
|
||||
TraceSourceSchema.omit({ id: true }),
|
||||
SessionSourceSchema.omit({ id: true }),
|
||||
MetricSourceSchema.omit({ id: true }),
|
||||
]);
|
||||
export type TSourceNoId = z.infer<typeof SourceSchemaNoId>;
|
||||
|
||||
// Per-kind source types extracted from the Zod discriminated union
|
||||
export type TLogSource = Extract<TSource, { kind: SourceKind.Log }>;
|
||||
export type TTraceSource = Extract<TSource, { kind: SourceKind.Trace }>;
|
||||
export type TSessionSource = Extract<TSource, { kind: SourceKind.Session }>;
|
||||
export type TMetricSource = Extract<TSource, { kind: SourceKind.Metric }>;
|
||||
|
||||
// Type guards for narrowing TSource by kind
|
||||
export function isLogSource(source: TSource): source is TLogSource {
|
||||
return source.kind === SourceKind.Log;
|
||||
}
|
||||
export function isTraceSource(source: TSource): source is TTraceSource {
|
||||
return source.kind === SourceKind.Trace;
|
||||
}
|
||||
export function isSessionSource(source: TSource): source is TSessionSource {
|
||||
return source.kind === SourceKind.Session;
|
||||
}
|
||||
export function isMetricSource(source: TSource): source is TMetricSource {
|
||||
return source.kind === SourceKind.Metric;
|
||||
}
|
||||
|
||||
// Helper types for better union flattening
|
||||
type AllKeys<T> = T extends any ? keyof T : never;
|
||||
// This is Claude Opus's explanation of this type magic to extract the required
|
||||
// parameters:
|
||||
//
|
||||
// 1. [K in keyof T]-?:
|
||||
// Maps over all keys in T. The -? removes the optional modifier, making all
|
||||
// properties required in this mapped type
|
||||
// 2. {} extends Pick<T, K> ? never : K
|
||||
// Pick<T, K> creates a type with just property K from T.
|
||||
// {} extends Pick<T, K> checks if an empty object can satisfy the picked property.
|
||||
// If the property is optional, {} can extend it (returns never)
|
||||
// If the property is required, {} cannot extend it (returns K)
|
||||
// 3. [keyof T]
|
||||
// Indexes into the mapped type to get the union of all non-never values
|
||||
type NonOptionalKeysPresentInEveryUnionBranch<T> = {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
|
||||
[K in keyof T]-?: {} extends Pick<T, K> ? never : K;
|
||||
}[keyof T];
|
||||
|
||||
// Helper to check if a key is required in ALL branches of the union
|
||||
type RequiredInAllBranches<T, K extends AllKeys<T>> = T extends any
|
||||
? K extends NonOptionalKeysPresentInEveryUnionBranch<T>
|
||||
? true
|
||||
: false
|
||||
: never;
|
||||
|
||||
// This type gathers the Required Keys across the discriminated union TSourceUnion
|
||||
// and keeps them as required in a non-unionized type, and also gathers all possible
|
||||
// optional keys from the union branches and brings them into one unified flattened type.
|
||||
// This is done to maintain compatibility with the legacy zod schema.
|
||||
type FlattenUnion<T> = {
|
||||
// If a key is required in all branches of a union, make it a required key
|
||||
[K in AllKeys<T> as RequiredInAllBranches<T, K> extends true
|
||||
? K
|
||||
: never]: T extends infer U ? (K extends keyof U ? U[K] : never) : never;
|
||||
} & {
|
||||
// If a key is not required in all branches of a union, make it an optional
|
||||
// key and join the possible types
|
||||
[K in AllKeys<T> as RequiredInAllBranches<T, K> extends true
|
||||
? never
|
||||
: K]?: T extends infer U ? (K extends keyof U ? U[K] : never) : never;
|
||||
};
|
||||
type TSourceWithoutDefaults = FlattenUnion<z.infer<typeof SourceSchema>>;
|
||||
|
||||
// Type representing a TSourceWithoutDefaults object which has been augmented with default values
|
||||
export type TSource = TSourceWithoutDefaults & {
|
||||
timestampValueExpression: string;
|
||||
};
|
||||
|
||||
export const AssistantLineTableConfigSchema = z.object({
|
||||
displayType: z.enum([DisplayType.Line, DisplayType.Table]),
|
||||
|
|
|
|||
Loading…
Reference in a new issue