mirror of
https://github.com/hyperdxio/hyperdx
synced 2026-04-21 13:37:15 +00:00
fix: Fix query error when searching nested JSON values (#2044)
## Summary
Fixes a query error when clicking Search, Add to Filters, Chart, or Column on nested JSON values inside Map column attributes (e.g., LogAttributes['config'] containing '{"host": "localhost"}').
buildJSONExtractQuery was using only the last element of parsedJsonRootPath as the base column, producing invalid ClickHouse expressions like JSONExtractString(config, 'host') instead of JSONExtractString(LogAttributes['config'], 'host'). Fixed by using mergePath to construct the full column path and passing jsonColumns through so both Map (bracket notation) and JSON (dot notation) columns are handled correctly.
## How to test locally or on Vercel
Open a log or trace side panel where a Map column attribute (e.g., LogAttributes) has a value containing a JSON string (e.g., {"host": "localhost", "port": 5432})
Expand the JSON value in the side panel by clicking "Expand JSON"
Hover over a nested key (e.g., host) and click Search — verify the search page opens without a query error and the WHERE clause uses the full column path (e.g., JSONExtractString(LogAttributes['config'], 'host') = 'localhost')
Repeat step 3 with Add to Filters, Column, and Chart (for numeric values) to verify all actions produce valid queries
## References
Linear Issue: Fixes [HDX-3906](https://linear.app/clickhouse/issue/HDX-3906/clicking-search-on-nested-logattributes-map-key-results-in-query-error)
This commit is contained in:
parent
bfc938118d
commit
2bb8ccdc5a
3 changed files with 45 additions and 2 deletions
5
.changeset/odd-plants-grin.md
Normal file
5
.changeset/odd-plants-grin.md
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
"@hyperdx/app": patch
|
||||
---
|
||||
|
||||
fix: Fix query error when searching nested JSON values
|
||||
|
|
@ -227,6 +227,7 @@ describe('DBRowJsonViewer', () => {
|
|||
buildJSONExtractQuery(
|
||||
['SpanAttributes', 'count'],
|
||||
['SpanAttributes'],
|
||||
[],
|
||||
'JSONExtractFloat',
|
||||
),
|
||||
).toBe("JSONExtractFloat(SpanAttributes, 'count')");
|
||||
|
|
@ -237,6 +238,7 @@ describe('DBRowJsonViewer', () => {
|
|||
buildJSONExtractQuery(
|
||||
['LogAttributes', 'enabled'],
|
||||
['LogAttributes'],
|
||||
[],
|
||||
'JSONExtractBool',
|
||||
),
|
||||
).toBe("JSONExtractBool(LogAttributes, 'enabled')");
|
||||
|
|
@ -247,5 +249,33 @@ describe('DBRowJsonViewer', () => {
|
|||
buildJSONExtractQuery(['LogAttributes', '0', 'id'], ['LogAttributes']),
|
||||
).toBe("JSONExtractString(LogAttributes, '0', 'id')");
|
||||
});
|
||||
|
||||
it('uses full column path for Map column with parsed JSON value', () => {
|
||||
expect(
|
||||
buildJSONExtractQuery(
|
||||
['LogAttributes', 'config', 'host'],
|
||||
['LogAttributes', 'config'],
|
||||
),
|
||||
).toBe("JSONExtractString(LogAttributes['config'], 'host')");
|
||||
});
|
||||
|
||||
it('uses full column path for deeply nested Map column with parsed JSON', () => {
|
||||
expect(
|
||||
buildJSONExtractQuery(
|
||||
['LogAttributes', 'config', 'database', 'host'],
|
||||
['LogAttributes', 'config'],
|
||||
),
|
||||
).toBe("JSONExtractString(LogAttributes['config'], 'database', 'host')");
|
||||
});
|
||||
|
||||
it('uses JSON dot notation for JSON column with parsed JSON value', () => {
|
||||
expect(
|
||||
buildJSONExtractQuery(
|
||||
['LogAttributes', 'config', 'host'],
|
||||
['LogAttributes', 'config'],
|
||||
['LogAttributes'],
|
||||
),
|
||||
).toBe("JSONExtractString(LogAttributes.`config`, 'host')");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ type JSONExtractFn =
|
|||
export function buildJSONExtractQuery(
|
||||
keyPath: string[],
|
||||
parsedJsonRootPath: string[],
|
||||
jsonColumns: string[] = [],
|
||||
jsonExtractFn: JSONExtractFn = 'JSONExtractString',
|
||||
): string | null {
|
||||
const nestedPath = keyPath.slice(parsedJsonRootPath.length);
|
||||
|
|
@ -45,7 +46,7 @@ export function buildJSONExtractQuery(
|
|||
return null; // No nested path to extract
|
||||
}
|
||||
|
||||
const baseColumn = parsedJsonRootPath[parsedJsonRootPath.length - 1];
|
||||
const baseColumn = mergePath(parsedJsonRootPath, jsonColumns);
|
||||
const jsonPathArgs = nestedPath.map(p => `'${p}'`).join(', ');
|
||||
return `${jsonExtractFn}(${baseColumn}, ${jsonPathArgs})`;
|
||||
}
|
||||
|
|
@ -387,6 +388,7 @@ export function DBRowJsonViewer({
|
|||
const jsonQuery = buildJSONExtractQuery(
|
||||
keyPath,
|
||||
parsedJsonRootPath,
|
||||
jsonColumns,
|
||||
);
|
||||
if (jsonQuery) {
|
||||
filterFieldPath = jsonQuery;
|
||||
|
|
@ -438,6 +440,7 @@ export function DBRowJsonViewer({
|
|||
const jsonQuery = buildJSONExtractQuery(
|
||||
keyPath,
|
||||
parsedJsonRootPath,
|
||||
jsonColumns,
|
||||
jsonExtractFn,
|
||||
);
|
||||
|
||||
|
|
@ -481,6 +484,7 @@ export function DBRowJsonViewer({
|
|||
const jsonQuery = buildJSONExtractQuery(
|
||||
keyPath,
|
||||
parsedJsonRootPath,
|
||||
jsonColumns,
|
||||
);
|
||||
if (jsonQuery) {
|
||||
chartFieldPath = jsonQuery;
|
||||
|
|
@ -504,7 +508,11 @@ export function DBRowJsonViewer({
|
|||
|
||||
// Handle parsed JSON from string columns using JSONExtractString
|
||||
if (isInParsedJson && parsedJsonRootPath) {
|
||||
const jsonQuery = buildJSONExtractQuery(keyPath, parsedJsonRootPath);
|
||||
const jsonQuery = buildJSONExtractQuery(
|
||||
keyPath,
|
||||
parsedJsonRootPath,
|
||||
jsonColumns,
|
||||
);
|
||||
if (jsonQuery) {
|
||||
columnFieldPath = jsonQuery;
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue