mirror of
https://github.com/hyperdxio/hyperdx
synced 2026-04-21 13:37:15 +00:00
fix: Compatibilty with lowercase text skip index (#1089)
Expecting users to create schemas in the form of ``` INDEX inv_idx lower(Body) TYPE text(tokenizer = 'default') GRANULARITY 64 ``` default tokenizer, and lower(Body) specifically
This commit is contained in:
parent
3636fc570d
commit
0f242558b3
3 changed files with 9 additions and 3 deletions
5
.changeset/little-mayflies-tie.md
Normal file
5
.changeset/little-mayflies-tie.md
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
"@hyperdx/common-utils": patch
|
||||
---
|
||||
|
||||
fix: Compatibilty with lowercase text skip index
|
||||
|
|
@ -54,7 +54,7 @@ services:
|
|||
ch-server:
|
||||
condition: service_healthy
|
||||
ch-server:
|
||||
image: clickhouse/clickhouse-server:25.6-alpine
|
||||
image: clickhouse/clickhouse-server:25.7-alpine
|
||||
ports:
|
||||
- 8123:8123 # http api
|
||||
- 9000:9000 # native
|
||||
|
|
|
|||
|
|
@ -369,13 +369,14 @@ export abstract class SQLSerializer implements Serializer {
|
|||
],
|
||||
);
|
||||
} else {
|
||||
// TODO: Check case sensitivity of the index before lowering by default
|
||||
// We can't search multiple tokens with `hasToken`, so we need to split up the term into tokens
|
||||
const hasSeperators = this.termHasSeperators(term);
|
||||
if (hasSeperators) {
|
||||
const tokens = this.tokenizeTerm(term);
|
||||
return `(${isNegatedField ? 'NOT (' : ''}${[
|
||||
...tokens.map(token =>
|
||||
SqlString.format(`hasTokenCaseInsensitive(?, ?)`, [
|
||||
SqlString.format(`hasToken(lower(?), lower(?))`, [
|
||||
SqlString.raw(column ?? ''),
|
||||
token,
|
||||
]),
|
||||
|
|
@ -388,7 +389,7 @@ export abstract class SQLSerializer implements Serializer {
|
|||
].join(' AND ')}${isNegatedField ? ')' : ''})`;
|
||||
} else {
|
||||
return SqlString.format(
|
||||
`(${isNegatedField ? 'NOT ' : ''}hasTokenCaseInsensitive(?, ?))`,
|
||||
`(${isNegatedField ? 'NOT ' : ''}hasToken(lower(?), lower(?)))`,
|
||||
[SqlString.raw(column ?? ''), term],
|
||||
);
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue