Add support for WKB geometry columns, @within, @intersects & @contains spatial queries list and GeoJSON FeatureCollection output.

This commit is contained in:
Sebastian Jeltsch 2026-02-17 17:47:36 +01:00
parent def4c72b49
commit 63210e2e7c
25 changed files with 1680 additions and 226 deletions

315
Cargo.lock generated
View file

@ -24,7 +24,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0"
dependencies = [
"crypto-common",
"generic-array",
"generic-array 0.14.7",
]
[[package]]
@ -168,6 +168,15 @@ version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
[[package]]
name = "approx"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6"
dependencies = [
"num-traits",
]
[[package]]
name = "ar_archive_writer"
version = "0.5.1"
@ -207,6 +216,18 @@ dependencies = [
"password-hash",
]
[[package]]
name = "as-slice"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45403b49e3954a4b8428a0ac21a4b7afadccf92bfd96273f1a58cd4812496ae0"
dependencies = [
"generic-array 0.12.4",
"generic-array 0.13.3",
"generic-array 0.14.7",
"stable_deref_trait",
]
[[package]]
name = "askama"
version = "0.15.4"
@ -475,6 +496,15 @@ dependencies = [
"tungstenite",
]
[[package]]
name = "atomic-polyfill"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4"
dependencies = [
"critical-section",
]
[[package]]
name = "atomic-waker"
version = "1.1.2"
@ -737,7 +767,7 @@ version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
"generic-array 0.14.7",
]
[[package]]
@ -805,6 +835,12 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bd91ee7b2422bcb158d90ef4d14f75ef67f340943fc4149891dcce8f8b972a3"
[[package]]
name = "c_vec"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdd7a427adc0135366d99db65b36dae9237130997e560ed61118041fb72be6e8"
[[package]]
name = "camino"
version = "1.2.2"
@ -1437,6 +1473,12 @@ dependencies = [
"itertools 0.13.0",
]
[[package]]
name = "critical-section"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b"
[[package]]
name = "cron"
version = "0.15.0"
@ -1494,7 +1536,7 @@ version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
dependencies = [
"generic-array",
"generic-array 0.14.7",
"rand_core 0.6.4",
"subtle",
"zeroize",
@ -1506,7 +1548,7 @@ version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"generic-array 0.14.7",
"rand_core 0.6.4",
"typenum",
]
@ -1782,7 +1824,7 @@ dependencies = [
"crypto-bigint",
"digest",
"ff",
"generic-array",
"generic-array 0.14.7",
"group",
"hkdf",
"pem-rfc7468",
@ -2253,6 +2295,24 @@ dependencies = [
"serde_json",
]
[[package]]
name = "generic-array"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd"
dependencies = [
"typenum",
]
[[package]]
name = "generic-array"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f797e67af32588215eaaab8327027ee8e71b9dd0b2b26996aedf20c030fce309"
dependencies = [
"typenum",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@ -2264,6 +2324,70 @@ dependencies = [
"zeroize",
]
[[package]]
name = "geo-traits"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e7c353d12a704ccfab1ba8bfb1a7fe6cb18b665bf89d37f4f7890edcd260206"
dependencies = [
"geo-types",
]
[[package]]
name = "geo-types"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24f8647af4005fa11da47cd56252c6ef030be8fa97bdbf355e7dfb6348f0a82c"
dependencies = [
"approx",
"num-traits",
"rstar 0.10.0",
"rstar 0.11.0",
"rstar 0.12.2",
"rstar 0.8.4",
"rstar 0.9.3",
"serde",
]
[[package]]
name = "geojson"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e26f3c45b36fccc9cf2805e61d4da6bc4bbd5a3a9589b01afa3a40eff703bd79"
dependencies = [
"geo-types",
"log",
"serde",
"serde_json",
"thiserror 2.0.18",
]
[[package]]
name = "geos"
version = "10.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0473e63acafe4109b096ab8c1e6b8151e1cb25397811525779a9bc7187382a7b"
dependencies = [
"c_vec",
"geo-types",
"geojson",
"geos-sys",
"libc",
"num",
"wkt 0.10.3",
]
[[package]]
name = "geos-sys"
version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dc873d24aefc72aa94c3c1c251afb82beb7be5926002746c0e1f585fef9854c"
dependencies = [
"libc",
"pkg-config",
"semver",
]
[[package]]
name = "getrandom"
version = "0.2.17"
@ -2398,6 +2522,33 @@ dependencies = [
"zerocopy",
]
[[package]]
name = "hash32"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4041af86e63ac4298ce40e5cca669066e75b6f1aa3390fe2561ffa5e1d9f4cc"
dependencies = [
"byteorder",
]
[[package]]
name = "hash32"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67"
dependencies = [
"byteorder",
]
[[package]]
name = "hash32"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606"
dependencies = [
"byteorder",
]
[[package]]
name = "hashbrown"
version = "0.14.5"
@ -2438,6 +2589,41 @@ dependencies = [
"hashbrown 0.16.1",
]
[[package]]
name = "heapless"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634bd4d29cbf24424d0a4bfcbf80c6960129dc24424752a7d1d1390607023422"
dependencies = [
"as-slice",
"generic-array 0.14.7",
"hash32 0.1.1",
"stable_deref_trait",
]
[[package]]
name = "heapless"
version = "0.7.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f"
dependencies = [
"atomic-polyfill",
"hash32 0.2.1",
"rustc_version",
"spin",
"stable_deref_trait",
]
[[package]]
name = "heapless"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bfb9eb618601c89945a70e254898da93b13be0388091d42117462b265bb3fad"
dependencies = [
"hash32 0.3.1",
"stable_deref_trait",
]
[[package]]
name = "heck"
version = "0.5.0"
@ -2803,7 +2989,7 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
dependencies = [
"generic-array",
"generic-array 0.14.7",
]
[[package]]
@ -3151,6 +3337,17 @@ version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
[[package]]
name = "litegis"
version = "0.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23501e336b60b13990828e72aec7c11e219f7afe9e57d306b18fe5bd05c7f64d"
dependencies = [
"geos",
"rusqlite",
"rustc_tools_util",
]
[[package]]
name = "litemap"
version = "0.8.1"
@ -3865,6 +4062,12 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "pdqselect"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec91767ecc0a0bbe558ce8c9da33c068066c57ecc8bb8477ef8c1ad3ef77c27"
[[package]]
name = "pem"
version = "3.0.6"
@ -4861,6 +5064,67 @@ dependencies = [
"thiserror 2.0.18",
]
[[package]]
name = "rstar"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a45c0e8804d37e4d97e55c6f258bc9ad9c5ee7b07437009dd152d764949a27c"
dependencies = [
"heapless 0.6.1",
"num-traits",
"pdqselect",
"serde",
"smallvec",
]
[[package]]
name = "rstar"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b40f1bfe5acdab44bc63e6699c28b74f75ec43afb59f3eda01e145aff86a25fa"
dependencies = [
"heapless 0.7.17",
"num-traits",
"serde",
"smallvec",
]
[[package]]
name = "rstar"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f39465655a1e3d8ae79c6d9e007f4953bfc5d55297602df9dc38f9ae9f1359a"
dependencies = [
"heapless 0.7.17",
"num-traits",
"serde",
"smallvec",
]
[[package]]
name = "rstar"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73111312eb7a2287d229f06c00ff35b51ddee180f017ab6dec1f69d62ac098d6"
dependencies = [
"heapless 0.7.17",
"num-traits",
"serde",
"smallvec",
]
[[package]]
name = "rstar"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "421400d13ccfd26dfa5858199c30a5d76f9c54e0dba7575273025b43c5175dbb"
dependencies = [
"heapless 0.8.0",
"num-traits",
"serde",
"smallvec",
]
[[package]]
name = "rusqlite"
version = "0.38.0"
@ -4948,6 +5212,12 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc_tools_util"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3b75158011a63889ba12084cf1224baad7bcad50f6ee7c842f772b74aa148ed"
[[package]]
name = "rustc_version"
version = "0.4.1"
@ -5173,7 +5443,7 @@ checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
dependencies = [
"base16ct",
"der",
"generic-array",
"generic-array 0.14.7",
"pkcs8",
"subtle",
"zeroize",
@ -5495,6 +5765,9 @@ name = "spin"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
dependencies = [
"lock_api",
]
[[package]]
name = "spinning_top"
@ -6203,6 +6476,7 @@ dependencies = [
"fallible-iterator",
"form_urlencoded",
"futures-util",
"geos",
"http-body-util",
"hyper",
"hyper-util",
@ -6215,6 +6489,7 @@ dependencies = [
"kanal",
"lazy_static",
"lettre",
"litegis",
"log",
"mini-moka",
"minijinja",
@ -6391,6 +6666,7 @@ dependencies = [
"serde-value",
"serde_qs",
"uuid",
"wkt 0.14.0",
]
[[package]]
@ -6422,6 +6698,7 @@ dependencies = [
"itertools 0.14.0",
"jsonschema",
"lazy_static",
"litegis",
"log",
"parking_lot",
"rand 0.10.0",
@ -8288,6 +8565,30 @@ dependencies = [
"wasmparser 0.245.1",
]
[[package]]
name = "wkt"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c2252781f8927974e8ba6a67c965a759a2b88ea2b1825f6862426bbb1c8f41"
dependencies = [
"geo-types",
"log",
"num-traits",
"thiserror 1.0.69",
]
[[package]]
name = "wkt"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efb2b923ccc882312e559ffaa832a055ba9d1ac0cc8e86b3e25453247e4b81d7"
dependencies = [
"geo-traits",
"log",
"num-traits",
"thiserror 1.0.69",
]
[[package]]
name = "writeable"
version = "0.6.2"

View file

@ -77,6 +77,7 @@ axum = { version = "^0.8.1", features = ["multipart"] }
base64 = { version = "0.22.1", default-features = false, features = ["alloc", "std"] }
env_logger = { version = "^0.11.8", default-features = false, features = ["auto-color", "humantime"] }
libsqlite3-sys = { version = "0.36.0", default-features = false, features = ["bundled", "preupdate_hook"] }
litegis = { version = "0.0.2" }
minijinja = { version = "2.1.2", default-features = false }
parking_lot = { version = "0.12.3", default-features = false, features = ["send_guard", "arc_lock"] }
rand = "^0.10.0"

View file

@ -0,0 +1,11 @@
CREATE TABLE geometry (
id INTEGER PRIMARY KEY,
description TEXT,
geom BLOB NOT NULL CHECK(ST_IsValid(geom))
) STRICT;
CREATE INDEX _geometry_geom ON geometry(geom);
INSERT INTO geometry (description, geom) VALUES
('Colloseo', ST_GeomFromText('POINT(12.4924 41.8902)', 4326)),
('A Line', ST_GeomFromText('LINESTRING(10 20, 20 30)', 4326));

View file

@ -32,6 +32,7 @@
"@tanstack/solid-query": "^5.90.23",
"@tanstack/solid-table": "^8.21.3",
"@tanstack/table-core": "^8.21.3",
"@terraformer/wkt": "^2.2.1",
"chart.js": "^4.5.1",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
@ -57,6 +58,7 @@
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/user-event": "^14.6.1",
"@types/geojson": "^7946.0.16",
"@types/terraformer__wkt": "^2.0.3",
"@types/wicg-file-system-access": "^2023.10.7",
"autoprefixer": "^10.4.24",
"eslint": "^9.39.2",

View file

@ -27,6 +27,7 @@ import type {
} from "@tanstack/solid-table";
import { createColumnHelper } from "@tanstack/solid-table";
import type { DialogTriggerProps } from "@kobalte/core/dialog";
import { geojsonToWKT } from "@terraformer/wkt";
import { urlSafeBase64Decode } from "trailbase";
import { Header } from "@/components/Header";
@ -77,6 +78,7 @@ import {
UploadedFiles,
} from "@/components/tables/Files";
import { parseWkb } from "@/lib/wkb";
import { createConfigQuery } from "@/lib/api/config";
import type { Record, ArrayRecord } from "@/lib/record";
import { hashSqlValue } from "@/lib/value";
@ -89,6 +91,7 @@ import {
findPrimaryKeyColumnIndex,
getForeignKey,
isFileUploadColumn,
isGeometryColumn,
isFileUploadsColumn,
isJSONColumn,
isNotNull,
@ -151,13 +154,19 @@ function renderCell(
if ("Blob" in value) {
const blob = value.Blob;
if ("Base64UrlSafe" in blob) {
if (cell.type === "UUID") {
return (
<Uuid
base64UrlSafeBlob={blob.Base64UrlSafe}
blobEncoding={blobEncoding}
/>
);
switch (cell.type) {
case "UUID": {
return (
<Uuid
base64UrlSafeBlob={blob.Base64UrlSafe}
blobEncoding={blobEncoding}
/>
);
}
case "Geometry": {
const geometry = parseWkb(urlSafeBase64Decode(blob.Base64UrlSafe));
return geojsonToWKT(geometry);
}
}
if (blobEncoding === "hex") {
@ -451,12 +460,21 @@ function TableHeader(props: {
);
}
type CellType = "UUID" | "JSON" | "File" | "File[]" | ColumnDataType;
type CellType =
| "UUID"
| "JSON"
| "File"
| "File[]"
| "Geometry"
| ColumnDataType;
function deriveCellType(column: Column): CellType {
if (isUUIDColumn(column)) {
return "UUID";
}
if (isGeometryColumn(column)) {
return "Geometry";
}
if (isFileUploadColumn(column)) {
return "File";
}

View file

@ -213,6 +213,14 @@ export function isFileUploadColumn(column: Column): boolean {
return false;
}
export function isGeometryColumn(column: Column): boolean {
if (column.data_type === "Blob") {
const check = getCheckValue(column.options);
return (check?.search(/^ST_IsValid\s*\(/g) ?? -1) === 0;
}
return false;
}
export function isFileUploadsColumn(column: Column): boolean {
if (column.data_type === "Text") {
const check = getCheckValue(column.options);

View file

@ -0,0 +1,220 @@
// Derived from: https://github.com/conveyal/osmix/blob/8cc2d43a12a722449c63c71e0d8b7d77583ca82c/packages/geoparquet/src/wkb.ts (MIT)
/**
* WKB (Well-Known Binary) geometry parsing utilities.
*
* Browser-compatible WKB parser using DataView instead of Node.js Buffer.
* Supports standard WKB and EWKB (with SRID) formats.
*
* @module
*/
import type {
Geometry,
GeometryCollection,
LineString,
MultiLineString,
MultiPoint,
MultiPolygon,
Point,
Polygon,
Position,
} from "geojson";
/** WKB geometry type codes */
const WKB_POINT = 1;
const WKB_LINESTRING = 2;
const WKB_POLYGON = 3;
const WKB_MULTIPOINT = 4;
const WKB_MULTILINESTRING = 5;
const WKB_MULTIPOLYGON = 6;
const WKB_GEOMETRYCOLLECTION = 7;
/** EWKB flags */
const EWKB_SRID_FLAG = 0x20000000;
const EWKB_Z_FLAG = 0x80000000;
const EWKB_M_FLAG = 0x40000000;
/**
* Binary reader using DataView for browser compatibility.
*/
class WkbReader {
private view: DataView;
private offset = 0;
private littleEndian = true;
constructor(data: Uint8Array) {
// Create DataView from the Uint8Array's underlying buffer with correct offset
this.view = new DataView(data.buffer, data.byteOffset, data.byteLength);
}
readByte(): number {
const value = this.view.getUint8(this.offset);
this.offset += 1;
return value;
}
readUint32(): number {
const value = this.view.getUint32(this.offset, this.littleEndian);
this.offset += 4;
return value;
}
readDouble(): number {
const value = this.view.getFloat64(this.offset, this.littleEndian);
this.offset += 8;
return value;
}
setLittleEndian(littleEndian: boolean): void {
this.littleEndian = littleEndian;
}
}
/**
* Parse a WKB geometry into a GeoJSON Geometry object.
*
* Browser-compatible implementation using DataView.
* Supports Point, LineString, Polygon, MultiPoint, MultiLineString,
* MultiPolygon, and GeometryCollection. Also handles EWKB with SRID.
*
* @param wkb - WKB-encoded geometry as Uint8Array
* @returns Parsed GeoJSON Geometry
* @throws Error if geometry type is unsupported
*/
export function parseWkb(wkb: Uint8Array): Geometry {
const reader = new WkbReader(wkb);
return parseGeometry(reader);
}
/**
* Parse a geometry from the reader at current position.
*/
function parseGeometry(reader: WkbReader): Geometry {
// Read byte order
const byteOrder = reader.readByte();
reader.setLittleEndian(byteOrder === 1);
// Read geometry type (may include EWKB flags)
let geometryType = reader.readUint32();
// Handle EWKB SRID flag
if (geometryType & EWKB_SRID_FLAG) {
// Skip SRID (4 bytes)
reader.readUint32();
geometryType &= ~EWKB_SRID_FLAG;
}
// Check for Z/M flags and mask them out
const hasZ = (geometryType & EWKB_Z_FLAG) !== 0;
const hasM = (geometryType & EWKB_M_FLAG) !== 0;
geometryType &= 0x0000ffff; // Keep only the base type
// Determine coordinate dimensions
const dimensions = 2 + (hasZ ? 1 : 0) + (hasM ? 1 : 0);
switch (geometryType) {
case WKB_POINT:
return parsePoint(reader, dimensions);
case WKB_LINESTRING:
return parseLineString(reader, dimensions);
case WKB_POLYGON:
return parsePolygon(reader, dimensions);
case WKB_MULTIPOINT:
return parseMultiPoint(reader);
case WKB_MULTILINESTRING:
return parseMultiLineString(reader);
case WKB_MULTIPOLYGON:
return parseMultiPolygon(reader);
case WKB_GEOMETRYCOLLECTION:
return parseGeometryCollection(reader);
default:
throw new Error(`Unsupported WKB geometry type: ${geometryType}`);
}
}
/**
* Read a coordinate (lon, lat, and optionally z/m).
* Only returns [lon, lat] for GeoJSON compatibility.
*/
function readCoordinate(reader: WkbReader, dimensions: number): Position {
const x = reader.readDouble();
const y = reader.readDouble();
// Read and discard extra dimensions (Z, M)
for (let i = 2; i < dimensions; i++) {
reader.readDouble();
}
return [x, y];
}
/**
* Read an array of coordinates.
*/
function readCoordinates(reader: WkbReader, dimensions: number): Position[] {
const count = reader.readUint32();
const coords: Position[] = [];
for (let i = 0; i < count; i++) {
coords.push(readCoordinate(reader, dimensions));
}
return coords;
}
function parsePoint(reader: WkbReader, dimensions: number): Point {
const coordinates = readCoordinate(reader, dimensions);
return { type: "Point", coordinates };
}
function parseLineString(reader: WkbReader, dimensions: number): LineString {
const coordinates = readCoordinates(reader, dimensions);
return { type: "LineString", coordinates };
}
function parsePolygon(reader: WkbReader, dimensions: number): Polygon {
const numRings = reader.readUint32();
const coordinates: Position[][] = [];
for (let i = 0; i < numRings; i++) {
coordinates.push(readCoordinates(reader, dimensions));
}
return { type: "Polygon", coordinates };
}
function parseMultiPoint(reader: WkbReader): MultiPoint {
const numPoints = reader.readUint32();
const coordinates: Position[] = [];
for (let i = 0; i < numPoints; i++) {
const point = parseGeometry(reader) as Point;
coordinates.push(point.coordinates);
}
return { type: "MultiPoint", coordinates };
}
function parseMultiLineString(reader: WkbReader): MultiLineString {
const numLineStrings = reader.readUint32();
const coordinates: Position[][] = [];
for (let i = 0; i < numLineStrings; i++) {
const lineString = parseGeometry(reader) as LineString;
coordinates.push(lineString.coordinates);
}
return { type: "MultiLineString", coordinates };
}
function parseMultiPolygon(reader: WkbReader): MultiPolygon {
const numPolygons = reader.readUint32();
const coordinates: Position[][][] = [];
for (let i = 0; i < numPolygons; i++) {
const polygon = parseGeometry(reader) as Polygon;
coordinates.push(polygon.coordinates);
}
return { type: "MultiPolygon", coordinates };
}
function parseGeometryCollection(reader: WkbReader): GeometryCollection {
const numGeometries = reader.readUint32();
const geometries: Geometry[] = [];
for (let i = 0; i < numGeometries; i++) {
geometries.push(parseGeometry(reader));
}
return { type: "GeometryCollection", geometries };
}

View file

@ -317,7 +317,7 @@ test("Expand foreign records", async () => {
},
});
expect(response.records.length).toBe(1);
expect(response.records).toHaveLength(1);
const comment = response.records[0];
expect(comment.id).toBe(2);

View file

@ -48,7 +48,7 @@ ed25519-dalek = { version = "2.1.1", features = ["pkcs8", "pem", "rand_core"] }
fallible-iterator = "0.3.0"
form_urlencoded = "1.2.1"
futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
# geos = { version = "10.0.0", default-features = false, features = ["geo", "json"] }
geos = { version = "10.0.0", default-features = false, features = ["geo", "json"] }
http-body-util = "0.1.3"
hyper = "1.6.0"
hyper-util = "0.1.7"
@ -60,6 +60,7 @@ jsonwebtoken = { version = "^10.2.0", default-features = false, features = ["use
kanal = "0.1.1"
lazy_static = "1.4.0"
lettre = { version = "^0.11.7", default-features = false, features = ["tokio1-rustls-tls", "sendmail-transport", "smtp-transport", "builder"] }
litegis = { workspace = true }
log = { version = "^0.4.21", default-features = false }
mini-moka = "0.10.3"
minijinja = { workspace = true }

View file

@ -342,6 +342,8 @@ fn init_main_db_impl(
let mut conn =
trailbase_extension::connect_sqlite(main_path.clone(), json_registry.clone())?;
litegis::register(&conn)?;
if main_migrations {
new_db.fetch_or(
apply_main_migrations(&mut conn, migrations_path.as_ref())?,
@ -377,6 +379,8 @@ fn init_main_db_impl(
let mut secondary =
trailbase_extension::connect_sqlite(Some(path.clone()), json_registry.clone())?;
litegis::register(&secondary)?;
apply_base_migrations(&mut secondary, Some(migrations_path), &schema_name)?;
}

View file

@ -33,30 +33,37 @@ pub(crate) fn build_filter_where_clause(
});
};
let convert = |column_name: &str,
value: trailbase_qs::Value|
-> Result<trailbase_sqlite::Value, WhereClauseError> {
// Param validation first.
// NOTE: This is separate step is important, because the value mapping below
// is **not** applied to all parameters unlike the visitor here.
filter_params.visit_values(|column_op_value| -> Result<(), WhereClauseError> {
let column_name = &column_op_value.column;
if column_name.starts_with("_") {
return Err(WhereClauseError::UnrecognizedParam(format!(
"Invalid parameter: {column_name}"
)));
}
return Ok(());
})?;
let (sql, params) = filter_params.into_sql(Some(table_name), |column_op_value| {
let Some(meta) = column_metadata
.iter()
.find(|meta| meta.column.name == column_name)
.find(|meta| meta.column.name == column_op_value.column)
else {
return Err(WhereClauseError::UnrecognizedParam(format!(
"Unrecognized parameter: {column_name}"
"Filter on unknown column: {}",
column_op_value.column
)));
};
// TODO: Improve hacky error handling.
return crate::records::filter::qs_value_to_sql_with_constraints(&meta.column, value)
.map_err(|err| WhereClauseError::UnrecognizedParam(err.to_string()));
};
let (sql, params) = filter_params.into_sql(Some(table_name), &convert)?;
return crate::records::filter::qs_value_to_sql_with_constraints(
&meta.column,
column_op_value.value,
)
.map_err(|err| WhereClauseError::UnrecognizedParam(err.to_string()));
})?;
return Ok(WhereClause {
clause: sql,

View file

@ -17,8 +17,10 @@ pub enum JsonError {
Finite,
#[error("Value not found")]
ValueNotFound,
#[error("Unsupported type")]
#[error("UnsupportedType")]
NotSupported,
#[error("ColumnMismatch")]
ColumnMismatch,
#[error("Decoding")]
Decode(#[from] base64::DecodeError),
#[error("Unexpected type: {0}, expected {1:?}")]
@ -30,6 +32,8 @@ pub enum JsonError {
// NOTE: This is the only extra error to schema::JsonError. Can we collapse?
#[error("SerdeJson error: {0}")]
SerdeJson(#[from] serde_json::Error),
#[error("Geos: {0}")]
Geos(#[from] geos::Error),
}
impl From<trailbase_schema::json::JsonError> for JsonError {
@ -64,7 +68,7 @@ pub(crate) fn row_to_json_expand(
) -> Result<serde_json::Value, JsonError> {
// Row may contain extra columns like trailing "_rowid_" or excluded columns.
if column_metadata.len() > row.column_count() {
return Err(JsonError::NotSupported);
return Err(JsonError::ColumnMismatch);
}
return Ok(serde_json::Value::Object(
@ -76,7 +80,7 @@ pub(crate) fn row_to_json_expand(
|(i, meta)| -> Result<(String, serde_json::Value), JsonError> {
let column = &meta.column;
if column.name.as_str() != row.column_name(i).unwrap_or_default() {
return Err(JsonError::NotSupported);
return Err(JsonError::ColumnMismatch);
}
let value = row.get_value(i).ok_or(JsonError::ValueNotFound)?;
@ -106,32 +110,45 @@ pub(crate) fn row_to_json_expand(
});
}
// Deserialize JSON.
if let types::Value::Text(str) = value {
match meta.json.as_ref() {
Some(JsonColumnMetadata::SchemaName(x)) if x == "std.FileUpload" => {
// De-serialize JSON.
if let types::Value::Text(str) = value
&& let Some(ref json) = meta.json
{
return match json {
JsonColumnMetadata::SchemaName(x) if x == "std.FileUpload" => {
#[allow(unused_mut)]
let mut value: serde_json::Value = serde_json::from_str(str)?;
#[cfg(not(test))]
value.as_object_mut().map(|o| o.remove("id"));
return Ok((column.name.clone(), value));
let mut file_metadata: serde_json::Value = serde_json::from_str(str)?;
strip_file_metadata_id(&mut file_metadata);
Ok((column.name.clone(), file_metadata))
}
Some(JsonColumnMetadata::SchemaName(x)) if x == "std.FileUploads" => {
JsonColumnMetadata::SchemaName(x) if x == "std.FileUploads" => {
#[allow(unused_mut)]
let mut values: Vec<serde_json::Value> = serde_json::from_str(str)?;
#[cfg(not(test))]
for value in &mut values {
value.as_object_mut().map(|o| o.remove("id"));
let mut file_metadata_list: Vec<serde_json::Value> = serde_json::from_str(str)?;
for file_metadata in &mut file_metadata_list {
strip_file_metadata_id(file_metadata);
}
return Ok((column.name.clone(), serde_json::Value::Array(values)));
Ok((
column.name.clone(),
serde_json::Value::Array(file_metadata_list),
))
}
Some(JsonColumnMetadata::SchemaName(_)) | Some(JsonColumnMetadata::Pattern(_)) => {
return Ok((column.name.clone(), serde_json::from_str(str)?));
JsonColumnMetadata::SchemaName(_) | JsonColumnMetadata::Pattern(_) => {
Ok((column.name.clone(), serde_json::from_str(str)?))
}
None => {}
};
}
// De-serialize WKB Geometry.
if let types::Value::Blob(wkb) = value
&& meta.is_geometry
{
let geometry = geos::Geometry::new_from_wkb(wkb)?;
let json_geometry: geos::geojson::Geometry = geometry.try_into()?;
return Ok((column.name.clone(), serde_json::to_value(json_geometry)?));
}
debug_assert!(!meta.is_geometry);
return Ok((column.name.clone(), value_to_flat_json(value)?));
},
)
@ -139,6 +156,12 @@ pub(crate) fn row_to_json_expand(
));
}
fn strip_file_metadata_id(file_metadata: &mut serde_json::Value) {
if !cfg!(test) {
file_metadata.as_object_mut().map(|o| o.remove("id"));
}
}
pub(crate) struct ExpandedTable<'a> {
pub metadata: &'a TableMetadata,
pub local_column_name: String,

View file

@ -107,12 +107,15 @@ pub(crate) fn qs_filter_to_record_filter(
};
}
/// Mimics the `WHERE` filter behavior we use in list-queries but for subscriptions, where can't
/// query directly.
#[inline]
fn compare_values(
op: &CompareOp,
record_value: &rusqlite::types::Value,
filter_value: &rusqlite::types::Value,
) -> bool {
use geos::Geom;
use rusqlite::types::Value;
return match op {
@ -171,9 +174,48 @@ fn compare_values(
}
_ => false,
},
CompareOp::StWithin => match (record_value, filter_value) {
(Value::Blob(record), Value::Text(filter)) => {
let Some((record_geometry, filter_geometry)) = parse_geometries(record, filter) else {
return false;
};
return record_geometry.within(&filter_geometry).unwrap_or(false);
}
_ => false,
},
CompareOp::StIntersects => match (record_value, filter_value) {
(Value::Blob(record), Value::Text(filter)) => {
let Some((record_geometry, filter_geometry)) = parse_geometries(record, filter) else {
return false;
};
return record_geometry
.intersects(&filter_geometry)
.unwrap_or(false);
}
_ => false,
},
CompareOp::StContains => match (record_value, filter_value) {
(Value::Blob(record), Value::Text(filter)) => {
let Some((record_geometry, filter_geometry)) = parse_geometries(record, filter) else {
return false;
};
return record_geometry.contains(&filter_geometry).unwrap_or(false);
}
_ => false,
},
};
}
#[inline]
fn parse_geometries(record: &[u8], filter: &str) -> Option<(geos::Geometry, geos::Geometry)> {
let record_geometry = geos::Geometry::new_from_wkb(record).ok()?;
// TODO: We should memoize the filter geometry with the subscription to not reparse it over and
// over again.
let filter_geometry = geos::Geometry::new_from_wkt(filter).ok()?;
return Some((record_geometry, filter_geometry));
}
pub(crate) fn apply_filter_recursively_to_record(
filter: &ValueOrComposite,
record: &indexmap::IndexMap<&str, rusqlite::types::Value>,

View file

@ -1,16 +1,17 @@
use askama::Template;
use axum::{
Json,
extract::{Path, RawQuery, State},
extract::{Path, Query, RawQuery, State},
};
use base64::prelude::*;
use itertools::Itertools;
use serde::Serialize;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::convert::TryInto;
use std::sync::LazyLock;
use trailbase_qs::{OrderPrecedent, Query};
use trailbase_qs::OrderPrecedent;
use trailbase_schema::QualifiedNameEscaped;
use trailbase_schema::metadata::ColumnMetadata;
use trailbase_sqlite::Value;
use crate::app_state::AppState;
@ -33,19 +34,28 @@ pub struct ListResponse {
pub records: Vec<serde_json::Value>,
}
#[derive(Template)]
#[template(escape = "none", path = "list_record_query.sql")]
struct ListRecordQueryTemplate<'a> {
table_name: &'a QualifiedNameEscaped,
column_names: &'a [&'a str],
read_access_clause: &'a str,
filter_clause: &'a str,
cursor_clause: Option<&'a str>,
order_clause: &'a str,
expanded_tables: &'a [ExpandedTable<'a>],
count: bool,
offset: bool,
is_table: bool,
#[derive(Debug)]
pub enum ListOrGeoJSONResponse {
List(ListResponse),
GeoJSON(geos::geojson::FeatureCollection),
}
// Transparent serializer. We could probably have an Either instead.
impl Serialize for ListOrGeoJSONResponse {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
return match self {
Self::List(v) => v.serialize(serializer),
Self::GeoJSON(v) => v.serialize(serializer),
};
}
}
#[derive(Debug, Default, Deserialize)]
pub struct ListRecordsQuery {
pub geojson: Option<String>,
}
/// Lists records matching the given filters
@ -60,9 +70,10 @@ struct ListRecordQueryTemplate<'a> {
pub async fn list_records_handler(
State(state): State<AppState>,
Path(api_name): Path<String>,
Query(query): Query<ListRecordsQuery>,
RawQuery(raw_url_query): RawQuery,
user: Option<User>,
) -> Result<Json<ListResponse>, RecordError> {
) -> Result<Json<ListOrGeoJSONResponse>, RecordError> {
let Some(api) = state.lookup_record_api(&api_name) else {
return Err(RecordError::ApiNotFound);
};
@ -76,7 +87,20 @@ pub async fn list_records_handler(
let pk_column = &pk_meta.column;
let is_table = api.is_table();
let Query {
let geojson_geometry_column = if let Some(column) = query.geojson {
let meta = api.column_metadata_by_name(&column).ok_or_else(|| {
return RecordError::BadRequest("Invalid geometry column");
})?;
if !meta.is_geometry {
return Err(RecordError::BadRequest("Invalid geometry column"));
}
Some(meta)
} else {
None
};
let trailbase_qs::Query {
limit,
cursor,
count,
@ -86,7 +110,10 @@ pub async fn list_records_handler(
offset,
} = raw_url_query
.as_ref()
.map_or_else(|| Ok(Query::default()), |query| Query::parse(query))
.map_or_else(
|| Ok(Default::default()),
|query| trailbase_qs::Query::parse(query),
)
.map_err(|_err| {
return RecordError::BadRequest("Invalid query");
})?;
@ -230,11 +257,11 @@ pub async fn list_records_handler(
let Some(last_row) = rows.last() else {
// Query result is empty:
return Ok(Json(ListResponse {
return Ok(Json(ListOrGeoJSONResponse::List(ListResponse {
cursor: None,
total_count: Some(0),
records: vec![],
}));
})));
};
let total_count = if count == Some(true) {
@ -256,11 +283,11 @@ pub async fn list_records_handler(
// For ?limit=0 we still query one record to get the total count.
if limit == 0 {
return Ok(Json(ListResponse {
return Ok(Json(ListOrGeoJSONResponse::List(ListResponse {
cursor: None,
total_count,
records: vec![],
}));
})));
}
let cursor: Option<String> = if supports_cursor {
@ -318,11 +345,68 @@ pub async fn list_records_handler(
.collect::<Result<Vec<_>, RecordError>>()?
};
return Ok(Json(ListResponse {
if let Some(meta) = geojson_geometry_column {
return Ok(Json(ListOrGeoJSONResponse::GeoJSON(
build_feature_collection(meta, &pk_column.name, cursor, total_count, records)?,
)));
}
return Ok(Json(ListOrGeoJSONResponse::List(ListResponse {
cursor,
total_count,
records,
}));
})));
}
fn build_feature_collection(
meta: &ColumnMetadata,
pk_column_name: &str,
cursor: Option<String>,
total_count: Option<usize>,
records: Vec<serde_json::Value>,
) -> Result<geos::geojson::FeatureCollection, RecordError> {
let mut foreign_members = serde_json::Map::<String, serde_json::Value>::new();
if let Some(cursor) = cursor {
foreign_members.insert("cursor".to_string(), serde_json::Value::String(cursor));
}
if let Some(total_count) = total_count {
foreign_members.insert("total_count".to_string(), serde_json::json!(total_count));
}
let features = records
.into_iter()
.map(|record| -> Result<geos::geojson::Feature, RecordError> {
let serde_json::Value::Object(mut obj) = record else {
return Err(RecordError::Internal("Not an object".into()));
};
let id = obj.get(pk_column_name).and_then(|id| match id {
serde_json::Value::Number(n) => Some(geos::geojson::feature::Id::Number(n.clone())),
serde_json::Value::String(s) => Some(geos::geojson::feature::Id::String(s.clone())),
_ => None,
});
debug_assert!(id.is_some());
// NOTE: Geometry may be NULL for nullable columns.
let geometry = obj.remove(&meta.column.name).and_then(|g| {
return geos::geojson::Geometry::from_json_value(g).ok();
});
return Ok(geos::geojson::Feature {
id,
geometry,
properties: Some(obj),
bbox: None,
foreign_members: None,
});
})
.collect::<Result<_, _>>()?;
return Ok(geos::geojson::FeatureCollection {
bbox: None,
features,
foreign_members: Some(foreign_members),
});
}
fn fmt_order(col: &str, order: OrderPrecedent) -> String {
@ -365,6 +449,21 @@ fn decrypt_cursor(key: &KeyType, api_name: &str, encoded: &str) -> Result<i64, R
.map_err(|_| RecordError::BadRequest("Bad cursor"));
}
#[derive(Template)]
#[template(escape = "none", path = "list_record_query.sql")]
struct ListRecordQueryTemplate<'a> {
table_name: &'a QualifiedNameEscaped,
column_names: &'a [&'a str],
read_access_clause: &'a str,
filter_clause: &'a str,
cursor_clause: Option<&'a str>,
order_clause: &'a str,
expanded_tables: &'a [ExpandedTable<'a>],
count: bool,
offset: bool,
is_table: bool,
}
// Ephemeral key for encrypting cursors, i.e. cursors cannot be re-used across TB restarts.
static EPHEMERAL_CURSOR_KEY: LazyLock<KeyType> = LazyLock::new(generate_random_key);
@ -588,29 +687,37 @@ mod tests {
.await
.unwrap();
let response = list_records_handler(
let ListOrGeoJSONResponse::List(response) = list_records_handler(
State(state.clone()),
Path("api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(None),
None,
)
.await
.unwrap()
.0;
.0
else {
panic!("not a list");
};
assert_eq!(3, response.records.len());
let first: Entry = serde_json::from_value(response.records[0].clone()).unwrap();
let response = list_records_handler(
let ListOrGeoJSONResponse::List(response) = list_records_handler(
State(state.clone()),
Path("api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(Some(format!("filter[id]={}", first.id))),
None,
)
.await
.unwrap()
.0;
.0
else {
panic!("not a list");
};
assert_eq!(1, response.records.len());
assert_eq!(
@ -618,26 +725,35 @@ mod tests {
serde_json::from_value(response.records[0].clone()).unwrap()
);
let null_response = list_records_handler(
let ListOrGeoJSONResponse::List(null_response) = list_records_handler(
State(state.clone()),
Path("api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(Some("filter[nullable][$is]=NULL".to_string())),
None,
)
.await
.unwrap()
.0;
.0
else {
panic!("not a list");
};
assert_eq!(2, null_response.records.len());
let not_null_response = list_records_handler(
let ListOrGeoJSONResponse::List(not_null_response) = list_records_handler(
State(state.clone()),
Path("api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(Some("filter[nullable][$is]=!NULL".to_string())),
None,
)
.await
.unwrap()
.0;
.0
else {
panic!("not a list");
};
assert_eq!(1, not_null_response.records.len());
}
@ -1028,13 +1144,16 @@ mod tests {
let json_response = list_records_handler(
State(state.clone()),
Path("messages_api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(query),
auth_token.and_then(|token| User::from_auth_token(&state, token)),
)
.await?;
let response: ListResponse = json_response.0;
return Ok(response);
if let ListOrGeoJSONResponse::List(response) = json_response.0 {
return Ok(response);
};
panic!("not a list response: {json_response:?}");
}
#[tokio::test]
@ -1079,14 +1198,19 @@ mod tests {
.await
.unwrap();
let resp = list_records_handler(
let ListOrGeoJSONResponse::List(resp) = list_records_handler(
State(state.clone()),
Path("data_view_api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(Some("count=TRUE".to_string())),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(3, resp.records.len());
assert_eq!(3, resp.total_count.unwrap());
@ -1103,28 +1227,155 @@ mod tests {
.await
.unwrap();
let resp_filtered0 = list_records_handler(
let ListOrGeoJSONResponse::List(resp_filtered0) = list_records_handler(
State(state.clone()),
Path("data_view_filtered_api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(Some("count=TRUE&offset=0".to_string())),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(2, resp_filtered0.records.len());
assert_eq!(2, resp_filtered0.total_count.unwrap());
let resp_filtered1 = list_records_handler(
let ListOrGeoJSONResponse::List(resp_filtered1) = list_records_handler(
State(state.clone()),
Path("data_view_filtered_api".to_string()),
Query(ListRecordsQuery::default()),
RawQuery(Some("count=TRUE&filter[prefixed]=prefix_msg0".to_string())),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(1, resp_filtered1.records.len());
assert_eq!(1, resp_filtered1.total_count.unwrap());
}
#[tokio::test]
async fn test_record_api_geojson_list() {
let state = test_state(None).await.unwrap();
let name = "geometry";
state
.conn()
.execute_batch(format!(
r#"
CREATE TABLE {name} (
id INTEGER PRIMARY KEY,
description TEXT,
geom BLOB CHECK(ST_IsValid(geom))
) STRICT;
INSERT INTO {name} (id, description, geom) VALUES
( 3, 'Colloseo', ST_GeomFromText('POINT(12.4924 41.8902)', 4326)),
( 7, 'A Line', ST_GeomFromText('LINESTRING(10 20, 20 30)', 4326)),
( 8, 'br-quadrant', ST_MakeEnvelope(0, -0, 180, -90)),
(21, 'null', NULL);
"#
))
.await
.unwrap();
state.rebuild_connection_metadata().await.unwrap();
add_record_api_config(
&state,
RecordApiConfig {
name: Some(name.to_string()),
table_name: Some(name.to_string()),
acl_world: [PermissionFlag::Read as i32].into(),
..Default::default()
},
)
.await
.unwrap();
{
let ListOrGeoJSONResponse::GeoJSON(response) = list_records_handler(
State(state.clone()),
Path(name.to_string()),
Query(ListRecordsQuery {
geojson: Some("geom".to_string()),
}),
RawQuery(None),
None,
)
.await
.unwrap()
.0
else {
panic!("not GeoJSON");
};
assert_eq!(4, response.features.len());
}
{
// Test that stored point is `within` a filter bounding box.
let ListOrGeoJSONResponse::GeoJSON(response) = list_records_handler(
State(state.clone()),
Path(name.to_string()),
Query(ListRecordsQuery {
geojson: Some("geom".to_string()),
}),
RawQuery(Some(format!(
"filter[geom][@within]={polygon}",
// Colloseo @ 12.4924 41.8902
polygon = urlencode("POLYGON ((12 40, 12 42, 13 42, 13 40, 12 40))")
))),
None,
)
.await
.unwrap()
.0
else {
panic!("not GeoJSON");
};
assert_eq!(1, response.features.len());
assert_eq!(
Some(geos::geojson::feature::Id::Number(3.into())),
response.features[0].id
);
}
{
// Test that stored polygon `contains` a filter point.
let ListOrGeoJSONResponse::GeoJSON(response) = list_records_handler(
State(state.clone()),
Path(name.to_string()),
Query(ListRecordsQuery {
geojson: Some("geom".to_string()),
}),
RawQuery(Some(format!(
"filter[geom][@contains]={point}",
point = urlencode("POINT (12 -40)")
))),
None,
)
.await
.unwrap()
.0
else {
panic!("not GeoJSON");
};
assert_eq!(1, response.features.len());
assert_eq!(
Some(geos::geojson::feature::Id::Number(8.into())),
response.features[0].id
);
}
}
}

View file

@ -42,8 +42,8 @@ pub enum ParamsError {
Storage(Arc<object_store::Error>),
#[error("SqlValueDecode: {0}")]
SqlValueDecode(#[from] trailbase_sqlvalue::DecodeError),
// #[error("Geos: {0}")]
// Geos(#[from] geos::Error),
#[error("Geos: {0}")]
Geos(#[from] geos::Error),
}
impl From<serde_json::Error> for ParamsError {
@ -528,20 +528,20 @@ fn extract_params_and_files_from_json(
) -> Result<(Value, Option<FileMetadataContents>), ParamsError> {
// If this is *not* a JSON column convert the value trivially.
let Some(json_metadata) = json_metadata else {
// if is_geometry && col.data_type == ColumnDataType::Blob {
// use geos::Geom;
//
// let json_geometry = geos::geojson::Geometry::from_json_value(value)
// .map_err(|err| ParamsError::UnexpectedType("", format!("GeoJSON: {err}")))?;
// let geometry: geos::Geometry = json_geometry.try_into()?;
//
// let mut writer = geos::WKBWriter::new()?;
// if let Some(_) = geometry.get_srid().ok() {
// writer.set_include_SRID(true);
// }
//
// return Ok((Value::Blob(writer.write_wkb(&geometry)?.into()), None));
// }
if is_geometry && col.data_type == ColumnDataType::Blob {
use geos::Geom;
let json_geometry = geos::geojson::Geometry::from_json_value(value)
.map_err(|err| ParamsError::UnexpectedType("", format!("GeoJSON: {err}")))?;
let geometry: geos::Geometry = json_geometry.try_into()?;
let mut writer = geos::WKBWriter::new()?;
if geometry.get_srid().is_ok() {
writer.set_include_SRID(true);
}
return Ok((Value::Blob(writer.write_wkb(&geometry)?.into()), None));
}
debug_assert!(!is_geometry);

View file

@ -99,7 +99,7 @@ pub async fn read_record_handler(
.map_err(|err| RecordError::Internal(err.into()))?;
let result = expand.insert(col_name.to_string(), foreign_value);
assert!(result.is_some());
debug_assert!(result.is_some(), "{col_name} duplicate");
}
return Ok(Json(
@ -1265,4 +1265,91 @@ mod test {
assert_eq!(read_response, record);
}
#[tokio::test]
async fn test_geometry_columns_and_geojson() {
let state = test_state(None).await.unwrap();
let name = "with_geo".to_string();
state
.conn()
.execute(
format!(
r#"CREATE TABLE '{name}' (
id INTEGER PRIMARY KEY,
geo BLOB CHECK(ST_IsValid(geo))
) STRICT"#
),
(),
)
.await
.unwrap();
state.rebuild_connection_metadata().await.unwrap();
add_record_api_config(
&state,
RecordApiConfig {
name: Some(name.clone()),
table_name: Some(name.clone()),
acl_world: [
PermissionFlag::Create as i32,
PermissionFlag::Read as i32,
PermissionFlag::Delete as i32,
PermissionFlag::Update as i32,
]
.into(),
..Default::default()
},
)
.await
.unwrap();
let coords = geos::CoordSeq::new_from_vec(&[&[12.4924, 41.8902]]).unwrap();
let geometry = geos::Geometry::create_point(coords).unwrap();
let json_geometry: geos::geojson::Geometry = geometry.try_into().unwrap();
let record = json!({
"id": 1,
"geo": json_geometry,
});
let geojson = record
.as_object()
.unwrap()
.get("geo")
.unwrap()
.as_object()
.unwrap();
assert_eq!(
geojson.get("type").unwrap().as_str().unwrap(),
"Point",
"{geojson:?}"
);
let create_response: CreateRecordResponse = unpack_json_response(
create_record_handler(
State(state.clone()),
Path(name.clone()),
Query(CreateRecordQuery::default()),
None,
Either::Json(record.clone()),
)
.await
.unwrap(),
)
.await
.unwrap();
let Json(read_response) = read_record_handler(
State(state),
Path((name.clone(), create_response.ids[0].clone())),
Query(ReadRecordQuery::default()),
None,
)
.await
.unwrap();
assert_eq!(read_response, record);
}
}

View file

@ -144,7 +144,7 @@ mod tests {
use crate::app_state::*;
use crate::config::proto::{PermissionFlag, RecordApiConfig};
use crate::connection::ConnectionEntry;
use crate::records::list_records::list_records_handler;
use crate::records::list_records::{ListOrGeoJSONResponse, list_records_handler};
use crate::records::read_record::{ReadRecordQuery, read_record_handler};
use crate::records::test_utils::add_record_api_config;
@ -287,6 +287,7 @@ mod tests {
assert_eq!(
schema,
json!({
"$schema": "https://json-schema.org/draft/2020-12/schema",
"title": table_name.name,
"type": "object",
"properties": {
@ -347,6 +348,7 @@ mod tests {
let list_response = list_records_handler(
State(state.clone()),
Path("test_table_api".to_string()),
Query(Default::default()),
RawQuery(Some("expand=UNKNOWN".to_string())),
None,
)
@ -375,14 +377,19 @@ mod tests {
assert_eq!(expected, value);
let Json(list_response) = list_records_handler(
let ListOrGeoJSONResponse::List(list_response) = list_records_handler(
State(state.clone()),
Path("test_table_api".to_string()),
Query(Default::default()),
RawQuery(None),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(vec![expected.clone()], list_response.records);
validator.validate(&list_response.records[0]).unwrap();
@ -416,28 +423,38 @@ mod tests {
}
{
let Json(list_response) = list_records_handler(
let ListOrGeoJSONResponse::List(list_response) = list_records_handler(
State(state.clone()),
Path("test_table_api".to_string()),
Query(Default::default()),
RawQuery(Some("expand=fk".to_string())),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(vec![expected.clone()], list_response.records);
validator.validate(&list_response.records[0]).unwrap();
}
{
let Json(list_response) = list_records_handler(
let ListOrGeoJSONResponse::List(list_response) = list_records_handler(
State(state.clone()),
Path("test_table_api".to_string()),
Query(Default::default()),
RawQuery(Some("count=TRUE&expand=fk".to_string())),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(Some(1), list_response.total_count);
assert_eq!(vec![expected], list_response.records);
@ -511,14 +528,19 @@ mod tests {
assert_eq!(expected, value);
let Json(list_response) = list_records_handler(
let ListOrGeoJSONResponse::List(list_response) = list_records_handler(
State(state.clone()),
Path("test_table_api".to_string()),
Query(Default::default()),
RawQuery(None),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(vec![expected], list_response.records);
}
@ -550,14 +572,19 @@ mod tests {
assert_eq!(expected, value);
let Json(list_response) = list_records_handler(
let ListOrGeoJSONResponse::List(list_response) = list_records_handler(
State(state.clone()),
Path("test_table_api".to_string()),
Query(Default::default()),
RawQuery(Some("expand=fk1".to_string())),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(vec![expected], list_response.records);
}
@ -600,14 +627,19 @@ mod tests {
.await
.unwrap();
let Json(list_response) = list_records_handler(
let ListOrGeoJSONResponse::List(list_response) = list_records_handler(
State(state.clone()),
Path("test_table_api".to_string()),
Query(Default::default()),
RawQuery(Some("expand=fk0,fk1".to_string())),
None,
)
.await
.unwrap();
.unwrap()
.0
else {
panic!("not a list");
};
assert_eq!(
vec![

View file

@ -15,6 +15,7 @@ serde = { workspace = true }
serde-value = "0.7.0"
serde_qs = { workspace = true }
uuid = { workspace = true }
wkt = { version = "0.14.0", default-features = false }
[dev-dependencies]
rusqlite = { workspace = true }

View file

@ -1,5 +1,6 @@
use base64::prelude::*;
use serde::de::{Deserializer, Error};
use std::str::FromStr;
use crate::value::Value;
@ -14,6 +15,11 @@ pub enum CompareOp {
Is,
Like,
Regexp,
// Spatial Types:
StWithin,
StIntersects,
StContains,
}
impl CompareOp {
@ -28,22 +34,30 @@ impl CompareOp {
"$is" => Some(Self::Is),
"$like" => Some(Self::Like),
"$re" => Some(Self::Regexp),
// Spatial Types:
"@within" => Some(Self::StWithin),
"@intersects" => Some(Self::StIntersects),
"@contains" => Some(Self::StContains),
_ => None,
};
}
#[inline]
pub fn as_sql(&self) -> &'static str {
pub fn as_sql(&self, column: &str, param: &str) -> String {
return match self {
Self::GreaterThanEqual => ">=",
Self::GreaterThan => ">",
Self::LessThanEqual => "<=",
Self::LessThan => "<",
Self::NotEqual => "<>",
Self::Is => "IS",
Self::Like => "LIKE",
Self::Regexp => "REGEXP",
Self::Equal => "=",
Self::GreaterThanEqual => format!("{column} >= {param}"),
Self::GreaterThan => format!("{column} > {param}"),
Self::LessThanEqual => format!("{column} <= {param}"),
Self::LessThan => format!("{column} < {param}"),
Self::NotEqual => format!("{column} <> {param}"),
Self::Is => format!("{column} IS {param}"),
Self::Like => format!("{column} LIKE {param}"),
Self::Regexp => format!("{column} REGEXP {param}"),
Self::Equal => format!("{column} = {param}"),
// Spatial Types:
Self::StWithin => format!("ST_Within({column}, {param})"),
Self::StIntersects => format!("ST_Intersects({column}, {param})"),
Self::StContains => format!("ST_Contains({column}, {param})"),
};
}
@ -59,6 +73,10 @@ impl CompareOp {
Self::Is => "$is",
Self::Like => "$like",
Self::Regexp => "$re",
// Spatial Types:
Self::StWithin => "@within",
Self::StIntersects => "@intersects",
Self::StContains => "@contains",
};
}
}
@ -85,6 +103,13 @@ where
}
_ => Err(Error::invalid_type(unexpected(&value), &"NULL or !NULL")),
},
CompareOp::StWithin | CompareOp::StIntersects | CompareOp::StContains => {
// WARN: The assumption here is that valid WKTs cannot be used for SQL injection.
match value {
serde_value::Value::String(v) if validate_wkt(&v) => Ok(Value::String(v)),
_ => Err(Error::invalid_type(unexpected(&value), &"WKT Geometry")),
}
}
_ => match value {
serde_value::Value::String(value) => Ok(Value::unparse(value)),
serde_value::Value::Bytes(bytes) => Ok(Value::String(BASE64_URL_SAFE.encode(bytes))),
@ -105,6 +130,14 @@ where
};
}
#[inline]
fn validate_wkt(s: &str) -> bool {
if s.chars().all(|c| c != ';' && c != '\'') {
return wkt::Wkt::<f64>::from_str(s).is_ok();
}
return false;
}
pub fn serde_value_to_single_column_rel_value<'de, D>(
key: String,
value: serde_value::Value,

View file

@ -26,91 +26,83 @@ pub enum ValueOrComposite {
}
impl ValueOrComposite {
pub fn visit_values<E>(&self, f: impl Fn(&ColumnOpValue) -> Result<(), E>) -> Result<(), E> {
fn recurse<E>(
f: &dyn Fn(&ColumnOpValue) -> Result<(), E>,
v: &ValueOrComposite,
) -> Result<(), E> {
match v {
ValueOrComposite::Value(v) => f(v)?,
ValueOrComposite::Composite(_combiner, vec) => {
for value_or_composite in vec {
recurse(f, value_or_composite)?;
}
}
}
return Ok(());
}
return recurse(&f, self);
}
/// Returns SQL query, and a list of (param_name, param_value).
///
/// The column_prefix can be used to refer to non-main schemas, e.g. `foo."param" IS NULL`.
///
/// Returns the resulting SQL query string and a mapping from param name to param value.
///
/// NOTE: The value type is generic to avoid a dependency on rusqlite
/// and not hard-code "Value -> Sql::Value" conversion. For example,
/// TB does some "String -> Blob" decoding depending on the column type.
/// NOTE: Do **not** use this for parameter validation, `map` is **not** applied
/// to all leafs. Use `visit_values` instead for validation.
pub fn into_sql<V, E>(
self,
column_prefix: Option<&str>,
convert: &dyn Fn(&str, Value) -> Result<V, E>,
map: impl Fn(ColumnOpValue) -> Result<V, E>,
) -> Result<(String, Vec<(String, V)>), E> {
fn render_value<V, E>(
column_op_value: ColumnOpValue,
column_prefix: Option<&str>,
convert: &dyn Fn(&str, Value) -> Result<V, E>,
index: &mut usize,
) -> Result<(String, Option<(String, V)>), E> {
let v = column_op_value.value;
let c = column_op_value.column;
return match column_op_value.op {
CompareOp::Is => {
debug_assert!(matches!(v, Value::String(_)), "{v:?}");
Ok(match column_prefix {
Some(p) => (format!(r#"{p}."{c}" IS {v}"#), None),
None => (format!(r#""{c}" IS {v}"#), None),
})
}
op => {
let param = param_name(*index);
*index += 1;
Ok(match column_prefix {
Some(p) => (
format!(r#"{p}."{c}" {o} {param}"#, o = op.as_sql()),
Some((param, convert(&c, v)?)),
),
None => (
format!(r#""{c}" {o} {param}"#, o = op.as_sql()),
Some((param, convert(&c, v)?)),
),
})
}
};
}
fn recurse<V, E>(
v: ValueOrComposite,
column_prefix: Option<&str>,
convert: &dyn Fn(&str, Value) -> Result<V, E>,
map: &dyn Fn(ColumnOpValue) -> Result<V, E>,
index: &mut usize,
) -> Result<(String, Vec<(String, V)>), E> {
match v {
ValueOrComposite::Value(v) => {
return Ok(match render_value(v, column_prefix, convert, index)? {
return Ok(match render_sql_fragment(v, column_prefix, map, index)? {
(sql, Some(param)) => (sql, vec![param]),
(sql, None) => (sql, vec![]),
});
}
ValueOrComposite::Composite(combiner, vec) => {
let mut fragments = Vec::<String>::with_capacity(vec.len());
let mut params = Vec::<(String, V)>::with_capacity(vec.len());
let mut params: Vec<(String, V)> = vec![];
let fragments: Vec<String> = vec
.into_iter()
.map(|value_or_composite| {
let (f, p) = recurse(value_or_composite, column_prefix, map, index)?;
params.extend(p);
return Ok(f);
})
.collect::<Result<Vec<_>, _>>()?;
for value_or_composite in vec {
let (f, p) = recurse(value_or_composite, column_prefix, convert, index)?;
fragments.push(f);
params.extend(p);
}
let sub_clause = fragments.join(match combiner {
Combiner::And => " AND ",
Combiner::Or => " OR ",
});
let fragment = format!(
"({})",
fragments.join(match combiner {
Combiner::And => " AND ",
Combiner::Or => " OR ",
}),
);
return Ok((fragment, params));
return Ok((format!("({sub_clause})"), params));
}
};
}
let mut index: usize = 0;
return recurse(self, column_prefix, convert, &mut index);
return recurse(self, column_prefix, &map, &mut index);
}
/// Return a query-string fragment for this filter (no leading '&').
pub fn to_query(&self) -> String {
/// Return a (key, value) pair suitable for query-string serialization (not percent-encoded).
fn render_value(prefix: &str, v: &ColumnOpValue) -> String {
fn render_param(prefix: &str, v: &ColumnOpValue) -> String {
let value: std::borrow::Cow<str> = match (&v.op, &v.value) {
(CompareOp::Is, Value::String(s)) if s == "NOT NULL" => "!NULL".into(),
(CompareOp::Is, Value::String(s)) if s == "NULL" => "NULL".into(),
@ -129,7 +121,7 @@ impl ValueOrComposite {
fn recurse(v: &ValueOrComposite, prefix: &str) -> Vec<String> {
return match v {
ValueOrComposite::Value(v) => vec![render_value(prefix, v)],
ValueOrComposite::Value(v) => vec![render_param(prefix, v)],
ValueOrComposite::Composite(combiner, vec) => {
let comb = match combiner {
Combiner::And => "$and",
@ -264,6 +256,48 @@ fn param_name(index: usize) -> String {
return s;
}
fn render_sql_fragment<V, E>(
column_op_value: ColumnOpValue,
column_prefix: Option<&str>,
map: &dyn Fn(ColumnOpValue) -> Result<V, E>,
index: &mut usize,
) -> Result<(String, Option<(String, V)>), E> {
let c = &column_op_value.column;
let column_name = match column_prefix {
Some(p) => format!(r#"{p}."{c}""#),
None => format!(r#""{c}""#),
};
return match (column_op_value.op, &column_op_value.value) {
(CompareOp::Is, Value::String(s)) if s == "NULL" || s == "NOT NULL" => {
// We need to inline NULL/NOT NULL, since `IS [NOT ]NULL` is an operator and not a `TEXT`
// literal.
Ok((column_op_value.op.as_sql(&column_name, s), None))
}
(CompareOp::StWithin | CompareOp::StIntersects | CompareOp::StContains, Value::String(s)) => {
// QUESTION: should we pass the string as a parameter instead? Right now we can't because
// the value `map` function tries to decode strings as Base64 for Blob columns.
// NOTE: this should already not allow SQL injections, since we validated the string
// during Filter parsing as WKT.
Ok((
column_op_value
.op
.as_sql(&column_name, &format!("ST_GeomFromText('{s}')")),
None,
))
}
(op, _) => {
let param = param_name(*index);
*index += 1;
Ok((
op.as_sql(&column_name, &param),
Some((param, map(column_op_value)?)),
))
}
};
}
#[cfg(test)]
mod tests {
use super::*;
@ -373,22 +407,17 @@ mod tests {
value: Value::String("val0".to_string()),
});
let convert = |_: &str, value: Value| -> Result<SqlValue, String> {
return Ok(match value {
fn map(cov: ColumnOpValue) -> Result<SqlValue, String> {
return Ok(match cov.value {
Value::String(s) => SqlValue::Text(s),
Value::Integer(i) => SqlValue::Integer(i),
Value::Double(d) => SqlValue::Real(d),
});
};
}
let sql0 = v0
.clone()
.into_sql(/* column_prefix= */ None, &convert)
.unwrap();
let sql0 = v0.clone().into_sql(/* column_prefix= */ None, map).unwrap();
assert_eq!(sql0.0, r#""col0" = :__p0"#);
let sql0 = v0
.into_sql(/* column_prefix= */ Some("p"), &convert)
.unwrap();
let sql0 = v0.into_sql(/* column_prefix= */ Some("p"), map).unwrap();
assert_eq!(sql0.0, r#"p."col0" = :__p0"#);
let v1 = ValueOrComposite::Value(ColumnOpValue {
@ -396,7 +425,7 @@ mod tests {
op: CompareOp::Is,
value: Value::String("NULL".to_string()),
});
let sql1 = v1.into_sql(None, &convert).unwrap();
let sql1 = v1.into_sql(None, map).unwrap();
assert_eq!(sql1.0, r#""col0" IS NULL"#, "{sql1:?}",);
}
}

View file

@ -473,15 +473,15 @@ mod tests {
)
);
fn convert(_: &str, value: Value) -> Result<SqlValue, String> {
return Ok(match value {
fn map(cov: ColumnOpValue) -> Result<SqlValue, String> {
return Ok(match cov.value {
Value::String(s) => SqlValue::Text(s),
Value::Integer(i) => SqlValue::Integer(i),
Value::Double(d) => SqlValue::Real(d),
});
}
let (sql, params) = q1.filter.clone().unwrap().into_sql(None, &convert).unwrap();
let (sql, params) = q1.filter.clone().unwrap().into_sql(None, map).unwrap();
assert_eq!(
sql,
r#"(("col2" = :__p0 OR "col0" <> :__p1) AND "col1" = :__p2)"#
@ -494,7 +494,7 @@ mod tests {
(":__p2".to_string(), SqlValue::Integer(1)),
]
);
let (sql, _) = q1.filter.unwrap().into_sql(Some("p"), &convert).unwrap();
let (sql, _) = q1.filter.unwrap().into_sql(Some("p"), map).unwrap();
assert_eq!(
sql,
r#"((p."col2" = :__p0 OR p."col0" <> :__p1) AND p."col1" = :__p2)"#
@ -589,4 +589,48 @@ mod tests {
}
);
}
#[test]
fn test_geometry_filter() {
let polygon = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))";
// Make sure ";" cannot be used for SQL injection.
assert!(Query::parse(&format!("filter[col][@within]={polygon};")).is_err());
assert_eq!(
Query::parse(&format!("filter[col][@within]={polygon}"))
.unwrap()
.filter
.unwrap(),
ValueOrComposite::Value(ColumnOpValue {
column: "col".to_string(),
op: CompareOp::StWithin,
value: Value::String(polygon.to_string()),
})
);
assert_eq!(
Query::parse(&format!("filter[col][@intersects]={polygon}"))
.unwrap()
.filter
.unwrap(),
ValueOrComposite::Value(ColumnOpValue {
column: "col".to_string(),
op: CompareOp::StIntersects,
value: Value::String(polygon.to_string()),
})
);
assert_eq!(
Query::parse(&format!("filter[col][@contains]={polygon}"))
.unwrap()
.filter
.unwrap(),
ValueOrComposite::Value(ColumnOpValue {
column: "col".to_string(),
op: CompareOp::StContains,
value: Value::String(polygon.to_string()),
})
);
}
}

View file

@ -32,5 +32,6 @@ uuid = { workspace = true }
[dev-dependencies]
anyhow = "1.0.97"
indoc = "2.0.6"
litegis = { workspace = true }
tokio = { workspace = true }
trailbase-sqlite = { workspace = true }

View file

@ -0,0 +1,216 @@
{
"title": "GeoJSON Geometry",
"oneOf": [
{
"title": "GeoJSON Point",
"type": "object",
"required": [
"type",
"coordinates"
],
"properties": {
"type": {
"type": "string",
"enum": [
"Point"
]
},
"coordinates": {
"type": "array",
"minItems": 2,
"items": {
"type": "number"
}
},
"bbox": {
"type": "array",
"minItems": 4,
"items": {
"type": "number"
}
}
}
},
{
"title": "GeoJSON LineString",
"type": "object",
"required": [
"type",
"coordinates"
],
"properties": {
"type": {
"type": "string",
"enum": [
"LineString"
]
},
"coordinates": {
"type": "array",
"minItems": 2,
"items": {
"type": "array",
"minItems": 2,
"items": {
"type": "number"
}
}
},
"bbox": {
"type": "array",
"minItems": 4,
"items": {
"type": "number"
}
}
}
},
{
"title": "GeoJSON Polygon",
"type": "object",
"required": [
"type",
"coordinates"
],
"properties": {
"type": {
"type": "string",
"enum": [
"Polygon"
]
},
"coordinates": {
"type": "array",
"items": {
"type": "array",
"minItems": 4,
"items": {
"type": "array",
"minItems": 2,
"items": {
"type": "number"
}
}
}
},
"bbox": {
"type": "array",
"minItems": 4,
"items": {
"type": "number"
}
}
}
},
{
"title": "GeoJSON MultiPoint",
"type": "object",
"required": [
"type",
"coordinates"
],
"properties": {
"type": {
"type": "string",
"enum": [
"MultiPoint"
]
},
"coordinates": {
"type": "array",
"items": {
"type": "array",
"minItems": 2,
"items": {
"type": "number"
}
}
},
"bbox": {
"type": "array",
"minItems": 4,
"items": {
"type": "number"
}
}
}
},
{
"title": "GeoJSON MultiLineString",
"type": "object",
"required": [
"type",
"coordinates"
],
"properties": {
"type": {
"type": "string",
"enum": [
"MultiLineString"
]
},
"coordinates": {
"type": "array",
"items": {
"type": "array",
"minItems": 2,
"items": {
"type": "array",
"minItems": 2,
"items": {
"type": "number"
}
}
}
},
"bbox": {
"type": "array",
"minItems": 4,
"items": {
"type": "number"
}
}
}
},
{
"title": "GeoJSON MultiPolygon",
"type": "object",
"required": [
"type",
"coordinates"
],
"properties": {
"type": {
"type": "string",
"enum": [
"MultiPolygon"
]
},
"coordinates": {
"type": "array",
"items": {
"type": "array",
"items": {
"type": "array",
"minItems": 4,
"items": {
"type": "array",
"minItems": 2,
"items": {
"type": "number"
}
}
}
}
},
"bbox": {
"type": "array",
"minItems": 4,
"items": {
"type": "number"
}
}
}
}
]
}

View file

@ -2,6 +2,7 @@ use jsonschema::Validator;
use log::*;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::sync::LazyLock;
use trailbase_extension::jsonschema::JsonSchemaRegistry;
use crate::metadata::{
@ -54,12 +55,34 @@ pub fn build_json_schema_expanded(
mode: JsonSchemaMode,
expand: Option<Expand<'_>>,
) -> Result<(Validator, serde_json::Value), JsonSchemaError> {
let mut schema =
build_json_schema_expanded_impl(registry, title, columns_metadata, mode, expand)?;
if let Some(obj) = schema.as_object_mut() {
const SCHEMA_STD: &str = "https://json-schema.org/draft/2020-12/schema";
obj.insert("$schema".to_string(), SCHEMA_STD.into());
}
return Ok((
Validator::new(&schema).map_err(|err| JsonSchemaError::SchemaCompile(err.to_string()))?,
schema,
));
}
fn build_json_schema_expanded_impl(
registry: &JsonSchemaRegistry,
title: &str,
columns_metadata: &[ColumnMetadata],
mode: JsonSchemaMode,
expand: Option<Expand<'_>>,
) -> Result<serde_json::Value, JsonSchemaError> {
let mut properties = serde_json::Map::new();
let mut defs = serde_json::Map::new();
let mut required_cols: Vec<String> = vec![];
for meta in columns_metadata {
let col = &meta.column;
let mut def_name: Option<String> = None;
let mut not_null = false;
let mut default = false;
@ -72,6 +95,7 @@ pub fn build_json_schema_expanded(
let Some(json_metadata) = extract_json_metadata(registry, opt)? else {
continue;
};
debug_assert_eq!(Some(&json_metadata), meta.json.as_ref());
match json_metadata {
JsonColumnMetadata::SchemaName(name) => {
@ -159,8 +183,13 @@ pub fn build_json_schema_expanded(
continue;
};
let (_validator, schema) =
build_json_schema(registry, foreign_table, &table.column_metadata, mode)?;
let nested_schema = build_json_schema_expanded_impl(
registry,
foreign_table,
&table.column_metadata,
mode,
None,
)?;
let new_def_name = foreign_table.clone();
defs.insert(
@ -171,7 +200,7 @@ pub fn build_json_schema_expanded(
"id": {
"type": column_data_type_to_json_type(pk_column.data_type),
},
"data": schema,
"data": nested_schema,
},
"required": ["id"],
}),
@ -183,6 +212,12 @@ pub fn build_json_schema_expanded(
}
}
if meta.is_geometry {
const KEY: &str = "_geojson_geometry";
defs.insert(KEY.to_string(), GEOJSON_GEOMETRY.clone());
def_name = Some(KEY.to_string());
}
match mode {
JsonSchemaMode::Insert => {
if not_null && !default {
@ -211,27 +246,22 @@ pub fn build_json_schema_expanded(
);
}
let schema = if defs.is_empty() {
serde_json::json!({
if defs.is_empty() {
return Ok(serde_json::json!({
"title": title,
"type": "object",
"properties": serde_json::Value::Object(properties),
"required": serde_json::json!(required_cols),
})
} else {
serde_json::json!({
"title": title,
"type": "object",
"properties": serde_json::Value::Object(properties),
"required": serde_json::json!(required_cols),
"$defs": serde_json::Value::Object(defs),
})
};
}));
}
return Ok((
Validator::new(&schema).map_err(|err| JsonSchemaError::SchemaCompile(err.to_string()))?,
schema,
));
return Ok(serde_json::json!({
"title": title,
"type": "object",
"properties": serde_json::Value::Object(properties),
"required": serde_json::json!(required_cols),
"$defs": serde_json::Value::Object(defs),
}));
}
fn column_data_type_to_json_type(data_type: ColumnDataType) -> Value {
@ -252,6 +282,11 @@ fn column_data_type_to_json_type(data_type: ColumnDataType) -> Value {
};
}
static GEOJSON_GEOMETRY: LazyLock<Value> = LazyLock::new(|| {
const GEOJSON_GEOMETRY: &[u8] = include_bytes!("../schemas/Geometry.json");
return serde_json::from_slice(GEOJSON_GEOMETRY).expect("valid");
});
#[cfg(test)]
mod tests {
use parking_lot::RwLock;
@ -299,7 +334,12 @@ mod tests {
)
.unwrap();
let (table, schema) = get_and_build_table_schema(&conn, &registry.read(), "test_table");
let (table, schema, _value) = get_and_build_table_schema(
&conn,
&registry.read(),
"test_table",
JsonSchemaMode::Insert,
);
let col = table.columns.first().unwrap();
let check_expr = col
@ -420,27 +460,91 @@ mod tests {
)
.unwrap();
let (_table, schema) = get_and_build_table_schema(&conn, &registry.read(), "test_table");
let (_table, schema, _value) = get_and_build_table_schema(
&conn,
&registry.read(),
"test_table",
JsonSchemaMode::Insert,
);
assert!(schema.is_valid(&json!({})));
}
#[test]
fn test_geojson_schema() {
let registry = Arc::new(RwLock::new(
crate::registry::build_json_schema_registry(vec![]).unwrap(),
));
let conn = trailbase_extension::connect_sqlite(None, Some(registry.clone())).unwrap();
litegis::register(&conn).unwrap();
conn
.execute_batch("CREATE TABLE test_table (geom BLOB NOT NULL CHECK(ST_IsValid(geom))) STRICT;")
.unwrap();
{
// Insert
let (_table, schema, _value) = get_and_build_table_schema(
&conn,
&registry.read(),
"test_table",
JsonSchemaMode::Insert,
);
let valid_point = json!({
"type": "Point",
"coordinates": [125.6, 10.1]
});
assert!(schema.is_valid(&json!({
"geom": valid_point,
})));
assert!(!schema.is_valid(&json!({})));
let invalid_point = json!({
"type": "Point",
"coordinates": [125.6]
});
assert!(
!schema.is_valid(&json!({
"geom": invalid_point,
})),
"{schema:?},\n{}",
serde_json::to_string_pretty(&_value).unwrap()
);
}
{
// Update
let (_table, schema, _value) = get_and_build_table_schema(
&conn,
&registry.read(),
"test_table",
JsonSchemaMode::Update,
);
assert!(schema.is_valid(&json!({})));
}
}
fn get_and_build_table_schema(
conn: &rusqlite::Connection,
registry: &JsonSchemaRegistry,
table_name: &str,
) -> (Table, Validator) {
mode: JsonSchemaMode,
) -> (Table, Validator, Value) {
let table = lookup_and_parse_table_schema(conn, table_name).unwrap();
let table_metadata = TableMetadata::new(&registry, table.clone(), &[table.clone()]).unwrap();
let (schema, _) = build_json_schema(
let (schema, value) = build_json_schema(
&registry,
&table_metadata.name().name,
&table_metadata.column_metadata,
JsonSchemaMode::Insert,
mode,
)
.unwrap();
return (table, schema);
return (table, schema, value);
}
}

View file

@ -96,6 +96,9 @@ importers:
'@tanstack/table-core':
specifier: ^8.21.3
version: 8.21.3
'@terraformer/wkt':
specifier: ^2.2.1
version: 2.2.1
chart.js:
specifier: ^4.5.1
version: 4.5.1
@ -166,6 +169,9 @@ importers:
'@types/geojson':
specifier: ^7946.0.16
version: 7946.0.16
'@types/terraformer__wkt':
specifier: ^2.0.3
version: 2.0.3
'@types/wicg-file-system-access':
specifier: ^2023.10.7
version: 2023.10.7
@ -2804,6 +2810,9 @@ packages:
peerDependencies:
typescript: '>=4.7'
'@terraformer/wkt@2.2.1':
resolution: {integrity: sha512-XDUsW/lvbMzFi7GIuRD9+UqR4QyP+5C+TugeJLMDczKIRbaHoE9J3N8zLSdyOGmnJL9B6xTS3YMMlBnMU0Ar5A==}
'@testing-library/dom@10.4.1':
resolution: {integrity: sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==}
engines: {node: '>=18'}
@ -2953,6 +2962,9 @@ packages:
'@types/supercluster@7.1.3':
resolution: {integrity: sha512-Z0pOY34GDFl3Q6hUFYf3HkTwKEE02e7QgtJppBt+beEAxnyOpJua+voGFvxINBHa06GwLFFym7gRPY2SiKIfIA==}
'@types/terraformer__wkt@2.0.3':
resolution: {integrity: sha512-60CGvi30kMIKl2QERrE6LD5iPm4lutZ1M/mqBY4wrn6H/QlZQa/5CN1e6trZ6ZtSRHLbHLwG+egt/nAIDbPG0A==}
'@types/unist@2.0.11':
resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==}
@ -8670,6 +8682,8 @@ snapshots:
transitivePeerDependencies:
- supports-color
'@terraformer/wkt@2.2.1': {}
'@testing-library/dom@10.4.1':
dependencies:
'@babel/code-frame': 7.29.0
@ -8840,6 +8854,10 @@ snapshots:
dependencies:
'@types/geojson': 7946.0.16
'@types/terraformer__wkt@2.0.3':
dependencies:
'@types/geojson': 7946.0.16
'@types/unist@2.0.11': {}
'@types/unist@3.0.3': {}