refactor: shared internal postgres package around slonik (#7887)

Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: n1ru4l <14338007+n1ru4l@users.noreply.github.com>
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
Co-authored-by: Laurin Quast <laurinquast@googlemail.com>
This commit is contained in:
Copilot 2026-03-27 10:20:05 +01:00 committed by GitHub
parent e83bc29e2a
commit aac23596ec
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
194 changed files with 3635 additions and 3333 deletions

View file

@ -20,6 +20,7 @@
"@graphql-hive/core": "workspace:*",
"@graphql-typed-document-node/core": "3.2.0",
"@hive/commerce": "workspace:*",
"@hive/postgres": "workspace:*",
"@hive/schema": "workspace:*",
"@hive/server": "workspace:*",
"@hive/service-common": "workspace:*",
@ -42,7 +43,6 @@
"human-id": "4.1.1",
"ioredis": "5.8.2",
"set-cookie-parser": "2.7.1",
"slonik": "30.4.4",
"strip-ansi": "7.1.2",
"tslib": "2.8.1",
"vitest": "4.0.9",

View file

@ -1,10 +1,10 @@
import { DatabasePool } from 'slonik';
import {
AccessTokenKeyContainer,
hashPassword,
} from '@hive/api/modules/auth/lib/supertokens-at-home/crypto';
import { SuperTokensStore } from '@hive/api/modules/auth/providers/supertokens-store';
import { NoopLogger } from '@hive/api/modules/shared/providers/logger';
import { PostgresDatabasePool } from '@hive/postgres';
import type { InternalApi } from '@hive/server';
import { createNewSession } from '@hive/server/supertokens-at-home/shared';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
@ -76,7 +76,7 @@ const tokenResponsePromise: {
} = {};
export async function authenticate(
pool: DatabasePool,
pool: PostgresDatabasePool,
email: string,
oidcIntegrationId?: string,
): Promise<{ access_token: string; refresh_token: string; supertokensUserId: string }> {

View file

@ -1,9 +1,9 @@
import type { AddressInfo } from 'node:net';
import humanId from 'human-id';
import setCookie from 'set-cookie-parser';
import { sql, type DatabasePool } from 'slonik';
import z from 'zod';
import formDataPlugin from '@fastify/formbody';
import { psql, type PostgresDatabasePool } from '@hive/postgres';
import { createServer, type FastifyReply, type FastifyRequest } from '@hive/service-common';
import { graphql } from './gql';
import { execute } from './graphql';
@ -157,7 +157,7 @@ const VerifyEmailMutation = graphql(`
export async function createOIDCIntegration(args: {
organizationId: string;
accessToken: string;
getPool: () => Promise<DatabasePool>;
getPool: () => Promise<PostgresDatabasePool>;
}) {
const { accessToken: authToken, getPool } = args;
const result = await execute({
@ -192,7 +192,7 @@ export async function createOIDCIntegration(args: {
}) + '.local';
const pool = await getPool();
const query = sql`
const query = psql`
INSERT INTO "oidc_integration_domains" (
"organization_id"
, "oidc_integration_id"

View file

@ -1,8 +1,9 @@
import { formatISO, subHours } from 'date-fns';
import { humanId } from 'human-id';
import { createPool, sql } from 'slonik';
import z from 'zod';
import { NoopLogger } from '@hive/api/modules/shared/providers/logger';
import { createRedisClient } from '@hive/api/modules/shared/providers/redis';
import { createPostgresDatabasePool, psql } from '@hive/postgres';
import type { Report } from '../../packages/libraries/core/src/client/usage.js';
import { authenticate, userEmail } from './auth';
import {
@ -82,9 +83,9 @@ function createConnectionPool() {
db: ensureEnv('POSTGRES_DB'),
};
return createPool(
`postgres://${pg.user}:${pg.password}@${pg.host}:${pg.port}/${pg.db}?sslmode=disable`,
);
return createPostgresDatabasePool({
connectionParameters: `postgres://${pg.user}:${pg.password}@${pg.host}:${pg.port}/${pg.db}?sslmode=disable`,
});
}
async function createDbConnection() {
@ -97,9 +98,9 @@ async function createDbConnection() {
};
}
export function initSeed() {
let sharedDBPoolPromise: ReturnType<typeof createDbConnection>;
let sharedDBPoolPromise: ReturnType<typeof createDbConnection>;
export function initSeed() {
function getPool() {
if (!sharedDBPoolPromise) {
sharedDBPoolPromise = createDbConnection();
@ -118,7 +119,7 @@ export function initSeed() {
if (opts?.verifyEmail ?? true) {
const pool = await getPool();
await pool.query(sql`
await pool.query(psql`
INSERT INTO "email_verifications" ("user_identity_id", "email", "verified_at")
VALUES (${auth.supertokensUserId}, ${email}, NOW())
`);
@ -142,18 +143,22 @@ export function initSeed() {
pollForEmailVerificationLink,
async purgeOIDCDomains() {
const pool = await getPool();
await pool.query(sql`
await pool.query(psql`
TRUNCATE "oidc_integration_domains"
`);
},
async forgeOIDCDNSChallenge(orgSlug: string) {
const pool = await getPool();
const domainChallengeId = await pool.oneFirst<string>(sql`
const domainChallengeId = await pool
.oneFirst(
psql`
SELECT "oidc_integration_domains"."id"
FROM "oidc_integration_domains" INNER JOIN "organizations" ON "oidc_integration_domains"."organization_id" = "organizations"."id"
WHERE "organizations"."clean_id" = ${orgSlug}
`);
`,
)
.then(z.string().parse);
const key = `hive:oidcDomainChallenge:${domainChallengeId}`;
const challenge = {
@ -208,7 +213,7 @@ export function initSeed() {
async overrideOrgPlan(plan: 'PRO' | 'ENTERPRISE' | 'HOBBY') {
const pool = await createConnectionPool();
await pool.query(sql`
await pool.query(psql`
UPDATE organizations SET plan_name = ${plan} WHERE id = ${organization.id}
`);
@ -260,8 +265,8 @@ export function initSeed() {
async setFeatureFlag(name: string, value: boolean | string[]) {
const pool = await createConnectionPool();
await pool.query(sql`
UPDATE organizations SET feature_flags = ${sql.jsonb({
await pool.query(psql`
UPDATE organizations SET feature_flags = ${psql.jsonb({
[name]: value,
})}
WHERE id = ${organization.id}
@ -272,7 +277,7 @@ export function initSeed() {
async setDataRetention(days: number) {
const pool = await createConnectionPool();
await pool.query(sql`
await pool.query(psql`
UPDATE organizations SET limit_retention_days = ${days} WHERE id = ${organization.id}
`);
@ -340,13 +345,15 @@ export function initSeed() {
/** Expires tokens */
async forceExpireTokens(tokenIds: string[]) {
const pool = await createConnectionPool();
const result = await pool.query(sql`
const result = await pool.any(psql`
UPDATE "organization_access_tokens"
SET "expires_at"=NOW()
WHERE id IN (${sql.join(tokenIds, sql`, `)}) AND organization_id=${organization.id}
SET "expires_at" = NOW()
WHERE id IN (${psql.join(tokenIds, psql`, `)}) AND organization_id = ${organization.id}
RETURNING
"id"
`);
await pool.end();
expect(result.rowCount).toBe(tokenIds.length);
expect(result.length).toBe(tokenIds.length);
for (const id of tokenIds) {
await purgeOrganizationAccessTokenById(id);
}
@ -390,7 +397,7 @@ export function initSeed() {
async setNativeFederation(enabled: boolean) {
const pool = await createConnectionPool();
await pool.query(sql`
await pool.query(psql`
UPDATE projects SET native_federation = ${enabled} WHERE id = ${project.id}
`);

View file

@ -1,11 +1,12 @@
import { buildASTSchema, parse } from 'graphql';
import { createLogger } from 'graphql-yoga';
import { sql } from 'slonik';
import { pollFor } from 'testkit/flow';
import { initSeed } from 'testkit/seed';
import { getServiceHost } from 'testkit/utils';
import z from 'zod';
import { createHive } from '@graphql-hive/core';
import { clickHouseInsert, clickHouseQuery } from '../../testkit/clickhouse';
import { psql } from '@hive/postgres';
import { clickHouseInsert } from '../../testkit/clickhouse';
import { graphql } from '../../testkit/gql';
import { execute } from '../../testkit/graphql';
@ -2056,12 +2057,20 @@ test('activeAppDeployments works for > 1000 records with a date filter (neverUse
);
// insert into postgres
const result = await conn.pool.query(sql`
const result = await conn.pool
.any(
psql`
INSERT INTO app_deployments ("target_id", "name", "version", "activated_at")
SELECT * FROM ${sql.unnest(appDeploymentRows, ['uuid', 'text', 'text', 'timestamptz'])}
SELECT * FROM ${psql.unnest(appDeploymentRows, ['uuid', 'text', 'text', 'timestamptz'])}
RETURNING "id", "target_id", "name", "version"
`);
expect(result.rowCount).toBe(1200);
`,
)
.then(
z.array(
z.object({ id: z.string(), target_id: z.string(), name: z.string(), version: z.string() }),
).parse,
);
expect(result.length).toBe(1200);
// insert into clickhouse and activate
const query = `INSERT INTO app_deployments (
@ -2071,7 +2080,7 @@ test('activeAppDeployments works for > 1000 records with a date filter (neverUse
,"app_version"
,"is_active"
) VALUES
${result.rows
${result
.map(
r => `(
'${r['target_id']}'

View file

@ -1,21 +1,28 @@
import 'reflect-metadata';
import { sql, type CommonQueryMethods } from 'slonik';
/* eslint-disable no-process-env */
import { ProjectType } from 'testkit/gql/graphql';
import { test } from 'vitest';
import z from 'zod';
import { psql, type CommonQueryMethods } from '@hive/postgres';
import { initSeed } from '../../../testkit/seed';
async function fetchCoordinates(db: CommonQueryMethods, target: { id: string }) {
const result = await db.query<{
coordinate: string;
created_in_version_id: string;
deprecated_in_version_id: string | null;
}>(sql`
const result = await db
.any(
psql`
SELECT coordinate, created_in_version_id, deprecated_in_version_id
FROM schema_coordinate_status WHERE target_id = ${target.id}
`);
`,
)
.then(
z.object({
coordinate: z.string(),
created_in_version_id: z.string(),
deprecated_in_version_id: z.string().nullable(),
}).parse,
);
return result.rows;
return result;
}
describe.skip('schema cleanup tracker', () => {

View file

@ -1,10 +1,11 @@
import 'reflect-metadata';
import { createPool, sql } from 'slonik';
import { graphql } from 'testkit/gql';
/* eslint-disable no-process-env */
import { ProjectType } from 'testkit/gql/graphql';
import { execute } from 'testkit/graphql';
import { assertNonNull, getServiceHost } from 'testkit/utils';
import z from 'zod';
import { createPostgresDatabasePool, psql } from '@hive/postgres';
// eslint-disable-next-line import/no-extraneous-dependencies
import { createStorage } from '@hive/storage';
import { createTarget, publishSchema, updateSchemaComposition } from '../../../testkit/flow';
@ -3820,7 +3821,7 @@ test.concurrent(
);
const insertLegacyVersion = async (
pool: Awaited<ReturnType<typeof createPool>>,
pool: Awaited<ReturnType<typeof createPostgresDatabasePool>>,
args: {
sdl: string;
projectId: string;
@ -3828,7 +3829,9 @@ const insertLegacyVersion = async (
serviceUrl: string;
},
) => {
const logId = await pool.oneFirst<string>(sql`
const logId = await pool
.oneFirst(
psql`
INSERT INTO schema_log
(
author,
@ -3854,9 +3857,13 @@ const insertLegacyVersion = async (
'PUSH'
)
RETURNING id
`);
`,
)
.then(z.string().parse);
const versionId = await pool.oneFirst<string>(sql`
const versionId = await pool
.oneFirst(
psql`
INSERT INTO schema_versions
(
is_composable,
@ -3870,9 +3877,11 @@ const insertLegacyVersion = async (
${logId}
)
RETURNING "id"
`);
`,
)
.then(z.string().parse);
await pool.query(sql`
await pool.query(psql`
INSERT INTO
schema_version_to_log
(version_id, action_id)
@ -3886,7 +3895,7 @@ const insertLegacyVersion = async (
test.concurrent(
'service url change from legacy to new version is displayed correctly',
async ({ expect }) => {
let pool: Awaited<ReturnType<typeof createPool>> | undefined;
let pool: Awaited<ReturnType<typeof createPostgresDatabasePool>> | undefined;
try {
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
@ -3899,7 +3908,9 @@ test.concurrent(
// We need to seed a legacy entry in the database
const conn = connectionString();
pool = await createPool(conn);
pool = await createPostgresDatabasePool({
connectionParameters: conn,
});
const sdl = 'type Query { ping: String! }';
@ -3950,7 +3961,7 @@ test.concurrent(
test.concurrent(
'service url change from legacy to legacy version is displayed correctly',
async ({ expect }) => {
let pool: Awaited<ReturnType<typeof createPool>> | undefined;
let pool: Awaited<ReturnType<typeof createPostgresDatabasePool>> | undefined;
try {
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
@ -3963,7 +3974,7 @@ test.concurrent(
// We need to seed a legacy entry in the database
const conn = connectionString();
pool = await createPool(conn);
pool = await createPostgresDatabasePool({ connectionParameters: conn });
const sdl = 'type Query { ping: String! }';

View file

@ -2,12 +2,13 @@ import { existsSync, rmSync, writeFileSync } from 'node:fs';
import { createServer } from 'node:http';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { MaybePromise } from 'slonik/dist/src/types';
import { ProjectType } from 'testkit/gql/graphql';
import { initSeed } from 'testkit/seed';
import { getServiceHost } from 'testkit/utils';
import { execa } from '@esm2cjs/execa';
type MaybePromise<T> = T | Promise<T>;
describe('Apollo Router Integration', () => {
const getAvailablePort = () =>
new Promise<number>(resolve => {

View file

@ -74,7 +74,6 @@ test('update-retention script skips gracefully when no env vars are set', async
delete process.env.CLICKHOUSE_TTL_HOURLY_MV_TABLES;
delete process.env.CLICKHOUSE_TTL_MINUTELY_MV_TABLES;
vi.resetModules();
const { updateRetention } = await import(
'../../../packages/migrations/src/scripts/update-retention'
);

View file

@ -0,0 +1,3 @@
# Internal Postgres Client
This is a lightweight abstraction on top of Slonik, that sets up some things for ease of usage.

View file

@ -0,0 +1,16 @@
{
"name": "@hive/postgres",
"type": "module",
"license": "MIT",
"private": true,
"exports": {
".": "./src/index.ts"
},
"dependencies": {
"slonik": "30.4.4",
"slonik-interceptor-query-logging": "46.4.0"
},
"devDependencies": {
"@hive/service-common": "workspace:*"
}
}

View file

@ -1,11 +1,14 @@
export function createConnectionString(config: {
export type PostgresConnectionParamaters = {
host: string;
port: number;
password: string | undefined;
user: string;
db: string;
ssl: boolean;
}) {
};
/** Create a Postgres Connection String */
export function createConnectionString(config: PostgresConnectionParamaters) {
// prettier-ignore
const encodedUser = encodeURIComponent(config.user);
const encodedPassword =

View file

@ -0,0 +1,18 @@
export {
PostgresDatabasePool,
createPostgresDatabasePool,
type CommonQueryMethods,
} from './postgres-database-pool';
export { type PostgresConnectionParamaters, createConnectionString } from './connection-string';
export { psql } from './psql';
export {
UniqueIntegrityConstraintViolationError,
ForeignKeyIntegrityConstraintViolationError,
type TaggedTemplateLiteralInvocation,
type PrimitiveValueExpression,
type SerializableValue,
type Interceptor,
type Query,
type QueryContext,
} from 'slonik';
export { toDate } from './utils';

View file

@ -0,0 +1,246 @@
import {
createPool,
type DatabasePool,
type Interceptor,
type PrimitiveValueExpression,
type QueryResultRow,
type QueryResultRowColumn,
type CommonQueryMethods as SlonikCommonQueryMethods,
type TaggedTemplateLiteralInvocation,
} from 'slonik';
import { createQueryLoggingInterceptor } from 'slonik-interceptor-query-logging';
import { context, SpanKind, SpanStatusCode, trace } from '@hive/service-common';
import { createConnectionString, type PostgresConnectionParamaters } from './connection-string';
const tracer = trace.getTracer('storage');
export interface CommonQueryMethods {
exists(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<boolean>;
any(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<ReadonlyArray<unknown>>;
maybeOne(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown>;
query(sql: TaggedTemplateLiteralInvocation, values?: PrimitiveValueExpression[]): Promise<void>;
oneFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown>;
one(sql: TaggedTemplateLiteralInvocation, values?: PrimitiveValueExpression[]): Promise<unknown>;
anyFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<ReadonlyArray<unknown>>;
maybeOneFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown>;
}
export class PostgresDatabasePool implements CommonQueryMethods {
constructor(private pool: DatabasePool) {}
/** Retrieve the raw PgPool instance. Refrain from using this API. It only exists for postgraphile workers */
getRawPgPool() {
return this.pool.pool;
}
/** Retrieve the raw Slonik instance. Refrain from using this API. */
getSlonikPool() {
return this.pool;
}
async exists(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<boolean> {
return this.pool.exists(sql, values);
}
async any(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<ReadonlyArray<unknown>> {
return this.pool.any<unknown>(sql, values);
}
async maybeOne(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return this.pool.maybeOne(sql, values);
}
async query(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<void> {
await this.pool.query<unknown>(sql, values);
}
async oneFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return await this.pool.oneFirst(sql, values);
}
async maybeOneFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return await this.pool.maybeOneFirst(sql, values);
}
async one(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return await this.pool.one(sql, values);
}
async anyFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<ReadonlyArray<unknown>> {
return await this.pool.anyFirst(sql, values);
}
async transaction<T = void>(
name: string,
handler: (methods: CommonQueryMethods) => Promise<T>,
): Promise<T> {
const span = tracer.startSpan(`PG Transaction: ${name}`, {
kind: SpanKind.INTERNAL,
});
return context.with(trace.setSpan(context.active(), span), async () => {
return await this.pool.transaction(async methods => {
try {
return await handler({
async exists(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<boolean> {
return methods.exists(sql, values);
},
async any(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<ReadonlyArray<unknown>> {
return methods.any<unknown>(sql, values);
},
async maybeOne(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return methods.maybeOne(sql, values);
},
async query(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<void> {
await methods.query<unknown>(sql, values);
},
async oneFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return await methods.oneFirst(sql, values);
},
async maybeOneFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return await methods.maybeOneFirst(sql, values);
},
async anyFirst(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<ReadonlyArray<unknown>> {
return await methods.anyFirst(sql, values);
},
async one(
sql: TaggedTemplateLiteralInvocation,
values?: PrimitiveValueExpression[],
): Promise<unknown> {
return await methods.one(sql, values);
},
});
} catch (err) {
span.setAttribute('error', 'true');
if (err instanceof Error) {
span.setAttribute('error.type', err.name);
span.setAttribute('error.message', err.message);
span.setStatus({
code: SpanStatusCode.ERROR,
message: err.message,
});
}
throw err;
} finally {
span.end();
}
});
});
}
end(): Promise<void> {
return this.pool.end();
}
}
const dbInterceptors: Interceptor[] = [createQueryLoggingInterceptor()];
export async function createPostgresDatabasePool(args: {
connectionParameters: PostgresConnectionParamaters | string;
maximumPoolSize?: number;
additionalInterceptors?: Interceptor[];
statementTimeout?: number;
}) {
const connectionString =
typeof args.connectionParameters === 'string'
? args.connectionParameters
: createConnectionString(args.connectionParameters);
const pool = await createPool(connectionString, {
interceptors: dbInterceptors.concat(args.additionalInterceptors ?? []),
captureStackTrace: false,
maximumPoolSize: args.maximumPoolSize,
idleTimeout: 30000,
statementTimeout: args.statementTimeout,
});
function interceptError<K extends Exclude<keyof SlonikCommonQueryMethods, 'transaction'>>(
methodName: K,
) {
const original: SlonikCommonQueryMethods[K] = pool[methodName];
function interceptor<T extends QueryResultRow>(
this: any,
sql: TaggedTemplateLiteralInvocation<T>,
values?: QueryResultRowColumn[],
): any {
return (original as any).call(this, sql, values).catch((error: any) => {
error.sql = sql.sql;
error.values = sql.values || values;
return Promise.reject(error);
});
}
pool[methodName] = interceptor;
}
interceptError('one');
interceptError('many');
return new PostgresDatabasePool(pool);
}

View file

@ -0,0 +1,3 @@
import { createSqlTag } from 'slonik';
export const psql = createSqlTag();

View file

@ -0,0 +1,5 @@
import { psql } from './psql';
export function toDate(date: Date) {
return psql`to_timestamp(${date.getTime() / 1000})`;
}

View file

@ -13,10 +13,12 @@
"db:init": "pnpm db:create && pnpm migration:run",
"db:migrator": "tsx src/index.ts",
"migration:run": "pnpm db:migrator up",
"test": "WATCH=0 tsup-node --config ../../configs/tsup/dev.config.node.ts ./test/root.ts"
"test": "WATCH=0 tsup-node --config ../../configs/tsup/dev.config.node.ts ./test/root.ts",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@graphql-hive/core": "workspace:*",
"@hive/postgres": "workspace:*",
"@hive/service-common": "workspace:*",
"@types/bcryptjs": "2.4.6",
"@types/node": "24.10.9",
@ -30,7 +32,6 @@
"graphql": "16.9.0",
"p-limit": "6.2.0",
"pg-promise": "11.10.2",
"slonik": "30.4.4",
"tslib": "2.8.1",
"tsx": "4.19.2",
"typescript": "5.7.3",

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-03-05T19-06-23.initial.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--initial (up)
-- Extensions
CREATE EXTENSION

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-03-08T11-02-26.urls.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--urls (up)
ALTER TABLE
projects

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-03-09T10-30-35.roles.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--roles (up)
CREATE TYPE
user_role AS ENUM('ADMIN', 'MEMBER');

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-03-09T14-02-34.activities.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--activities (up)
CREATE TABLE
activities (

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-03-15T19-32-01.commit-project-id.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--commit-project-id (up)
ALTER TABLE
commits

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-04-20T11-30-30.tokens.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--tokens (up)
ALTER TABLE
tokens

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-04-30T07-01-57.token-per-target.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--token-per-target (up)
ALTER TABLE
tokens

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-04-30T11-47-26.validation.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--validation (up)
ALTER TABLE
targets

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-04-30T18-30-00.persisted-operations.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--persisted_operations (up)
CREATE TYPE
operation_kind AS ENUM('query', 'mutation', 'subscription');

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-05-07T07-28-07.token-last-used-at.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--token-last-used-at (up)
ALTER TABLE
tokens

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-06-11T10-46-24.slack-integration.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--slack-integration (up)
ALTER TABLE
organizations

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-06-11T15-38-28.alerts.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--alerts (up)
CREATE TYPE
alert_channel_type AS ENUM('SLACK', 'WEBHOOK');

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-08-18T13-20-45.urls.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--urls (up)
ALTER TABLE
version_commit

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021-08-27T14-19-48.non-unique-emails.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--non-unique-emails (up)
DROP INDEX
email_idx;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021.09.17T14.45.36.token-deleted.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
tokens
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021.10.07T12.11.13.access-scopes.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
-- Adds scopes to tokens
ALTER TABLE
tokens

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021.11.22T11.23.44.base-schema.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
-- Adds a base schema column in target table and versions table
ALTER TABLE
targets

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2021.12.20T14.05.30.commits-with-targets.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--creates and fills a target_id column on commits
ALTER TABLE
commits

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.01.21T12.34.46.validation-targets.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE
target_validation (
target_id UUID NOT NULL REFERENCES targets (id) ON DELETE CASCADE,

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.03.28T10.31.26.github-integration.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--slack-integration (up)
ALTER TABLE
organizations

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.04.15T14.24.17.hash-tokens.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
tokens
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.05.03T15.58.13.org_rate_limits.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
organizations
ADD COLUMN

View file

@ -2,10 +2,10 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.05.04T11.01.22.billing_plans.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE
organizations_billing (
organization_id UUID NOT NULL REFERENCES organizations (id) ON DELETE CASCADE, -- org id
organization_id UUID NOT NULL REFERENCES organizations (id) ON DELETE CASCADE, -- org id
external_billing_reference_id VARCHAR(255) NOT NULL, -- stripe customer id
billing_email_address VARCHAR(255),
PRIMARY KEY (organization_id)

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.05.05T08.05.35.commits-metadata.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
commits
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.07.07T12.15.10.no-schema-pushes-limit.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
organizations
DROP COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.07.11T10.09.41.get-started-wizard.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
-- Tracks feature discovery progress
ALTER TABLE
organizations

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.07.11T20.09.37.migrate-pro-hobby-retention.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
-- Update Hobby with 3d to 7d
UPDATE
organizations

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.07.18T10.10.44.target-validation-client-exclusion.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
targets
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.08.25T09.59.16.multiple-invitation-codes.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
organizations
DROP COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.08.26T06.23.24.add-supertokens-id.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
users
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.09.14T16.09.43.external-projects.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
projects
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.10.20T08.00.46.oidc-integrations.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE IF NOT EXISTS
"oidc_integrations" (
"id" UUID PRIMARY KEY DEFAULT uuid_generate_v4 (),

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.11.07T09.30.47.user-table-varchar-to-text.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
"users"
ALTER COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.12.03T09.12.28.organization-transfer.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
organizations
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2022.12.20T09.20.36.oidc-columns.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
"oidc_integrations"
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.01.04T17.00.23.hobby-7-by-default.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
-- Update Hobby with 3d to 7d - personal orgs were created with the default value of 3d
UPDATE
organizations

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.01.12T17.00.23.cdn-tokens.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE
"cdn_access_tokens" (
"id" UUID PRIMARY KEY DEFAULT uuid_generate_v4 (),

View file

@ -39,7 +39,7 @@ type Cursor = {
lastCreatedAt: string;
};
const run: MigrationExecutor['run'] = async ({ connection, sql }) => {
const run: MigrationExecutor['run'] = async ({ connection, psql }) => {
// eslint-disable-next-line no-process-env
const eenv = shouldRunModel.parse(process.env);
const shouldRun = eenv.RUN_S3_LEGACY_CDN_KEY_IMPORT === '1';
@ -71,7 +71,7 @@ const run: MigrationExecutor['run'] = async ({ connection, sql }) => {
// Also all this code runs inside a database transaction.
// This will block any other writes to the table.
// As the table should not be heavily in use when this is being run, it does not really matter.
const query = sql`
const query = psql`
SELECT
"id"
, to_json("created_at") as "created_at_cursor"
@ -79,12 +79,12 @@ const run: MigrationExecutor['run'] = async ({ connection, sql }) => {
"targets"
${
cursor
? sql`
? psql`
WHERE
("created_at" = ${cursor.lastCreatedAt} AND "id" > ${cursor.lastId})
OR "created_at" > ${cursor.lastCreatedAt}
`
: sql``
: psql``
}
ORDER BY
"created_at" ASC
@ -93,8 +93,8 @@ const run: MigrationExecutor['run'] = async ({ connection, sql }) => {
200
`;
const items = await connection.query(query);
return TargetsModel.parse(items.rows);
const items = await connection.any(query);
return TargetsModel.parse(items);
}
let lastCursor: null | Cursor = null;
@ -120,7 +120,7 @@ const run: MigrationExecutor['run'] = async ({ connection, sql }) => {
throw new Error(`Unexpected Status for storing key. (status=${response.status})`);
}
const query = sql`
const query = psql`
INSERT INTO
"cdn_access_tokens"
(

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.01.18T11.03.41.registry-v2.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
--
CREATE INDEX
IF NOT EXISTS version_commit_cid_vid_idx ON version_commit (commit_id, version_id);

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.02.22T09.27.02.delete-personal-org.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
-- Find and delete all organizations of type PERSONAL that have no projects
DELETE FROM
organizations AS o

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.03.14T12.14.23.schema-policy.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TYPE
schema_policy_resource AS ENUM('ORGANIZATION', 'PROJECT');

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.03.29T11.42.44.feature-flags.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
organizations
ADD COLUMN

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.04.03T12.51.36.schema-versions-meta.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE
"schema_version_changes" (
"id" UUID PRIMARY KEY DEFAULT uuid_generate_v4 (),

View file

@ -2,11 +2,11 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.05.08T12.23.45.clean-invalid-schema-version-changes.sql',
run: ({ sql }) => sql`
DELETE
run: ({ psql }) => psql`
DELETE
FROM
"schema_version_changes" "svc"
WHERE
WHERE
"svc"."change_type" = 'REGISTRY_SERVICE_URL_CHANGED'
AND (
NOT "svc"."meta"->'serviceUrls' ? 'new'

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.05.12T08.29.06.store-supergraph-on-schema-versions.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_versions"
ADD COLUMN "supergraph_sdl" text
;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.06.01T09.07.53.create_collections.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE "document_collections" (
"id" uuid NOT NULL DEFAULT uuid_generate_v4(),
"title" text NOT NULL,

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.06.06T11.26.04.schema-checks.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE "schema_checks" (
"id" uuid PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4()
, "created_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.07.10T11.26.04.schema-checks-manual-approval.sql',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_checks"
ADD COLUMN "github_check_run_id" bigint
, ADD COLUMN "is_manually_approved" boolean

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.07.27T11.44.36.graphql-endpoint.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "targets"
ADD COLUMN "graphql_endpoint_url" text
;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.08.01T11.44.36.schema-checks-expires-at.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_checks"
ADD COLUMN "expires_at" TIMESTAMP WITH TIME ZONE
;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.09.01T09.54.00.zendesk-support.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "users" ADD COLUMN "zendesk_user_id" TEXT UNIQUE DEFAULT NULL;
CREATE INDEX "users_by_zendesk_user_id" ON "users" ("zendesk_user_id" ASC);

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.09.25T15.23.00.github-check-with-project-name.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "projects" ADD COLUMN "github_check_with_project_name" BOOLEAN;
UPDATE "projects" SET "github_check_with_project_name" = FALSE WHERE "github_check_with_project_name" IS NULL;
ALTER TABLE "projects"

View file

@ -2,5 +2,6 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.09.28T14.14.14.native-fed-v2.ts',
run: ({ sql }) => sql`ALTER TABLE "projects" ADD COLUMN native_federation BOOLEAN DEFAULT FALSE;`,
run: ({ psql }) =>
psql`ALTER TABLE "projects" ADD COLUMN native_federation BOOLEAN DEFAULT FALSE;`,
} satisfies MigrationExecutor;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.08.03T11.44.36.schema-checks-github-repository.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_checks"
ADD COLUMN "github_repository" text
, ADD COLUMN "github_sha" text

View file

@ -3,10 +3,10 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.10.26T12.44.36.schema-checks-filters-index.ts',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: 'schema_checks_connection_pagination_with_changes',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "schema_checks_connection_pagination_with_changes" ON "schema_checks" (
"target_id" ASC
, "created_at" DESC
@ -20,7 +20,7 @@ export default {
},
{
name: 'schema_checks_connection_pagination_with_no_success',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "schema_checks_connection_pagination_with_no_success" ON "schema_checks" (
"target_id" ASC
, "created_at" DESC
@ -33,7 +33,7 @@ export default {
},
{
name: 'schema_checks_connection_pagination_with_no_success_and_changes',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "schema_checks_connection_pagination_with_no_success_and_changes" ON "schema_checks" (
"target_id" ASC
, "created_at" DESC

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.10.30T00-00-00.drop-persisted-operations.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
DROP TABLE IF EXISTS "persisted_operations";
DROP TYPE IF EXISTS "operation_kind";
`,

View file

@ -3,15 +3,15 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.10.25T14.41.41.schema-checks-dedup.ts',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: 'create sdl_store and alter schema_checks',
query: sql`
query: psql`
CREATE TABLE "sdl_store" (
"id" text PRIMARY KEY NOT NULL,
"sdl" text NOT NULL
);
ALTER TABLE "schema_checks"
ADD COLUMN "schema_sdl_store_id" text REFERENCES "sdl_store" ("id"),
ADD COLUMN "supergraph_sdl_store_id" text REFERENCES "sdl_store" ("id"),
@ -25,25 +25,25 @@ export default {
},
{
name: 'Create sdl_store_unique_id index',
query: sql`
query: psql`
CREATE UNIQUE INDEX sdl_store_unique_id ON "sdl_store" ("id");
`,
},
{
name: 'Create schema_check_by_schema_sdl_store_id index',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "schema_check_by_schema_sdl_store_id" ON "schema_checks" ("schema_sdl_store_id" ASC)
`,
},
{
name: 'Create schema_check_by_supergraph_sdl_store_id index',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "schema_check_by_supergraph_sdl_store_id" ON "schema_checks" ("supergraph_sdl_store_id" ASC)
`,
},
{
name: 'Create schema_check_by_composite_schema_sdl_store_id index',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "schema_check_by_composite_schema_sdl_store_id" ON "schema_checks" ("composite_schema_sdl_store_id" ASC);
`,
},

View file

@ -2,7 +2,7 @@ import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.11.09T00.00.00.schema-check-approval.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE "schema_change_approvals" (
"target_id" UUID NOT NULL REFERENCES "targets" ("id") ON DELETE CASCADE,
"context_id" text NOT NULL,

View file

@ -3,10 +3,10 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2023.11.20T10-00-00.organization-member-roles.ts',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: 'Create organization_roles and alter organization_member table',
query: sql`
query: psql`
CREATE TABLE organization_member_roles (
"id" uuid NOT NULL UNIQUE DEFAULT uuid_generate_v4(),
"organization_id" uuid NOT NULL REFERENCES "organizations" ("id") ON DELETE CASCADE,
@ -26,7 +26,7 @@ export default {
},
{
name: 'Create Admin role',
query: sql`
query: psql`
INSERT INTO organization_member_roles
(
organization_id,
@ -68,7 +68,7 @@ export default {
},
{
name: 'Create Contributor role',
query: sql`
query: psql`
INSERT INTO organization_member_roles
(
organization_id,
@ -104,7 +104,7 @@ export default {
},
{
name: 'Create Viewer role',
query: sql`
query: psql`
INSERT INTO organization_member_roles
(
organization_id,
@ -133,12 +133,12 @@ export default {
},
{
name: 'Assign roles to users with matching scopes',
query: sql`
query: psql`
UPDATE organization_member
SET role_id = (
SELECT id
FROM organization_member_roles
WHERE
WHERE
organization_member_roles.organization_id = organization_member.organization_id
AND
ARRAY(SELECT unnest(organization_member_roles.scopes) ORDER BY 1)
@ -150,14 +150,14 @@ export default {
},
{
name: 'Migrate organization_invitations table to use Viewer role',
query: sql`
query: psql`
ALTER TABLE organization_invitations ADD COLUMN "role_id" uuid REFERENCES "organization_member_roles" ("id");
UPDATE organization_invitations
SET role_id = (
SELECT id
FROM organization_member_roles
WHERE
WHERE
organization_member_roles.organization_id = organization_invitations.organization_id
AND
locked = true

View file

@ -3,10 +3,10 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.01.08T10-00-00.schema-version-diff-schema-version-id',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: 'add diff_schema_version_id column',
query: sql`
query: psql`
ALTER TABLE "schema_versions"
ADD COLUMN IF NOT EXISTS "diff_schema_version_id" uuid REFERENCES "schema_versions" ("id")
, ADD COLUMN IF NOT EXISTS "record_version" text
@ -15,7 +15,7 @@ export default {
},
{
name: 'create schema_versions_cursor_pagination index',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_versions_cursor_pagination" ON "schema_versions" (
"target_id" ASC
, "created_at" DESC
@ -25,7 +25,7 @@ export default {
},
{
name: 'create schema_versions_cursor_pagination index',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_versions_cursor_pagination_composable" ON "schema_versions" (
"target_id" ASC
, "created_at" DESC

View file

@ -2,7 +2,7 @@ import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.01.26T00.00.00.contracts.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_versions"
ADD COLUMN "tags" text[]
, ADD COLUMN "has_contract_composition_errors" boolean

View file

@ -3,10 +3,10 @@ import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.01.26T00.00.01.schema-check-pagination-index-update',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: 'create index schema_checks_connection_pagination_with_changes_new',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_checks_connection_pagination_with_changes_new" ON "schema_checks" (
"target_id" ASC
, "created_at" DESC
@ -21,7 +21,7 @@ export default {
},
{
name: 'create index schema_checks_connection_pagination_with_no_success_and_changes_new',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_checks_connection_pagination_with_no_success_and_changes_new" ON "schema_checks" (
"target_id" ASC
, "created_at" DESC
@ -39,19 +39,19 @@ export default {
},
{
name: 'drop index schema_checks_connection_pagination_with_changes',
query: sql`
query: psql`
DROP INDEX CONCURRENTLY IF EXISTS "schema_checks_connection_pagination_with_changes";
`,
},
{
name: 'drop index schema_checks_connection_pagination_with_no_success_and_changes',
query: sql`
query: psql`
DROP INDEX CONCURRENTLY IF EXISTS "schema_checks_connection_pagination_with_no_success_and_changes";
`,
},
{
name: 'create index contract_checks_supergraph_sdl_store_id index',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "contract_checks_supergraph_sdl_store_id" ON "contract_checks" (
"supergraph_sdl_store_id" ASC
);
@ -59,7 +59,7 @@ export default {
},
{
name: 'create index contract_checks_composite_schema_sdl_store_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY "contract_checks_composite_schema_sdl_store_id" ON "contract_checks" (
"composite_schema_sdl_store_id" ASC
);

View file

@ -3,7 +3,7 @@ import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.02.19T00.00.01.schema-check-store-breaking-change-metadata.ts',
noTransaction: true,
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_checks"
ADD COLUMN IF NOT EXISTS "conditional_breaking_change_metadata" JSONB
;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.04.09T10.10.00.check-approval-comment.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_checks" ADD COLUMN IF NOT EXISTS "manual_approval_comment" text;
`,
} satisfies MigrationExecutor;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.06.11T10-10-00.ms-teams-webhook.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TYPE alert_channel_type ADD VALUE 'MSTEAMS_WEBHOOK';
`,
} satisfies MigrationExecutor;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.07.16T13-44-00.oidc-only-access.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "oidc_integrations"
ADD COLUMN "oidc_user_access_only" BOOLEAN NOT NULL DEFAULT TRUE;
`,

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.07.17T00-00-00.app-deployments.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE IF NOT EXISTS "app_deployments" (
"id" UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
"target_id" UUID NOT NULL REFERENCES "targets" ("id") ON DELETE CASCADE,

View file

@ -10,14 +10,15 @@ import {
isScalarType,
isUnionType,
} from 'graphql';
import { sql, type CommonQueryMethods } from 'slonik';
import z from 'zod';
import { psql, type CommonQueryMethods } from '@hive/postgres';
import { env } from '../environment';
import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.07.23T09.36.00.schema-cleanup-tracker.ts',
async run({ connection }) {
await connection.query(sql`
await connection.query(psql`
CREATE TABLE IF NOT EXISTS "schema_coordinate_status" (
coordinate text NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
@ -27,7 +28,7 @@ export default {
"target_id" UUID NOT NULL REFERENCES "targets" ("id") ON DELETE CASCADE,
PRIMARY KEY (coordinate, target_id)
);
CREATE INDEX IF NOT EXISTS idx_schema_coordinate_status_by_target_timestamp
ON schema_coordinate_status(
target_id,
@ -40,7 +41,7 @@ export default {
coordinate,
created_at,
deprecated_at
);
);
`);
if (env.isHiveCloud) {
@ -48,9 +49,13 @@ export default {
return;
}
const schemaVersionsTotal = await connection.oneFirst<number>(sql`
const schemaVersionsTotal = await connection
.oneFirst(
psql`
SELECT count(*) as total FROM schema_versions
`);
`,
)
.then(z.number().parse);
console.log(`Found ${schemaVersionsTotal} schema versions`);
if (schemaVersionsTotal > 1000) {
@ -93,24 +98,24 @@ function diffSchemaCoordinates(
export async function schemaCoordinateStatusMigration(connection: CommonQueryMethods) {
// Fetch targets
const targetResult = await connection.query<{ id: string }>(sql`
const targetResult = await connection
.any(
psql`
SELECT id FROM targets WHERE ID NOT IN (SELECT target_id FROM schema_coordinate_status)
`);
`,
)
.then(z.array(z.object({ id: z.string() })).parse);
console.log(`Found ${targetResult.rowCount} targets`);
console.log(`Found ${targetResult.length} targets`);
let i = 0;
for await (const target of targetResult.rows) {
for await (const target of targetResult) {
try {
console.log(`Processing target (${i++}/${targetResult.rowCount}) - ${target.id}`);
console.log(`Processing target (${i++}/${targetResult.length}) - ${target.id}`);
const latestSchema = await connection.maybeOne<{
id: string;
created_at: number;
is_composable: boolean;
sdl?: string;
previous_schema_version_id?: string;
}>(sql`
const latestSchema = await connection
.maybeOne(
psql`
SELECT
id,
created_at,
@ -121,7 +126,19 @@ export async function schemaCoordinateStatusMigration(connection: CommonQueryMet
WHERE target_id = ${target.id} AND is_composable = true
ORDER BY created_at DESC
LIMIT 1
`);
`,
)
.then(
z
.object({
id: z.string(),
created_at: z.number(),
is_composable: z.boolean(),
sdl: z.string().nullable(),
previous_schema_version_id: z.string().nullable(),
})
.nullable().parse,
);
if (!latestSchema) {
console.log('[SKIPPING] No latest composable schema found for target %s', target.id);
@ -270,10 +287,10 @@ async function insertRemainingCoordinates(
console.log(
`Adding remaining ${targetCoordinates.coordinates.size} coordinates for target ${targetId}`,
);
await connection.query(sql`
await connection.query(psql`
INSERT INTO schema_coordinate_status
( target_id, coordinate, created_at, created_in_version_id )
SELECT * FROM ${sql.unnest(
SELECT * FROM ${psql.unnest(
Array.from(targetCoordinates.coordinates).map(coordinate => [
targetId,
coordinate,
@ -290,10 +307,10 @@ async function insertRemainingCoordinates(
console.log(
`Deprecating remaining ${remainingDeprecated.size} coordinates for target ${targetId}`,
);
await connection.query(sql`
await connection.query(psql`
INSERT INTO schema_coordinate_status
( target_id, coordinate, created_at, created_in_version_id, deprecated_at, deprecated_in_version_id )
SELECT * FROM ${sql.unnest(
SELECT * FROM ${psql.unnest(
Array.from(remainingDeprecated).map(coordinate => [
targetId,
coordinate,
@ -343,13 +360,9 @@ async function processVersion(
return;
}
const versionBefore = await connection.maybeOne<{
id: string;
sdl?: string;
previous_schema_version_id?: string;
created_at: number;
is_composable: boolean;
}>(sql`
const versionBefore = await connection
.maybeOne(
psql`
SELECT
id,
composite_schema_sdl as sdl,
@ -358,7 +371,19 @@ async function processVersion(
is_composable
FROM schema_versions
WHERE id = ${previousVersionId} AND target_id = ${targetId}
`);
`,
)
.then(
z
.object({
id: z.string(),
created_at: z.number(),
is_composable: z.boolean(),
sdl: z.string().nullable(),
previous_schema_version_id: z.string().nullable(),
})
.nullable().parse,
);
if (!versionBefore) {
console.error(
@ -440,10 +465,10 @@ async function processVersion(
if (added.length) {
console.log(`Adding ${added.length} coordinates for target ${targetId}`);
await connection.query(sql`
await connection.query(psql`
INSERT INTO schema_coordinate_status
( target_id, coordinate, created_at, created_in_version_id )
SELECT * FROM ${sql.unnest(
SELECT * FROM ${psql.unnest(
added.map(coordinate => [targetId, coordinate, datePG, after.versionId]),
['uuid', 'text', 'date', 'uuid'],
)}
@ -456,10 +481,10 @@ async function processVersion(
if (deprecated.length) {
console.log(`deprecating ${deprecated.length} coordinates for target ${targetId}`);
await connection.query(sql`
await connection.query(psql`
INSERT INTO schema_coordinate_status
( target_id, coordinate, created_at, created_in_version_id, deprecated_at, deprecated_in_version_id )
SELECT * FROM ${sql.unnest(
SELECT * FROM ${psql.unnest(
deprecated.map(coordinate => [
targetId,
coordinate,

View file

@ -4,14 +4,14 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.12.23T00-00-00.improve-version-index.ts',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: `create "schema_log"."action" with "created_at" sort index`,
query: sql`CREATE INDEX CONCURRENTLY idx_schema_log_action_created ON schema_log(action, created_at DESC);`,
query: psql`CREATE INDEX CONCURRENTLY idx_schema_log_action_created ON schema_log(action, created_at DESC);`,
},
{
name: `create "schema_log"."action" + "service_name" index`,
query: sql`CREATE INDEX CONCURRENTLY idx_schema_log_action_service ON schema_log(action, lower(service_name));`,
query: psql`CREATE INDEX CONCURRENTLY idx_schema_log_action_service ON schema_log(action, lower(service_name));`,
},
],
} satisfies MigrationExecutor;

View file

@ -3,10 +3,10 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.12.24T00-00-00.improve-version-index-2.ts',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: `create "schema_version_changes"."schema_version_id" lookup index`,
query: sql`CREATE INDEX CONCURRENTLY idx_schema_version_changes_id ON schema_version_changes(schema_version_id);`,
query: psql`CREATE INDEX CONCURRENTLY idx_schema_version_changes_id ON schema_version_changes(schema_version_id);`,
},
],
} satisfies MigrationExecutor;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2024.12.27T00.00.00.create-preflight-scripts.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE IF NOT EXISTS "document_preflight_scripts" (
"id" uuid NOT NULL DEFAULT uuid_generate_v4(),
"source_code" text NOT NULL,

View file

@ -7,10 +7,10 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.01.02T00-00-00.cascade-deletion-indices.ts',
noTransaction: true,
run: ({ sql }) => [
run: ({ psql }) => [
{
name: 'index schema_checks_manual_approval_user_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_checks_manual_approval_user_id"
ON "schema_checks"("manual_approval_user_id")
WHERE "manual_approval_user_id" is not null
@ -18,106 +18,106 @@ export default {
},
{
name: 'index organization_member_user_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "organization_member_user_id"
ON "organization_member"("user_id")
`,
},
{
name: 'index organization_member_organization_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "organization_member_organization_id"
ON "organization_member"("organization_id")
`,
},
{
name: 'index organization_member_roles_organization_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "organization_member_roles_organization_id"
ON "organization_member_roles"("organization_id")
`,
},
{
name: 'index projects_org_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "projects_org_id" ON "projects"("org_id")
`,
},
{
name: 'index targets_project_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "targets_project_id" ON "targets"("project_id")
`,
},
{
name: 'index schema_versions_target_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_versions_target_id" ON "schema_versions"("target_id")
`,
},
{
name: 'index schema_checks_target_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_checks_target_id" ON "schema_checks"("target_id")
`,
},
{
name: 'index schema_log_target_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_log_target_id" ON "schema_log"("target_id")
`,
},
{
name: 'index schema_log_project_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_log_project_id" ON "schema_log"("project_id")
`,
},
{
name: 'index contract_versions_schema_version_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "contract_versions_schema_version_id" ON "contract_versions"("schema_version_id")
`,
},
{
name: 'index schema_version_to_log_action_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_version_to_log_action_id" ON "schema_version_to_log"("action_id")
`,
},
{
name: 'index schema_version_to_log_version_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_version_to_log_version_id" ON "schema_version_to_log"("version_id")
`,
},
{
name: 'index contract_schema_change_approvals_schema_change_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "contract_schema_change_approvals_schema_change_id" ON "contract_schema_change_approvals"("schema_change_id")
`,
},
{
name: 'index schema_checks_schema_version_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_checks_schema_version_id" ON "schema_checks"("schema_version_id")
`,
},
{
name: 'index schema_versions_diff_schema_version_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "schema_versions_diff_schema_version_id" ON "schema_versions"("diff_schema_version_id")
`,
},
{
name: 'index organizations_ownership_transfer_user_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "organizations_ownership_transfer_user_id" ON "organizations"("ownership_transfer_user_id")
`,
},
{
name: 'index users_supertoken_user_id',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "users_supertoken_user_id_missing"
ON "users"("supertoken_user_id")
WHERE 'supertoken_user_id' IS NULL

View file

@ -7,16 +7,16 @@ export default {
// we do not run this in a transaction as each user processing delete takes around 300ms.
// and we do not want to mess with live traffic.
noTransaction: true,
async run({ sql, connection }) {
async run({ psql, connection }) {
const userIds = await connection
.anyFirst(
sql`
psql`
SELECT
"id"
FROM
"users"
WHERE
"supertoken_user_id" IS NULL
"supertoken_user_id" IS NULL
`,
)
.then(value => z.array(z.string()).parse(value));
@ -32,7 +32,7 @@ export default {
`processing userId="${userId}" (${counter.toPrecision().padStart(padAmount, '0')}/${total})`,
);
// ON DELETE SET null constraint is missing, so we need to first update it manually
await connection.query(sql`
await connection.query(psql`
UPDATE
"organizations"
SET
@ -41,14 +41,14 @@ export default {
"ownership_transfer_user_id" = ${userId}
`);
// Delete the organizations of these users
await connection.query(sql`
await connection.query(psql`
DELETE
FROM
"organizations"
WHERE
"user_id" = ${userId}
`);
await connection.query(sql`
await connection.query(psql`
DELETE
FROM
"users"

View file

@ -24,8 +24,8 @@ const QUERY_RESULT = z.array(
export default {
name: '2025.01.09T00-00-00.legacy-member-scopes.ts',
noTransaction: true,
async run({ sql, connection }) {
const queryResult = await connection.query(sql`
async run({ psql, connection }) {
const queryResult = await connection.any(psql`
SELECT
organization_id as "organizationId",
sorted_scopes as "sortedScopes",
@ -49,7 +49,7 @@ export default {
ORDER BY organization_id;
`);
if (queryResult.rowCount === 0) {
if (queryResult.length === 0) {
console.log('No members without role_id found.');
return;
}
@ -57,7 +57,7 @@ export default {
// rows are sorted by organization_id
// and grouped by scopes
// so we can process them in order
const rows = QUERY_RESULT.parse(queryResult.rows);
const rows = QUERY_RESULT.parse(queryResult);
let counter = 1;
let previousOrganizationId: string | null = null;
@ -70,12 +70,12 @@ export default {
}
console.log(
`processing organization_id="${row.organizationId}" (${counter}) with ${row.userIds.length} users | ${index + 1}/${queryResult.rowCount}`,
`processing organization_id="${row.organizationId}" (${counter}) with ${row.userIds.length} users | ${index + 1}/${queryResult.length}`,
);
const startedAt = Date.now();
await connection.query(sql`
await connection.query(psql`
WITH new_role AS (
INSERT INTO organization_member_roles (
organization_id, name, description, scopes
@ -84,13 +84,13 @@ export default {
${row.organizationId},
'Auto Role ' || substring(uuid_generate_v4()::text FROM 1 FOR 8),
'Auto generated role to assign to members without a role',
${sql.array(row.sortedScopes, 'text')}
${psql.array(row.sortedScopes, 'text')}
)
RETURNING id
)
UPDATE organization_member
SET role_id = (SELECT id FROM new_role)
WHERE organization_id = ${row.organizationId} AND user_id = ANY(${sql.array(row.userIds, 'uuid')})
WHERE organization_id = ${row.organizationId} AND user_id = ANY(${psql.array(row.userIds, 'uuid')})
`);
console.log(`finished after ${Date.now() - startedAt}ms`);

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.01.10T00.00.00.breaking-changes-request-count.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TYPE
breaking_change_formula AS ENUM('PERCENTAGE', 'REQUEST_COUNT');

View file

@ -4,10 +4,10 @@ export default {
name: '2025.01.13T10-08-00.default-role.ts',
noTransaction: true,
// Adds a default role to OIDC integration and set index on "oidc_integrations"."default_role_id"
run: ({ sql }) => [
run: ({ psql }) => [
{
name: 'Add a column',
query: sql`
query: psql`
ALTER TABLE "oidc_integrations"
ADD COLUMN IF NOT EXISTS "default_role_id" UUID REFERENCES organization_member_roles(id)
ON DELETE SET NULL;
@ -15,7 +15,7 @@ export default {
},
{
name: 'Create an index',
query: sql`
query: psql`
CREATE INDEX CONCURRENTLY IF NOT EXISTS "oidc_integrations_default_role_id_idx"
ON "oidc_integrations"("default_role_id")
WHERE "default_role_id" is not null;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.01.17T10-08-00.drop-activities.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
DROP TABLE IF EXISTS "activities";
`,
} satisfies MigrationExecutor;

View file

@ -6,7 +6,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.01.20T00-00-00.legacy-registry-model-removal.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE projects DROP COLUMN IF EXISTS legacy_registry_model;
`,
} satisfies MigrationExecutor;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2025-01-30T00-00-00.granular-member-role-permissions.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "organization_member_roles"
ALTER "scopes" DROP NOT NULL
, ADD COLUMN "permissions" text[]

View file

@ -2,7 +2,7 @@ import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.02.14T00.00.00.schema-versions-metadata.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_versions"
ADD COLUMN "schema_metadata" JSONB DEFAULT NULL
;

View file

@ -2,7 +2,7 @@ import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.02.20T00-00-00.organization-access-tokens.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
CREATE TABLE IF NOT EXISTS "organization_access_tokens" (
"id" UUID PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4()
, "organization_id" UUID NOT NULL REFERENCES "organizations" ("id") ON DELETE CASCADE

View file

@ -2,7 +2,7 @@ import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.02.21T00.00.00.schema-versions-metadata-attributes.ts',
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE "schema_versions"
ADD COLUMN "metadata_attributes" JSONB DEFAULT NULL
;

View file

@ -3,7 +3,7 @@ import type { MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.03.20T00-00-00.dangerous_breaking.ts',
noTransaction: true,
run: ({ sql }) => sql`
run: ({ psql }) => psql`
ALTER TABLE
targets
ADD COLUMN

Some files were not shown because too many files have changed in this diff Show more