feat: persistent job queue with postgraphile worker (#7383)

Co-authored-by: jdolle <1841898+jdolle@users.noreply.github.com>
This commit is contained in:
Laurin Quast 2026-01-12 13:13:23 +01:00 committed by GitHub
parent 36dd9d3bff
commit ec77725ca1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
76 changed files with 2364 additions and 384 deletions

View file

@ -0,0 +1,38 @@
---
'hive': major
---
Add a new `workflows` service. This service consolidates and replaces the `emails` and `webhooks` services, using a Postgres-backed persistent queue for improved stability and reliability.
If you are running a self-hosted setup the following docker compose changes are required:
```diff
services:
+ workflows:
+ image: '${DOCKER_REGISTRY}workflows${DOCKER_TAG}'
+ networks:
+ - 'stack'
+ depends_on:
+ db:
+ condition: service_healthy
+ environment:
+ NODE_ENV: production
+ PORT: 3014
+ POSTGRES_HOST: db
+ POSTGRES_PORT: 5432
+ POSTGRES_DB: '${POSTGRES_DB}'
+ POSTGRES_USER: '${POSTGRES_USER}'
+ POSTGRES_PASSWORD: '${POSTGRES_PASSWORD}'
+ EMAIL_FROM: no-reply@graphql-hive.com
+ EMAIL_PROVIDER: sendmail
+ LOG_LEVEL: '${LOG_LEVEL:-debug}'
+ SENTRY: '${SENTRY:-0}'
+ SENTRY_DSN: '${SENTRY_DSN:-}'
+ PROMETHEUS_METRICS: '${PROMETHEUS_METRICS:-}'
+ LOG_JSON: '1'
```
For different setups, we recommend using this as a reference.
**Note:** The workflows service will attempt to run postgres migrations for seeding the required database tables within the `graphile_worker` namespace. Please make sure the database user has sufficient permissions. For more information please refer to the [Graphile Worker documentation](https://worker.graphile.org/).

View file

@ -30,6 +30,7 @@ import { deployTokens } from './services/tokens';
import { deployUsage } from './services/usage';
import { deployUsageIngestor } from './services/usage-ingestor';
import { deployWebhooks } from './services/webhooks';
import { deployWorkflows, PostmarkSecret } from './services/workflows';
import { configureZendesk } from './services/zendesk';
import { optimizeAzureCluster } from './utils/azure-helpers';
import { isDefined } from './utils/helpers';
@ -68,6 +69,13 @@ const docker = configureDocker();
const envName = pulumi.getStack();
const heartbeatsConfig = new pulumi.Config('heartbeats');
const emailConfig = new pulumi.Config('email');
const postmarkSecret = new PostmarkSecret('postmark', {
token: emailConfig.requireSecret('token'),
from: emailConfig.require('from'),
messageStream: emailConfig.require('messageStream'),
});
const sentry = configureSentry();
const environment = prepareEnvironment({
release: imagesTag,
@ -146,10 +154,22 @@ const emails = deployEmails({
docker,
environment,
redis,
postmarkSecret,
sentry,
observability,
});
deployWorkflows({
image: docker.factory.getImageId('workflows', imagesTag),
docker,
environment,
postgres,
postmarkSecret,
observability,
sentry,
heartbeat: heartbeatsConfig.get('workflows'),
});
const commerce = deployCommerce({
image: docker.factory.getImageId('commerce', imagesTag),
docker,

View file

@ -56,7 +56,6 @@ export function deployCommerce({
env: {
...environment.envVars,
SENTRY: sentry.enabled ? '1' : '0',
EMAILS_ENDPOINT: serviceLocalEndpoint(emails.service),
WEB_APP_URL: `https://${environment.appDns}/`,
OPENTELEMETRY_TRACE_USAGE_REQUESTS: observability.enabledForUsageService ? '1' : '',
OPENTELEMETRY_COLLECTOR_ENDPOINT:

View file

@ -7,15 +7,10 @@ import { Environment } from './environment';
import { Observability } from './observability';
import { Redis } from './redis';
import { Sentry } from './sentry';
import { PostmarkSecret } from './workflows';
export type Emails = ReturnType<typeof deployEmails>;
class PostmarkSecret extends ServiceSecret<{
token: pulumi.Output<string> | string;
from: string;
messageStream: string;
}> {}
export function deployEmails({
environment,
redis,
@ -24,22 +19,17 @@ export function deployEmails({
docker,
sentry,
observability,
postmarkSecret,
}: {
observability: Observability;
environment: Environment;
image: string;
redis: Redis;
docker: Docker;
postmarkSecret: PostmarkSecret;
heartbeat?: string;
sentry: Sentry;
}) {
const emailConfig = new pulumi.Config('email');
const postmarkSecret = new PostmarkSecret('postmark', {
token: emailConfig.requireSecret('token'),
from: emailConfig.require('from'),
messageStream: emailConfig.require('messageStream'),
});
const { deployment, service } = new ServiceDeployment(
'emails-service',
{

View file

@ -126,10 +126,8 @@ export function deployGraphQL({
REQUEST_LOGGING: '1', // disabled
COMMERCE_ENDPOINT: serviceLocalEndpoint(commerce.service),
TOKENS_ENDPOINT: serviceLocalEndpoint(tokens.service),
WEBHOOKS_ENDPOINT: serviceLocalEndpoint(webhooks.service),
SCHEMA_ENDPOINT: serviceLocalEndpoint(schema.service),
SCHEMA_POLICY_ENDPOINT: serviceLocalEndpoint(schemaPolicy.service),
EMAILS_ENDPOINT: serviceLocalEndpoint(emails.service),
WEB_APP_URL: `https://${environment.appDns}`,
GRAPHQL_PUBLIC_ORIGIN: `https://${environment.appDns}`,
CDN_CF: '1',

View file

@ -0,0 +1,73 @@
import * as pulumi from '@pulumi/pulumi';
import { ServiceSecret } from '../utils/secrets';
import { ServiceDeployment } from '../utils/service-deployment';
import { Docker } from './docker';
import { Environment } from './environment';
import { Observability } from './observability';
import { Postgres } from './postgres';
import { Sentry } from './sentry';
export class PostmarkSecret extends ServiceSecret<{
token: pulumi.Output<string> | string;
from: string;
messageStream: string;
}> {}
export function deployWorkflows({
environment,
heartbeat,
image,
docker,
sentry,
postgres,
observability,
postmarkSecret,
}: {
postgres: Postgres;
observability: Observability;
environment: Environment;
image: string;
docker: Docker;
heartbeat?: string;
sentry: Sentry;
postmarkSecret: PostmarkSecret;
}) {
return (
new ServiceDeployment(
'workflow-service',
{
imagePullSecret: docker.secret,
env: {
...environment.envVars,
SENTRY: sentry.enabled ? '1' : '0',
EMAIL_PROVIDER: 'postmark',
HEARTBEAT_ENDPOINT: heartbeat ?? '',
OPENTELEMETRY_COLLECTOR_ENDPOINT:
observability.enabled && observability.tracingEndpoint
? observability.tracingEndpoint
: '',
LOG_JSON: '1',
},
readinessProbe: '/_readiness',
livenessProbe: '/_health',
startupProbe: '/_health',
exposesMetrics: true,
image,
replicas: environment.podsConfig.general.replicas,
},
[],
)
// PG
.withSecret('POSTGRES_HOST', postgres.pgBouncerSecret, 'host')
.withSecret('POSTGRES_PORT', postgres.pgBouncerSecret, 'port')
.withSecret('POSTGRES_USER', postgres.pgBouncerSecret, 'user')
.withSecret('POSTGRES_PASSWORD', postgres.pgBouncerSecret, 'password')
.withSecret('POSTGRES_DB', postgres.pgBouncerSecret, 'database')
.withSecret('POSTGRES_SSL', postgres.pgBouncerSecret, 'ssl')
.withSecret('EMAIL_FROM', postmarkSecret, 'from')
.withSecret('EMAIL_PROVIDER_POSTMARK_TOKEN', postmarkSecret, 'token')
.withSecret('EMAIL_PROVIDER_POSTMARK_MESSAGE_STREAM', postmarkSecret, 'messageStream')
.withConditionalSecret(sentry.enabled, 'SENTRY_DSN', sentry.secret, 'dsn')
.deploy()
);
}

View file

@ -218,10 +218,8 @@ services:
REDIS_PORT: 6379
REDIS_PASSWORD: '${REDIS_PASSWORD}'
TOKENS_ENDPOINT: http://tokens:3003
WEBHOOKS_ENDPOINT: http://webhooks:3005
SCHEMA_ENDPOINT: http://schema:3002
SCHEMA_POLICY_ENDPOINT: http://policy:3012
EMAILS_ENDPOINT: http://emails:3011
ENCRYPTION_SECRET: '${HIVE_ENCRYPTION_SECRET}'
WEB_APP_URL: '${HIVE_APP_BASE_URL}'
PORT: 3001
@ -341,11 +339,33 @@ services:
EMAIL_FROM: no-reply@graphql-hive.com
EMAIL_PROVIDER: sendmail
LOG_LEVEL: '${LOG_LEVEL:-debug}'
OPENTELEMETRY_COLLECTOR_ENDPOINT: '${OPENTELEMETRY_COLLECTOR_ENDPOINT:-}'
SENTRY: '${SENTRY:-0}'
SENTRY_DSN: '${SENTRY_DSN:-}'
PROMETHEUS_METRICS: '${PROMETHEUS_METRICS:-}'
workflows:
image: '${DOCKER_REGISTRY}workflows${DOCKER_TAG}'
networks:
- 'stack'
depends_on:
db:
condition: service_healthy
environment:
NODE_ENV: production
PORT: 3014
POSTGRES_HOST: db
POSTGRES_PORT: 5432
POSTGRES_DB: '${POSTGRES_DB}'
POSTGRES_USER: '${POSTGRES_USER}'
POSTGRES_PASSWORD: '${POSTGRES_PASSWORD}'
EMAIL_FROM: no-reply@graphql-hive.com
EMAIL_PROVIDER: sendmail
LOG_LEVEL: '${LOG_LEVEL:-debug}'
SENTRY: '${SENTRY:-0}'
SENTRY_DSN: '${SENTRY_DSN:-}'
PROMETHEUS_METRICS: '${PROMETHEUS_METRICS:-}'
LOG_JSON: '1'
usage:
image: '${DOCKER_REGISTRY}usage${DOCKER_TAG}'
networks:

View file

@ -316,6 +316,27 @@ target "webhooks" {
]
}
target "workflows" {
inherits = ["service-base", get_target()]
contexts = {
dist = "${PWD}/packages/services/workflows/dist"
shared = "${PWD}/docker/shared"
}
args = {
SERVICE_DIR_NAME = "@hive/workflows"
IMAGE_TITLE = "graphql-hive/workflows"
IMAGE_DESCRIPTION = "The workflow service of the GraphQL Hive project."
PORT = "3013"
HEALTHCHECK_CMD = "wget --spider -q http://127.0.0.1:$${PORT}/_readiness"
}
tags = [
local_image_tag("workflows"),
stable_image_tag("workflows"),
image_tag("workflows", COMMIT_SHA),
image_tag("workflows", BRANCH_NAME)
]
}
target "composition-federation-2" {
inherits = ["service-base", get_target()]
contexts = {
@ -425,6 +446,7 @@ group "build" {
"commerce",
"composition-federation-2",
"app",
"workflows",
"otel-collector"
]
}
@ -442,6 +464,7 @@ group "integration-tests" {
"webhooks",
"server",
"composition-federation-2",
"workflows",
"otel-collector"
]
}

View file

@ -138,7 +138,6 @@ services:
CLICKHOUSE_PORT: '8123'
CLICKHOUSE_USERNAME: '${CLICKHOUSE_USER}'
CLICKHOUSE_PASSWORD: '${CLICKHOUSE_PASSWORD}'
EMAILS_ENDPOINT: http://emails:3011
STRIPE_SECRET_KEY: empty
PORT: 3009
@ -246,6 +245,13 @@ services:
ports:
- '3011:3011'
workflows:
environment:
EMAIL_PROVIDER: '${EMAIL_PROVIDER}'
LOG_LEVEL: debug
ports:
- '3014:3014'
schema:
environment:
LOG_LEVEL: debug

View file

@ -6,10 +6,10 @@ export interface Email {
body: string;
}
export async function history(): Promise<Email[]> {
const emailsAddress = await getServiceHost('emails', 3011);
export async function history(forEmail?: string): Promise<Email[]> {
const workflowsAddress = await getServiceHost('workflows', 3014);
const response = await fetch(`http://${emailsAddress}/_history`, {
const response = await fetch(`http://${workflowsAddress}/_history`, {
method: 'GET',
headers: {
accept: 'application/json',
@ -17,5 +17,11 @@ export async function history(): Promise<Email[]> {
},
});
return response.json();
const result: Email[] = await response.json();
if (!forEmail) {
return result;
}
return result.filter(result => result.to === forEmail);
}

View file

@ -21,6 +21,7 @@ const LOCAL_SERVICES = {
external_composition: 3012,
mock_server: 3042,
'otel-collector': 4318,
workflows: 3014,
} as const;
export type KnownServices = keyof typeof LOCAL_SERVICES;

View file

@ -54,12 +54,16 @@ test('rate limit approaching and reached for organization', async () => {
return filterEmailsByOrg(organization.slug, sent)?.length === 1;
});
let sent = await emails.history();
expect(sent).toContainEqual({
to: ownerEmail,
subject: `${organization.slug} is approaching its rate limit`,
body: expect.any(String),
});
let sent = await emails.history(ownerEmail);
expect(sent).toEqual([
{
to: ownerEmail,
subject: `${organization.slug} is approaching its rate limit`,
body: expect.any(String),
},
]);
expect(filterEmailsByOrg(organization.slug, sent)).toHaveLength(1);
// Collect operations and check for rate-limit reached
@ -70,11 +74,11 @@ test('rate limit approaching and reached for organization', async () => {
// wait for the quota email to send...
await pollFor(async () => {
let sent = await emails.history();
let sent = await emails.history(ownerEmail);
return filterEmailsByOrg(organization.slug, sent)?.length === 2;
});
sent = await emails.history();
sent = await emails.history(ownerEmail);
expect(sent).toContainEqual({
to: ownerEmail,

View file

@ -0,0 +1,17 @@
import { type MigrationExecutor } from '../pg-migrator';
export default {
name: '2025.12.12T00-00-00.workflows-deduplication.ts',
run: ({ sql }) => sql`
CREATE TABLE "graphile_worker_deduplication" (
"task_name" text NOT NULL,
"dedupe_key" text NOT NULL,
"expires_at" timestamptz NOT NULL,
CONSTRAINT "dedupe_pk" PRIMARY KEY ("task_name", "dedupe_key")
);
CREATE INDEX "graphile_worker_deduplication_expires_at_idx"
ON "graphile_worker_deduplication" ("expires_at")
;
`,
} satisfies MigrationExecutor;

View file

@ -175,5 +175,6 @@ export const runPGMigrations = async (args: { slonik: DatabasePool; runTo?: stri
await import('./actions/2025.12.5T00-00-00.schema-check-url'),
await import('./actions/2025.12.30T00-00-00.schema-proposals-part-2'),
await import('./actions/2025.12.17T00-00-00.custom-oidc-scopes'),
await import('./actions/2025.12.12T00-00-00.workflows-deduplication'),
],
});

View file

@ -20,14 +20,13 @@
"@graphql-inspector/core": "7.1.0",
"@graphql-tools/merge": "9.1.1",
"@hive/cdn-script": "workspace:*",
"@hive/emails": "workspace:*",
"@hive/schema": "workspace:*",
"@hive/service-common": "workspace:*",
"@hive/storage": "workspace:*",
"@hive/tokens": "workspace:*",
"@hive/usage-common": "workspace:*",
"@hive/usage-ingestor": "workspace:*",
"@hive/webhooks": "workspace:*",
"@hive/workflows": "workspace:*",
"@nodesecure/i18n": "^4.0.1",
"@nodesecure/js-x-ray": "8.0.0",
"@octokit/app": "15.1.4",

View file

@ -1,8 +1,8 @@
import { CONTEXT, createApplication, Provider, Scope } from 'graphql-modules';
import { Redis } from 'ioredis';
import { TaskScheduler } from '@hive/workflows/kit';
import { adminModule } from './modules/admin';
import { alertsModule } from './modules/alerts';
import { WEBHOOKS_CONFIG, WebhooksConfig } from './modules/alerts/providers/tokens';
import { appDeploymentsModule } from './modules/app-deployments';
import { APP_DEPLOYMENTS_ENABLED } from './modules/app-deployments/providers/app-deployments-enabled-token';
import { auditLogsModule } from './modules/audit-logs';
@ -49,7 +49,6 @@ import {
import { sharedModule } from './modules/shared';
import { CryptoProvider, encryptionSecretProvider } from './modules/shared/providers/crypto';
import { DistributedCache } from './modules/shared/providers/distributed-cache';
import { Emails, EMAILS_ENDPOINT } from './modules/shared/providers/emails';
import { HttpClient } from './modules/shared/providers/http-client';
import { IdTranslator } from './modules/shared/providers/id-translator';
import {
@ -92,13 +91,13 @@ const modules = [
appDeploymentsModule,
auditLogsModule,
proposalsModule,
supportModule,
];
export function createRegistry({
app,
commerce,
tokens,
webhooks,
schemaService,
schemaPolicyService,
logger,
@ -113,13 +112,13 @@ export function createRegistry({
encryptionSecret,
schemaConfig,
supportConfig,
emailsEndpoint,
organizationOIDC,
pubSub,
appDeploymentsEnabled,
schemaProposalsEnabled,
otelTracingEnabled,
prometheus,
taskScheduler,
}: {
logger: Logger;
storage: Storage;
@ -127,7 +126,6 @@ export function createRegistry({
redis: Redis;
commerce: CommerceConfig;
tokens: TokensConfig;
webhooks: WebhooksConfig;
schemaService: SchemaServiceConfig;
schemaPolicyService: SchemaPolicyServiceConfig;
githubApp: GitHubApplicationConfig | null;
@ -159,13 +157,13 @@ export function createRegistry({
} | null;
schemaConfig: SchemaModuleConfig;
supportConfig: SupportConfig | null;
emailsEndpoint?: string;
organizationOIDC: boolean;
pubSub: HivePubSub;
appDeploymentsEnabled: boolean;
schemaProposalsEnabled: boolean;
otelTracingEnabled: boolean;
prometheus: null | Record<string, unknown>;
taskScheduler: TaskScheduler;
}) {
const s3Config: S3Config = [
{
@ -215,7 +213,6 @@ export function createRegistry({
Mutex,
DistributedCache,
CryptoProvider,
Emails,
InMemoryRateLimitStore,
InMemoryRateLimiter,
{
@ -246,12 +243,6 @@ export function createRegistry({
useValue: tokens,
scope: Scope.Singleton,
},
{
provide: WEBHOOKS_CONFIG,
useValue: webhooks,
scope: Scope.Singleton,
},
{
provide: SCHEMA_SERVICE_CONFIG,
useValue: schemaService,
@ -330,16 +321,12 @@ export function createRegistry({
return new PrometheusConfig(!!prometheus);
},
},
];
if (emailsEndpoint) {
providers.push({
provide: EMAILS_ENDPOINT,
useValue: emailsEndpoint,
{
provide: TaskScheduler,
useValue: taskScheduler,
scope: Scope.Singleton,
});
modules.push(supportModule);
}
},
];
if (supportConfig) {
providers.push(provideSupportConfig(supportConfig));

View file

@ -18,7 +18,6 @@ export type {
OrganizationBilling,
OrganizationInvitation,
} from './shared/entities';
export { createTaskRunner } from './modules/shared/lib/task-runner';
export { minifySchema } from './shared/schema';
export { HiveError } from './shared/errors';
export { ProjectType } from './__generated__/types';

View file

@ -1,10 +1,7 @@
import { CONTEXT, Inject, Injectable, Scope } from 'graphql-modules';
import type { WebhooksApi } from '@hive/webhooks';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import { HttpClient } from '../../../shared/providers/http-client';
import { Injectable, Scope } from 'graphql-modules';
import { TaskScheduler } from '@hive/workflows/kit';
import { SchemaChangeNotificationTask } from '@hive/workflows/tasks/schema-change-notification';
import { Logger } from '../../../shared/providers/logger';
import type { WebhooksConfig } from '../tokens';
import { WEBHOOKS_CONFIG } from '../tokens';
import type { CommunicationAdapter, SchemaChangeNotificationInput } from './common';
@Injectable({
@ -12,26 +9,12 @@ import type { CommunicationAdapter, SchemaChangeNotificationInput } from './comm
})
export class WebhookCommunicationAdapter implements CommunicationAdapter {
private logger: Logger;
private webhooksService;
constructor(
logger: Logger,
private http: HttpClient,
@Inject(WEBHOOKS_CONFIG) private config: WebhooksConfig,
@Inject(CONTEXT) context: GraphQLModules.ModuleContext,
private taskScheduler: TaskScheduler,
) {
this.logger = logger.child({ service: 'WebhookCommunicationAdapter' });
this.webhooksService = createTRPCProxyClient<WebhooksApi>({
links: [
httpLink({
url: `${config.endpoint}/trpc`,
fetch,
headers: {
'x-request-id': context.requestId,
},
}),
],
});
}
async sendSchemaChangeNotification(input: SchemaChangeNotificationInput) {
@ -41,20 +24,10 @@ export class WebhookCommunicationAdapter implements CommunicationAdapter {
input.event.project.id,
input.event.target.id,
);
try {
await this.webhooksService.schedule.mutate({
endpoint: input.channel.webhookEndpoint!,
event: input.event,
});
} catch (error) {
const errorText =
error instanceof Error
? error.toString()
: typeof error === 'string'
? error
: JSON.stringify(error);
this.logger.error(`Failed to send Webhook notification`, errorText);
}
await this.taskScheduler.scheduleTask(SchemaChangeNotificationTask, {
endpoint: input.channel.webhookEndpoint!,
event: input.event,
});
}
async sendChannelConfirmation() {

View file

@ -1,7 +0,0 @@
import { InjectionToken } from 'graphql-modules';
export interface WebhooksConfig {
endpoint: string;
}
export const WEBHOOKS_CONFIG = new InjectionToken<WebhooksConfig>('webhooks-endpoint');

View file

@ -2,11 +2,12 @@ import { stringify } from 'csv-stringify';
import { endOfDay, startOfDay } from 'date-fns';
import { Injectable, Scope } from 'graphql-modules';
import { traceFn } from '@hive/service-common';
import { TaskScheduler } from '@hive/workflows/kit';
import { AuditLogExportTask } from '@hive/workflows/tasks/audit-log-export';
import { captureException } from '@sentry/node';
import { Session } from '../../auth/lib/authz';
import { type AwsClient } from '../../cdn/providers/aws';
import { ClickHouse, sql } from '../../operations/providers/clickhouse-client';
import { Emails } from '../../shared/providers/emails';
import { Logger } from '../../shared/providers/logger';
import { Storage } from '../../shared/providers/storage';
import { formatToClickhouseDateTime } from './audit-log-recorder';
@ -33,7 +34,7 @@ export class AuditLogManager {
logger: Logger,
private clickHouse: ClickHouse,
private s3Config: AuditLogS3Config,
private emailProvider: Emails,
private taskScheduler: TaskScheduler,
private session: Session,
private storage: Storage,
) {
@ -212,7 +213,8 @@ export class AuditLogManager {
const organization = await this.storage.getOrganization({
organizationId,
});
await this.emailProvider.api?.sendAuditLogsReportEmail.mutate({
await this.taskScheduler.scheduleTask(AuditLogExportTask, {
email: email,
organizationName: organization.name,
organizationId: organization.id,

View file

@ -1,6 +1,9 @@
import { Inject, Injectable, Scope } from 'graphql-modules';
import { OrganizationReferenceInput } from 'packages/libraries/core/src/client/__generated__/types';
import { z } from 'zod';
import { TaskScheduler } from '@hive/workflows/kit';
import { OrganizationInvitationTask } from '@hive/workflows/tasks/organization-invitation';
import { OrganizationOwnershipTransferTask } from '@hive/workflows/tasks/organization-ownership-transfer';
import * as GraphQLSchema from '../../../__generated__/types';
import { Organization } from '../../../shared/entities';
import { HiveError } from '../../../shared/errors';
@ -9,7 +12,6 @@ import { Session } from '../../auth/lib/authz';
import { AuthManager } from '../../auth/providers/auth-manager';
import { BillingProvider } from '../../commerce/providers/billing.provider';
import { OIDCIntegrationsProvider } from '../../oidc-integrations/providers/oidc-integrations.provider';
import { Emails } from '../../shared/providers/emails';
import { IdTranslator } from '../../shared/providers/id-translator';
import { InMemoryRateLimiter } from '../../shared/providers/in-memory-rate-limiter';
import { Logger } from '../../shared/providers/logger';
@ -43,7 +45,7 @@ export class OrganizationManager {
private tokenStorage: TokenStorage,
private billingProvider: BillingProvider,
private oidcIntegrationProvider: OIDCIntegrationsProvider,
private emails: Emails,
private taskScheduler: TaskScheduler,
private organizationMemberRoles: OrganizationMemberRoles,
private organizationMembers: OrganizationMembers,
private resourceAssignments: ResourceAssignments,
@ -626,7 +628,7 @@ export class OrganizationManager {
step: 'invitingMembers',
}),
// schedule an email
this.emails.api?.sendOrganizationInviteEmail.mutate({
this.taskScheduler.scheduleTask(OrganizationInvitationTask, {
organizationId: invitation.organizationId,
organizationName: organization.name,
email,
@ -745,7 +747,7 @@ export class OrganizationManager {
userId: member.user.id,
});
await this.emails.api?.sendOrganizationOwnershipTransferEmail.mutate({
await this.taskScheduler.scheduleTask(OrganizationOwnershipTransferTask, {
email: member.user.email,
organizationId: organization.id,
organizationName: organization.name,

View file

@ -1,77 +0,0 @@
import { type Logger } from '../providers/logger';
/**
* Create task runner that runs a task at at a given interval.
*/
export const createTaskRunner = (args: {
run: () => Promise<any>;
interval: number;
logger: Logger;
}) => {
let task: ReturnType<typeof scheduleTask> | null = null;
let isStarted = false;
let isStopped = false;
function loop() {
task = scheduleTask({
runInMilliSeconds: args.interval,
run: args.run,
logger: args.logger,
name: 'schema-purge',
});
void task.done.finally(() => {
if (!isStopped) {
loop();
}
});
}
return {
start() {
if (isStarted) {
return;
}
isStarted = true;
loop();
},
async stop() {
isStopped = true;
if (task) {
task.cancel();
await task.done;
}
},
};
};
const scheduleTask = (args: {
runInMilliSeconds: number;
run: () => Promise<void>;
name: string;
logger: Logger;
}) => {
const runsAt = new Date(Date.now() + args.runInMilliSeconds).toISOString();
args.logger.info(`Scheduling task "${args.name}" to run at ${runsAt}.`);
let timeout: null | NodeJS.Timeout = setTimeout(async () => {
timeout = null;
args.logger.info(`Running task "${args.name}" to run at ${runsAt}.`);
try {
await args.run();
} catch (err: unknown) {
args.logger.error(`Failed to run task "${args.name}" to run at ${runsAt}.`, err);
}
args.logger.info(`Completed running task "${args.name}" to run at ${runsAt}.`);
deferred.resolve();
}, args.runInMilliSeconds);
const deferred = Promise.withResolvers<void>();
return {
done: deferred.promise,
cancel: () => {
if (timeout) {
clearTimeout(timeout);
}
deferred.resolve();
},
};
};

View file

@ -1,23 +0,0 @@
import { Inject, Injectable, InjectionToken, Optional } from 'graphql-modules';
import type { EmailsApi } from '@hive/emails';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
export const EMAILS_ENDPOINT = new InjectionToken<string>('EMAILS_ENDPOINT');
@Injectable()
export class Emails {
public api;
constructor(@Optional() @Inject(EMAILS_ENDPOINT) endpoint?: string) {
this.api = endpoint
? createTRPCProxyClient<EmailsApi>({
links: [
httpLink({
url: `${endpoint}/trpc`,
fetch,
}),
],
})
: null;
}
}

View file

@ -10,6 +10,5 @@ POSTGRES_PASSWORD=postgres
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DB=registry
EMAILS_ENDPOINT="http://localhost:6260"
WEB_APP_URL="http://localhost:3000"
STRIPE_SECRET_KEY="empty"

View file

@ -10,9 +10,9 @@
},
"devDependencies": {
"@hive/api": "workspace:*",
"@hive/emails": "workspace:*",
"@hive/service-common": "workspace:*",
"@hive/storage": "workspace:*",
"@hive/workflows": "workspace:*",
"@sentry/node": "7.120.2",
"@trpc/client": "10.45.3",
"@trpc/server": "10.45.3",

View file

@ -79,7 +79,6 @@ const PostgresModel = zod.object({
});
const HiveServicesModel = zod.object({
EMAILS_ENDPOINT: emptyString(zod.string().url().optional()),
WEB_APP_URL: emptyString(zod.string().url().optional()),
});
@ -177,9 +176,6 @@ export const env = {
}
: null,
hiveServices: {
emails: {
endpoint: hiveServices.EMAILS_ENDPOINT,
},
webAppUrl: hiveServices.WEB_APP_URL,
},
rateLimit: {

View file

@ -11,6 +11,7 @@ import {
TracingInstance,
} from '@hive/service-common';
import { createConnectionString, createStorage as createPostgreSQLStorage } from '@hive/storage';
import { TaskScheduler } from '@hive/workflows/kit';
import * as Sentry from '@sentry/node';
import { commerceRouter } from './api';
import { env } from './environment';
@ -58,6 +59,8 @@ async function main() {
tracing ? [tracing.instrumentSlonik()] : undefined,
);
const taskScheduler = new TaskScheduler(postgres.pool.pool);
const usageEstimator = createEstimator({
logger: server.log,
clickhouse: {
@ -75,11 +78,7 @@ async function main() {
interval: env.rateLimit.limitCacheUpdateIntervalMs,
},
usageEstimator,
emails: env.hiveServices.emails.endpoint
? {
endpoint: env.hiveServices.emails.endpoint,
}
: undefined,
taskScheduler,
storage: postgres,
});

View file

@ -1,19 +1,9 @@
import type { EmailsApi } from '@hive/emails';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import type { TaskScheduler } from '@hive/workflows/kit';
import { UsageRateLimitExceededTask } from '@hive/workflows/tasks/usage-rate-limit-exceeded';
import { UsageRateLimitWarningTask } from '@hive/workflows/tasks/usage-rate-limit-warning';
import { env } from '../environment';
export function createEmailScheduler(config?: { endpoint: string }) {
const api = config?.endpoint
? createTRPCProxyClient<EmailsApi>({
links: [
httpLink({
url: `${config.endpoint}/trpc`,
fetch,
}),
],
})
: null;
export function createEmailScheduler(taskScheduler: TaskScheduler) {
let scheduledEmails: Promise<unknown>[] = [];
return {
@ -38,23 +28,28 @@ export function createEmailScheduler(config?: { endpoint: string }) {
current: number;
};
}) {
if (!api) {
return scheduledEmails.push(Promise.resolve());
}
return scheduledEmails.push(
api.sendRateLimitExceededEmail.mutate({
email: input.organization.email,
organizationId: input.organization.id,
organizationName: input.organization.name,
limit: input.usage.quota,
currentUsage: input.usage.current,
startDate: input.period.start,
endDate: input.period.end,
subscriptionManagementLink: `${env.hiveServices.webAppUrl}/${
input.organization.slug
}/view/subscription`,
}),
taskScheduler.scheduleTask(
UsageRateLimitExceededTask,
{
email: input.organization.email,
organizationId: input.organization.id,
organizationName: input.organization.name,
limit: input.usage.quota,
currentUsage: input.usage.current,
startDate: input.period.start,
endDate: input.period.end,
subscriptionManagementLink: `${env.hiveServices.webAppUrl}/${
input.organization.slug
}/view/subscription`,
},
{
dedupe: {
key: p => p.organizationId,
ttl: 1000 * 60 * 60 * 24 * 32,
},
},
),
);
},
@ -74,23 +69,28 @@ export function createEmailScheduler(config?: { endpoint: string }) {
current: number;
};
}) {
if (!api) {
return scheduledEmails.push(Promise.resolve());
}
return scheduledEmails.push(
api.sendRateLimitWarningEmail.mutate({
email: input.organization.email,
organizationId: input.organization.id,
organizationName: input.organization.name,
limit: input.usage.quota,
currentUsage: input.usage.current,
startDate: input.period.start,
endDate: input.period.end,
subscriptionManagementLink: `${env.hiveServices.webAppUrl}/${
input.organization.slug
}/view/subscription`,
}),
taskScheduler.scheduleTask(
UsageRateLimitWarningTask,
{
email: input.organization.email,
organizationId: input.organization.id,
organizationName: input.organization.name,
limit: input.usage.quota,
currentUsage: input.usage.current,
startDate: input.period.start,
endDate: input.period.end,
subscriptionManagementLink: `${env.hiveServices.webAppUrl}/${
input.organization.slug
}/view/subscription`,
},
{
dedupe: {
key: p => p.organizationId,
ttl: 1000 * 60 * 60 * 24 * 32,
},
},
),
);
},
};

View file

@ -2,6 +2,7 @@ import { endOfMonth, startOfMonth } from 'date-fns';
import type { Storage } from '@hive/api';
import type { ServiceLogger } from '@hive/service-common';
import { traceInline } from '@hive/service-common';
import { TaskScheduler } from '@hive/workflows/kit';
import * as Sentry from '@sentry/node';
import { UsageEstimator } from '../usage-estimator/estimator';
import { createEmailScheduler } from './emails';
@ -52,13 +53,11 @@ export function createRateLimiter(config: {
rateLimitConfig: {
interval: number;
};
emails?: {
endpoint: string;
};
taskScheduler: TaskScheduler;
usageEstimator: UsageEstimator;
storage: Storage;
}) {
const emails = createEmailScheduler(config.emails);
const emails = createEmailScheduler(config.taskScheduler);
const { logger, usageEstimator, storage } = config;
let intervalHandle: ReturnType<typeof setInterval> | null = null;
@ -184,6 +183,7 @@ export function createRateLimiter(config: {
function getOrganizationFromCache(targetId: string) {
const orgId = targetIdToOrgLookup.get(targetId);
return orgId ? cachedResult.get(orgId) : undefined;
}

View file

@ -15,8 +15,7 @@ TOKENS_ENDPOINT="http://localhost:6001"
SCHEMA_ENDPOINT="http://localhost:6500"
SCHEMA_POLICY_ENDPOINT="http://localhost:6600"
COMMERCE_ENDPOINT="http://localhost:4013"
WEBHOOKS_ENDPOINT="http://localhost:6250"
EMAILS_ENDPOINT="http://localhost:6260"
REDIS_HOST="localhost"
REDIS_PORT="6379"
REDIS_PASSWORD=""

View file

@ -10,9 +10,7 @@ The GraphQL API for GraphQL Hive.
| `ENCRYPTION_SECRET` | **Yes** | Secret for encrypting stuff. | `8ebe95cg21c1fee355e9fa32c8c33141` |
| `WEB_APP_URL` | **Yes** | The url of the web app. | `http://127.0.0.1:3000` |
| `GRAPHQL_PUBLIC_ORIGIN` | **Yes** | The origin of the GraphQL server. | `http://127.0.0.1:4013` |
| `EMAILS_ENDPOINT` | **Yes** | The endpoint of the GraphQL Hive Email service. | `http://127.0.0.1:6260` |
| `TOKENS_ENDPOINT` | **Yes** | The endpoint of the tokens service. | `http://127.0.0.1:6001` |
| `WEBHOOKS_ENDPOINT` | **Yes** | The endpoint of the webhooks service. | `http://127.0.0.1:6250` |
| `SCHEMA_ENDPOINT` | **Yes** | The endpoint of the schema service. | `http://127.0.0.1:6500` |
| `SCHEMA_POLICY_ENDPOINT` | **No** | The endpoint of the schema policy service. | `http://127.0.0.1:6600` |
| `POSTGRES_SSL` | No | Whether the postgres connection should be established via SSL. | `1` (enabled) or `0` (disabled) |

View file

@ -31,6 +31,7 @@
"@hive/schema": "workspace:*",
"@hive/service-common": "workspace:*",
"@hive/storage": "workspace:*",
"@hive/workflows": "workspace:*",
"@sentry/integrations": "7.114.0",
"@sentry/node": "7.120.2",
"@swc/core": "1.13.5",

View file

@ -32,8 +32,6 @@ const EnvironmentModel = zod.object({
.url(),
SCHEMA_POLICY_ENDPOINT: emptyString(zod.string().url().optional()),
TOKENS_ENDPOINT: zod.string().url(),
EMAILS_ENDPOINT: emptyString(zod.string().url().optional()),
WEBHOOKS_ENDPOINT: zod.string().url(),
SCHEMA_ENDPOINT: zod.string().url(),
AUTH_ORGANIZATION_OIDC: emptyString(zod.union([zod.literal('1'), zod.literal('0')]).optional()),
AUTH_REQUIRE_EMAIL_VERIFICATION: emptyString(
@ -405,8 +403,6 @@ export const env = {
endpoint: base.SCHEMA_POLICY_ENDPOINT,
}
: null,
emails: base.EMAILS_ENDPOINT ? { endpoint: base.EMAILS_ENDPOINT } : null,
webhooks: { endpoint: base.WEBHOOKS_ENDPOINT },
schema: { endpoint: base.SCHEMA_ENDPOINT },
},
http: {

View file

@ -16,7 +16,6 @@ import { z } from 'zod';
import formDataPlugin from '@fastify/formbody';
import {
createRegistry,
createTaskRunner,
CryptoProvider,
LogFn,
Logger,
@ -39,10 +38,10 @@ import {
registerTRPC,
reportReadiness,
startMetrics,
traceInline,
TracingInstance,
} from '@hive/service-common';
import { createConnectionString, createStorage as createPostgreSQLStorage } from '@hive/storage';
import { TaskScheduler } from '@hive/workflows/kit';
import {
contextLinesIntegration,
dedupeIntegration,
@ -195,6 +194,7 @@ export async function main() {
10,
tracing ? [tracing.instrumentSlonik()] : [],
);
const taskScheduler = new TaskScheduler(storage.pool.pool);
const redis = createRedisClient('Redis', env.redis, server.log.child({ source: 'Redis' }));
@ -209,50 +209,6 @@ export async function main() {
}),
}) as HivePubSub;
let dbPurgeTaskRunner: null | ReturnType<typeof createTaskRunner> = null;
if (!env.hiveServices.commerce) {
server.log.debug('Commerce service is disabled. Skip scheduling purge tasks.');
} else {
server.log.debug(
`Commerce service is enabled. Start scheduling purge tasks every ${env.hiveServices.commerce.dateRetentionPurgeIntervalMinutes} minutes.`,
);
dbPurgeTaskRunner = createTaskRunner({
run: traceInline(
'Purge Task',
{
resultAttributes: result => ({
'purge.schema.check.count': result.deletedSchemaCheckCount,
'purge.sdl.store.count': result.deletedSdlStoreCount,
'purge.change.approval.count': result.deletedSchemaChangeApprovalCount,
'purge.contract.approval.count': result.deletedContractSchemaChangeApprovalCount,
}),
},
async () => {
try {
const result = await storage.purgeExpiredSchemaChecks({
expiresAt: new Date(),
});
server.log.debug(
'Finished running schema check purge task. (deletedSchemaCheckCount=%s deletedSdlStoreCount=%s)',
result.deletedSchemaCheckCount,
result.deletedSdlStoreCount,
);
return result;
} catch (error) {
captureException(error);
throw error;
}
},
),
interval: env.hiveServices.commerce.dateRetentionPurgeIntervalMinutes * 60 * 1000,
logger: server.log,
});
dbPurgeTaskRunner.start();
}
registerShutdown({
logger: server.log,
async onShutdown() {
@ -262,10 +218,6 @@ export async function main() {
await server.close();
server.log.info('Stopping Storage handler...');
await storage.destroy();
if (dbPurgeTaskRunner) {
server.log.info('Stopping expired schema check purge task...');
await dbPurgeTaskRunner.stop();
}
server.log.info('Shutdown complete.');
},
});
@ -346,10 +298,6 @@ export async function main() {
commerce: {
endpoint: env.hiveServices.commerce ? env.hiveServices.commerce.endpoint : null,
},
emailsEndpoint: env.hiveServices.emails ? env.hiveServices.emails.endpoint : undefined,
webhooks: {
endpoint: env.hiveServices.webhooks.endpoint,
},
schemaService: {
endpoint: env.hiveServices.schema.endpoint,
},
@ -425,6 +373,7 @@ export async function main() {
schemaProposalsEnabled: env.featureFlags.schemaProposalsEnabled,
otelTracingEnabled: env.featureFlags.otelTracingEnabled,
prometheus: env.prometheus,
taskScheduler,
});
const organizationAccessTokenStrategy = (logger: Logger) =>
@ -519,6 +468,7 @@ export async function main() {
crypto,
logger: server.log,
redis,
taskScheduler,
broadcastLog(id, message) {
pubSub.publish('oidcIntegrationLogs', id, {
timestamp: new Date().toISOString(),

View file

@ -11,8 +11,9 @@ import type { TypeInput as ThirdPartEmailPasswordTypeInput } from 'supertokens-n
import type { TypeInput } from 'supertokens-node/types';
import zod from 'zod';
import { type Storage } from '@hive/api';
import type { EmailsApi } from '@hive/emails';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import { TaskScheduler } from '@hive/workflows/kit';
import { EmailVerificationTask } from '@hive/workflows/tasks/email-verification';
import { PasswordResetTask } from '@hive/workflows/tasks/password-reset';
import { createInternalApiCaller } from './api';
import { env } from './environment';
import {
@ -37,11 +38,10 @@ export const backendConfig = (requirements: {
logger: FastifyBaseLogger;
broadcastLog: BroadcastOIDCIntegrationLog;
redis: Redis;
taskScheduler: TaskScheduler;
}): TypeInput => {
const { logger } = requirements;
const emailsService = createTRPCProxyClient<EmailsApi>({
links: [httpLink({ url: `${env.hiveServices.emails?.endpoint}/trpc` })],
});
const internalApi = createInternalApiCaller({
storage: requirements.storage,
crypto: requirements.crypto,
@ -134,13 +134,14 @@ export const backendConfig = (requirements: {
...originalImplementation,
async sendEmail(input) {
if (input.type === 'PASSWORD_RESET') {
await emailsService.sendPasswordResetEmail.mutate({
await requirements.taskScheduler.scheduleTask(PasswordResetTask, {
user: {
id: input.user.id,
email: input.user.email,
},
passwordResetLink: input.passwordResetLink,
});
return Promise.resolve();
}
@ -160,14 +161,13 @@ export const backendConfig = (requirements: {
...originalImplementation,
async sendEmail(input) {
if (input.type === 'EMAIL_VERIFICATION') {
await emailsService.sendEmailVerificationEmail.mutate({
await requirements.taskScheduler.scheduleTask(EmailVerificationTask, {
user: {
id: input.user.id,
email: input.user.email,
},
emailVerifyLink: input.emailVerifyLink,
});
return Promise.resolve();
}
},
@ -414,6 +414,7 @@ export function initSupertokens(requirements: {
logger: FastifyBaseLogger;
broadcastLog: BroadcastOIDCIntegrationLog;
redis: Redis;
taskScheduler: TaskScheduler;
}) {
supertokens.init(backendConfig(requirements));
}

View file

@ -10,6 +10,7 @@
},
"devDependencies": {
"@fastify/cors": "9.0.1",
"@graphql-hive/logger": "1.0.9",
"@graphql-hive/plugin-opentelemetry": "1.3.0",
"@opentelemetry/api": "1.9.0",
"@opentelemetry/auto-instrumentations-node": "0.67.2",

View file

@ -1,28 +1,68 @@
import { fastify } from 'fastify';
import { fastify, type FastifyBaseLogger } from 'fastify';
import cors from '@fastify/cors';
import { Logger } from '@graphql-hive/logger';
import * as Sentry from '@sentry/node';
import { useHTTPErrorHandler } from './http-error-handler';
import { useRequestLogging } from './request-logs';
export type { FastifyBaseLogger, FastifyRequest, FastifyReply } from 'fastify';
/* eslint-disable prefer-spread */
// Using spread causes typescript errors
// I prefer to disable eslint over having to use ts-ignore and ts not catching other errors.
function bridgeHiveLoggerToFastifyLogger(logger: Logger): FastifyBaseLogger {
return {
debug(...args: Array<any>) {
logger.debug.apply(logger, args as any);
},
error(...args: Array<any>) {
logger.error.apply(logger, args as any);
},
fatal(...args: Array<any>) {
logger.error.apply(logger, args as any);
},
trace(...args: Array<any>) {
logger.trace.apply(logger, args as any);
},
info(...args: Array<any>) {
logger.info.apply(logger, args as any);
},
warn(...args: Array<any>) {
logger.warn.apply(logger, args as any);
},
child() {
return this;
},
level: logger.level === false ? 'silent' : logger.level,
silent() {},
};
}
/* eslint-enable prefer-spread */
export async function createServer(options: {
sentryErrorHandler: boolean;
name: string;
log: {
requests: boolean;
level: string;
};
log:
| {
requests: boolean;
level: string;
}
| Logger;
cors?: boolean;
bodyLimit?: number;
}) {
const server = fastify({
disableRequestLogging: true,
bodyLimit: options.bodyLimit ?? 30e6, // 30mb by default
logger: {
level: options.log.level,
redact: ['request.options', 'options', 'request.headers.authorization'],
},
logger:
options.log instanceof Logger
? bridgeHiveLoggerToFastifyLogger(options.log)
: {
level: options.log.level,
redact: ['request.options', 'options', 'request.headers.authorization'],
},
maxParamLength: 5000,
requestIdHeader: 'x-request-id',
trustProxy: true,
@ -44,7 +84,7 @@ export async function createServer(options: {
await useHTTPErrorHandler(server, options.sentryErrorHandler);
if (options.log.requests) {
if (options.log instanceof Logger === false && options.log.requests) {
await useRequestLogging(server);
}

View file

@ -13,7 +13,10 @@ export function reportReadiness(isReady: boolean) {
readiness.set(isReady ? 1 : 0);
}
export async function startMetrics(instanceLabel: string | undefined, port = 10_254) {
export async function startMetrics(
instanceLabel: string | undefined,
port = 10_254,
): Promise<() => Promise<void>> {
promClient.collectDefaultMetrics({
labels: { instance: instanceLabel },
});
@ -26,7 +29,7 @@ export async function startMetrics(instanceLabel: string | undefined, port = 10_
server.route({
method: 'GET',
url: '/metrics',
async handler(req, res) {
async handler(_req, res) {
try {
void res.header('Content-Type', promClient.register.contentType);
const result = await promClient.register.metrics();
@ -40,8 +43,10 @@ export async function startMetrics(instanceLabel: string | undefined, port = 10_
await server.register(cors);
return await server.listen({
await server.listen({
port,
host: '::',
});
return () => server.close();
}

View file

@ -136,6 +136,12 @@ export interface document_preflight_scripts {
updated_at: Date;
}
export interface graphile_worker_deduplication {
dedupe_key: string;
expires_at: Date;
task_name: string;
}
export interface migration {
date: Date;
hash: string;
@ -477,6 +483,7 @@ export interface DBTables {
document_collection_documents: document_collection_documents;
document_collections: document_collections;
document_preflight_scripts: document_preflight_scripts;
graphile_worker_deduplication: graphile_worker_deduplication;
migration: migration;
oidc_integrations: oidc_integrations;
organization_access_tokens: organization_access_tokens;

View file

@ -0,0 +1,19 @@
# Workflow Service
Services for running asynchronous tasks and cron jobs. E.g. sending email, webhooks or other
maintenance/clen up tasks.
## Structure
```
# Definition of Webook Payload Models using zod
src/webhooks/*
# Task Definitions
src/tasks/*
# General lib
src/lib/*
```
## References
- [Graphile Worker Documentation](https://worker.graphile.org/)

View file

@ -0,0 +1,27 @@
{
"name": "@hive/workflows",
"type": "module",
"license": "MIT",
"private": true,
"scripts": {
"build": "tsx ../../../scripts/runify.ts",
"dev": "tsup-node --config ../../../configs/tsup/dev.config.node.ts src/dev.ts",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@graphql-hive/logger": "1.0.9",
"@hive/service-common": "workspace:*",
"@hive/storage": "workspace:*",
"@sentry/node": "7.120.2",
"@types/mjml": "4.7.1",
"@types/nodemailer": "7.0.4",
"bentocache": "1.1.0",
"dotenv": "16.4.7",
"graphile-worker": "0.16.6",
"mjml": "4.14.0",
"nodemailer": "7.0.11",
"sendmail": "1.6.1",
"slonik": "30.4.4",
"zod": "3.25.76"
}
}

View file

@ -0,0 +1,11 @@
import type { DatabasePool } from 'slonik';
import type { Logger } from '@graphql-hive/logger';
import type { EmailProvider } from './lib/emails/providers.js';
import { RequestBroker } from './lib/webhooks/send-webhook.js';
export type Context = {
logger: Logger;
email: EmailProvider;
pg: DatabasePool;
requestBroker: RequestBroker | null;
};

View file

@ -0,0 +1,15 @@
import { config } from 'dotenv';
config({
debug: true,
encoding: 'utf8',
});
await import('./index.js');
// Not having this caused hell
process.stdout.on('error', function (err) {
if (err.code == 'EPIPE') {
process.exit(0);
}
});

View file

@ -0,0 +1,238 @@
import zod from 'zod';
import { OpenTelemetryConfigurationModel } from '@hive/service-common';
import { createConnectionString } from '@hive/storage';
import { RequestBroker } from './lib/webhooks/send-webhook.js';
const isNumberString = (input: unknown) => zod.string().regex(/^\d+$/).safeParse(input).success;
const numberFromNumberOrNumberString = (input: unknown): number | undefined => {
if (typeof input == 'number') return input;
if (isNumberString(input)) return Number(input);
};
const NumberFromString = zod.preprocess(numberFromNumberOrNumberString, zod.number().min(1));
// treat an empty string (`''`) as undefined
const emptyString = <T extends zod.ZodType>(input: T) => {
return zod.preprocess((value: unknown) => {
if (value === '') return undefined;
return value;
}, input);
};
const EnvironmentModel = zod.object({
PORT: emptyString(NumberFromString.optional()).default(3014),
ENVIRONMENT: emptyString(zod.string().optional()),
RELEASE: emptyString(zod.string().optional()),
HEARTBEAT_ENDPOINT: emptyString(zod.string().url().optional()),
EMAIL_FROM: zod.string().email(),
});
const SentryModel = zod.union([
zod.object({
SENTRY: emptyString(zod.literal('0').optional()),
}),
zod.object({
SENTRY: zod.literal('1'),
SENTRY_DSN: zod.string(),
}),
]);
const PostgresModel = zod.object({
POSTGRES_SSL: emptyString(zod.union([zod.literal('1'), zod.literal('0')]).optional()),
POSTGRES_HOST: zod.string(),
POSTGRES_PORT: NumberFromString,
POSTGRES_DB: zod.string(),
POSTGRES_USER: zod.string(),
POSTGRES_PASSWORD: emptyString(zod.string().optional()),
});
const PostmarkEmailModel = zod.object({
EMAIL_PROVIDER: zod.literal('postmark'),
EMAIL_PROVIDER_POSTMARK_TOKEN: zod.string(),
EMAIL_PROVIDER_POSTMARK_MESSAGE_STREAM: zod.string(),
});
const SMTPEmailModel = zod.object({
EMAIL_PROVIDER: zod.literal('smtp'),
EMAIL_PROVIDER_SMTP_PROTOCOL: emptyString(
zod.union([zod.literal('smtp'), zod.literal('smtps')]).optional(),
),
EMAIL_PROVIDER_SMTP_HOST: zod.string(),
EMAIL_PROVIDER_SMTP_PORT: NumberFromString,
EMAIL_PROVIDER_SMTP_AUTH_USERNAME: zod.string(),
EMAIL_PROVIDER_SMTP_AUTH_PASSWORD: zod.string(),
EMAIL_PROVIDER_SMTP_REJECT_UNAUTHORIZED: emptyString(
zod.union([zod.literal('0'), zod.literal('1')]).optional(),
),
});
const SendmailEmailModel = zod.object({
EMAIL_PROVIDER: zod.literal('sendmail'),
});
const MockEmailProviderModel = zod.object({
EMAIL_PROVIDER: zod.literal('mock'),
});
const EmailProviderModel = zod.union([
PostmarkEmailModel,
MockEmailProviderModel,
SMTPEmailModel,
SendmailEmailModel,
]);
const RequestBrokerModel = zod.union([
zod.object({
REQUEST_BROKER: emptyString(zod.literal('0').optional()),
}),
zod.object({
REQUEST_BROKER: zod.literal('1'),
REQUEST_BROKER_ENDPOINT: zod.string().min(1),
REQUEST_BROKER_SIGNATURE: zod.string().min(1),
}),
]);
const PrometheusModel = zod.object({
PROMETHEUS_METRICS: emptyString(
zod.union([zod.literal('0'), zod.literal('1')]).optional(),
).default('0'),
PROMETHEUS_METRICS_LABEL_INSTANCE: emptyString(zod.string().optional()).default('workflows'),
PROMETHEUS_METRICS_PORT: emptyString(NumberFromString.optional()).default(10254),
});
const LogModel = zod.object({
LOG_LEVEL: emptyString(
zod
.union([
zod.literal('trace'),
zod.literal('debug'),
zod.literal('info'),
zod.literal('warn'),
zod.literal('error'),
])
.optional(),
),
REQUEST_LOGGING: emptyString(zod.union([zod.literal('0'), zod.literal('1')]).optional()).default(
'1',
),
});
const configs = {
base: EnvironmentModel.safeParse(process.env),
email: EmailProviderModel.safeParse(process.env),
sentry: SentryModel.safeParse(process.env),
postgres: PostgresModel.safeParse(process.env),
prometheus: PrometheusModel.safeParse(process.env),
log: LogModel.safeParse(process.env),
tracing: OpenTelemetryConfigurationModel.safeParse(process.env),
requestBroker: RequestBrokerModel.safeParse(process.env),
};
const environmentErrors: Array<string> = [];
for (const config of Object.values(configs)) {
if (config.success === false) {
environmentErrors.push(JSON.stringify(config.error.format(), null, 4));
}
}
if (environmentErrors.length) {
const fullError = environmentErrors.join(`\n`);
console.error('❌ Invalid environment variables:', fullError);
process.exit(1);
}
function extractConfig<Input, Output>(config: zod.SafeParseReturnType<Input, Output>): Output {
if (!config.success) {
throw new Error('Something went wrong.');
}
return config.data;
}
const base = extractConfig(configs.base);
const email = extractConfig(configs.email);
const postgres = extractConfig(configs.postgres);
const sentry = extractConfig(configs.sentry);
const prometheus = extractConfig(configs.prometheus);
const log = extractConfig(configs.log);
const tracing = extractConfig(configs.tracing);
const requestBroker = extractConfig(configs.requestBroker);
const emailProviderConfig =
email.EMAIL_PROVIDER === 'postmark'
? ({
provider: 'postmark' as const,
token: email.EMAIL_PROVIDER_POSTMARK_TOKEN,
messageStream: email.EMAIL_PROVIDER_POSTMARK_MESSAGE_STREAM,
} as const)
: email.EMAIL_PROVIDER === 'smtp'
? ({
provider: 'smtp' as const,
protocol: email.EMAIL_PROVIDER_SMTP_PROTOCOL ?? 'smtp',
host: email.EMAIL_PROVIDER_SMTP_HOST,
port: email.EMAIL_PROVIDER_SMTP_PORT,
auth: {
user: email.EMAIL_PROVIDER_SMTP_AUTH_USERNAME,
pass: email.EMAIL_PROVIDER_SMTP_AUTH_PASSWORD,
},
tls: {
rejectUnauthorized: email.EMAIL_PROVIDER_SMTP_REJECT_UNAUTHORIZED !== '0',
},
} as const)
: email.EMAIL_PROVIDER === 'sendmail'
? ({ provider: 'sendmail' } as const)
: ({ provider: 'mock' } as const);
export type EmailProviderConfig = typeof emailProviderConfig;
export type PostmarkEmailProviderConfig = Extract<EmailProviderConfig, { provider: 'postmark' }>;
export type SMTPEmailProviderConfig = Extract<EmailProviderConfig, { provider: 'smtp' }>;
export type SendmailEmailProviderConfig = Extract<EmailProviderConfig, { provider: 'sendmail' }>;
export type MockEmailProviderConfig = Extract<EmailProviderConfig, { provider: 'mock' }>;
export const env = {
environment: base.ENVIRONMENT,
release: base.RELEASE ?? 'local',
http: {
port: base.PORT ?? 6260,
},
tracing: {
enabled: !!tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
collectorEndpoint: tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
},
email: {
provider: emailProviderConfig,
emailFrom: base.EMAIL_FROM,
},
sentry: sentry.SENTRY === '1' ? { dsn: sentry.SENTRY_DSN } : null,
log: {
level: log.LOG_LEVEL ?? 'info',
},
prometheus:
prometheus.PROMETHEUS_METRICS === '1'
? {
labels: {
instance: prometheus.PROMETHEUS_METRICS_LABEL_INSTANCE ?? 'workflows',
},
port: prometheus.PROMETHEUS_METRICS_PORT ?? 10_254,
}
: null,
postgres: {
connectionString: createConnectionString({
ssl: postgres.POSTGRES_SSL === '1',
host: postgres.POSTGRES_HOST,
db: postgres.POSTGRES_DB,
password: postgres.POSTGRES_PASSWORD,
port: postgres.POSTGRES_PORT,
user: postgres.POSTGRES_USER,
}),
},
requestBroker:
requestBroker.REQUEST_BROKER === '1'
? ({
endpoint: requestBroker.REQUEST_BROKER_ENDPOINT,
signature: requestBroker.REQUEST_BROKER_SIGNATURE,
} satisfies RequestBroker)
: null,
httpHeartbeat: base.HEARTBEAT_ENDPOINT ? { endpoint: base.HEARTBEAT_ENDPOINT } : null,
} as const;

View file

@ -0,0 +1,154 @@
import { hostname } from 'node:os';
import { run } from 'graphile-worker';
import { createPool } from 'slonik';
import { Logger } from '@graphql-hive/logger';
import {
createServer,
registerShutdown,
reportReadiness,
startHeartbeats,
startMetrics,
} from '@hive/service-common';
import * as Sentry from '@sentry/node';
import { Context } from './context.js';
import { env } from './environment.js';
import { createEmailProvider } from './lib/emails/providers.js';
import { bridgeFastifyLogger, bridgeGraphileLogger } from './logger.js';
import { createTaskEventEmitter } from './task-events.js';
if (env.sentry) {
Sentry.init({
serverName: hostname(),
dist: 'workflows',
environment: env.environment,
dsn: env.sentry.dsn,
release: env.release,
});
}
/**
* Registered Task Definitions.
*/
const modules = await Promise.all([
import('./tasks/audit-log-export.js'),
import('./tasks/email-verification.js'),
import('./tasks/organization-invitation.js'),
import('./tasks/organization-ownership-transfer.js'),
import('./tasks/password-reset.js'),
import('./tasks/purge-expired-dedupe-keys.js'),
import('./tasks/purge-expired-schema-checks.js'),
import('./tasks/schema-change-notification.js'),
import('./tasks/usage-rate-limit-exceeded.js'),
import('./tasks/usage-rate-limit-warning.js'),
]);
const crontab = `
# Purge expired schema checks every Sunday at 10:00AM
0 10 * * 0 purgeExpiredSchemaChecks
# Every day at 3:00 AM
0 3 * * * purgeExpiredDedupeKeys
`;
const pg = await createPool(env.postgres.connectionString);
const logger = new Logger({ level: env.log.level });
logger.info({ pid: process.pid }, 'starting workflow service');
const stopHttpHeartbeat = env.httpHeartbeat
? startHeartbeats({
enabled: true,
endpoint: env.httpHeartbeat.endpoint,
intervalInMS: 20_000,
onError: error => logger.error({ error }, 'Heartbeat failed.'),
isReady: () => true,
})
: null;
const context: Context = {
logger,
email: createEmailProvider(env.email.provider, env.email.emailFrom),
pg,
requestBroker: env.requestBroker,
};
const server = await createServer({
sentryErrorHandler: !!env.sentry,
name: 'workflows',
log: logger,
});
server.route({
method: ['GET', 'HEAD'],
url: '/_health',
handler(_req, res) {
void res.status(200).send();
},
});
server.route({
method: ['GET', 'HEAD'],
url: '/_readiness',
handler(_, res) {
reportReadiness(true);
void res.status(200).send();
},
});
if (context.email.id === 'mock') {
server.route({
method: ['GET'],
url: '/_history',
handler(_, res) {
void res.status(200).send(context.email.history);
},
});
}
await server.listen({
port: env.http.port,
host: '::',
});
const shutdownMetrics = env.prometheus
? await startMetrics(env.prometheus.labels.instance, env.prometheus.port)
: null;
const runner = await run({
logger: bridgeGraphileLogger(logger),
crontab,
pgPool: pg.pool,
taskList: Object.fromEntries(modules.map(module => module.task(context))),
noHandleSignals: true,
events: createTaskEventEmitter(),
noPreparedStatements: true,
});
registerShutdown({
logger: bridgeFastifyLogger(logger),
async onShutdown() {
try {
logger.info('Stopping task runner.');
await runner.stop();
logger.info('Task runner shutdown successful.');
logger.info('Shutdown postgres connection.');
await pg.end();
logger.info('Shutdown postgres connection successful.');
if (shutdownMetrics) {
logger.info('Stopping prometheus endpoint');
await shutdownMetrics();
logger.info('Stopping prometheus endpoint successful.');
}
if (stopHttpHeartbeat) {
logger.info('Stop HTTP heartbeat');
stopHttpHeartbeat();
logger.info('HTTP heartbeat stopped');
}
logger.info('Stopping HTTP server');
await server.close();
logger.info('HTTP server stopped');
} catch (error: unknown) {
logger.error({ error }, 'Unexpected error occurred');
process.exit(1);
}
},
});

View file

@ -0,0 +1,183 @@
import { BentoCache, bentostore } from 'bentocache';
import { memoryDriver } from 'bentocache/build/src/drivers/memory';
import { makeWorkerUtils, WorkerUtils, type JobHelpers, type Task } from 'graphile-worker';
import type { Pool } from 'pg';
import { z } from 'zod';
import { Logger } from '@graphql-hive/logger';
import type { Context } from './context';
import { bridgeGraphileLogger } from './logger';
export type TaskDefinition<TName extends string, TModel> = {
name: TName;
schema: z.ZodTypeAny & { _output: TModel };
};
type TaskImplementationArgs<TPayload> = {
input: TPayload;
context: Context;
logger: Logger;
helpers: JobHelpers;
};
export type TaskImplementation<TPayload> = (
args: TaskImplementationArgs<TPayload>,
) => Promise<void>;
/**
* Define a task
*/
export function defineTask<TName extends string, TModel>(
workflow: TaskDefinition<TName, TModel>,
): TaskDefinition<TName, TModel> {
return workflow;
}
/**
* Implement a task.
*/
export function implementTask<TPayload>(
taskDefinition: TaskDefinition<string, TPayload>,
implementation: TaskImplementation<TPayload>,
): (context: Context) => [string, Task] {
const schema = z.object({
requestId: z.string().optional(),
input: taskDefinition.schema,
});
return function (context) {
return [
taskDefinition.name,
function (unsafePayload, helpers) {
const payload = schema.parse(unsafePayload);
return implementation({
input: payload.input,
context,
helpers,
logger: context.logger.child({
'request.id': payload.requestId,
'job.id': helpers.job.id,
'job.queueId': helpers.job.job_queue_id,
'job.attempts': helpers.job.attempts,
'job.maxAttempts': helpers.job.max_attempts,
'job.priority': helpers.job.priority,
'job.taskId': helpers.job.task_id,
}),
});
},
];
};
}
/**
* Schedule a tasks.
*/
export class TaskScheduler {
tools: Promise<WorkerUtils>;
cache: BentoCache<{ store: ReturnType<typeof bentostore> }>;
constructor(
pgPool: Pool,
private logger: Logger = new Logger(),
) {
this.tools = makeWorkerUtils({
pgPool,
logger: bridgeGraphileLogger(logger),
});
this.cache = new BentoCache({
default: 'taskSchedule',
stores: {
taskSchedule: bentostore().useL1Layer(
memoryDriver({
maxItems: 10_000,
prefix: 'bentocache:graphile_worker_deduplication',
}),
),
},
});
}
async scheduleTask<TPayload>(
taskDefinition: TaskDefinition<string, TPayload>,
payload: TPayload,
opts?: {
requestId?: string;
/** Ensures the task is scheduled only once. */
dedupe?: {
/** dedupe key for this task */
key: string | ((payload: TPayload) => string);
/** how long should the task be de-duped in milliseconds */
ttl: number;
};
},
) {
this.logger.info(
{
'job.taskId': taskDefinition.name,
},
'attempt enqueue task',
);
const input = taskDefinition.schema.parse(payload);
const tools = await this.tools;
if (opts?.dedupe) {
const dedupeKey =
typeof opts.dedupe.key === 'string' ? opts.dedupe.key : opts.dedupe.key(payload);
const expiresAt = new Date(new Date().getTime() + opts.dedupe.ttl).toISOString();
let shouldSkip = true;
await this.cache.getOrSet({
key: `${taskDefinition.name}:${dedupeKey}`,
ttl: opts.dedupe.ttl,
async factory() {
return await tools.withPgClient(async client => {
const result = await client.query(
`
INSERT INTO "graphile_worker_deduplication" ("task_name", "dedupe_key", "expires_at")
VALUES($1, $2, $3)
ON CONFLICT ("task_name", "dedupe_key")
DO
UPDATE SET "expires_at" = EXCLUDED.expires_at
WHERE "graphile_worker_deduplication"."expires_at" < NOW()
RETURNING xmax = 0 AS "inserted"
`,
[taskDefinition.name, dedupeKey, expiresAt],
);
shouldSkip = result.rows.length === 0;
return true;
});
},
});
if (shouldSkip) {
this.logger.info(
{
'job.taskId': taskDefinition.name,
},
'enqueue skipped due to dedupe',
);
return;
}
}
const job = await tools.addJob(taskDefinition.name, {
requestId: opts?.requestId,
input,
});
this.logger.info(
{
'job.taskId': taskDefinition.name,
'job.id': job.id,
},
'task enqueued.',
);
}
async dispose() {
await (await this.tools).release();
}
}

View file

@ -0,0 +1,64 @@
import mjml2html from 'mjml';
import { mjml, type MJMLValue } from './mjml.js';
export { mjml };
export function paragraph(content: string | MJMLValue) {
return mjml`
<mj-text padding-bottom="10px" line-height="1.6" font-size="16px">
${content}
</mj-text>
`;
}
export function button(input: { url: string; text: string }) {
return mjml`
<mj-button align="left" href="${input.url}" font-size="16px" border-radius="3px" color="#fff" background-color="#245850">
${input.text}
</mj-button>
`;
}
export function email(input: { title: string | MJMLValue; body: MJMLValue }) {
const body = mjml`
<mjml>
<mj-body>
<mj-section background-color="#e6eded">
<mj-column>
<mj-text color="#245850" font-size="28px" font-weight="300">
Hive Console
</mj-text>
</mj-column>
</mj-section>
<mj-section>
<mj-column>
<mj-text color="#245850" font-size="24px" font-weight="300" padding-bottom="20px">
${input.title}
</mj-text>
${input.body}
</mj-column>
</mj-section>
<mj-section>
<mj-column>
<mj-divider border-width="1px" border-color="#eeeeee"></mj-divider>
<mj-text align="center" padding-bottom="20px" line-height="1.6" font-size="14px" color="#888888">
© ${mjml.raw(String(new Date().getFullYear()))} Hive. All rights reserved.
</mj-text>
</mj-column>
</mj-section>
</mj-body>
</mjml>
`.content;
const rendered = mjml2html(body, {
minify: false,
minifyOptions: undefined,
});
if (rendered.errors.length > 0) {
throw new Error(rendered.errors.map(e => e.formattedMessage).join('\n'));
}
return rendered.html;
}

View file

@ -0,0 +1,102 @@
export type MJMLValue = {
readonly kind: 'mjml';
readonly content: string;
};
type RawValue = {
readonly kind: 'raw';
readonly content: string;
};
type SpecialValues = RawValue;
type ValueExpression = string | SpecialValues | MJMLValue;
export function mjml(parts: TemplateStringsArray, ...values: ValueExpression[]): MJMLValue {
let content = '';
let index = 0;
for (const part of parts) {
const token = values[index++];
content += part;
if (index >= parts.length) {
continue;
}
if (token === undefined) {
throw new Error('MJML tag cannot be bound an undefined value.');
} else if (isRawValue(token)) {
content += token.content;
} else if (typeof token === 'string') {
content += escapeHtml(token);
} else if (token.kind === 'mjml') {
content += token.content;
} else {
throw new TypeError('mjml: Unexpected value expression.');
}
}
return {
kind: 'mjml',
content: content,
};
}
mjml.raw = (content: string): RawValue => ({
kind: 'raw',
content,
});
/**
* @source https://github.com/component/escape-html
*/
function escapeHtml(input: string): string {
const matchHtmlRegExp = /["'<>]/;
const match = matchHtmlRegExp.exec(input);
if (!match) {
return input;
}
let escapeSequence;
let html = '';
let index = 0;
let lastIndex = 0;
for (index = match.index; index < input.length; index++) {
switch (input.charCodeAt(index)) {
case 34: // "
escapeSequence = '&quot;';
break;
case 39: // '
escapeSequence = '&#39;';
break;
case 60: // <
escapeSequence = '&lt;';
break;
case 62: // >
escapeSequence = '&gt;';
break;
default:
continue;
}
if (lastIndex !== index) {
html += input.substring(lastIndex, index);
}
lastIndex = index + 1;
html += escapeSequence;
}
return lastIndex !== index ? html + input.substring(lastIndex, index) : html;
}
function isOfKind<T extends SpecialValues>(value: unknown, kind: T['kind']): value is T {
return !!value && typeof value === 'object' && 'kind' in value && value.kind === kind;
}
function isRawValue(value: unknown): value is RawValue {
return isOfKind<RawValue>(value, 'raw');
}

View file

@ -0,0 +1,165 @@
import nodemailer from 'nodemailer';
import sm from 'sendmail';
interface Email {
to: string;
subject: string;
body: string;
}
const emailProviders = {
postmark,
mock,
smtp,
sendmail,
};
export interface EmailProvider {
id: keyof typeof emailProviders;
send(email: Email): Promise<void>;
history: Email[];
}
type PostmarkEmailProviderConfig = {
provider: 'postmark';
token: string;
messageStream: string;
};
type MockEmailProviderConfig = {
provider: 'mock';
};
type SMTPEmailProviderConfig = {
provider: 'smtp';
protocol: 'smtp' | 'smtps';
host: string;
port: number;
auth: {
user: string;
pass: string;
};
tls: {
rejectUnauthorized: boolean;
};
};
type SendmailEmailProviderConfig = {
provider: 'sendmail';
};
type EmailProviderConfig =
| SMTPEmailProviderConfig
| PostmarkEmailProviderConfig
| SendmailEmailProviderConfig
| MockEmailProviderConfig;
export function createEmailProvider(config: EmailProviderConfig, emailFrom: string): EmailProvider {
switch (config.provider) {
case 'mock':
return mock(config, emailFrom);
case 'postmark':
return postmark(config, emailFrom);
case 'smtp':
return smtp(config, emailFrom);
case 'sendmail':
return sendmail(config, emailFrom);
}
}
function postmark(config: PostmarkEmailProviderConfig, emailFrom: string) {
return {
id: 'postmark' as const,
async send(email: Email) {
const response = await fetch('https://api.postmarkapp.com/email', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
'X-Postmark-Server-Token': config.token,
},
body: JSON.stringify({
From: emailFrom,
To: email.to,
Subject: email.subject,
HtmlBody: email.body,
MessageStream: config.messageStream,
}),
});
if (!response.ok) {
const details: any = await response.json();
throw new Error(details.Message ?? response.statusText);
}
},
history: [],
};
}
function mock(_config: MockEmailProviderConfig, _emailFrom: string): EmailProvider {
const history: Email[] = [];
return {
id: 'mock' as const,
async send(email: Email) {
history.push(email);
},
history,
};
}
function smtp(config: SMTPEmailProviderConfig, emailFrom: string) {
const transporter = nodemailer.createTransport({
host: config.host,
port: config.port,
secure: config.protocol === 'smtps',
auth: {
user: config.auth.user,
pass: config.auth.pass,
},
tls: {
rejectUnauthorized: config.tls.rejectUnauthorized,
},
});
return {
id: 'smtp' as const,
async send(email: Email) {
await transporter.sendMail({
from: emailFrom,
to: email.to,
subject: email.subject,
html: email.body,
});
},
history: [],
};
}
function sendmail(_config: SendmailEmailProviderConfig, emailFrom: string) {
const client = sm({});
return {
id: 'sendmail' as const,
async send(email: Email) {
await new Promise((resolve, reject) => {
client(
{
from: emailFrom,
to: email.to,
subject: email.subject,
html: email.body,
},
(err, reply) => {
if (err) {
reject(err);
} else {
resolve(reply);
}
},
);
});
},
history: [],
};
}

View file

@ -0,0 +1,18 @@
import { button, email, mjml, paragraph } from '../components.js';
export function renderAuditLogsReportEmail(input: {
organizationName: string;
formattedStartDate: string;
formattedEndDate: string;
url: string;
}) {
return email({
title: 'Your Requested Audit Logs Are Ready',
body: mjml`
${paragraph(mjml`You requested audit logs for ${input.formattedStartDate} ${input.formattedEndDate}, and they are now ready for download.`)}
${paragraph('Click the link below to download your CSV file:')}
${button({ url: input.url, text: 'Download Audit Logs' })}
${paragraph(`If you didn't request this, please contact support@graphql-hive.com.`)}
`,
});
}

View file

@ -0,0 +1,12 @@
import { button, email, mjml, paragraph } from '../components.js';
export function renderEmailVerificationEmail(input: { verificationLink: string; toEmail: string }) {
return email({
title: `Verify Your Email Address`,
body: mjml`
${paragraph(`To complete your sign-up, please verify your email address by clicking the link below:`)}
${button({ url: input.verificationLink, text: 'Verify Email' })}
${paragraph(`If you didn't sign up, you can ignore this email.`)}
`,
});
}

View file

@ -0,0 +1,11 @@
import { button, email, mjml, paragraph } from '../components.js';
export function renderOrganizationInvitation(input: { organizationName: string; link: string }) {
return email({
title: `Join ${input.organizationName}`,
body: mjml`
${paragraph(mjml`You've been invited to join ${input.organizationName} on GraphQL Hive.`)}
${button({ url: input.link, text: 'Accept the invitation' })}
`,
});
}

View file

@ -0,0 +1,18 @@
import { button, email, mjml, paragraph } from '../components.js';
export function renderOrganizationOwnershipTransferEmail(input: {
authorName: string;
organizationName: string;
link: string;
}) {
return email({
title: 'Organization Ownership Transfer Initiated',
body: mjml`
${paragraph(
mjml`${input.authorName} wants to transfer the ownership of the <strong>${input.organizationName}</strong> organization.`,
)}
${button({ url: input.link, text: 'Accept the transfer' })}
${paragraph(`This link will expire in a day.`)}
`,
});
}

View file

@ -0,0 +1,12 @@
import { button, email, mjml, paragraph } from '../components.js';
export function renderPasswordResetEmail(input: { passwordResetLink: string; toEmail: string }) {
return email({
title: `Reset Your Password`,
body: mjml`
${paragraph(`We received a request to reset your password. Click the link below to set a new password:`)}
${button({ url: input.passwordResetLink, text: 'Reset your password' })}
${paragraph(`If you didn't request a password reset, you can ignore this email.`)}
`,
});
}

View file

@ -0,0 +1,25 @@
import { button, email, mjml, paragraph } from '../components.js';
const numberFormatter = new Intl.NumberFormat();
export function renderRateLimitExceededEmail(input: {
organizationName: string;
limit: number;
currentUsage: number;
subscriptionManagementLink: string;
}) {
return email({
title: 'Rate Limit Reached',
body: mjml`
${paragraph(
mjml`Your Hive organization <strong>${
input.organizationName
}</strong> has reached over 100% of the operations limit quota.. Used ${numberFormatter.format(input.currentUsage)} of ${numberFormatter.format(
input.limit,
)}.`,
)}
${paragraph(`We recommend to increase the limit.`)}
${button({ url: input.subscriptionManagementLink, text: 'Manage your subscription' })}
`,
});
}

View file

@ -0,0 +1,25 @@
import { button, email, mjml, paragraph } from '../components.js';
const numberFormatter = new Intl.NumberFormat();
export function renderRateLimitWarningEmail(input: {
organizationName: string;
limit: number;
currentUsage: number;
subscriptionManagementLink: string;
}) {
return email({
title: 'Approaching Rate Limit',
body: mjml`
${paragraph(
mjml`Your Hive organization <strong>${
input.organizationName
}</strong> is approaching its operations limit quota. Used ${numberFormatter.format(input.currentUsage)} of ${numberFormatter.format(
input.limit,
)}.`,
)}
${paragraph(`We recommend to increase the limit.`)}
${button({ url: input.subscriptionManagementLink, text: 'Manage your subscription' })}
`,
});
}

View file

@ -0,0 +1,181 @@
import { DatabasePool, sql } from 'slonik';
import { z } from 'zod';
const SchemaCheckModel = z.object({
schemaCheckIds: z.array(z.string()),
sdlStoreIds: z.array(z.string()),
contextIds: z.array(z.string()),
targetIds: z.array(z.string()),
contractIds: z.array(z.string()),
});
export async function purgeExpiredSchemaChecks(args: { pool: DatabasePool; expiresAt: Date }) {
return await args.pool.transaction(async pool => {
const date = args.expiresAt.toISOString();
const rawData = await pool.maybeOne<unknown>(sql`/* findSchemaChecksToPurge */
WITH "filtered_schema_checks" AS (
SELECT *
FROM "schema_checks"
WHERE "expires_at" <= ${date}
)
SELECT
ARRAY(SELECT "filtered_schema_checks"."id" FROM "filtered_schema_checks") AS "schemaCheckIds",
ARRAY(SELECT DISTINCT "filtered_schema_checks"."target_id" FROM "filtered_schema_checks") AS "targetIds",
ARRAY(
SELECT DISTINCT "filtered_schema_checks"."schema_sdl_store_id"
FROM "filtered_schema_checks"
WHERE "filtered_schema_checks"."schema_sdl_store_id" IS NOT NULL
UNION SELECT DISTINCT "filtered_schema_checks"."composite_schema_sdl_store_id"
FROM "filtered_schema_checks"
WHERE "filtered_schema_checks"."composite_schema_sdl_store_id" IS NOT NULL
UNION SELECT DISTINCT "filtered_schema_checks"."supergraph_sdl_store_id"
FROM "filtered_schema_checks"
WHERE "filtered_schema_checks"."supergraph_sdl_store_id" IS NOT NULL
UNION SELECT DISTINCT "contract_checks"."composite_schema_sdl_store_id"
FROM "contract_checks"
INNER JOIN "filtered_schema_checks" ON "contract_checks"."schema_check_id" = "filtered_schema_checks"."id"
WHERE "contract_checks"."composite_schema_sdl_store_id" IS NOT NULL
UNION SELECT DISTINCT "contract_checks"."supergraph_sdl_store_id" FROM "filtered_schema_checks"
INNER JOIN "contract_checks" ON "contract_checks"."schema_check_id" = "filtered_schema_checks"."id"
WHERE "contract_checks"."supergraph_sdl_store_id" IS NOT NULL
) AS "sdlStoreIds",
ARRAY(
SELECT DISTINCT "filtered_schema_checks"."context_id"
FROM "filtered_schema_checks"
WHERE "filtered_schema_checks"."context_id" IS NOT NULL
) AS "contextIds",
ARRAY(
SELECT DISTINCT "contract_checks"."contract_id"
FROM "contract_checks"
INNER JOIN "filtered_schema_checks" ON "contract_checks"."schema_check_id" = "filtered_schema_checks"."id"
) AS "contractIds"
`);
const data = SchemaCheckModel.parse(rawData);
if (!data.schemaCheckIds.length) {
return {
deletedSchemaCheckCount: 0,
deletedSdlStoreCount: 0,
deletedSchemaChangeApprovalCount: 0,
deletedContractSchemaChangeApprovalCount: 0,
};
}
let deletedSdlStoreCount = 0;
let deletedSchemaChangeApprovalCount = 0;
let deletedContractSchemaChangeApprovalCount = 0;
await pool.any<unknown>(sql`/* purgeExpiredSchemaChecks */
DELETE
FROM "schema_checks"
WHERE
"id" = ANY(${sql.array(data.schemaCheckIds, 'uuid')})
`);
if (data.sdlStoreIds.length) {
deletedSdlStoreCount = await pool.oneFirst<number>(sql`/* purgeExpiredSdlStore */
WITH "deleted" AS (
DELETE
FROM
"sdl_store"
WHERE
"id" = ANY(
${sql.array(data.sdlStoreIds, 'text')}
)
AND NOT EXISTS (
SELECT
1
FROM
"schema_checks"
WHERE
"schema_checks"."schema_sdl_store_id" = "sdl_store"."id"
OR "schema_checks"."composite_schema_sdl_store_id" = "sdl_store"."id"
OR "schema_checks"."supergraph_sdl_store_id" = "sdl_store"."id"
)
AND NOT EXISTS (
SELECT
1
FROM
"contract_checks"
WHERE
"contract_checks"."composite_schema_sdl_store_id" = "sdl_store"."id"
OR "contract_checks"."supergraph_sdl_store_id" = "sdl_store"."id"
)
RETURNING
"id"
) SELECT COUNT(*) FROM "deleted"
`);
}
if (data.targetIds.length && data.contextIds.length) {
deletedSchemaChangeApprovalCount =
await pool.oneFirst<number>(sql`/* purgeExpiredSchemaChangeApprovals */
WITH "deleted" AS (
DELETE
FROM
"schema_change_approvals"
WHERE
"target_id" = ANY(
${sql.array(data.targetIds, 'uuid')}
)
AND "context_id" = ANY(
${sql.array(data.contextIds, 'text')}
)
AND NOT EXISTS (
SELECT
1
FROM "schema_checks"
WHERE
"schema_checks"."target_id" = "schema_change_approvals"."target_id"
AND "schema_checks"."context_id" = "schema_change_approvals"."context_id"
)
RETURNING
"target_id"
) SELECT COUNT(*) FROM "deleted"
`);
}
if (data.contractIds.length && data.contextIds.length) {
deletedContractSchemaChangeApprovalCount =
await pool.oneFirst<number>(sql`/* purgeExpiredContractSchemaChangeApprovals */
WITH "deleted" AS (
DELETE
FROM
"contract_schema_change_approvals"
WHERE
"contract_id" = ANY(
${sql.array(data.contractIds, 'uuid')}
)
AND "context_id" = ANY(
${sql.array(data.contextIds, 'text')}
)
AND NOT EXISTS (
SELECT
1
FROM
"schema_checks"
INNER JOIN "contract_checks"
ON "contract_checks"."schema_check_id" = "schema_checks"."id"
WHERE
"contract_checks"."contract_id" = "contract_schema_change_approvals"."contract_id"
AND "schema_checks"."context_id" = "contract_schema_change_approvals"."context_id"
)
RETURNING
"contract_id"
) SELECT COUNT(*) FROM "deleted"
`);
}
return {
deletedSchemaCheckCount: data.schemaCheckIds.length,
deletedSdlStoreCount,
deletedSchemaChangeApprovalCount,
deletedContractSchemaChangeApprovalCount,
};
});
}

View file

@ -0,0 +1,68 @@
import got from 'got';
import type { Logger } from '@graphql-hive/logger';
export type RequestBroker = {
endpoint: string;
signature: string;
};
export async function sendWebhook(
logger: Logger,
requestBroker: RequestBroker | null,
args: {
attempt: number;
maxAttempts: number;
/** endpoint to be called */
endpoint: string;
/** JSON data to be sent to the endpoint */
data: unknown;
},
) {
if (args.attempt < args.maxAttempts) {
logger.debug('Calling webhook');
try {
if (!requestBroker) {
await got.post(args.endpoint, {
headers: {
Accept: 'application/json',
'Accept-Encoding': 'gzip, deflate, br',
'Content-Type': 'application/json',
},
timeout: {
request: 10_000,
},
json: args.data,
});
return;
}
await got.post(requestBroker.endpoint, {
headers: {
Accept: 'text/plain',
'x-hive-signature': requestBroker.signature,
},
timeout: {
request: 10_000,
},
json: {
url: args.endpoint,
method: 'POST',
headers: {
Accept: 'application/json',
'Accept-Encoding': 'gzip, deflate, br',
'Content-Type': 'application/json',
},
body: JSON.stringify(args.data),
resolveResponseBody: false,
},
});
} catch (error) {
logger.error('Failed to call webhook.');
// so we can re-try
throw error;
}
} else {
logger.warn('Giving up on webhook.');
}
}

View file

@ -0,0 +1,34 @@
import { Logger as GraphileLogger, type LogLevel as GraphileLogLevel } from 'graphile-worker';
import type { Logger } from '@graphql-hive/logger';
import { ServiceLogger } from '@hive/service-common';
function logLevel(level: GraphileLogLevel) {
switch (level) {
case 'warning':
return 'warn' as const;
case 'info': {
return 'info' as const;
}
case 'debug': {
return 'debug' as const;
}
case 'error': {
return 'error' as const;
}
}
return 'info';
}
/**
* Bridges Hive Logger to Graphile Logger
*/
export function bridgeGraphileLogger(logger: Logger) {
return new GraphileLogger(_scope => (level, message, _meta) => {
logger[logLevel(level)](message);
});
}
export function bridgeFastifyLogger(logger: Logger): ServiceLogger {
return logger as unknown as ServiceLogger;
}

View file

@ -0,0 +1,44 @@
import { metrics } from '@hive/service-common';
export const jobCompleteCounter = new metrics.Counter({
name: 'hive_workflow_job_complete_total',
help: 'Total number of completed jobs',
labelNames: ['task_identifier'],
});
export const jobErrorCounter = new metrics.Counter({
name: 'hive_workflow_job_error_total',
help: 'Total number of jobs with errors',
labelNames: ['task_identifier'],
});
export const jobSuccessCounter = new metrics.Counter({
name: 'hive_workflow_job_success_total',
help: 'Total number of successful jobs',
labelNames: ['task_identifier'],
});
export const jobFailedCounter = new metrics.Counter({
name: 'hive_workflow_job_failed_total',
help: 'Total number of failed jobs',
labelNames: ['task_identifier'],
});
export const workerFatalErrorCounter = new metrics.Counter({
name: 'hive_workflow_worker_fatal_error_total',
help: 'Total number of worker fatal errors',
});
export const jobDuration = new metrics.Summary({
name: 'hive_workflow_job_duration_seconds',
help: 'Duration of jobs in seconds',
labelNames: ['task_identifier'],
percentiles: [0.5, 0.9, 0.95, 0.99],
});
export const jobQueueTime = new metrics.Summary({
name: 'hive_workflow_job_queue_time_seconds',
help: 'Time a job spends in the queue in seconds',
labelNames: ['task_identifier'],
percentiles: [0.5, 0.9, 0.95, 0.99],
});

View file

@ -0,0 +1,50 @@
import EventEmitter from 'events';
import type { WorkerEvents } from 'graphile-worker';
import {
jobCompleteCounter,
jobDuration,
jobErrorCounter,
jobFailedCounter,
jobQueueTime,
jobSuccessCounter,
workerFatalErrorCounter,
} from './metrics';
/**
* Creates an event emitter with handlers for prometheus metrics for the Graphile Worker
*/
export function createTaskEventEmitter() {
const events: WorkerEvents = new EventEmitter();
events.on('job:start', ({ job }) => {
const queueTimeInSeconds =
(Date.now() - (new Date(job.run_at) ?? new Date(job.created_at)).getTime()) / 1000;
jobQueueTime.observe({ task_identifier: job.task_identifier }, queueTimeInSeconds);
});
events.on('job:complete', ({ job }) => {
jobCompleteCounter.inc({ task_identifier: job.task_identifier });
jobDuration.observe(
{ task_identifier: job.task_identifier },
(Date.now() - (new Date(job.run_at) ?? new Date(job.created_at)).getTime()) / 1000,
);
});
events.on('job:error', ({ job }) => {
jobErrorCounter.inc({ task_identifier: job.task_identifier });
});
events.on('job:success', ({ job }) => {
jobSuccessCounter.inc({ task_identifier: job.task_identifier });
});
events.on('job:failed', ({ job }) => {
jobFailedCounter.inc({ task_identifier: job.task_identifier });
});
events.on('worker:fatalError', () => {
workerFatalErrorCounter.inc();
});
return events;
}

View file

@ -0,0 +1,28 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { renderAuditLogsReportEmail } from '../lib/emails/templates/audit-logs-report.js';
export const AuditLogExportTask = defineTask({
name: 'auditLogExport',
schema: z.object({
organizationId: z.string(),
organizationName: z.string(),
formattedStartDate: z.string(),
formattedEndDate: z.string(),
url: z.string(),
email: z.string(),
}),
});
export const task = implementTask(AuditLogExportTask, async args => {
await args.context.email.send({
to: args.input.email,
subject: 'Hive - Audit Log Report',
body: renderAuditLogsReportEmail({
url: args.input.url,
organizationName: args.input.organizationName,
formattedStartDate: args.input.formattedStartDate,
formattedEndDate: args.input.formattedEndDate,
}),
});
});

View file

@ -0,0 +1,25 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { renderEmailVerificationEmail } from '../lib/emails/templates/email-verification.js';
export const EmailVerificationTask = defineTask({
name: 'emailVerification',
schema: z.object({
user: z.object({
email: z.string(),
id: z.string(),
}),
emailVerifyLink: z.string(),
}),
});
export const task = implementTask(EmailVerificationTask, async args => {
await args.context.email.send({
to: args.input.user.email,
subject: 'Verify your email',
body: renderEmailVerificationEmail({
verificationLink: args.input.emailVerifyLink,
toEmail: args.input.user.email,
}),
});
});

View file

@ -0,0 +1,25 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { renderOrganizationInvitation } from '../lib/emails/templates/organization-invitation.js';
export const OrganizationInvitationTask = defineTask({
name: 'organizationInvitation',
schema: z.object({
organizationId: z.string(),
organizationName: z.string(),
code: z.string(),
email: z.string(),
link: z.string(),
}),
});
export const task = implementTask(OrganizationInvitationTask, async args => {
await args.context.email.send({
to: args.input.email,
subject: `You have been invited to join ${args.input.organizationName}`,
body: renderOrganizationInvitation({
link: args.input.link,
organizationName: args.input.organizationName,
}),
});
});

View file

@ -0,0 +1,26 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { renderOrganizationOwnershipTransferEmail } from '../lib/emails/templates/organization-ownership-transfer.js';
export const OrganizationOwnershipTransferTask = defineTask({
name: 'organizationOwnershipTransfer',
schema: z.object({
organizationId: z.string(),
organizationName: z.string(),
authorName: z.string(),
email: z.string(),
link: z.string(),
}),
});
export const task = implementTask(OrganizationOwnershipTransferTask, async args => {
await args.context.email.send({
to: args.input.email,
subject: `Organization transfer from ${args.input.authorName} (${args.input.organizationName})`,
body: renderOrganizationOwnershipTransferEmail({
link: args.input.link,
organizationName: args.input.organizationName,
authorName: args.input.authorName,
}),
});
});

View file

@ -0,0 +1,25 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { renderPasswordResetEmail } from '../lib/emails/templates/password-reset.js';
export const PasswordResetTask = defineTask({
name: 'passwordReset',
schema: z.object({
user: z.object({
email: z.string(),
id: z.string(),
}),
passwordResetLink: z.string(),
}),
});
export const task = implementTask(PasswordResetTask, async args => {
await args.context.email.send({
subject: `Reset your password`,
to: args.input.user.email,
body: renderPasswordResetEmail({
passwordResetLink: args.input.passwordResetLink,
toEmail: args.input.user.email,
}),
});
});

View file

@ -0,0 +1,25 @@
import { sql } from 'slonik';
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
export const PurgeExpiredDedupeKeysTask = defineTask({
name: 'purgeExpiredDedupeKeys',
schema: z.unknown(),
});
export const task = implementTask(PurgeExpiredDedupeKeysTask, async args => {
args.logger.debug('purging expired postgraphile task dedupe keys');
const result = await args.context.pg.oneFirst(sql`
WITH "deleted" AS (
DELETE FROM "graphile_worker_deduplication"
WHERE "expires_at" < NOW()
RETURNING 1
)
SELECT COUNT(*) FROM "deleted";
`);
const amount = z.number().parse(result);
args.logger.debug(
{ purgedCount: amount },
'finished purging expired postgraphile task dedupe keys',
);
});

View file

@ -0,0 +1,17 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { purgeExpiredSchemaChecks } from '../lib/expired-schema-checks.js';
export const PurgeExpiredSchemaChecks = defineTask({
name: 'purgeExpiredSchemaChecks',
schema: z.unknown(),
});
export const task = implementTask(PurgeExpiredSchemaChecks, async args => {
args.logger.debug('purging expired schema checks');
const statistics = await purgeExpiredSchemaChecks({
pool: args.context.pg,
expiresAt: new Date(),
});
args.logger.debug({ statistics }, 'finished purging schema checks');
});

View file

@ -0,0 +1,17 @@
import { defineTask, implementTask } from '../kit.js';
import { sendWebhook } from '../lib/webhooks/send-webhook.js';
import { SchemaChangeNotification } from '../webhooks/schema-change-notification.js';
export const SchemaChangeNotificationTask = defineTask({
name: 'schemaChangeNotification',
schema: SchemaChangeNotification,
});
export const task = implementTask(SchemaChangeNotificationTask, async args => {
await sendWebhook(args.context.logger, args.context.requestBroker, {
attempt: args.helpers.job.attempts,
maxAttempts: args.helpers.job.max_attempts,
data: args.input.event,
endpoint: args.input.endpoint,
});
});

View file

@ -0,0 +1,30 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { renderRateLimitExceededEmail } from '../lib/emails/templates/rate-limit-exceeded.js';
export const UsageRateLimitExceededTask = defineTask({
name: 'usageRateLimitExceeded',
schema: z.object({
organizationId: z.string(),
organizationName: z.string(),
limit: z.number(),
currentUsage: z.number(),
startDate: z.number(),
endDate: z.number(),
subscriptionManagementLink: z.string(),
email: z.string(),
}),
});
export const task = implementTask(UsageRateLimitExceededTask, async args => {
await args.context.email.send({
subject: `GraphQL-Hive operations quota for ${args.input.organizationName} exceeded`,
to: args.input.email,
body: renderRateLimitExceededEmail({
organizationName: args.input.organizationName,
currentUsage: args.input.currentUsage,
limit: args.input.limit,
subscriptionManagementLink: args.input.subscriptionManagementLink,
}),
});
});

View file

@ -0,0 +1,30 @@
import { z } from 'zod';
import { defineTask, implementTask } from '../kit.js';
import { renderRateLimitWarningEmail } from '../lib/emails/templates/rate-limit-warning.js';
export const UsageRateLimitWarningTask = defineTask({
name: 'usageRateLimitWarning',
schema: z.object({
organizationId: z.string(),
organizationName: z.string(),
limit: z.number(),
currentUsage: z.number(),
startDate: z.number(),
endDate: z.number(),
subscriptionManagementLink: z.string(),
email: z.string(),
}),
});
export const task = implementTask(UsageRateLimitWarningTask, async args => {
await args.context.email.send({
subject: `${args.input.organizationName} is approaching its rate limit`,
to: args.input.email,
body: renderRateLimitWarningEmail({
organizationName: args.input.organizationName,
limit: args.input.limit,
currentUsage: args.input.currentUsage,
subscriptionManagementLink: args.input.subscriptionManagementLink,
}),
});
});

View file

@ -0,0 +1,35 @@
import { z } from 'zod';
/**
* Webhook payload definition for schema change notifications.
*/
export const SchemaChangeNotification = z.object({
endpoint: z.string().nonempty(),
event: z.object({
organization: z.object({
id: z.string().nonempty(),
cleanId: z.string().nonempty(),
slug: z.string().nonempty(),
name: z.string().nonempty(),
}),
project: z.object({
id: z.string().nonempty(),
cleanId: z.string().nonempty(),
slug: z.string().nonempty(),
name: z.string().nonempty(),
}),
target: z.object({
id: z.string().nonempty(),
cleanId: z.string().nonempty(),
slug: z.string().nonempty(),
name: z.string().nonempty(),
}),
schema: z.object({
id: z.string().nonempty(),
valid: z.boolean(),
commit: z.string().nonempty(),
}),
changes: z.array(z.any()),
errors: z.array(z.any()),
}),
});

View file

@ -0,0 +1,9 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"target": "ES2020",
"module": "esnext",
"rootDir": "../.."
},
"files": ["src/index.ts"]
}

View file

@ -1,3 +1,15 @@
diff --git a/dist/src/binders/bindPool.js b/dist/src/binders/bindPool.js
index ad509058bf5d26c82d4b2aea35e945df2f83f38e..1ca27403f70362f9a60abef7df6c2fdaaba1805b 100644
--- a/dist/src/binders/bindPool.js
+++ b/dist/src/binders/bindPool.js
@@ -7,6 +7,7 @@ const factories_1 = require("../factories");
const state_1 = require("../state");
const bindPool = (parentLog, pool, clientConfiguration) => {
return {
+ pool,
any: (query) => {
return (0, factories_1.createConnection)(parentLog, pool, clientConfiguration, 'IMPLICIT_QUERY', (connectionLog, connection, boundConnection) => {
return boundConnection.any(query);
diff --git a/dist/src/factories/createPool.js b/dist/src/factories/createPool.js
index b91a9fe433dc340f5cdf096ca4c568297c343ab3..401df1272d1c7f344bb956b38cc7dbde29231742 100644
--- a/dist/src/factories/createPool.js
@ -19,3 +31,39 @@ index b91a9fe433dc340f5cdf096ca4c568297c343ab3..401df1272d1c7f344bb956b38cc7dbde
state_1.poolStateMap.set(pool, {
ended: false,
mock: false,
diff --git a/dist/src/types.d.ts b/dist/src/types.d.ts
index d091b301b1df0f8d9ad9298d587081fe6d33c0be..57ea5a46fbf0878546e34debed2401efcb66d7fb 100644
--- a/dist/src/types.d.ts
+++ b/dist/src/types.d.ts
@@ -138,6 +138,7 @@ export declare type DatabasePool = CommonQueryMethods & {
readonly end: () => Promise<void>;
readonly getPoolState: () => PoolState;
readonly stream: StreamFunction;
+ readonly pool: PgPool;
};
export declare type DatabaseConnection = DatabasePool | DatabasePoolConnection;
export declare type QueryResultRowColumn = PrimitiveValueExpression;
diff --git a/src/binders/bindPool.ts b/src/binders/bindPool.ts
index d10bb50117b613f262ee715fc40745e8270a60b3..fde977dd042ec2561163f252c7f76f92cb043eb0 100644
--- a/src/binders/bindPool.ts
+++ b/src/binders/bindPool.ts
@@ -26,6 +26,7 @@ export const bindPool = (
clientConfiguration: ClientConfiguration,
): DatabasePool => {
return {
+ pool,
any: (query: TaggedTemplateLiteralInvocation) => {
return createConnection(
parentLog,
diff --git a/src/types.ts b/src/types.ts
index da293a0a4ce2583c43711cbe90d4829ec9c46fa8..962acdd5c2652e6e61147adc03204bca065cd28c 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -191,6 +191,7 @@ export type DatabasePool = CommonQueryMethods & {
readonly end: () => Promise<void>,
readonly getPoolState: () => PoolState,
readonly stream: StreamFunction,
+ readonly pool: PgPool,
};
export type DatabaseConnection = DatabasePool | DatabasePoolConnection;

View file

@ -77,7 +77,7 @@ patchedDependencies:
hash: 195ae63d47810ca4c987421948f15869356e598dccde9e8c9a4ff4efd3e88322
path: patches/p-cancelable@4.0.1.patch
slonik@30.4.4:
hash: 408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299
hash: 195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9
path: patches/slonik@30.4.4.patch
importers:
@ -384,7 +384,7 @@ importers:
version: 5.8.2
slonik:
specifier: 30.4.4
version: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
version: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
strip-ansi:
specifier: 7.1.2
version: 7.1.2
@ -720,7 +720,7 @@ importers:
version: 11.10.2(pg-query-stream@4.7.0(pg@8.13.1))
slonik:
specifier: 30.4.4
version: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
version: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
ts-node:
specifier: 10.9.2
version: 10.9.2(@swc/core@1.13.5)(@types/node@22.10.5)(typescript@5.7.3)
@ -767,9 +767,6 @@ importers:
'@hive/cdn-script':
specifier: workspace:*
version: link:../cdn-worker
'@hive/emails':
specifier: workspace:*
version: link:../emails
'@hive/schema':
specifier: workspace:*
version: link:../schema
@ -788,9 +785,9 @@ importers:
'@hive/usage-ingestor':
specifier: workspace:*
version: link:../usage-ingestor
'@hive/webhooks':
'@hive/workflows':
specifier: workspace:*
version: link:../webhooks
version: link:../workflows
'@nodesecure/i18n':
specifier: ^4.0.1
version: 4.0.1
@ -910,7 +907,7 @@ importers:
version: 5.0.0-beta.2
slonik:
specifier: 30.4.4
version: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
version: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
stripe:
specifier: 17.5.0
version: 17.5.0
@ -1013,15 +1010,15 @@ importers:
'@hive/api':
specifier: workspace:*
version: link:../api
'@hive/emails':
specifier: workspace:*
version: link:../emails
'@hive/service-common':
specifier: workspace:*
version: link:../service-common
'@hive/storage':
specifier: workspace:*
version: link:../storage
'@hive/workflows':
specifier: workspace:*
version: link:../workflows
'@sentry/node':
specifier: 7.120.2
version: 7.120.2
@ -1335,6 +1332,9 @@ importers:
'@hive/storage':
specifier: workspace:*
version: link:../storage
'@hive/workflows':
specifier: workspace:*
version: link:../workflows
'@sentry/integrations':
specifier: 7.114.0
version: 7.114.0
@ -1408,6 +1408,9 @@ importers:
'@fastify/cors':
specifier: 9.0.1
version: 9.0.1
'@graphql-hive/logger':
specifier: 1.0.9
version: 1.0.9
'@graphql-hive/plugin-opentelemetry':
specifier: 1.3.0
version: 1.3.0(encoding@0.1.13)(graphql@16.11.0)(ws@8.18.0)
@ -1467,7 +1470,7 @@ importers:
version: 15.1.3
slonik:
specifier: 30.4.4
version: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
version: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
zod:
specifier: 3.25.76
version: 3.25.76
@ -1512,13 +1515,13 @@ importers:
version: 11.10.2(pg-query-stream@4.7.0(pg@8.13.1))
slonik:
specifier: 30.4.4
version: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
version: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
slonik-interceptor-query-logging:
specifier: 46.4.0
version: 46.4.0(slonik@30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299))
version: 46.4.0(slonik@30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9))
slonik-utilities:
specifier: 1.9.4
version: 1.9.4(slonik@30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299))
version: 1.9.4(slonik@30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9))
tslib:
specifier: 2.8.1
version: 2.8.1
@ -1733,6 +1736,51 @@ importers:
specifier: 3.25.76
version: 3.25.76
packages/services/workflows:
devDependencies:
'@graphql-hive/logger':
specifier: 1.0.9
version: 1.0.9
'@hive/service-common':
specifier: workspace:*
version: link:../service-common
'@hive/storage':
specifier: workspace:*
version: link:../storage
'@sentry/node':
specifier: 7.120.2
version: 7.120.2
'@types/mjml':
specifier: 4.7.1
version: 4.7.1
'@types/nodemailer':
specifier: 7.0.4
version: 7.0.4
bentocache:
specifier: 1.1.0
version: 1.1.0(patch_hash=98c0f93795fdd4f5eae32ee7915de8e9a346a24c3a917262b1f4551190f1a1af)(ioredis@5.8.2)
dotenv:
specifier: 16.4.7
version: 16.4.7
graphile-worker:
specifier: 0.16.6
version: 0.16.6(typescript@5.7.3)
mjml:
specifier: 4.14.0
version: 4.14.0(encoding@0.1.13)
nodemailer:
specifier: 7.0.11
version: 7.0.11
sendmail:
specifier: 1.6.1
version: 1.6.1
slonik:
specifier: 30.4.4
version: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
zod:
specifier: 3.25.76
version: 3.25.76
packages/web/app:
devDependencies:
'@date-fns/utc':
@ -4082,6 +4130,9 @@ packages:
graphql: ^15.5.0 || ^16.0.0 || ^17.0.0
typescript: ^5.0.0
'@graphile/logger@0.2.0':
resolution: {integrity: sha512-jjcWBokl9eb1gVJ85QmoaQ73CQ52xAaOCF29ukRbYNl6lY+ts0ErTaDYOBlejcbUs2OpaiqYLO5uDhyLFzWw4w==}
'@graphiql/plugin-explorer@4.0.0-alpha.2':
resolution: {integrity: sha512-U3pAVaSX9lKUEIpOffJL0wV8S+T5be6qN/Med+p7Jmi6fCJcBsjzOreLf5bUBAaHYIaXAgMBQXCrNTL17lN4Ag==}
peerDependencies:
@ -9716,6 +9767,9 @@ packages:
'@types/http-errors@2.0.5':
resolution: {integrity: sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==}
'@types/interpret@1.1.4':
resolution: {integrity: sha512-r+tPKWHYqaxJOYA3Eik0mMi+SEREqOXLmsooRFmc6GHv7nWUDixFtKN+cegvsPlDcEZd9wxsdp041v2imQuvag==}
'@types/ioredis-mock@8.2.5':
resolution: {integrity: sha512-cZyuwC9LGtg7s5G9/w6rpy3IOZ6F/hFR0pQlWYZESMo1xQUYbDpa6haqB4grTePjsGzcB/YLBFCjqRunK5wieg==}
@ -9821,6 +9875,9 @@ packages:
'@types/node@22.10.5':
resolution: {integrity: sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==}
'@types/node@22.19.1':
resolution: {integrity: sha512-LCCV0HdSZZZb34qifBsyWlUmok6W7ouER+oQIGBScS8EsZsQbrtFTUrDX4hOl+CS6p7cnNC4td+qrSVGSCTUfQ==}
'@types/node@24.10.1':
resolution: {integrity: sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==}
@ -9904,6 +9961,9 @@ packages:
'@types/semver@7.5.6':
resolution: {integrity: sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==}
'@types/semver@7.7.1':
resolution: {integrity: sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==}
'@types/send@0.17.5':
resolution: {integrity: sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==}
@ -12890,6 +12950,15 @@ packages:
graphemer@1.4.0:
resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==}
graphile-config@0.0.1-beta.18:
resolution: {integrity: sha512-uMdF9Rt8/NwT1wVXNleYgM5ro2hHDodHiKA3efJhgdU8iP+r/hksnghOHreMva0sF5tV73f4TpiELPUR0g7O9w==}
engines: {node: '>=16'}
graphile-worker@0.16.6:
resolution: {integrity: sha512-e7gGYDmGqzju2l83MpzX8vNG/lOtVJiSzI3eZpAFubSxh/cxs7sRrRGBGjzBP1kNG0H+c95etPpNRNlH65PYhw==}
engines: {node: '>=14.0.0'}
hasBin: true
graphiql-explorer@0.9.0:
resolution: {integrity: sha512-fZC/wsuatqiQDO2otchxriFO0LaWIo/ovF/CQJ1yOudmY0P7pzDiP+l9CEHUiWbizk3e99x6DQG4XG1VxA+d6A==}
peerDependencies:
@ -13412,6 +13481,10 @@ packages:
resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==}
engines: {node: '>=12'}
interpret@3.1.1:
resolution: {integrity: sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==}
engines: {node: '>=10.13.0'}
intl-tel-input@17.0.19:
resolution: {integrity: sha512-GBNoUT4JVgm2e1N+yFMaBQ24g5EQfZhDznGneCM9IEZwfKsMUAUa1dS+v0wOiKpRAZ5IPNLJMIEEFGgqlCI22A==}
@ -17921,6 +17994,9 @@ packages:
undici-types@6.20.0:
resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==}
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
undici-types@7.16.0:
resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
@ -19119,8 +19195,8 @@ snapshots:
dependencies:
'@aws-crypto/sha256-browser': 3.0.0
'@aws-crypto/sha256-js': 3.0.0
'@aws-sdk/client-sso-oidc': 3.596.0(@aws-sdk/client-sts@3.596.0)
'@aws-sdk/client-sts': 3.596.0
'@aws-sdk/client-sso-oidc': 3.596.0
'@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)
'@aws-sdk/core': 3.592.0
'@aws-sdk/credential-provider-node': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0)
'@aws-sdk/middleware-host-header': 3.577.0
@ -19227,11 +19303,11 @@ snapshots:
transitivePeerDependencies:
- aws-crt
'@aws-sdk/client-sso-oidc@3.596.0(@aws-sdk/client-sts@3.596.0)':
'@aws-sdk/client-sso-oidc@3.596.0':
dependencies:
'@aws-crypto/sha256-browser': 3.0.0
'@aws-crypto/sha256-js': 3.0.0
'@aws-sdk/client-sts': 3.596.0
'@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)
'@aws-sdk/core': 3.592.0
'@aws-sdk/credential-provider-node': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0)
'@aws-sdk/middleware-host-header': 3.577.0
@ -19270,7 +19346,6 @@ snapshots:
'@smithy/util-utf8': 3.0.0
tslib: 2.8.1
transitivePeerDependencies:
- '@aws-sdk/client-sts'
- aws-crt
'@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0)':
@ -19447,11 +19522,11 @@ snapshots:
transitivePeerDependencies:
- aws-crt
'@aws-sdk/client-sts@3.596.0':
'@aws-sdk/client-sts@3.596.0(@aws-sdk/client-sso-oidc@3.596.0)':
dependencies:
'@aws-crypto/sha256-browser': 3.0.0
'@aws-crypto/sha256-js': 3.0.0
'@aws-sdk/client-sso-oidc': 3.596.0(@aws-sdk/client-sts@3.596.0)
'@aws-sdk/client-sso-oidc': 3.596.0
'@aws-sdk/core': 3.592.0
'@aws-sdk/credential-provider-node': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0)
'@aws-sdk/middleware-host-header': 3.577.0
@ -19490,6 +19565,7 @@ snapshots:
'@smithy/util-utf8': 3.0.0
tslib: 2.8.1
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
'@aws-sdk/client-sts@3.723.0':
@ -19684,7 +19760,7 @@ snapshots:
'@aws-sdk/credential-provider-ini@3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0)':
dependencies:
'@aws-sdk/client-sts': 3.596.0
'@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)
'@aws-sdk/credential-provider-env': 3.587.0
'@aws-sdk/credential-provider-http': 3.596.0
'@aws-sdk/credential-provider-process': 3.587.0
@ -19874,7 +19950,7 @@ snapshots:
'@aws-sdk/credential-provider-web-identity@3.587.0(@aws-sdk/client-sts@3.596.0)':
dependencies:
'@aws-sdk/client-sts': 3.596.0
'@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)
'@aws-sdk/types': 3.577.0
'@smithy/property-provider': 3.1.11
'@smithy/types': 3.7.2
@ -20143,7 +20219,7 @@ snapshots:
'@aws-sdk/token-providers@3.587.0(@aws-sdk/client-sso-oidc@3.596.0)':
dependencies:
'@aws-sdk/client-sso-oidc': 3.596.0(@aws-sdk/client-sts@3.596.0)
'@aws-sdk/client-sso-oidc': 3.596.0
'@aws-sdk/types': 3.577.0
'@smithy/property-provider': 3.1.11
'@smithy/shared-ini-file-loader': 3.1.12
@ -21723,6 +21799,8 @@ snapshots:
graphql: 16.9.0
typescript: 5.7.3
'@graphile/logger@0.2.0': {}
'@graphiql/plugin-explorer@4.0.0-alpha.2(@graphiql/react@1.0.0-alpha.4(patch_hash=1018befc9149cbc43bc2bf8982d52090a580e68df34b46674234f4e58eb6d0a0)(@codemirror/language@6.10.2)(@types/node@25.0.2)(@types/react-dom@18.3.5(@types/react@18.3.18))(@types/react@18.3.18)(graphql-ws@5.16.1(graphql@16.9.0))(graphql@16.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(graphql@16.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
dependencies:
'@graphiql/react': 1.0.0-alpha.4(patch_hash=1018befc9149cbc43bc2bf8982d52090a580e68df34b46674234f4e58eb6d0a0)(@codemirror/language@6.10.2)(@types/node@25.0.2)(@types/react-dom@18.3.5(@types/react@18.3.18))(@types/react@18.3.18)(graphql-ws@5.16.1(graphql@16.9.0))(graphql@16.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@ -30800,7 +30878,6 @@ snapshots:
'@types/debug@4.1.12':
dependencies:
'@types/ms': 0.7.34
optional: true
'@types/debug@4.1.7':
dependencies:
@ -30871,6 +30948,10 @@ snapshots:
'@types/http-errors@2.0.5': {}
'@types/interpret@1.1.4':
dependencies:
'@types/node': 22.10.5
'@types/ioredis-mock@8.2.5':
dependencies:
'@types/node': 22.10.5
@ -30976,6 +31057,10 @@ snapshots:
dependencies:
undici-types: 6.20.0
'@types/node@22.19.1':
dependencies:
undici-types: 6.21.0
'@types/node@24.10.1':
dependencies:
undici-types: 7.16.0
@ -31063,6 +31148,8 @@ snapshots:
'@types/semver@7.5.6': {}
'@types/semver@7.7.1': {}
'@types/send@0.17.5':
dependencies:
'@types/mime': 1.3.5
@ -34662,6 +34749,36 @@ snapshots:
graphemer@1.4.0: {}
graphile-config@0.0.1-beta.18:
dependencies:
'@types/interpret': 1.1.4
'@types/node': 22.19.1
'@types/semver': 7.7.1
chalk: 4.1.2
debug: 4.4.3(supports-color@8.1.1)
interpret: 3.1.1
semver: 7.7.2
tslib: 2.8.1
yargs: 17.7.2
transitivePeerDependencies:
- supports-color
graphile-worker@0.16.6(typescript@5.7.3):
dependencies:
'@graphile/logger': 0.2.0
'@types/debug': 4.1.12
'@types/pg': 8.11.10
cosmiconfig: 8.3.6(typescript@5.7.3)
graphile-config: 0.0.1-beta.18
json5: 2.2.3
pg: 8.13.1
tslib: 2.8.1
yargs: 17.7.2
transitivePeerDependencies:
- pg-native
- supports-color
- typescript
graphiql-explorer@0.9.0(graphql@16.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
dependencies:
graphql: 16.9.0
@ -35450,6 +35567,8 @@ snapshots:
internmap@2.0.3: {}
interpret@3.1.1: {}
intl-tel-input@17.0.19: {}
invariant@2.2.4:
@ -39867,14 +39986,14 @@ snapshots:
slick@1.12.2: {}
slonik-interceptor-query-logging@46.4.0(slonik@30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)):
slonik-interceptor-query-logging@46.4.0(slonik@30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)):
dependencies:
crack-json: 1.3.0
pretty-ms: 7.0.1
serialize-error: 8.1.0
slonik: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
slonik: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
slonik-utilities@1.9.4(slonik@30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)):
slonik-utilities@1.9.4(slonik@30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)):
dependencies:
core-js: 3.25.5
delay: 4.4.1
@ -39882,9 +40001,9 @@ snapshots:
lodash: 4.17.21
roarr: 7.14.3
serialize-error: 5.0.0
slonik: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
slonik: 30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9)
slonik@30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299):
slonik@30.4.4(patch_hash=195b140c0181c27a85a6026c0058087a419e38f6c5d89f5f2c608e39f5bf23e9):
dependencies:
concat-stream: 2.0.0
es6-error: 4.1.1
@ -40845,6 +40964,8 @@ snapshots:
undici-types@6.20.0: {}
undici-types@6.21.0: {}
undici-types@7.16.0: {}
undici@5.29.0:

View file

@ -48,8 +48,6 @@
"@hive/usage-ingestor": ["./packages/services/usage-ingestor/src/index.ts"],
"@hive/policy": ["./packages/services/policy/src/api.ts"],
"@hive/tokens": ["./packages/services/tokens/src/api.ts"],
"@hive/webhooks": ["./packages/services/webhooks/src/api.ts"],
"@hive/emails": ["./packages/services/emails/src/api.ts"],
"@hive/commerce": ["./packages/services/commerce/src/api.ts"],
"@hive/storage": ["./packages/services/storage/src/index.ts"],
"@hive/storage/*": ["./packages/services/storage/src/*"],
@ -66,6 +64,8 @@
"@hive/usage-ingestor/src/normalize-operation": [
"./packages/services/usage-ingestor/src/normalize-operation.ts"
],
"@hive/workflows/kit": ["./packages/services/workflows/src/kit.ts"],
"@hive/workflows/tasks/*": ["./packages/services/workflows/src/tasks/*"],
"@/*": ["./packages/web/app/src/*"],
"testkit/*": ["./integration-tests/testkit/*"],
"@graphql-hive/plugin-opentelemetry/api": [