Replaces rate-limit, stripe-billing and usage-estimator with commerce (#6540)

This commit is contained in:
Kamil Kisiela 2025-02-24 11:41:01 +01:00 committed by GitHub
parent dbbf482bad
commit f60ae6e83e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
98 changed files with 719 additions and 1840 deletions

View file

@ -52,17 +52,10 @@
"command": "pnpm dev"
},
{
"name": "usage-estimator:dev",
"description": "Run Usage Estimator Service",
"name": "commerce:dev",
"description": "Run Commerce Service",
"open": true,
"cwd": "packages/services/usage-estimator",
"command": "pnpm dev"
},
{
"name": "rate-limit:dev",
"description": "Run Rate Limiter Service",
"open": true,
"cwd": "packages/services/rate-limit",
"cwd": "packages/services/commerce",
"command": "pnpm dev"
},
{

View file

@ -1,10 +1,10 @@
import * as pulumi from '@pulumi/pulumi';
import { deployApp } from './services/app';
import { deployStripeBilling } from './services/billing';
import { deployCFBroker } from './services/cf-broker';
import { deployCFCDN } from './services/cf-cdn';
import { deployClickhouse } from './services/clickhouse';
import { deployCloudFlareSecurityTransform } from './services/cloudflare-security';
import { deployCommerce } from './services/commerce';
import { deployDatabaseCleanupJob } from './services/database-cleanup';
import { deployDbMigrations } from './services/db-migrations';
import { configureDocker } from './services/docker';
@ -17,7 +17,6 @@ import { deployObservability } from './services/observability';
import { deploySchemaPolicy } from './services/policy';
import { deployPostgres } from './services/postgres';
import { deployProxy } from './services/proxy';
import { deployRateLimit } from './services/rate-limit';
import { deployRedis } from './services/redis';
import { deployS3, deployS3AuditLog, deployS3Mirror } from './services/s3';
import { deploySchema } from './services/schema';
@ -27,7 +26,6 @@ import { configureSlackApp } from './services/slack-app';
import { deploySuperTokens } from './services/supertokens';
import { deployTokens } from './services/tokens';
import { deployUsage } from './services/usage';
import { deployUsageEstimation } from './services/usage-estimation';
import { deployUsageIngestor } from './services/usage-ingestor';
import { deployWebhooks } from './services/webhooks';
import { configureZendesk } from './services/zendesk';
@ -148,37 +146,16 @@ const emails = deployEmails({
observability,
});
const usageEstimator = deployUsageEstimation({
image: docker.factory.getImageId('usage-estimator', imagesTag),
const commerce = deployCommerce({
image: docker.factory.getImageId('commerce', imagesTag),
docker,
environment,
clickhouse,
dbMigrations,
sentry,
observability,
});
const billing = deployStripeBilling({
image: docker.factory.getImageId('stripe-billing', imagesTag),
docker,
postgres,
environment,
dbMigrations,
usageEstimator,
sentry,
observability,
});
const rateLimit = deployRateLimit({
image: docker.factory.getImageId('rate-limit', imagesTag),
docker,
environment,
dbMigrations,
usageEstimator,
emails,
postgres,
sentry,
observability,
});
const usage = deployUsage({
@ -188,7 +165,7 @@ const usage = deployUsage({
tokens,
kafka,
dbMigrations,
rateLimit,
commerce,
sentry,
observability,
});
@ -241,9 +218,7 @@ const graphql = deployGraphQL({
redis,
usage,
cdn,
usageEstimator,
rateLimit,
billing,
commerce,
emails,
supertokens,
s3,
@ -302,7 +277,7 @@ const app = deployApp({
image: docker.factory.getImageId('app', imagesTag),
docker,
zendesk,
billing,
commerce,
github: githubApp,
slackApp,
sentry,

View file

@ -2,7 +2,7 @@ import { randomUUID } from 'crypto';
import * as pulumi from '@pulumi/pulumi';
import { serviceLocalEndpoint } from '../utils/local-endpoint';
import { ServiceDeployment } from '../utils/service-deployment';
import { StripeBilling } from './billing';
import { CommerceService } from './commerce';
import { DbMigrations } from './db-migrations';
import { Docker } from './docker';
import { Environment } from './environment';
@ -23,7 +23,7 @@ export function deployApp({
zendesk,
github,
slackApp,
billing,
commerce,
sentry,
environment,
}: {
@ -35,7 +35,7 @@ export function deployApp({
zendesk: Zendesk;
github: GitHubApp;
slackApp: SlackApp;
billing: StripeBilling;
commerce: CommerceService;
sentry: Sentry;
publishAppDeploymentCommand: pulumi.Resource | undefined;
}) {
@ -79,7 +79,7 @@ export function deployApp({
)
.withSecret('INTEGRATION_SLACK_CLIENT_ID', slackApp.secret, 'clientId')
.withSecret('INTEGRATION_SLACK_CLIENT_SECRET', slackApp.secret, 'clientSecret')
.withSecret('STRIPE_PUBLIC_KEY', billing.secret, 'stripePublicKey')
.withSecret('STRIPE_PUBLIC_KEY', commerce.stripeSecret, 'stripePublicKey')
.withConditionalSecret(sentry.enabled, 'SENTRY_DSN', sentry.secret, 'dsn')
.deploy();
}

View file

@ -2,38 +2,41 @@ import * as pulumi from '@pulumi/pulumi';
import { serviceLocalEndpoint } from '../utils/local-endpoint';
import { ServiceSecret } from '../utils/secrets';
import { ServiceDeployment } from '../utils/service-deployment';
import { Clickhouse } from './clickhouse';
import { DbMigrations } from './db-migrations';
import { Docker } from './docker';
import { Emails } from './emails';
import { Environment } from './environment';
import { Observability } from './observability';
import { Postgres } from './postgres';
import { Sentry } from './sentry';
import { UsageEstimator } from './usage-estimation';
export type StripeBillingService = ReturnType<typeof deployStripeBilling>;
export type CommerceService = ReturnType<typeof deployCommerce>;
class StripeSecret extends ServiceSecret<{
stripePrivateKey: pulumi.Output<string> | string;
stripePublicKey: string | pulumi.Output<string>;
}> {}
export function deployStripeBilling({
export function deployCommerce({
observability,
environment,
dbMigrations,
usageEstimator,
emails,
image,
docker,
postgres,
clickhouse,
sentry,
}: {
observability: Observability;
usageEstimator: UsageEstimator;
image: string;
environment: Environment;
dbMigrations: DbMigrations;
docker: Docker;
emails: Emails;
postgres: Postgres;
clickhouse: Clickhouse;
sentry: Sentry;
}) {
const billingConfig = new pulumi.Config('billing');
@ -42,7 +45,7 @@ export function deployStripeBilling({
stripePublicKey: billingConfig.require('stripePublicKey'),
});
const { deployment, service } = new ServiceDeployment(
'stripe-billing',
'commerce',
{
image,
imagePullSecret: docker.secret,
@ -53,7 +56,9 @@ export function deployStripeBilling({
env: {
...environment.envVars,
SENTRY: sentry.enabled ? '1' : '0',
USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service),
EMAILS_ENDPOINT: serviceLocalEndpoint(emails.service),
WEB_APP_URL: `https://${environment.appDns}/`,
OPENTELEMETRY_TRACE_USAGE_REQUESTS: observability.enabledForUsageService ? '1' : '',
OPENTELEMETRY_COLLECTOR_ENDPOINT:
observability.enabled && observability.tracingEndpoint
? observability.tracingEndpoint
@ -62,7 +67,7 @@ export function deployStripeBilling({
exposesMetrics: true,
port: 4000,
},
[dbMigrations, usageEstimator.service, usageEstimator.deployment],
[dbMigrations],
)
.withSecret('STRIPE_SECRET_KEY', stripeSecret, 'stripePrivateKey')
.withSecret('POSTGRES_HOST', postgres.pgBouncerSecret, 'host')
@ -71,14 +76,17 @@ export function deployStripeBilling({
.withSecret('POSTGRES_PASSWORD', postgres.pgBouncerSecret, 'password')
.withSecret('POSTGRES_DB', postgres.pgBouncerSecret, 'database')
.withSecret('POSTGRES_SSL', postgres.pgBouncerSecret, 'ssl')
.withSecret('CLICKHOUSE_HOST', clickhouse.secret, 'host')
.withSecret('CLICKHOUSE_PORT', clickhouse.secret, 'port')
.withSecret('CLICKHOUSE_USERNAME', clickhouse.secret, 'username')
.withSecret('CLICKHOUSE_PASSWORD', clickhouse.secret, 'password')
.withSecret('CLICKHOUSE_PROTOCOL', clickhouse.secret, 'protocol')
.withConditionalSecret(sentry.enabled, 'SENTRY_DSN', sentry.secret, 'dsn')
.deploy();
return {
deployment,
service,
secret: stripeSecret,
stripeSecret,
};
}
export type StripeBilling = ReturnType<typeof deployStripeBilling>;

View file

@ -2,9 +2,9 @@ import * as pulumi from '@pulumi/pulumi';
import { serviceLocalEndpoint } from '../utils/local-endpoint';
import { ServiceSecret } from '../utils/secrets';
import { ServiceDeployment } from '../utils/service-deployment';
import { StripeBillingService } from './billing';
import { CDN } from './cf-cdn';
import { Clickhouse } from './clickhouse';
import { CommerceService } from './commerce';
import { DbMigrations } from './db-migrations';
import { Docker } from './docker';
import { Emails } from './emails';
@ -13,7 +13,6 @@ import { GitHubApp } from './github';
import { Observability } from './observability';
import { SchemaPolicy } from './policy';
import { Postgres } from './postgres';
import { RateLimitService } from './rate-limit';
import { Redis } from './redis';
import { S3 } from './s3';
import { Schema } from './schema';
@ -21,7 +20,6 @@ import { Sentry } from './sentry';
import { Supertokens } from './supertokens';
import { Tokens } from './tokens';
import { Usage } from './usage';
import { UsageEstimator } from './usage-estimation';
import { Webhooks } from './webhooks';
import { Zendesk } from './zendesk';
@ -43,10 +41,8 @@ export function deployGraphQL({
cdn,
redis,
usage,
usageEstimator,
commerce,
dbMigrations,
rateLimit,
billing,
emails,
supertokens,
s3,
@ -75,10 +71,8 @@ export function deployGraphQL({
s3Mirror: S3;
s3AuditLog: S3;
usage: Usage;
usageEstimator: UsageEstimator;
dbMigrations: DbMigrations;
rateLimit: RateLimitService;
billing: StripeBillingService;
commerce: CommerceService;
emails: Emails;
supertokens: Supertokens;
zendesk: Zendesk;
@ -125,15 +119,13 @@ export function deployGraphQL({
...apiEnv,
SENTRY: sentry.enabled ? '1' : '0',
REQUEST_LOGGING: '0', // disabled
BILLING_ENDPOINT: serviceLocalEndpoint(billing.service),
COMMERCE_ENDPOINT: serviceLocalEndpoint(commerce.service),
TOKENS_ENDPOINT: serviceLocalEndpoint(tokens.service),
WEBHOOKS_ENDPOINT: serviceLocalEndpoint(webhooks.service),
SCHEMA_ENDPOINT: serviceLocalEndpoint(schema.service),
SCHEMA_POLICY_ENDPOINT: serviceLocalEndpoint(schemaPolicy.service),
HIVE_USAGE_ENDPOINT: serviceLocalEndpoint(usage.service),
RATE_LIMIT_ENDPOINT: serviceLocalEndpoint(rateLimit.service),
EMAILS_ENDPOINT: serviceLocalEndpoint(emails.service),
USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service),
WEB_APP_URL: `https://${environment.appDns}`,
GRAPHQL_PUBLIC_ORIGIN: `https://${environment.appDns}`,
CDN_CF: '1',
@ -166,8 +158,8 @@ export function deployGraphQL({
redis.service,
clickhouse.deployment,
clickhouse.service,
rateLimit.deployment,
rateLimit.service,
commerce.deployment,
commerce.service,
],
)
// GitHub App

View file

@ -1,70 +0,0 @@
import { serviceLocalEndpoint } from '../utils/local-endpoint';
import { ServiceDeployment } from '../utils/service-deployment';
import { DbMigrations } from './db-migrations';
import { Docker } from './docker';
import { Emails } from './emails';
import { Environment } from './environment';
import { Observability } from './observability';
import { Postgres } from './postgres';
import { Sentry } from './sentry';
import { UsageEstimator } from './usage-estimation';
export type RateLimitService = ReturnType<typeof deployRateLimit>;
export function deployRateLimit({
environment,
dbMigrations,
usageEstimator,
emails,
image,
docker,
postgres,
sentry,
observability,
}: {
observability: Observability;
usageEstimator: UsageEstimator;
environment: Environment;
dbMigrations: DbMigrations;
emails: Emails;
image: string;
docker: Docker;
postgres: Postgres;
sentry: Sentry;
}) {
return new ServiceDeployment(
'rate-limiter',
{
imagePullSecret: docker.secret,
replicas: environment.isProduction ? 3 : 1,
readinessProbe: '/_readiness',
livenessProbe: '/_health',
startupProbe: '/_health',
env: {
...environment.envVars,
SENTRY: sentry.enabled ? '1' : '0',
LIMIT_CACHE_UPDATE_INTERVAL_MS: environment.isProduction ? '60000' : '86400000',
USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service),
EMAILS_ENDPOINT: serviceLocalEndpoint(emails.service),
WEB_APP_URL: `https://${environment.appDns}/`,
OPENTELEMETRY_TRACE_USAGE_REQUESTS: observability.enabledForUsageService ? '1' : '',
OPENTELEMETRY_COLLECTOR_ENDPOINT:
observability.enabled && observability.tracingEndpoint
? observability.tracingEndpoint
: '',
},
exposesMetrics: true,
port: 4000,
image,
},
[dbMigrations, usageEstimator.service, usageEstimator.deployment],
)
.withSecret('POSTGRES_HOST', postgres.pgBouncerSecret, 'host')
.withSecret('POSTGRES_PORT', postgres.pgBouncerSecret, 'port')
.withSecret('POSTGRES_USER', postgres.pgBouncerSecret, 'user')
.withSecret('POSTGRES_PASSWORD', postgres.pgBouncerSecret, 'password')
.withSecret('POSTGRES_DB', postgres.pgBouncerSecret, 'database')
.withSecret('POSTGRES_SSL', postgres.pgBouncerSecret, 'ssl')
.withConditionalSecret(sentry.enabled, 'SENTRY_DSN', sentry.secret, 'dsn')
.deploy();
}

View file

@ -1,57 +0,0 @@
import { ServiceDeployment } from '../utils/service-deployment';
import { Clickhouse } from './clickhouse';
import { DbMigrations } from './db-migrations';
import { Docker } from './docker';
import { Environment } from './environment';
import { Observability } from './observability';
import { Sentry } from './sentry';
export type UsageEstimator = ReturnType<typeof deployUsageEstimation>;
export function deployUsageEstimation({
image,
docker,
environment,
clickhouse,
dbMigrations,
sentry,
observability,
}: {
observability: Observability;
image: string;
docker: Docker;
environment: Environment;
clickhouse: Clickhouse;
dbMigrations: DbMigrations;
sentry: Sentry;
}) {
return new ServiceDeployment(
'usage-estimator',
{
image,
imagePullSecret: docker.secret,
replicas: environment.isProduction ? 3 : 1,
readinessProbe: '/_readiness',
livenessProbe: '/_health',
startupProbe: '/_health',
env: {
...environment.envVars,
SENTRY: sentry.enabled ? '1' : '0',
OPENTELEMETRY_COLLECTOR_ENDPOINT:
observability.enabled && observability.tracingEndpoint
? observability.tracingEndpoint
: '',
},
exposesMetrics: true,
port: 4000,
},
[dbMigrations],
)
.withSecret('CLICKHOUSE_HOST', clickhouse.secret, 'host')
.withSecret('CLICKHOUSE_PORT', clickhouse.secret, 'port')
.withSecret('CLICKHOUSE_USERNAME', clickhouse.secret, 'username')
.withSecret('CLICKHOUSE_PASSWORD', clickhouse.secret, 'password')
.withSecret('CLICKHOUSE_PROTOCOL', clickhouse.secret, 'protocol')
.withConditionalSecret(sentry.enabled, 'SENTRY_DSN', sentry.secret, 'dsn')
.deploy();
}

View file

@ -1,12 +1,12 @@
import * as pulumi from '@pulumi/pulumi';
import { serviceLocalEndpoint } from '../utils/local-endpoint';
import { ServiceDeployment } from '../utils/service-deployment';
import { CommerceService } from './commerce';
import { DbMigrations } from './db-migrations';
import { Docker } from './docker';
import { Environment } from './environment';
import { Kafka } from './kafka';
import { Observability } from './observability';
import { RateLimitService } from './rate-limit';
import { Sentry } from './sentry';
import { Tokens } from './tokens';
@ -17,7 +17,7 @@ export function deployUsage({
tokens,
kafka,
dbMigrations,
rateLimit,
commerce,
image,
docker,
observability,
@ -29,7 +29,7 @@ export function deployUsage({
tokens: Tokens;
kafka: Kafka;
dbMigrations: DbMigrations;
rateLimit: RateLimitService;
commerce: CommerceService;
docker: Docker;
sentry: Sentry;
}) {
@ -66,7 +66,7 @@ export function deployUsage({
KAFKA_BUFFER_DYNAMIC: kafkaBufferDynamic,
KAFKA_TOPIC: kafka.config.topic,
TOKENS_ENDPOINT: serviceLocalEndpoint(tokens.service),
RATE_LIMIT_ENDPOINT: serviceLocalEndpoint(rateLimit.service),
COMMERCE_ENDPOINT: serviceLocalEndpoint(commerce.service),
OPENTELEMETRY_COLLECTOR_ENDPOINT:
observability.enabled &&
observability.enabledForUsageService &&
@ -85,13 +85,9 @@ export function deployUsage({
maxReplicas,
},
},
[
dbMigrations,
tokens.deployment,
tokens.service,
rateLimit.deployment,
rateLimit.service,
].filter(Boolean),
[dbMigrations, tokens.deployment, tokens.service, commerce.deployment, commerce.service].filter(
Boolean,
),
)
.withSecret('KAFKA_SASL_USERNAME', kafka.secret, 'saslUsername')
.withSecret('KAFKA_SASL_PASSWORD', kafka.secret, 'saslPassword')

View file

@ -123,27 +123,6 @@ target "emails" {
]
}
target "rate-limit" {
inherits = ["service-base", get_target()]
contexts = {
dist = "${PWD}/packages/services/rate-limit/dist"
shared = "${PWD}/docker/shared"
}
args = {
SERVICE_DIR_NAME = "@hive/rate-limit"
IMAGE_TITLE = "graphql-hive/rate-limit"
IMAGE_DESCRIPTION = "The rate limit service of the GraphQL Hive project."
PORT = "3009"
HEALTHCHECK_CMD = "wget --spider -q http://127.0.0.1:$${PORT}/_readiness"
}
tags = [
local_image_tag("rate-limit"),
stable_image_tag("rate-limit"),
image_tag("rate-limit", COMMIT_SHA),
image_tag("rate-limit", BRANCH_NAME)
]
}
target "schema" {
inherits = ["service-base", get_target()]
contexts = {
@ -225,24 +204,24 @@ target "storage" {
]
}
target "stripe-billing" {
target "commerce" {
inherits = ["service-base", get_target()]
contexts = {
dist = "${PWD}/packages/services/stripe-billing/dist"
dist = "${PWD}/packages/services/commerce/dist"
shared = "${PWD}/docker/shared"
}
args = {
SERVICE_DIR_NAME = "@hive/stripe-billing"
IMAGE_TITLE = "graphql-hive/stripe-billing"
IMAGE_DESCRIPTION = "The stripe billing service of the GraphQL Hive project."
SERVICE_DIR_NAME = "@hive/commerce"
IMAGE_TITLE = "graphql-hive/commerce"
IMAGE_DESCRIPTION = "The commerce service of the GraphQL Hive project."
PORT = "3010"
HEALTHCHECK_CMD = "wget --spider -q http://127.0.0.1:$${PORT}/_readiness"
}
tags = [
local_image_tag("stripe-billing"),
stable_image_tag("stripe-billing"),
image_tag("stripe-billing", COMMIT_SHA),
image_tag("stripe-billing", BRANCH_NAME)
local_image_tag("commerce"),
stable_image_tag("commerce"),
image_tag("commerce", COMMIT_SHA),
image_tag("commerce", BRANCH_NAME)
]
}
@ -267,27 +246,6 @@ target "tokens" {
]
}
target "usage-estimator" {
inherits = ["service-base", get_target()]
contexts = {
dist = "${PWD}/packages/services/usage-estimator/dist"
shared = "${PWD}/docker/shared"
}
args = {
SERVICE_DIR_NAME = "@hive/usage-estimator"
IMAGE_TITLE = "graphql-hive/usage-estimator"
IMAGE_DESCRIPTION = "The usage estimator service of the GraphQL Hive project."
PORT = "3008"
HEALTHCHECK_CMD = "wget --spider -q http://127.0.0.1:$${PORT}/_readiness"
}
tags = [
local_image_tag("usage-estimator"),
stable_image_tag("usage-estimator"),
image_tag("usage-estimator", COMMIT_SHA),
image_tag("usage-estimator", BRANCH_NAME)
]
}
target "usage-ingestor" {
inherits = ["service-base", get_target()]
contexts = {
@ -432,17 +390,15 @@ target "cli" {
group "build" {
targets = [
"emails",
"rate-limit",
"schema",
"policy",
"storage",
"tokens",
"usage-estimator",
"usage-ingestor",
"usage",
"webhooks",
"server",
"stripe-billing",
"commerce",
"composition-federation-2",
"app"
]
@ -450,13 +406,12 @@ group "build" {
group "integration-tests" {
targets = [
"commerce",
"emails",
"rate-limit",
"schema",
"policy",
"storage",
"tokens",
"usage-estimator",
"usage-ingestor",
"usage",
"webhooks",

View file

@ -15,8 +15,7 @@ CLICKHOUSE_USER=clickhouse
CLICKHOUSE_PASSWORD=wowverysecuremuchsecret
CDN_AUTH_PRIVATE_KEY=6b4721a99bd2ef6c00ce4328f34d95d7
EMAIL_PROVIDER=mock
USAGE_ESTIMATOR_ENDPOINT=http://usage-estimator:3008
RATE_LIMIT_ENDPOINT=http://rate-limit:3009
COMMERCE_ENDPOINT=http://commerce:3009
CLICKHOUSE_ASYNC_INSERT_BUSY_TIMEOUT_MS=500
CLICKHOUSE_ASYNC_INSERT_MAX_DATA_SIZE=1000
EXTERNAL_COMPOSITION_SECRET=secretsecret

View file

@ -111,30 +111,8 @@ services:
LOG_LEVEL: debug
SECRET: '${EXTERNAL_COMPOSITION_SECRET}'
usage-estimator:
image: '${DOCKER_REGISTRY}usage-estimator${DOCKER_TAG}'
networks:
- 'stack'
ports:
- 3008:3008
depends_on:
clickhouse:
condition: service_healthy
migrations:
condition: service_completed_successfully
environment:
NODE_ENV: production
LOG_LEVEL: debug
CLICKHOUSE_PROTOCOL: 'http'
CLICKHOUSE_HOST: 'clickhouse'
CLICKHOUSE_PORT: '8123'
CLICKHOUSE_USERNAME: '${CLICKHOUSE_USER}'
CLICKHOUSE_PASSWORD: '${CLICKHOUSE_PASSWORD}'
PORT: 3008
FF_CLICKHOUSE_V2_TABLES: ${FF_CLICKHOUSE_V2_TABLES:-}
rate-limit:
image: '${DOCKER_REGISTRY}rate-limit${DOCKER_TAG}'
commerce:
image: '${DOCKER_REGISTRY}commerce${DOCKER_TAG}'
networks:
- 'stack'
ports:
@ -144,8 +122,6 @@ services:
condition: service_healthy
migrations:
condition: service_completed_successfully
usage-estimator:
condition: service_healthy
emails:
condition: service_healthy
environment:
@ -157,8 +133,13 @@ services:
POSTGRES_DB: '${POSTGRES_DB}'
POSTGRES_USER: '${POSTGRES_USER}'
POSTGRES_PASSWORD: '${POSTGRES_PASSWORD}'
USAGE_ESTIMATOR_ENDPOINT: http://usage-estimator:3008
CLICKHOUSE_PROTOCOL: 'http'
CLICKHOUSE_HOST: 'clickhouse'
CLICKHOUSE_PORT: '8123'
CLICKHOUSE_USERNAME: '${CLICKHOUSE_USER}'
CLICKHOUSE_PASSWORD: '${CLICKHOUSE_PASSWORD}'
EMAILS_ENDPOINT: http://emails:3011
STRIPE_SECRET_KEY: empty
PORT: 3009
# Overrides only to `docker-compose.community.yaml` from now on:
@ -180,8 +161,7 @@ services:
GITHUB_APP_PRIVATE_KEY: 5f938d51a065476c4dc1b04aeba13afb
FEEDBACK_SLACK_TOKEN: ''
FEEDBACK_SLACK_CHANNEL: '#hive'
USAGE_ESTIMATOR_ENDPOINT: '${USAGE_ESTIMATOR_ENDPOINT}'
RATE_LIMIT_ENDPOINT: '${RATE_LIMIT_ENDPOINT}'
COMMERCE_ENDPOINT: '${COMMERCE_ENDPOINT}'
EMAIL_PROVIDER: '${EMAIL_PROVIDER}'
LOG_LEVEL: debug
# Auth
@ -239,7 +219,7 @@ services:
usage:
environment:
RATE_LIMIT_ENDPOINT: '${RATE_LIMIT_ENDPOINT}'
COMMERCE_ENDPOINT: '${COMMERCE_ENDPOINT}'
RATE_LIMIT_TTL: 1000
LOG_LEVEL: debug
depends_on:

View file

@ -17,7 +17,7 @@
"@graphql-hive/apollo": "workspace:*",
"@graphql-hive/core": "workspace:*",
"@graphql-typed-document-node/core": "3.2.0",
"@hive/rate-limit": "workspace:*",
"@hive/commerce": "workspace:*",
"@hive/schema": "workspace:*",
"@hive/server": "workspace:*",
"@hive/storage": "workspace:*",

View file

@ -64,6 +64,7 @@
"prom-client": "15.1.3",
"redlock": "5.0.0-beta.2",
"slonik": "30.4.4",
"stripe": "17.5.0",
"supertokens-node": "16.7.5",
"tslib": "2.8.1",
"undici": "6.21.1",

View file

@ -10,12 +10,15 @@ import { AuditLogRecorder } from './modules/audit-logs/providers/audit-log-recor
import { AuditLogS3Config } from './modules/audit-logs/providers/audit-logs-manager';
import { authModule } from './modules/auth';
import { Session } from './modules/auth/lib/authz';
import { billingModule } from './modules/billing';
import { BILLING_CONFIG, BillingConfig } from './modules/billing/providers/tokens';
import { cdnModule } from './modules/cdn';
import { AwsClient } from './modules/cdn/providers/aws';
import { CDN_CONFIG, CDNConfig } from './modules/cdn/providers/tokens';
import { collectionModule } from './modules/collection';
import { commerceModule } from './modules/commerce';
import {
CommerceConfig,
provideCommerceConfig,
} from './modules/commerce/providers/commerce-client';
import { integrationsModule } from './modules/integrations';
import {
GITHUB_APP_CONFIG,
@ -33,11 +36,6 @@ import {
SchemaPolicyServiceConfig,
} from './modules/policy/providers/tokens';
import { projectModule } from './modules/project';
import { rateLimitModule } from './modules/rate-limit';
import {
RATE_LIMIT_SERVICE_CONFIG,
RateLimitServiceConfig,
} from './modules/rate-limit/providers/tokens';
import { schemaModule } from './modules/schema';
import { ArtifactStorageWriter } from './modules/schema/providers/artifact-storage-writer';
import { provideSchemaModuleConfig, SchemaModuleConfig } from './modules/schema/providers/config';
@ -51,6 +49,10 @@ import { DistributedCache } from './modules/shared/providers/distributed-cache';
import { Emails, EMAILS_ENDPOINT } from './modules/shared/providers/emails';
import { HttpClient } from './modules/shared/providers/http-client';
import { IdTranslator } from './modules/shared/providers/id-translator';
import {
InMemoryRateLimiter,
InMemoryRateLimitStore,
} from './modules/shared/providers/in-memory-rate-limiter';
import { Logger } from './modules/shared/providers/logger';
import { Mutex } from './modules/shared/providers/mutex';
import { PG_POOL_CONFIG } from './modules/shared/providers/pg-pool';
@ -64,11 +66,6 @@ import { provideSupportConfig, SupportConfig } from './modules/support/providers
import { targetModule } from './modules/target';
import { tokenModule } from './modules/token';
import { TOKENS_CONFIG, TokensConfig } from './modules/token/providers/tokens';
import { usageEstimationModule } from './modules/usage-estimation';
import {
USAGE_ESTIMATION_SERVICE_CONFIG,
UsageEstimationServiceConfig,
} from './modules/usage-estimation/providers/tokens';
const modules = [
sharedModule,
@ -84,9 +81,7 @@ const modules = [
alertsModule,
cdnModule,
adminModule,
usageEstimationModule,
rateLimitModule,
billingModule,
commerceModule,
oidcIntegrationsModule,
schemaPolicyModule,
collectionModule,
@ -96,11 +91,10 @@ const modules = [
export function createRegistry({
app,
commerce,
tokens,
webhooks,
schemaService,
usageEstimationService,
rateLimitService,
schemaPolicyService,
logger,
storage,
@ -112,7 +106,6 @@ export function createRegistry({
s3Mirror,
s3AuditLogs,
encryptionSecret,
billing,
schemaConfig,
supportConfig,
emailsEndpoint,
@ -124,11 +117,10 @@ export function createRegistry({
storage: Storage;
clickHouse: ClickHouseConfig;
redis: Redis;
commerce: CommerceConfig;
tokens: TokensConfig;
webhooks: WebhooksConfig;
schemaService: SchemaServiceConfig;
usageEstimationService: UsageEstimationServiceConfig;
rateLimitService: RateLimitServiceConfig;
schemaPolicyService: SchemaPolicyServiceConfig;
githubApp: GitHubApplicationConfig | null;
cdn: CDNConfig | null;
@ -157,7 +149,6 @@ export function createRegistry({
app: {
baseUrl: string;
} | null;
billing: BillingConfig;
schemaConfig: SchemaModuleConfig;
supportConfig: SupportConfig | null;
emailsEndpoint?: string;
@ -214,6 +205,8 @@ export function createRegistry({
DistributedCache,
CryptoProvider,
Emails,
InMemoryRateLimitStore,
InMemoryRateLimiter,
{
provide: AuditLogS3Config,
useValue: auditLogS3Config,
@ -242,11 +235,7 @@ export function createRegistry({
useValue: tokens,
scope: Scope.Singleton,
},
{
provide: BILLING_CONFIG,
useValue: billing,
scope: Scope.Singleton,
},
{
provide: WEBHOOKS_CONFIG,
useValue: webhooks,
@ -257,16 +246,6 @@ export function createRegistry({
useValue: schemaService,
scope: Scope.Singleton,
},
{
provide: USAGE_ESTIMATION_SERVICE_CONFIG,
useValue: usageEstimationService,
scope: Scope.Singleton,
},
{
provide: RATE_LIMIT_SERVICE_CONFIG,
useValue: rateLimitService,
scope: Scope.Singleton,
},
{
provide: SCHEMA_POLICY_SERVICE_CONFIG,
useValue: schemaPolicyService,
@ -315,6 +294,7 @@ export function createRegistry({
{ provide: PUB_SUB_CONFIG, scope: Scope.Singleton, useValue: pubSub },
encryptionSecretProvider(encryptionSecret),
provideSchemaModuleConfig(schemaConfig),
provideCommerceConfig(commerce),
{
provide: Session,
useFactory(context: { session: Session }) {

View file

@ -1,13 +0,0 @@
import { createModule } from 'graphql-modules';
import { AuditLogManager } from '../audit-logs/providers/audit-logs-manager';
import { BillingProvider } from './providers/billing.provider';
import { resolvers } from './resolvers.generated';
import typeDefs from './module.graphql';
export const billingModule = createModule({
id: 'billing',
dirname: __dirname,
typeDefs,
resolvers,
providers: [BillingProvider, AuditLogManager],
});

View file

@ -1,5 +0,0 @@
import type { StripeTypes } from '@hive/stripe-billing';
export type BillingPaymentMethodMapper = StripeTypes.PaymentMethod.Card;
export type BillingDetailsMapper = StripeTypes.PaymentMethod.BillingDetails;
export type BillingInvoiceMapper = StripeTypes.Invoice | StripeTypes.UpcomingInvoice;

View file

@ -1,7 +0,0 @@
import { InjectionToken } from 'graphql-modules';
export type BillingConfig = {
endpoint: string | null;
};
export const BILLING_CONFIG = new InjectionToken<BillingConfig>('billing-config');

View file

@ -0,0 +1,22 @@
import { createModule } from 'graphql-modules';
import { AuditLogManager } from '../audit-logs/providers/audit-logs-manager';
import { BillingProvider } from './providers/billing.provider';
import { provideCommerceClient } from './providers/commerce-client';
import { RateLimitProvider } from './providers/rate-limit.provider';
import { UsageEstimationProvider } from './providers/usage-estimation.provider';
import { resolvers } from './resolvers.generated';
import typeDefs from './module.graphql';
export const commerceModule = createModule({
id: 'commerce',
dirname: __dirname,
typeDefs,
resolvers,
providers: [
BillingProvider,
RateLimitProvider,
UsageEstimationProvider,
AuditLogManager,
provideCommerceClient(),
],
});

View file

@ -0,0 +1,5 @@
import type { Stripe } from 'stripe';
export type BillingPaymentMethodMapper = Stripe.PaymentMethod.Card;
export type BillingDetailsMapper = Stripe.PaymentMethod.BillingDetails;
export type BillingInvoiceMapper = Stripe.Invoice | Stripe.UpcomingInvoice;

View file

@ -6,6 +6,7 @@ export default gql`
billingConfiguration: BillingConfiguration!
viewerCanDescribeBilling: Boolean!
viewerCanModifyBilling: Boolean!
rateLimit: RateLimit!
}
type BillingConfiguration {
@ -54,6 +55,7 @@ export default gql`
extend type Query {
billingPlans: [BillingPlan!]!
usageEstimation(input: UsageEstimationInput!): UsageEstimation!
}
type BillingPlan {
@ -106,4 +108,24 @@ export default gql`
newPlan: BillingPlanType!
organization: Organization!
}
type RateLimit {
limitedForOperations: Boolean!
operations: SafeInt!
retentionInDays: Int!
}
input RateLimitInput {
operations: SafeInt!
}
input UsageEstimationInput {
year: Int!
month: Int!
organizationSlug: String!
}
type UsageEstimation {
operations: SafeInt!
}
`;

View file

@ -1,52 +1,47 @@
import { Inject, Injectable, Scope } from 'graphql-modules';
import type { StripeBillingApi, StripeBillingApiInput } from '@hive/stripe-billing';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import { OrganizationBilling } from '../../../shared/entities';
import { AuditLogRecorder } from '../../audit-logs/providers/audit-log-recorder';
import { Session } from '../../auth/lib/authz';
import { IdTranslator } from '../../shared/providers/id-translator';
import { Logger } from '../../shared/providers/logger';
import { Storage } from '../../shared/providers/storage';
import type { BillingConfig } from './tokens';
import { BILLING_CONFIG } from './tokens';
import {
COMMERCE_TRPC_CLIENT,
type CommerceTrpcClient,
type CommerceTrpcClientInputs,
} from './commerce-client';
type BillingInput = CommerceTrpcClientInputs['stripeBilling'];
@Injectable({
global: true,
scope: Scope.Operation,
global: true,
})
export class BillingProvider {
private logger: Logger;
private billingService;
enabled = false;
constructor(
@Inject(COMMERCE_TRPC_CLIENT) private client: CommerceTrpcClient,
logger: Logger,
private auditLog: AuditLogRecorder,
private storage: Storage,
private idTranslator: IdTranslator,
private session: Session,
@Inject(BILLING_CONFIG) billingConfig: BillingConfig,
) {
this.logger = logger.child({ source: 'BillingProvider' });
this.billingService = billingConfig.endpoint
? createTRPCProxyClient<StripeBillingApi>({
links: [httpLink({ url: `${billingConfig.endpoint}/trpc`, fetch })],
})
: null;
this.logger = logger.child({ source: 'CommerceProvider' });
if (billingConfig.endpoint) {
this.enabled = true;
}
this.enabled = !!client;
}
async upgradeToPro(input: StripeBillingApiInput['createSubscriptionForOrganization']) {
async upgradeToPro(input: BillingInput['createSubscriptionForOrganization']) {
this.logger.debug('Upgrading to PRO (input=%o)', input);
if (!this.billingService) {
if (!this.client) {
throw new Error(`Billing service is not configured!`);
}
const result = this.billingService.createSubscriptionForOrganization.mutate(input);
const result = this.client.stripeBilling.createSubscriptionForOrganization.mutate(input);
await this.auditLog.record({
eventType: 'SUBSCRIPTION_CREATED',
@ -62,21 +57,21 @@ export class BillingProvider {
return result;
}
syncOrganization(input: StripeBillingApiInput['syncOrganizationToStripe']) {
if (!this.billingService) {
syncOrganization(input: BillingInput['syncOrganizationToStripe']) {
if (!this.client) {
throw new Error(`Billing service is not configured!`);
}
return this.billingService.syncOrganizationToStripe.mutate(input);
return this.client.stripeBilling.syncOrganizationToStripe.mutate(input);
}
async getAvailablePrices() {
this.logger.debug('Getting available prices');
if (!this.billingService) {
if (!this.client) {
return null;
}
return await this.billingService.availablePrices.query();
return await this.client.stripeBilling.availablePrices.query();
}
async getOrganizationBillingParticipant(selector: {
@ -89,36 +84,36 @@ export class BillingProvider {
});
}
getActiveSubscription(input: StripeBillingApiInput['activeSubscription']) {
getActiveSubscription(input: BillingInput['activeSubscription']) {
this.logger.debug('Fetching active subscription (input=%o)', input);
if (!this.billingService) {
if (!this.client) {
throw new Error(`Billing service is not configured!`);
}
return this.billingService.activeSubscription.query(input);
return this.client.stripeBilling.activeSubscription.query(input);
}
invoices(input: StripeBillingApiInput['invoices']) {
invoices(input: BillingInput['invoices']) {
this.logger.debug('Fetching invoices (input=%o)', input);
if (!this.billingService) {
if (!this.client) {
throw new Error(`Billing service is not configured!`);
}
return this.billingService.invoices.query(input);
return this.client.stripeBilling.invoices.query(input);
}
upcomingInvoice(input: StripeBillingApiInput['upcomingInvoice']) {
upcomingInvoice(input: BillingInput['upcomingInvoice']) {
this.logger.debug('Fetching upcoming invoices (input=%o)', input);
if (!this.billingService) {
if (!this.client) {
throw new Error(`Billing service is not configured!`);
}
return this.billingService.upcomingInvoice.query(input);
return this.client.stripeBilling.upcomingInvoice.query(input);
}
async downgradeToHobby(input: StripeBillingApiInput['cancelSubscriptionForOrganization']) {
async downgradeToHobby(input: BillingInput['cancelSubscriptionForOrganization']) {
this.logger.debug('Downgrading to Hobby (input=%o)', input);
if (!this.billingService) {
if (!this.client) {
throw new Error(`Billing service is not configured!`);
}
@ -131,13 +126,13 @@ export class BillingProvider {
},
});
return await this.billingService.cancelSubscriptionForOrganization.mutate(input);
return await this.client.stripeBilling.cancelSubscriptionForOrganization.mutate(input);
}
public async generateStripePortalLink(args: { organizationSlug: string }) {
async generateStripePortalLink(args: { organizationSlug: string }) {
this.logger.debug('Generating Stripe portal link for id:' + args.organizationSlug);
if (!this.billingService) {
if (!this.client) {
throw new Error(`Billing service is not configured!`);
}
@ -153,7 +148,7 @@ export class BillingProvider {
},
});
return await this.billingService.generateStripePortalLink.mutate({
return await this.client.stripeBilling.generateStripePortalLink.mutate({
organizationId,
});
}

View file

@ -0,0 +1,38 @@
import { FactoryProvider, InjectionToken, Scope, ValueProvider } from 'graphql-modules';
import type { CommerceRouter } from '@hive/commerce';
import { createTRPCProxyClient, httpLink, type CreateTRPCProxyClient } from '@trpc/client';
import type { inferRouterInputs } from '@trpc/server';
export type CommerceTrpcClient = CreateTRPCProxyClient<CommerceRouter> | null;
export type CommerceTrpcClientInputs = inferRouterInputs<CommerceRouter>;
export type CommerceConfig = {
endpoint: string | null;
};
export const COMMERCE_TRPC_CLIENT = new InjectionToken<CommerceTrpcClient>('commerce-trpc-client');
export const COMMERCE_CONFIG = new InjectionToken<CommerceConfig>('commerce-config');
export function provideCommerceConfig(config: CommerceConfig): ValueProvider<CommerceConfig> {
return {
provide: COMMERCE_CONFIG,
useValue: config,
scope: Scope.Singleton,
};
}
export function provideCommerceClient(): FactoryProvider<CommerceTrpcClient> {
return {
provide: COMMERCE_TRPC_CLIENT,
scope: Scope.Singleton,
deps: [COMMERCE_CONFIG],
useFactory(config: CommerceConfig) {
if (!config.endpoint) {
return null;
}
return createTRPCProxyClient<CommerceRouter>({
links: [httpLink({ url: `${config.endpoint}/trpc`, fetch })],
});
},
};
}

View file

@ -1,10 +1,13 @@
import { Inject, Injectable, Scope } from 'graphql-modules';
import LRU from 'lru-cache';
import type { RateLimitApi, RateLimitApiInput } from '@hive/rate-limit';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import { Logger } from '../../shared/providers/logger';
import type { RateLimitServiceConfig } from './tokens';
import { RATE_LIMIT_SERVICE_CONFIG } from './tokens';
import {
COMMERCE_TRPC_CLIENT,
type CommerceTrpcClient,
type CommerceTrpcClientInputs,
} from './commerce-client';
type RateLimitApiInput = CommerceTrpcClientInputs['rateLimit'];
const RETENTION_CACHE_TTL_IN_SECONDS = 120;
@ -14,7 +17,6 @@ const RETENTION_CACHE_TTL_IN_SECONDS = 120;
})
export class RateLimitProvider {
private logger: Logger;
private rateLimit;
private retentionCache = new LRU<string, number>({
max: 500,
ttl: RETENTION_CACHE_TTL_IN_SECONDS * 1000,
@ -23,24 +25,13 @@ export class RateLimitProvider {
constructor(
logger: Logger,
@Inject(RATE_LIMIT_SERVICE_CONFIG)
rateLimitServiceConfig: RateLimitServiceConfig,
@Inject(COMMERCE_TRPC_CLIENT) private client: CommerceTrpcClient,
) {
this.logger = logger.child({ service: 'RateLimitProvider' });
this.rateLimit = rateLimitServiceConfig.endpoint
? createTRPCProxyClient<RateLimitApi>({
links: [
httpLink({
url: `${rateLimitServiceConfig.endpoint}/trpc`,
fetch,
}),
],
})
: null;
}
async checkRateLimit(input: RateLimitApiInput['checkRateLimit']) {
if (this.rateLimit === null) {
if (this.client === null) {
this.logger.warn(
`Unable to check rate-limit for input: %o , service information is not available`,
input,
@ -54,11 +45,11 @@ export class RateLimitProvider {
this.logger.debug(`Checking rate limit for target id="${input.id}", type=${input.type}`);
return await this.rateLimit.checkRateLimit.query(input);
return await this.client.rateLimit.checkRateLimit.query(input);
}
async getRetention(input: RateLimitApiInput['getRetention']) {
if (this.rateLimit === null) {
if (this.client === null) {
return null;
}
@ -68,7 +59,7 @@ export class RateLimitProvider {
this.logger.debug(`Fetching retention for target id="${input.targetId}"`);
const value = await this.rateLimit.getRetention.query(input);
const value = await this.client.rateLimit.getRetention.query(input);
this.retentionCache.set(input.targetId, value);
return value;

View file

@ -1,39 +1,24 @@
import { Inject, Injectable, Scope } from 'graphql-modules';
import { traceFn } from '@hive/service-common';
import type { UsageEstimatorApi } from '@hive/usage-estimator';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import { Session } from '../../auth/lib/authz';
import { IdTranslator } from '../../shared/providers/id-translator';
import { Logger } from '../../shared/providers/logger';
import type { UsageEstimationServiceConfig } from './tokens';
import { USAGE_ESTIMATION_SERVICE_CONFIG } from './tokens';
import { COMMERCE_TRPC_CLIENT, type CommerceTrpcClient } from './commerce-client';
@Injectable({
scope: Scope.Operation,
global: true,
})
export class UsageEstimationProvider {
private logger: Logger;
private usageEstimator;
constructor(
logger: Logger,
@Inject(USAGE_ESTIMATION_SERVICE_CONFIG)
usageEstimationConfig: UsageEstimationServiceConfig,
@Inject(COMMERCE_TRPC_CLIENT)
private client: CommerceTrpcClient,
private idTranslator: IdTranslator,
private session: Session,
) {
this.logger = logger.child({ service: 'UsageEstimationProvider' });
this.usageEstimator = usageEstimationConfig.endpoint
? createTRPCProxyClient<UsageEstimatorApi>({
links: [
httpLink({
url: `${usageEstimationConfig.endpoint}/trpc`,
fetch,
}),
],
})
: null;
}
@traceFn('UsageEstimation.estimateOperations', {
@ -51,13 +36,13 @@ export class UsageEstimationProvider {
}): Promise<number | null> {
this.logger.debug('Estimation operations, input: %o', input);
if (!this.usageEstimator) {
if (!this.client) {
this.logger.warn('Usage estimator is not available due to missing configuration');
return null;
}
const result = await this.usageEstimator.estimateOperationsForOrganization.query({
const result = await this.client.usageEstimator.estimateOperationsForOrganization.query({
organizationId: input.organizationId,
year: input.year,
month: input.month,

View file

@ -1,10 +1,13 @@
import { Logger } from '../../shared/providers/logger';
import { BillingProvider } from '../providers/billing.provider';
import { RateLimitProvider } from '../providers/rate-limit.provider';
import type { BillingPlanType, OrganizationResolvers } from './../../../__generated__/types';
export const Organization: Pick<
OrganizationResolvers,
| 'billingConfiguration'
| 'plan'
| 'rateLimit'
| 'viewerCanDescribeBilling'
| 'viewerCanModifyBilling'
| '__isTypeOf'
@ -110,4 +113,28 @@ export const Organization: Pick<
},
});
},
rateLimit: async (org, _args, { injector }) => {
let limitedForOperations = false;
const logger = injector.get(Logger);
try {
const operationsRateLimit = await injector.get(RateLimitProvider).checkRateLimit({
entityType: 'organization',
id: org.id,
type: 'operations-reporting',
token: null,
});
logger.debug('Fetched rate-limit info:', { orgId: org.id, operationsRateLimit });
limitedForOperations = operationsRateLimit.usagePercentage >= 1;
} catch (e) {
logger.error('Failed to fetch rate-limit info:', org.id, e);
}
return {
limitedForOperations,
operations: org.monthlyRateLimit.operations,
retentionInDays: org.monthlyRateLimit.retentionInDays,
};
},
};

View file

@ -7,7 +7,7 @@ import { cache } from '../../../shared/helpers';
import { AuditLogRecorder } from '../../audit-logs/providers/audit-log-recorder';
import { Session } from '../../auth/lib/authz';
import { AuthManager } from '../../auth/providers/auth-manager';
import { BillingProvider } from '../../billing/providers/billing.provider';
import { BillingProvider } from '../../commerce/providers/billing.provider';
import { OIDCIntegrationsProvider } from '../../oidc-integrations/providers/oidc-integrations.provider';
import { Emails, mjml } from '../../shared/providers/emails';
import { IdTranslator } from '../../shared/providers/id-translator';

View file

@ -1,6 +1,6 @@
import { z } from 'zod';
import { InMemoryRateLimiter } from '../../../rate-limit/providers/in-memory-rate-limiter';
import { IdTranslator } from '../../../shared/providers/id-translator';
import { InMemoryRateLimiter } from '../../../shared/providers/in-memory-rate-limiter';
import { OrganizationManager } from '../../providers/organization-manager';
import type { MutationResolvers } from './../../../../__generated__/types';

View file

@ -1,13 +0,0 @@
import { createModule } from 'graphql-modules';
import { InMemoryRateLimiter, InMemoryRateLimitStore } from './providers/in-memory-rate-limiter';
import { RateLimitProvider } from './providers/rate-limit.provider';
import { resolvers } from './resolvers.generated';
import typeDefs from './module.graphql';
export const rateLimitModule = createModule({
id: 'rate-limit',
dirname: __dirname,
typeDefs,
resolvers,
providers: [RateLimitProvider, InMemoryRateLimitStore, InMemoryRateLimiter],
});

View file

@ -1,17 +0,0 @@
import { gql } from 'graphql-modules';
export default gql`
type RateLimit {
limitedForOperations: Boolean!
operations: SafeInt!
retentionInDays: Int!
}
input RateLimitInput {
operations: SafeInt!
}
extend type Organization {
rateLimit: RateLimit!
}
`;

View file

@ -1,9 +0,0 @@
import { InjectionToken } from 'graphql-modules';
export interface RateLimitServiceConfig {
endpoint: string | null;
}
export const RATE_LIMIT_SERVICE_CONFIG = new InjectionToken<RateLimitServiceConfig>(
'rate-limit-service-config',
);

View file

@ -1,30 +0,0 @@
import { Logger } from '../../shared/providers/logger';
import { RateLimitProvider } from '../providers/rate-limit.provider';
import type { OrganizationResolvers } from './../../../__generated__/types';
export const Organization: Pick<OrganizationResolvers, 'rateLimit' | '__isTypeOf'> = {
rateLimit: async (org, _args, { injector }) => {
let limitedForOperations = false;
const logger = injector.get(Logger);
try {
const operationsRateLimit = await injector.get(RateLimitProvider).checkRateLimit({
entityType: 'organization',
id: org.id,
type: 'operations-reporting',
token: null,
});
logger.debug('Fetched rate-limit info:', { orgId: org.id, operationsRateLimit });
limitedForOperations = operationsRateLimit.usagePercentage >= 1;
} catch (e) {
logger.error('Failed to fetch rate-limit info:', org.id, e);
}
return {
limitedForOperations,
operations: org.monthlyRateLimit.operations,
retentionInDays: org.monthlyRateLimit.retentionInDays,
};
},
};

View file

@ -21,12 +21,12 @@ import { isGitHubRepositoryString } from '../../../shared/is-github-repository-s
import { bolderize } from '../../../shared/markdown';
import { AlertsManager } from '../../alerts/providers/alerts-manager';
import { InsufficientPermissionError, Session } from '../../auth/lib/authz';
import { RateLimitProvider } from '../../commerce/providers/rate-limit.provider';
import {
GitHubIntegrationManager,
type GitHubCheckRun,
} from '../../integrations/providers/github-integration-manager';
import { OperationsReader } from '../../operations/providers/operations-reader';
import { RateLimitProvider } from '../../rate-limit/providers/rate-limit.provider';
import { DistributedCache } from '../../shared/providers/distributed-cache';
import { IdTranslator } from '../../shared/providers/id-translator';
import { Logger } from '../../shared/providers/logger';

View file

@ -2,7 +2,7 @@ import { Injectable, Scope } from 'graphql-modules';
import LRU from 'lru-cache';
import { HiveError } from '../../../shared/errors';
import { Session } from '../../auth/lib/authz';
import { Logger } from '../../shared/providers/logger';
import { Logger } from './logger';
@Injectable({
scope: Scope.Singleton,

View file

@ -1,12 +0,0 @@
import { createModule } from 'graphql-modules';
import { UsageEstimationProvider } from './providers/usage-estimation.provider';
import { resolvers } from './resolvers.generated';
import typeDefs from './module.graphql';
export const usageEstimationModule = createModule({
id: 'usage-estimation',
dirname: __dirname,
typeDefs,
resolvers,
providers: [UsageEstimationProvider],
});

View file

@ -1,17 +0,0 @@
import { gql } from 'graphql-modules';
export default gql`
extend type Query {
usageEstimation(input: UsageEstimationInput!): UsageEstimation!
}
input UsageEstimationInput {
year: Int!
month: Int!
organizationSlug: String!
}
type UsageEstimation {
operations: SafeInt!
}
`;

View file

@ -1,9 +0,0 @@
import { InjectionToken } from 'graphql-modules';
export interface UsageEstimationServiceConfig {
endpoint: string | null;
}
export const USAGE_ESTIMATION_SERVICE_CONFIG = new InjectionToken<UsageEstimationServiceConfig>(
'usage-estimation-service-config',
);

View file

@ -1,12 +1,15 @@
PORT=4013
OPENTELEMETRY_COLLECTOR_ENDPOINT="<sync>"
CLICKHOUSE_PROTOCOL="http"
CLICKHOUSE_HOST="localhost"
CLICKHOUSE_PORT="8123"
CLICKHOUSE_USERNAME="test"
CLICKHOUSE_PASSWORD="test"
PORT=4011
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DB=registry
OPENTELEMETRY_COLLECTOR_ENDPOINT="<sync>"
EMAILS_ENDPOINT="http://localhost:6260"
WEB_APP_URL="http://localhost:3000"
STRIPE_SECRET_KEY="empty"

4
packages/services/commerce/.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
*.log
.DS_Store
node_modules
dist

View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 The Guild
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,7 +1,6 @@
{
"name": "@hive/stripe-billing",
"name": "@hive/commerce",
"type": "module",
"description": "A microservice for Hive Cloud, that syncs usage information to Stripe (metered billing)",
"license": "MIT",
"private": true,
"scripts": {
@ -10,6 +9,8 @@
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@hive/api": "workspace:*",
"@hive/emails": "workspace:*",
"@hive/service-common": "workspace:*",
"@hive/storage": "workspace:*",
"@sentry/node": "7.120.2",
@ -17,7 +18,7 @@
"@trpc/server": "10.45.2",
"date-fns": "4.1.0",
"dotenv": "16.4.7",
"got": "14.4.5",
"fastify": "4.29.0",
"pino-pretty": "11.3.0",
"reflect-metadata": "0.2.2",
"stripe": "17.5.0",

View file

@ -0,0 +1,12 @@
import { rateLimitRouter } from './rate-limit';
import { stripeBillingRouter } from './stripe-billing';
import { router } from './trpc';
import { usageEstimatorRouter } from './usage-estimator';
export const commerceRouter = router({
usageEstimator: usageEstimatorRouter,
rateLimit: rateLimitRouter,
stripeBilling: stripeBillingRouter,
});
export type CommerceRouter = typeof commerceRouter;

View file

@ -8,10 +8,12 @@ const numberFromNumberOrNumberString = (input: unknown): number | undefined => {
if (isNumberString(input)) return Number(input);
};
const NumberFromString = zod.preprocess(numberFromNumberOrNumberString, zod.number().min(1));
export const NumberFromString = zod.preprocess(numberFromNumberOrNumberString, zod.number().min(1));
// treat an empty string (`''`) as undefined
const emptyString = <T extends zod.ZodType>(input: T) => {
/**
* treat an empty string (`''`) as undefined
*/
export const emptyString = <T extends zod.ZodType>(input: T) => {
return zod.preprocess((value: unknown) => {
if (value === '') return undefined;
return value;
@ -22,8 +24,6 @@ const EnvironmentModel = zod.object({
PORT: emptyString(NumberFromString.optional()),
ENVIRONMENT: emptyString(zod.string().optional()),
RELEASE: emptyString(zod.string().optional()),
HEARTBEAT_ENDPOINT: emptyString(zod.string().url().optional()),
USAGE_ESTIMATOR_ENDPOINT: zod.string().url(),
});
const SentryModel = zod.union([
@ -36,23 +36,10 @@ const SentryModel = zod.union([
}),
]);
const PostgresModel = zod.object({
POSTGRES_HOST: zod.string(),
POSTGRES_PORT: NumberFromString,
POSTGRES_PASSWORD: emptyString(zod.string().optional()),
POSTGRES_USER: zod.string(),
POSTGRES_DB: zod.string(),
POSTGRES_SSL: emptyString(zod.union([zod.literal('1'), zod.literal('0')]).optional()),
});
const StripeModel = zod.object({
STRIPE_SECRET_KEY: zod.string(),
STRIPE_SYNC_INTERVAL_MS: emptyString(NumberFromString.optional()),
});
const PrometheusModel = zod.object({
PROMETHEUS_METRICS: emptyString(zod.union([zod.literal('0'), zod.literal('1')]).optional()),
PROMETHEUS_METRICS_LABEL_INSTANCE: emptyString(zod.string().optional()),
PROMETHEUS_METRICS_PORT: emptyString(NumberFromString.optional()),
});
const LogModel = zod.object({
@ -74,20 +61,48 @@ const LogModel = zod.object({
),
});
const ClickHouseModel = zod.object({
CLICKHOUSE_PROTOCOL: zod.union([zod.literal('http'), zod.literal('https')]),
CLICKHOUSE_HOST: zod.string(),
CLICKHOUSE_PORT: NumberFromString,
CLICKHOUSE_USERNAME: zod.string(),
CLICKHOUSE_PASSWORD: zod.string(),
});
const PostgresModel = zod.object({
POSTGRES_SSL: emptyString(zod.union([zod.literal('1'), zod.literal('0')]).optional()),
POSTGRES_HOST: zod.string(),
POSTGRES_PORT: NumberFromString,
POSTGRES_DB: zod.string(),
POSTGRES_USER: zod.string(),
POSTGRES_PASSWORD: emptyString(zod.string().optional()),
});
const HiveServicesModel = zod.object({
EMAILS_ENDPOINT: emptyString(zod.string().url().optional()),
WEB_APP_URL: emptyString(zod.string().url().optional()),
});
const RateLimitModel = zod.object({
LIMIT_CACHE_UPDATE_INTERVAL_MS: emptyString(NumberFromString.optional()),
});
const StripeModel = zod.object({
STRIPE_SECRET_KEY: zod.string(),
STRIPE_SYNC_INTERVAL_MS: emptyString(NumberFromString.optional()),
});
const configs = {
base: EnvironmentModel.safeParse(process.env),
sentry: SentryModel.safeParse(process.env),
clickhouse: ClickHouseModel.safeParse(process.env),
postgres: PostgresModel.safeParse(process.env),
stripe: StripeModel.safeParse(process.env),
prometheus: PrometheusModel.safeParse(process.env),
log: LogModel.safeParse(process.env),
tracing: OpenTelemetryConfigurationModel.safeParse(process.env),
hiveServices: HiveServicesModel.safeParse(process.env),
rateLimit: RateLimitModel.safeParse(process.env),
stripe: StripeModel.safeParse(process.env),
};
const environmentErrors: Array<string> = [];
@ -112,27 +127,32 @@ function extractConfig<Input, Output>(config: zod.SafeParseReturnType<Input, Out
}
const base = extractConfig(configs.base);
const clickhouse = extractConfig(configs.clickhouse);
const postgres = extractConfig(configs.postgres);
const sentry = extractConfig(configs.sentry);
const prometheus = extractConfig(configs.prometheus);
const log = extractConfig(configs.log);
const stripe = extractConfig(configs.stripe);
const tracing = extractConfig(configs.tracing);
const hiveServices = extractConfig(configs.hiveServices);
const rateLimit = extractConfig(configs.rateLimit);
const stripe = extractConfig(configs.stripe);
export const env = {
environment: base.ENVIRONMENT,
release: base.RELEASE ?? 'local',
http: {
port: base.PORT ?? 4013,
port: base.PORT ?? 4012,
},
tracing: {
enabled: !!tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
collectorEndpoint: tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
},
hiveServices: {
usageEstimator: {
endpoint: base.USAGE_ESTIMATOR_ENDPOINT,
},
clickhouse: {
protocol: clickhouse.CLICKHOUSE_PROTOCOL,
host: clickhouse.CLICKHOUSE_HOST,
port: clickhouse.CLICKHOUSE_PORT,
username: clickhouse.CLICKHOUSE_USERNAME,
password: clickhouse.CLICKHOUSE_PASSWORD,
},
postgres: {
host: postgres.POSTGRES_HOST,
@ -142,11 +162,6 @@ export const env = {
password: postgres.POSTGRES_PASSWORD,
ssl: postgres.POSTGRES_SSL === '1',
},
stripe: {
secretKey: stripe.STRIPE_SECRET_KEY,
syncIntervalMs: stripe.STRIPE_SYNC_INTERVAL_MS ?? 10 * 60_000,
},
heartbeat: base.HEARTBEAT_ENDPOINT ? { endpoint: base.HEARTBEAT_ENDPOINT } : null,
sentry: sentry.SENTRY === '1' ? { dsn: sentry.SENTRY_DSN } : null,
log: {
level: log.LOG_LEVEL ?? 'info',
@ -156,8 +171,22 @@ export const env = {
prometheus.PROMETHEUS_METRICS === '1'
? {
labels: {
instance: prometheus.PROMETHEUS_METRICS_LABEL_INSTANCE ?? 'usage-service',
instance: prometheus.PROMETHEUS_METRICS_LABEL_INSTANCE ?? 'rate-limit',
},
port: prometheus.PROMETHEUS_METRICS_PORT ?? 10_254,
}
: null,
hiveServices: {
emails: {
endpoint: hiveServices.EMAILS_ENDPOINT,
},
webAppUrl: hiveServices.WEB_APP_URL,
},
rateLimit: {
limitCacheUpdateIntervalMs: rateLimit.LIMIT_CACHE_UPDATE_INTERVAL_MS ?? 60_000,
},
stripe: {
secretKey: stripe.STRIPE_SECRET_KEY,
syncIntervalMs: stripe.STRIPE_SYNC_INTERVAL_MS ?? 10 * 60_000,
},
} as const;

View file

@ -10,11 +10,13 @@ import {
startMetrics,
TracingInstance,
} from '@hive/service-common';
import { createConnectionString, createStorage as createPostgreSQLStorage } from '@hive/storage';
import * as Sentry from '@sentry/node';
import { createContext, usageEstimatorApiRouter } from './api';
import { commerceRouter } from './api';
import { env } from './environment';
import { createEstimator } from './estimator';
import { clickHouseElapsedDuration, clickHouseReadDuration } from './metrics';
import { createRateLimiter } from './rate-limit/limiter';
import { createStripeBilling } from './stripe-billing/billing';
import { createEstimator } from './usage-estimator/estimator';
async function main() {
let tracing: TracingInstance | undefined;
@ -22,7 +24,7 @@ async function main() {
if (env.tracing.enabled && env.tracing.collectorEndpoint) {
tracing = configureTracing({
collectorEndpoint: env.tracing.collectorEndpoint,
serviceName: 'usage-estimator',
serviceName: 'commerce',
});
tracing.instrumentNodeFetch();
@ -33,7 +35,7 @@ async function main() {
if (env.sentry) {
Sentry.init({
serverName: hostname(),
dist: 'usage-estimator',
dist: 'commerce',
enabled: !!env.sentry,
environment: env.environment,
dsn: env.sentry.dsn,
@ -42,7 +44,7 @@ async function main() {
}
const server = await createServer({
name: 'usage-estimator',
name: 'commerce',
sentryErrorHandler: true,
log: {
level: env.log.level,
@ -50,12 +52,14 @@ async function main() {
},
});
if (tracing) {
await server.register(...tracing.instrumentFastify());
}
try {
const estimator = createEstimator({
const postgres = await createPostgreSQLStorage(
createConnectionString(env.postgres),
5,
tracing ? [tracing.instrumentSlonik()] : undefined,
);
const usageEstimator = createEstimator({
logger: server.log,
clickhouse: {
protocol: env.clickhouse.protocol,
@ -63,27 +67,51 @@ async function main() {
port: env.clickhouse.port,
username: env.clickhouse.username,
password: env.clickhouse.password,
onReadEnd(query, timings) {
clickHouseReadDuration.labels({ query }).observe(timings.totalSeconds);
if (timings.elapsedSeconds !== undefined) {
clickHouseElapsedDuration.labels({ query }).observe(timings.elapsedSeconds);
}
},
},
});
const rateLimiter = createRateLimiter({
logger: server.log,
rateLimitConfig: {
interval: env.rateLimit.limitCacheUpdateIntervalMs,
},
usageEstimator,
emails: env.hiveServices.emails.endpoint
? {
endpoint: env.hiveServices.emails.endpoint,
}
: undefined,
storage: postgres,
});
const stripeBilling = createStripeBilling({
logger: server.log,
stripe: {
token: env.stripe.secretKey,
syncIntervalMs: env.stripe.syncIntervalMs,
},
usageEstimator,
storage: postgres,
});
registerShutdown({
logger: server.log,
async onShutdown() {
await Promise.all([estimator.stop(), server.close()]);
await server.close();
await Promise.all([usageEstimator.stop(), rateLimiter.stop(), stripeBilling.stop()]);
await postgres.destroy();
},
});
await registerTRPC(server, {
router: usageEstimatorApiRouter,
router: commerceRouter,
createContext({ req }) {
return createContext(estimator, req);
return {
req,
usageEstimator,
rateLimiter,
stripeBilling,
};
},
});
@ -98,8 +126,13 @@ async function main() {
server.route({
method: ['GET', 'HEAD'],
url: '/_readiness',
handler(_, res) {
const isReady = estimator.readiness();
async handler(_, res) {
const readinessChecks = await Promise.all([
usageEstimator.readiness(),
rateLimiter.readiness(),
postgres.isReady(),
]);
const isReady = readinessChecks.every(val => val === true);
reportReadiness(isReady);
void res.status(isReady ? 200 : 400).send();
},
@ -112,7 +145,7 @@ async function main() {
port: env.http.port,
host: '::',
});
await estimator.start();
await Promise.all([usageEstimator.start(), rateLimiter.start(), stripeBilling.start()]);
} catch (error) {
server.log.fatal(error);
Sentry.captureException(error, {

View file

@ -1,6 +1,6 @@
import type { EmailsApi } from '@hive/emails';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import { env } from './environment';
import { env } from '../environment';
export function createEmailScheduler(config?: { endpoint: string }) {
const api = config?.endpoint

View file

@ -0,0 +1,33 @@
import { z } from 'zod';
import { publicProcedure, router } from '../trpc';
const VALIDATION = z
.object({
id: z.string().min(1),
entityType: z.enum(['organization', 'target']),
type: z.enum(['operations-reporting']),
/**
* Token is optional, and used only when an additional blocking (WAF) process is needed.
*/
token: z.string().nullish().optional(),
})
.required();
export const rateLimitRouter = router({
getRetention: publicProcedure
.input(
z
.object({
targetId: z.string().nonempty(),
})
.required(),
)
.query(({ ctx, input }) => {
return ctx.rateLimiter.getRetention(input.targetId);
}),
checkRateLimit: publicProcedure.input(VALIDATION).query(({ ctx, input }) => {
return ctx.rateLimiter.checkLimit(input);
}),
});
export type RateLimitRouter = typeof rateLimitRouter;

View file

@ -1,11 +1,9 @@
import { endOfMonth, startOfMonth } from 'date-fns';
import type { Storage } from '@hive/api';
import type { ServiceLogger } from '@hive/service-common';
import { traceInline } from '@hive/service-common';
import { createStorage as createPostgreSQLStorage, Interceptor } from '@hive/storage';
import type { UsageEstimatorApi } from '@hive/usage-estimator';
import * as Sentry from '@sentry/node';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import type { RateLimitInput } from './api';
import { UsageEstimator } from '../usage-estimator/estimator';
import { createEmailScheduler } from './emails';
import { rateLimitOperationsEventOrg } from './metrics';
@ -44,7 +42,7 @@ export type CachedRateLimitInfo = {
// to prevent applying lower retention value then expected.
const RETENTION_IN_DAYS_FALLBACK = 365;
export type Limiter = ReturnType<typeof createRateLimiter>;
export type RateLimiter = ReturnType<typeof createRateLimiter>;
type OrganizationId = string;
type TargetId = string;
@ -54,34 +52,15 @@ export function createRateLimiter(config: {
rateLimitConfig: {
interval: number;
};
rateEstimator: {
endpoint: string;
};
emails?: {
endpoint: string;
};
storage: {
connectionString: string;
additionalInterceptors?: Interceptor[];
};
usageEstimator: UsageEstimator;
storage: Storage;
}) {
const rateEstimator = createTRPCProxyClient<UsageEstimatorApi>({
links: [
httpLink({
url: `${config.rateEstimator.endpoint}/trpc`,
fetch,
}),
],
});
const emails = createEmailScheduler(config.emails);
const { logger } = config;
const postgres$ = createPostgreSQLStorage(
config.storage.connectionString,
1,
config.storage.additionalInterceptors,
);
let initialized = false;
const { logger, usageEstimator, storage } = config;
let intervalHandle: ReturnType<typeof setInterval> | null = null;
let targetIdToOrgLookup = new Map<TargetId, OrganizationId>();
@ -89,22 +68,17 @@ export function createRateLimiter(config: {
const fetchAndCalculateUsageInformation = traceInline('Calculate Rate Limit', {}, async () => {
const now = new Date();
const window = {
const timeWindow = {
startTime: startOfMonth(now),
endTime: endOfMonth(now),
};
const windowAsString = {
startTime: startOfMonth(now).toUTCString(),
endTime: endOfMonth(now).toUTCString(),
};
config.logger.info(
`Calculating rate-limit information based on window: ${windowAsString.startTime} -> ${windowAsString.endTime}`,
`Calculating rate-limit information based on window: ${timeWindow.startTime} -> ${timeWindow.endTime}`,
);
const storage = await postgres$;
const [records, operations] = await Promise.all([
storage.getGetOrganizationsAndTargetsWithLimitInfo(),
rateEstimator.estimateOperationsForAllTargets.query(windowAsString),
usageEstimator.estimateOperationsForAllTargets(timeWindow),
]);
const totalTargets = records.reduce((acc, record) => acc + record.targets.length, 0);
@ -170,8 +144,8 @@ export function createRateLimiter(config: {
email: orgRecord.orgEmail,
},
period: {
start: window.startTime.getTime(),
end: window.endTime.getTime(),
start: timeWindow.startTime.getTime(),
end: timeWindow.endTime.getTime(),
},
usage: {
quota: orgRecord.operations.quota,
@ -187,8 +161,8 @@ export function createRateLimiter(config: {
email: orgRecord.orgEmail,
},
period: {
start: window.startTime.getTime(),
end: window.endTime.getTime(),
start: timeWindow.startTime.getTime(),
end: timeWindow.endTime.getTime(),
},
usage: {
quota: orgRecord.operations.quota,
@ -213,10 +187,12 @@ export function createRateLimiter(config: {
return orgId ? cachedResult.get(orgId) : undefined;
}
let initialized = false;
return {
logger,
async readiness() {
return initialized && (await (await postgres$).isReady());
readiness() {
return initialized;
},
getRetention(targetId: string) {
const orgData = getOrganizationFromCache(targetId);
@ -236,7 +212,10 @@ export function createRateLimiter(config: {
return orgData.retentionInDays;
},
checkLimit(input: RateLimitInput): RateLimitCheckResponse {
checkLimit(input: {
entityType: 'organization' | 'target';
id: string;
}): RateLimitCheckResponse {
const orgId =
input.entityType === 'organization' ? input.id : targetIdToOrgLookup.get(input.id);
@ -258,10 +237,7 @@ export function createRateLimiter(config: {
return UNKNOWN_RATE_LIMIT_OBJ;
}
if (input.type === 'operations-reporting') {
return orgData.operations;
}
return UNKNOWN_RATE_LIMIT_OBJ;
return orgData.operations;
},
async start() {
logger.info(
@ -294,7 +270,6 @@ export function createRateLimiter(config: {
clearInterval(intervalHandle);
intervalHandle = null;
}
await (await postgres$).destroy();
logger.info('Rate Limiter stopped');
},
};

View file

@ -1,27 +1,20 @@
import { Stripe } from 'stripe';
import { Storage } from '@hive/api';
import { ServiceLogger } from '@hive/service-common';
import { createStorage as createPostgreSQLStorage, Interceptor } from '@hive/storage';
import type { UsageEstimator } from '../usage-estimator/estimator';
export type StripeBilling = ReturnType<typeof createStripeBilling>;
export function createStripeBilling(config: {
logger: ServiceLogger;
rateEstimator: {
endpoint: string;
};
storage: {
connectionString: string;
additionalInterceptors?: Interceptor[];
};
usageEstimator: UsageEstimator;
storage: Storage;
stripe: {
token: string;
syncIntervalMs: number;
};
}) {
const logger = config.logger;
const postgres$ = createPostgreSQLStorage(
config.storage.connectionString,
10,
config.storage.additionalInterceptors,
);
const stripeApi = new Stripe(config.stripe.token, {
apiVersion: '2024-12-18.acacia',
typescript: true,
@ -71,11 +64,11 @@ export function createStripeBilling(config: {
}
return {
postgres$,
loadStripeData$,
stripeApi,
storage: config.storage,
stripe: stripeApi,
stripeData$: loadStripeData$,
async readiness() {
return await (await postgres$).isReady();
return true;
},
async start() {
logger.info(
@ -86,8 +79,6 @@ export function createStripeBilling(config: {
logger.info(`Stripe is configured correctly, prices info: %o`, stripeData);
},
async stop() {
await (await postgres$).destroy();
logger.info(`Stripe Billing Sync stopped...`);
},
};

View file

@ -1,38 +1,20 @@
import { addDays, startOfMonth } from 'date-fns';
import { Stripe } from 'stripe';
import { z } from 'zod';
import { FastifyRequest, handleTRPCError } from '@hive/service-common';
import { createStorage } from '@hive/storage';
import type { inferRouterInputs } from '@trpc/server';
import { initTRPC } from '@trpc/server';
import { publicProcedure, router } from '../trpc';
export type Context = {
storage$: ReturnType<typeof createStorage>;
stripe: Stripe;
stripeData$: Promise<{
operationsPrice: Stripe.Price;
basePrice: Stripe.Price;
}>;
req: FastifyRequest;
};
export { Stripe as StripeTypes };
const t = initTRPC.context<Context>().create();
const procedure = t.procedure.use(handleTRPCError);
export const stripeBillingApiRouter = t.router({
availablePrices: procedure.query(async ({ ctx }) => {
return await ctx.stripeData$;
export const stripeBillingRouter = router({
availablePrices: publicProcedure.query(async ({ ctx }) => {
return await ctx.stripeBilling.stripeData$;
}),
invoices: procedure
invoices: publicProcedure
.input(
z.object({
organizationId: z.string().nonempty(),
}),
)
.query(async ({ ctx, input }) => {
const storage = await ctx.storage$;
const storage = ctx.stripeBilling.storage;
const organizationBillingRecord = await storage.getOrganizationBilling({
organizationId: input.organizationId,
});
@ -41,21 +23,21 @@ export const stripeBillingApiRouter = t.router({
throw new Error(`Organization does not have a subscription record!`);
}
const invoices = await ctx.stripe.invoices.list({
const invoices = await ctx.stripeBilling.stripe.invoices.list({
customer: organizationBillingRecord.externalBillingReference,
expand: ['data.charge'],
});
return invoices.data;
}),
upcomingInvoice: procedure
upcomingInvoice: publicProcedure
.input(
z.object({
organizationId: z.string().nonempty(),
}),
)
.query(async ({ ctx, input }) => {
const storage = await ctx.storage$;
const storage = ctx.stripeBilling.storage;
const organizationBillingRecord = await storage.getOrganizationBilling({
organizationId: input.organizationId,
});
@ -65,7 +47,7 @@ export const stripeBillingApiRouter = t.router({
}
try {
const upcomingInvoice = await ctx.stripe.invoices.retrieveUpcoming({
const upcomingInvoice = await ctx.stripeBilling.stripe.invoices.retrieveUpcoming({
customer: organizationBillingRecord.externalBillingReference,
});
@ -74,14 +56,14 @@ export const stripeBillingApiRouter = t.router({
return null;
}
}),
activeSubscription: procedure
activeSubscription: publicProcedure
.input(
z.object({
organizationId: z.string().nonempty(),
}),
)
.query(async ({ ctx, input }) => {
const storage = await ctx.storage$;
const storage = ctx.stripeBilling.storage;
const organizationBillingRecord = await storage.getOrganizationBilling({
organizationId: input.organizationId,
});
@ -90,7 +72,7 @@ export const stripeBillingApiRouter = t.router({
throw new Error(`Organization does not have a subscription record!`);
}
const customer = await ctx.stripe.customers.retrieve(
const customer = await ctx.stripeBilling.stripe.customers.retrieve(
organizationBillingRecord.externalBillingReference,
);
@ -102,7 +84,7 @@ export const stripeBillingApiRouter = t.router({
return null;
}
const subscriptions = await ctx.stripe.subscriptions
const subscriptions = await ctx.stripeBilling.stripe.subscriptions
.list({
customer: organizationBillingRecord.externalBillingReference,
})
@ -110,7 +92,7 @@ export const stripeBillingApiRouter = t.router({
const actualSubscription = subscriptions[0] || null;
const paymentMethod = await ctx.stripe.paymentMethods.list({
const paymentMethod = await ctx.stripeBilling.stripe.paymentMethods.list({
customer: customer.id,
type: 'card',
});
@ -120,7 +102,7 @@ export const stripeBillingApiRouter = t.router({
subscription: actualSubscription,
};
}),
syncOrganizationToStripe: procedure
syncOrganizationToStripe: publicProcedure
.input(
z.object({
organizationId: z.string().nonempty(),
@ -133,7 +115,7 @@ export const stripeBillingApiRouter = t.router({
}),
)
.mutation(async ({ ctx, input }) => {
const storage = await ctx.storage$;
const storage = ctx.stripeBilling.storage;
const [organizationBillingRecord, organization, stripePrices] = await Promise.all([
storage.getOrganizationBilling({
organizationId: input.organizationId,
@ -141,11 +123,11 @@ export const stripeBillingApiRouter = t.router({
storage.getOrganization({
organizationId: input.organizationId,
}),
ctx.stripeData$,
ctx.stripeBilling.stripeData$,
]);
if (organizationBillingRecord && organization) {
const allSubscriptions = await ctx.stripe.subscriptions.list({
const allSubscriptions = await ctx.stripeBilling.stripe.subscriptions.list({
customer: organizationBillingRecord.externalBillingReference,
});
@ -154,7 +136,7 @@ export const stripeBillingApiRouter = t.router({
if (actualSubscription) {
for (const item of actualSubscription.items.data) {
if (item.plan.id === stripePrices.operationsPrice.id) {
await ctx.stripe.subscriptionItems.update(item.id, {
await ctx.stripeBilling.stripe.subscriptionItems.update(item.id, {
quantity: input.reserved.operations,
});
}
@ -168,7 +150,7 @@ export const stripeBillingApiRouter = t.router({
}
if (Object.keys(updateParams).length > 0) {
await ctx.stripe.customers.update(
await ctx.stripeBilling.stripe.customers.update(
organizationBillingRecord.externalBillingReference,
updateParams,
);
@ -179,14 +161,14 @@ export const stripeBillingApiRouter = t.router({
);
}
}),
generateStripePortalLink: procedure
generateStripePortalLink: publicProcedure
.input(
z.object({
organizationId: z.string().nonempty(),
}),
)
.mutation(async ({ ctx, input }) => {
const storage = await ctx.storage$;
const storage = ctx.stripeBilling.storage;
const organizationBillingRecord = await storage.getOrganizationBilling({
organizationId: input.organizationId,
});
@ -197,21 +179,21 @@ export const stripeBillingApiRouter = t.router({
);
}
const session = await ctx.stripe.billingPortal.sessions.create({
const session = await ctx.stripeBilling.stripe.billingPortal.sessions.create({
customer: organizationBillingRecord.externalBillingReference,
return_url: 'https://app.graphql-hive.com/',
});
return session.url;
}),
cancelSubscriptionForOrganization: procedure
cancelSubscriptionForOrganization: publicProcedure
.input(
z.object({
organizationId: z.string().nonempty(),
}),
)
.mutation(async ({ ctx, input }) => {
const storage = await ctx.storage$;
const storage = ctx.stripeBilling.storage;
const organizationBillingRecord = await storage.getOrganizationBilling({
organizationId: input.organizationId,
});
@ -222,7 +204,7 @@ export const stripeBillingApiRouter = t.router({
);
}
const subscriptions = await ctx.stripe.subscriptions
const subscriptions = await ctx.stripeBilling.stripe.subscriptions
.list({
customer: organizationBillingRecord.externalBillingReference,
})
@ -235,13 +217,13 @@ export const stripeBillingApiRouter = t.router({
}
const actualSubscription = subscriptions[0];
const response = await ctx.stripe.subscriptions.cancel(actualSubscription.id, {
const response = await ctx.stripeBilling.stripe.subscriptions.cancel(actualSubscription.id, {
prorate: true,
});
return response;
}),
createSubscriptionForOrganization: procedure
createSubscriptionForOrganization: publicProcedure
.input(
z.object({
paymentMethodId: z.string().nullish(),
@ -256,7 +238,7 @@ export const stripeBillingApiRouter = t.router({
}),
)
.mutation(async ({ ctx, input }) => {
const storage = await ctx.storage$;
const storage = ctx.stripeBilling.storage;
let organizationBillingRecord = await storage.getOrganizationBilling({
organizationId: input.organizationId,
});
@ -270,7 +252,7 @@ export const stripeBillingApiRouter = t.router({
const customerId = organizationBillingRecord?.externalBillingReference
? organizationBillingRecord.externalBillingReference
: await ctx.stripe.customers
: await ctx.stripeBilling.stripe.customers
.create({
metadata: {
external_reference_id: input.organizationId,
@ -289,7 +271,7 @@ export const stripeBillingApiRouter = t.router({
}
const existingPaymentMethods = (
await ctx.stripe.paymentMethods.list({
await ctx.stripeBilling.stripe.paymentMethods.list({
customer: customerId,
type: 'card',
})
@ -306,7 +288,7 @@ export const stripeBillingApiRouter = t.router({
paymentMethodId = paymentMethodConfiguredAlready.id;
} else {
paymentMethodId = (
await ctx.stripe.paymentMethods.attach(input.paymentMethodId, {
await ctx.stripeBilling.stripe.paymentMethods.attach(input.paymentMethodId, {
customer: customerId,
})
).id;
@ -321,9 +303,9 @@ export const stripeBillingApiRouter = t.router({
);
}
const stripePrices = await ctx.stripeData$;
const stripePrices = await ctx.stripeBilling.stripeData$;
const subscription = await ctx.stripe.subscriptions.create({
const subscription = await ctx.stripeBilling.stripe.subscriptions.create({
metadata: {
hive_subscription: 'true',
},
@ -352,6 +334,4 @@ export const stripeBillingApiRouter = t.router({
}),
});
export type StripeBillingApi = typeof stripeBillingApiRouter;
export type StripeBillingApiInput = inferRouterInputs<StripeBillingApi>;
export type StripeBillingRouter = typeof stripeBillingRouter;

View file

@ -0,0 +1,17 @@
import { handleTRPCError, type FastifyRequest } from '@hive/service-common';
import { initTRPC } from '@trpc/server';
import type { RateLimiter } from './rate-limit/limiter';
import type { StripeBilling } from './stripe-billing/billing';
import type { UsageEstimator } from './usage-estimator/estimator';
export type Context = {
req: FastifyRequest;
usageEstimator: UsageEstimator;
rateLimiter: RateLimiter;
stripeBilling: StripeBilling;
};
const t = initTRPC.context<Context>().create();
export const router = t.router;
export const publicProcedure = t.procedure.use(handleTRPCError);

View file

@ -1,7 +1,8 @@
import { ClickHouse, HttpClient, OperationsReader, sql } from '@hive/api';
import type { ServiceLogger } from '@hive/service-common';
import { clickHouseElapsedDuration, clickHouseReadDuration } from './metrics';
export type Estimator = ReturnType<typeof createEstimator>;
export type UsageEstimator = ReturnType<typeof createEstimator>;
export function createEstimator(config: {
logger: ServiceLogger;
@ -11,18 +12,24 @@ export function createEstimator(config: {
port: number;
username: string;
password: string;
onReadEnd?: (
label: string,
timings: {
totalSeconds: number;
elapsedSeconds?: number;
},
) => void;
};
}) {
const { logger } = config;
const httpClient = new HttpClient();
const clickhouse = new ClickHouse(config.clickhouse, httpClient, config.logger);
const clickhouse = new ClickHouse(
{
...config.clickhouse,
onReadEnd(query, timings) {
clickHouseReadDuration.labels({ query }).observe(timings.totalSeconds);
if (timings.elapsedSeconds !== undefined) {
clickHouseElapsedDuration.labels({ query }).observe(timings.elapsedSeconds);
}
},
},
httpClient,
config.logger,
);
const operationsReader = new OperationsReader(clickhouse, logger);
return {
@ -43,7 +50,7 @@ export function createEstimator(config: {
},
});
return await clickhouse.query<{
const result = await clickhouse.query<{
total: string;
target: string;
}>({
@ -58,6 +65,8 @@ export function createEstimator(config: {
queryId: 'usage_estimator_count_operations_all',
timeout: 60_000,
});
return Object.fromEntries(result.data.map(item => [item.target, parseInt(item.total)]));
},
async estimateCollectedOperationsForOrganization(input: {
organizationId: string;
@ -69,7 +78,7 @@ export function createEstimator(config: {
total: string;
}>({
query: sql`
SELECT
SELECT
sum(total) as total
FROM monthly_overview
PREWHERE organization = ${input.organizationId} AND date=${startOfMonth}

View file

@ -0,0 +1,53 @@
import { z } from 'zod';
import { publicProcedure, router } from '../trpc';
export const usageEstimatorRouter = router({
estimateOperationsForOrganization: publicProcedure
.input(
z
.object({
month: z.number().min(1).max(12),
year: z
.number()
.min(new Date().getFullYear() - 1)
.max(new Date().getFullYear()),
organizationId: z.string().min(1),
})
.required(),
)
.query(async ({ ctx, input }) => {
const estimationResponse =
await ctx.usageEstimator.estimateCollectedOperationsForOrganization({
organizationId: input.organizationId,
month: input.month,
year: input.year,
});
if (!estimationResponse.data.length) {
return {
totalOperations: 0,
};
}
return {
totalOperations: parseInt(estimationResponse.data[0].total),
};
}),
estimateOperationsForAllTargets: publicProcedure
.input(
z
.object({
startTime: z.string().min(1),
endTime: z.string().min(1),
})
.required(),
)
.query(async ({ ctx, input }) => {
return await ctx.usageEstimator.estimateOperationsForAllTargets({
startTime: new Date(input.startTime),
endTime: new Date(input.endTime),
});
}),
});
export type UsageEstimatorRouter = typeof usageEstimatorRouter;

View file

@ -1,12 +0,0 @@
PORT=4012
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DB=registry
USAGE_ESTIMATOR_ENDPOINT=http://localhost:4011
EMAILS_ENDPOINT=http://localhost:6260
WEB_APP_URL=http://localhost:3000
OPENTELEMETRY_COLLECTOR_ENDPOINT="<sync>"
LIMIT_CACHE_UPDATE_INTERVAL_MS=2000
OPENTELEMETRY_TRACE_USAGE_REQUESTS=1

View file

@ -1,29 +0,0 @@
# Rate Limit
The rate limit service is responsible of enforcing account limitations. If you are self-hosting Hive
you don't need this service.
## Configuration
| Name | Required | Description | Example Value |
| ------------------------------------ | -------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | ---------------------------------------------------- |
| `PORT` | **Yes** | The HTTP port of the service. | `4012` |
| `LIMIT_CACHE_UPDATE_INTERVAL_MS` | No | The cache update interval limit in milliseconds. | `60_000` |
| `POSTGRES_HOST` | **Yes** | Host of the postgres database | `127.0.0.1` |
| `POSTGRES_PORT` | **Yes** | Port of the postgres database | `5432` |
| `POSTGRES_DB` | **Yes** | Name of the postgres database. | `registry` |
| `POSTGRES_USER` | **Yes** | User name for accessing the postgres database. | `postgres` |
| `POSTGRES_PASSWORD` | No | Password for accessing the postgres database. | `postgres` |
| `USAGE_ESTIMATOR_ENDPOINT` | **Yes** | The endpoint of the usage estimator service. | `http://127.0.0.1:4011` |
| `EMAILS_ENDPOINT` | No (if not provided no limit emails will be sent.) | The endpoint of the GraphQL Hive Email service. | `http://127.0.0.1:6260` |
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
| `SENTRY` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
| `SENTRY_DSN` | No | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |
| `PROMETHEUS_METRICS` | No | Whether Prometheus metrics should be enabled | `1` (enabled) or `0` (disabled) |
| `PROMETHEUS_METRICS_LABEL_INSTANCE` | No | The instance label added for the prometheus metrics. | `rate-limit` |
| `PROMETHEUS_METRICS_PORT` | No | Port on which prometheus metrics are exposed | Defaults to `10254` |
| `WEB_APP_URL` | No | The base url of the web app | `https://your-instance.com` |
| `REQUEST_LOGGING` | No | Log http requests | `1` (enabled) or `0` (disabled) |
| `LOG_LEVEL` | No | The verbosity of the service logs. One of `trace`, `debug`, `info`, `warn` ,`error`, `fatal` or `silent` | `info` (default) |
| `OPENTELEMETRY_COLLECTOR_ENDPOINT` | No | OpenTelemetry Collector endpoint. The expected traces transport is HTTP (port `4318`). | `http://localhost:4318/v1/traces` |
| `OPENTELEMETRY_TRACE_USAGE_REQUESTS` | No | If enabled, requests send to this service from `usage` service will be monitored with OTEL. | `1` (enabled, or ``) |

View file

@ -1,32 +0,0 @@
{
"name": "@hive/rate-limit",
"type": "module",
"description": "A microservice for Hive Cloud, that exposes information about rate limits per given org/target.",
"license": "MIT",
"private": true,
"scripts": {
"build": "tsx ../../../scripts/runify.ts",
"dev": "tsup-node --config ../../../configs/tsup/dev.config.node.ts src/dev.ts",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@hive/emails": "workspace:*",
"@hive/service-common": "workspace:*",
"@hive/storage": "workspace:*",
"@sentry/node": "7.120.2",
"@trpc/client": "10.45.2",
"@trpc/server": "10.45.2",
"date-fns": "4.1.0",
"dotenv": "16.4.7",
"got": "14.4.5",
"pino-pretty": "11.3.0",
"reflect-metadata": "0.2.2",
"tslib": "2.8.1",
"zod": "3.24.1"
},
"buildOptions": {
"external": [
"pg-native"
]
}
}

View file

@ -1,49 +0,0 @@
import { z } from 'zod';
import { handleTRPCError } from '@hive/service-common';
import type { FastifyRequest } from '@hive/service-common';
import type { inferRouterInputs, inferRouterOutputs } from '@trpc/server';
import { initTRPC } from '@trpc/server';
import type { Limiter } from './limiter';
export interface Context {
req: FastifyRequest;
limiter: Limiter;
}
const t = initTRPC.context<Context>().create();
const procedure = t.procedure.use(handleTRPCError);
export type RateLimitInput = z.infer<typeof VALIDATION>;
const VALIDATION = z
.object({
id: z.string().min(1),
entityType: z.enum(['organization', 'target']),
type: z.enum(['operations-reporting']),
/**
* Token is optional, and used only when an additional blocking (WAF) process is needed.
*/
token: z.string().nullish().optional(),
})
.required();
export const rateLimitApiRouter = t.router({
getRetention: procedure
.input(
z
.object({
targetId: z.string().nonempty(),
})
.required(),
)
.query(({ ctx, input }) => {
return ctx.limiter.getRetention(input.targetId);
}),
checkRateLimit: procedure.input(VALIDATION).query(({ ctx, input }) => {
return ctx.limiter.checkLimit(input);
}),
});
export type RateLimitApi = typeof rateLimitApiRouter;
export type RateLimitApiInput = inferRouterInputs<RateLimitApi>;
export type RateLimitApiOutput = inferRouterOutputs<RateLimitApi>;

View file

@ -1,161 +0,0 @@
import zod from 'zod';
import { OpenTelemetryConfigurationModel } from '@hive/service-common';
const isNumberString = (input: unknown) => zod.string().regex(/^\d+$/).safeParse(input).success;
const numberFromNumberOrNumberString = (input: unknown): number | undefined => {
if (typeof input == 'number') return input;
if (isNumberString(input)) return Number(input);
};
const NumberFromString = zod.preprocess(numberFromNumberOrNumberString, zod.number().min(1));
// treat an empty string (`''`) as undefined
const emptyString = <T extends zod.ZodType>(input: T) => {
return zod.preprocess((value: unknown) => {
if (value === '') return undefined;
return value;
}, input);
};
const EnvironmentModel = zod.object({
PORT: emptyString(NumberFromString.optional()),
ENVIRONMENT: emptyString(zod.string().optional()),
RELEASE: emptyString(zod.string().optional()),
USAGE_ESTIMATOR_ENDPOINT: zod.string().url(),
EMAILS_ENDPOINT: emptyString(zod.string().url().optional()),
LIMIT_CACHE_UPDATE_INTERVAL_MS: emptyString(NumberFromString.optional()),
WEB_APP_URL: emptyString(zod.string().url().optional()),
});
const SentryModel = zod.union([
zod.object({
SENTRY: emptyString(zod.literal('0').optional()),
}),
zod.object({
SENTRY: zod.literal('1'),
SENTRY_DSN: zod.string(),
}),
]);
const PostgresModel = zod.object({
POSTGRES_SSL: emptyString(zod.union([zod.literal('1'), zod.literal('0')]).optional()),
POSTGRES_HOST: zod.string(),
POSTGRES_PORT: NumberFromString,
POSTGRES_DB: zod.string(),
POSTGRES_USER: zod.string(),
POSTGRES_PASSWORD: emptyString(zod.string().optional()),
});
const PrometheusModel = zod.object({
PROMETHEUS_METRICS: emptyString(zod.union([zod.literal('0'), zod.literal('1')]).optional()),
PROMETHEUS_METRICS_LABEL_INSTANCE: zod.string().optional(),
PROMETHEUS_METRICS_PORT: emptyString(NumberFromString.optional()),
});
const LogModel = zod.object({
LOG_LEVEL: emptyString(
zod
.union([
zod.literal('trace'),
zod.literal('debug'),
zod.literal('info'),
zod.literal('warn'),
zod.literal('error'),
zod.literal('fatal'),
zod.literal('silent'),
])
.optional(),
),
REQUEST_LOGGING: emptyString(zod.union([zod.literal('0'), zod.literal('1')]).optional()).default(
'1',
),
});
const configs = {
base: EnvironmentModel.safeParse(process.env),
sentry: SentryModel.safeParse(process.env),
postgres: PostgresModel.safeParse(process.env),
prometheus: PrometheusModel.safeParse(process.env),
log: LogModel.safeParse(process.env),
tracing: zod
.object({
...OpenTelemetryConfigurationModel.shape,
OPENTELEMETRY_TRACE_USAGE_REQUESTS: emptyString(zod.literal('1').optional()),
})
.safeParse(process.env),
};
const environmentErrors: Array<string> = [];
for (const config of Object.values(configs)) {
if (config.success === false) {
environmentErrors.push(JSON.stringify(config.error.format(), null, 4));
}
}
if (environmentErrors.length) {
const fullError = environmentErrors.join(`\n`);
console.error('❌ Invalid environment variables:', fullError);
process.exit(1);
}
function extractConfig<Input, Output>(config: zod.SafeParseReturnType<Input, Output>): Output {
if (!config.success) {
throw new Error('Something went wrong.');
}
return config.data;
}
const base = extractConfig(configs.base);
const postgres = extractConfig(configs.postgres);
const sentry = extractConfig(configs.sentry);
const prometheus = extractConfig(configs.prometheus);
const log = extractConfig(configs.log);
const tracing = extractConfig(configs.tracing);
export const env = {
environment: base.ENVIRONMENT,
release: base.RELEASE ?? 'local',
hiveServices: {
usageEstimator: { endpoint: base.USAGE_ESTIMATOR_ENDPOINT },
emails: base.EMAILS_ENDPOINT ? { endpoint: base.EMAILS_ENDPOINT } : null,
webAppUrl: base.WEB_APP_URL ?? 'http://localhost:3000',
},
limitCacheUpdateIntervalMs: base.LIMIT_CACHE_UPDATE_INTERVAL_MS ?? 60_000,
http: {
port: base.PORT ?? 4012,
},
tracing: {
enabled: !!tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
collectorEndpoint: tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
traceRequestsFromUsageService: tracing.OPENTELEMETRY_TRACE_USAGE_REQUESTS === '1',
},
postgres: {
host: postgres.POSTGRES_HOST,
port: postgres.POSTGRES_PORT,
db: postgres.POSTGRES_DB,
user: postgres.POSTGRES_USER,
password: postgres.POSTGRES_PASSWORD,
ssl: postgres.POSTGRES_SSL === '1',
},
sentry: sentry.SENTRY === '1' ? { dsn: sentry.SENTRY_DSN } : null,
log: {
level: log.LOG_LEVEL ?? 'info',
requests: log.REQUEST_LOGGING === '1',
},
prometheus:
prometheus.PROMETHEUS_METRICS === '1'
? {
labels: {
instance: prometheus.PROMETHEUS_METRICS_LABEL_INSTANCE ?? 'rate-limit',
},
port: prometheus.PROMETHEUS_METRICS_PORT ?? 10_254,
}
: null,
} as const;

View file

@ -1,143 +0,0 @@
#!/usr/bin/env node
import 'reflect-metadata';
import { hostname } from 'os';
import {
configureTracing,
createServer,
registerShutdown,
registerTRPC,
reportReadiness,
SamplingDecision,
startMetrics,
TracingInstance,
} from '@hive/service-common';
import { createConnectionString } from '@hive/storage';
import * as Sentry from '@sentry/node';
import { Context, rateLimitApiRouter } from './api';
import { env } from './environment';
import { createRateLimiter } from './limiter';
async function main() {
let tracing: TracingInstance | undefined;
if (env.tracing.enabled && env.tracing.collectorEndpoint) {
tracing = configureTracing({
collectorEndpoint: env.tracing.collectorEndpoint,
serviceName: 'rate-limit',
sampler: (ctx, traceId, spanName, spanKind, attributes) => {
if (
attributes['requesting.service'] === 'usage' &&
!env.tracing.traceRequestsFromUsageService
) {
return {
decision: SamplingDecision.NOT_RECORD,
};
}
return {
decision: SamplingDecision.RECORD_AND_SAMPLED,
};
},
});
tracing.instrumentNodeFetch();
tracing.build();
tracing.start();
}
if (env.sentry) {
Sentry.init({
serverName: hostname(),
dist: 'rate-limit',
enabled: !!env.sentry,
environment: env.environment,
dsn: env.sentry.dsn,
release: env.release,
});
}
const server = await createServer({
name: 'rate-limit',
sentryErrorHandler: true,
log: {
level: env.log.level,
requests: env.log.requests,
},
});
if (tracing) {
await server.register(...tracing.instrumentFastify());
}
try {
const limiter = createRateLimiter({
logger: server.log,
rateLimitConfig: {
interval: env.limitCacheUpdateIntervalMs,
},
rateEstimator: env.hiveServices.usageEstimator,
emails: env.hiveServices.emails ?? undefined,
storage: {
connectionString: createConnectionString(env.postgres),
additionalInterceptors: tracing ? [tracing.instrumentSlonik()] : undefined,
},
});
await registerTRPC(server, {
router: rateLimitApiRouter,
createContext({ req }): Context {
return {
req,
limiter,
};
},
});
registerShutdown({
logger: server.log,
async onShutdown() {
await Promise.all([limiter.stop(), server.close()]);
},
});
server.route({
method: ['GET', 'HEAD'],
url: '/_health',
handler(_, res) {
void res.status(200).send();
},
});
server.route({
method: ['GET', 'HEAD'],
url: '/_readiness',
async handler(_, res) {
const isReady = await limiter.readiness();
reportReadiness(isReady);
void res.status(isReady ? 200 : 400).send();
},
});
if (env.prometheus) {
await startMetrics(env.prometheus.labels.instance, env.prometheus.port);
}
await server.listen({
port: env.http.port,
host: '::',
});
await limiter.start();
} catch (error) {
server.log.fatal(error);
Sentry.captureException(error, {
level: 'fatal',
});
}
}
main().catch(err => {
Sentry.captureException(err, {
level: 'fatal',
});
console.error(err);
process.exit(1);
});

View file

@ -1,9 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"target": "ES2020",
"module": "ESNext",
"rootDir": "../.."
},
"files": ["src/index.ts"]
}

View file

@ -14,9 +14,7 @@ CLICKHOUSE_PASSWORD="test"
TOKENS_ENDPOINT="http://localhost:6001"
SCHEMA_ENDPOINT="http://localhost:6500"
SCHEMA_POLICY_ENDPOINT="http://localhost:6600"
USAGE_ESTIMATOR_ENDPOINT="http://localhost:4011"
RATE_LIMIT_ENDPOINT="http://localhost:4012"
BILLING_ENDPOINT="http://localhost:4013"
COMMERCE_ENDPOINT="http://localhost:4013"
WEBHOOKS_ENDPOINT="http://localhost:6250"
EMAILS_ENDPOINT="http://localhost:6260"
REDIS_HOST="localhost"
@ -84,4 +82,4 @@ AUTH_OKTA_CLIENT_SECRET="<sync>"
OPENTELEMETRY_COLLECTOR_ENDPOINT="<sync>"
HIVE_ENCRYPTION_SECRET=wowverysecuremuchsecret
HIVE_ENCRYPTION_SECRET=wowverysecuremuchsecret

View file

@ -10,7 +10,6 @@ The GraphQL API for GraphQL Hive.
| `ENCRYPTION_SECRET` | **Yes** | Secret for encrypting stuff. | `8ebe95cg21c1fee355e9fa32c8c33141` |
| `WEB_APP_URL` | **Yes** | The url of the web app. | `http://127.0.0.1:3000` |
| `GRAPHQL_PUBLIC_ORIGIN` | **Yes** | The origin of the GraphQL server. | `http://127.0.0.1:4013` |
| `RATE_LIMIT_ENDPOINT` | **Yes** | The endpoint of the rate limiting service. | `http://127.0.0.1:4012` |
| `EMAILS_ENDPOINT` | **Yes** | The endpoint of the GraphQL Hive Email service. | `http://127.0.0.1:6260` |
| `TOKENS_ENDPOINT` | **Yes** | The endpoint of the tokens service. | `http://127.0.0.1:6001` |
| `WEBHOOKS_ENDPOINT` | **Yes** | The endpoint of the webhooks service. | `http://127.0.0.1:6250` |
@ -95,15 +94,14 @@ The GraphQL API for GraphQL Hive.
If you are self-hosting GraphQL Hive, you can ignore this section. It is only required for the Cloud
version.
| Name | Required | Description | Example Value |
| -------------------------- | ----------------------------- | -------------------------------------------- | ------------------------------- |
| `BILLING_ENDPOINT` | **Yes** | The endpoint of the Hive Billing service. | `http://127.0.0.1:4013` |
| `USAGE_ESTIMATOR_ENDPOINT` | No | The endpoint of the usage estimator service. | `4011` |
| `CDN_CF` | No | Whether the CDN is enabled. | `1` (enabled) or `0` (disabled) |
| `CDN_CF_BASE_URL` | No (**Yes** if `CDN` is `1`) | The base URL of the cdn. | `https://cdn.graphql-hive.com` |
| `HIVE` | No | The internal endpoint key. | `iliketurtles` |
| `HIVE_API_TOKEN` | No (**Yes** if `HIVE` is set) | The internal endpoint key. | `iliketurtles` |
| `HIVE_USAGE` | No | The internal endpoint key. | `1` (enabled) or `0` (disabled) |
| `HIVE_USAGE_ENDPOINT` | No | The endpoint used for usage reporting. | `http://127.0.0.1:4001` |
| `HIVE_REPORTING` | No | The internal endpoint key. | `iliketurtles` |
| `HIVE_REPORTING_ENDPOINT` | No | The internal endpoint key. | `http://127.0.0.1:4000/graphql` |
| Name | Required | Description | Example Value |
| ------------------------- | ----------------------------- | -------------------------------------- | ------------------------------- |
| `COMMERCE_ENDPOINT` | **Yes** | The endpoint of the commerce service. | `http://127.0.0.1:4012` |
| `CDN_CF` | No | Whether the CDN is enabled. | `1` (enabled) or `0` (disabled) |
| `CDN_CF_BASE_URL` | No (**Yes** if `CDN` is `1`) | The base URL of the cdn. | `https://cdn.graphql-hive.com` |
| `HIVE` | No | The internal endpoint key. | `iliketurtles` |
| `HIVE_API_TOKEN` | No (**Yes** if `HIVE` is set) | The internal endpoint key. | `iliketurtles` |
| `HIVE_USAGE` | No | The internal endpoint key. | `1` (enabled) or `0` (disabled) |
| `HIVE_USAGE_ENDPOINT` | No | The endpoint used for usage reporting. | `http://127.0.0.1:4001` |
| `HIVE_REPORTING` | No | The internal endpoint key. | `iliketurtles` |
| `HIVE_REPORTING_ENDPOINT` | No | The internal endpoint key. | `http://127.0.0.1:4000/graphql` |

View file

@ -2,23 +2,12 @@ import { CryptoProvider } from 'packages/services/api/src/modules/shared/provide
import { z } from 'zod';
import type { Storage } from '@hive/api';
import { OrganizationAccessScope, ProjectAccessScope, TargetAccessScope } from '@hive/api';
import type { inferAsyncReturnType } from '@trpc/server';
import { initTRPC } from '@trpc/server';
export async function createContext({
storage,
crypto,
}: {
export type Context = {
storage: Storage;
crypto: CryptoProvider;
}) {
return {
storage,
crypto,
};
}
export type Context = inferAsyncReturnType<typeof createContext>;
};
const oidcDefaultScopes = [
OrganizationAccessScope.READ,

View file

@ -30,12 +30,8 @@ const EnvironmentModel = zod.object({
'GRAPHQL_PUBLIC_ORIGIN is required (see: https://github.com/graphql-hive/platform/pull/4288#issue-2195509699)',
})
.url(),
RATE_LIMIT_ENDPOINT: emptyString(zod.string().url().optional()),
SCHEMA_POLICY_ENDPOINT: emptyString(zod.string().url().optional()),
TOKENS_ENDPOINT: zod.string().url(),
USAGE_ESTIMATOR_ENDPOINT: emptyString(zod.string().url().optional()),
USAGE_ESTIMATOR_RETENTION_PURGE_INTERVAL_MINUTES: emptyString(NumberFromString.optional()),
BILLING_ENDPOINT: emptyString(zod.string().url().optional()),
EMAILS_ENDPOINT: emptyString(zod.string().url().optional()),
WEBHOOKS_ENDPOINT: zod.string().url(),
SCHEMA_ENDPOINT: zod.string().url(),
@ -48,6 +44,10 @@ const EnvironmentModel = zod.object({
),
});
const CommerceModel = zod.object({
COMMERCE_ENDPOINT: emptyString(zod.string().url().optional()),
});
const SentryModel = zod.union([
zod.object({
SENTRY: emptyString(zod.literal('0').optional()),
@ -261,6 +261,7 @@ const processEnv = process.env;
const configs = {
base: EnvironmentModel.safeParse(processEnv),
commerce: CommerceModel.safeParse(processEnv),
sentry: SentryModel.safeParse(processEnv),
postgres: PostgresModel.safeParse(processEnv),
clickhouse: ClickHouseModel.safeParse(processEnv),
@ -306,6 +307,7 @@ function extractConfig<Input, Output>(config: zod.SafeParseReturnType<Input, Out
}
const base = extractConfig(configs.base);
const commerce = extractConfig(configs.commerce);
const postgres = extractConfig(configs.postgres);
const sentry = extractConfig(configs.sentry);
const clickhouse = extractConfig(configs.clickhouse);
@ -370,9 +372,10 @@ export const env = {
tokens: {
endpoint: base.TOKENS_ENDPOINT,
},
rateLimit: base.RATE_LIMIT_ENDPOINT
commerce: commerce.COMMERCE_ENDPOINT
? {
endpoint: base.RATE_LIMIT_ENDPOINT,
endpoint: commerce.COMMERCE_ENDPOINT,
dateRetentionPurgeIntervalMinutes: 5,
}
: null,
schemaPolicy: base.SCHEMA_POLICY_ENDPOINT
@ -380,13 +383,6 @@ export const env = {
endpoint: base.SCHEMA_POLICY_ENDPOINT,
}
: null,
usageEstimator: base.USAGE_ESTIMATOR_ENDPOINT
? {
endpoint: base.USAGE_ESTIMATOR_ENDPOINT,
dateRetentionPurgeIntervalMinutes: 5,
}
: null,
billing: base.BILLING_ENDPOINT ? { endpoint: base.BILLING_ENDPOINT } : null,
emails: base.EMAILS_ENDPOINT ? { endpoint: base.EMAILS_ENDPOINT } : null,
webhooks: { endpoint: base.WEBHOOKS_ENDPOINT },
schema: { endpoint: base.SCHEMA_ENDPOINT },

View file

@ -57,7 +57,7 @@ import { createServerAdapter } from '@whatwg-node/server';
import { AuthN } from '../../api/src/modules/auth/lib/authz';
import { SuperTokensUserAuthNStrategy } from '../../api/src/modules/auth/lib/supertokens-strategy';
import { TargetAccessTokenStrategy } from '../../api/src/modules/auth/lib/target-access-token-strategy';
import { createContext, internalApiRouter } from './api';
import { internalApiRouter } from './api';
import { asyncStorage } from './async-storage';
import { env } from './environment';
import { graphqlHandler } from './graphql-handler';
@ -185,11 +185,11 @@ export async function main() {
let dbPurgeTaskRunner: null | ReturnType<typeof createTaskRunner> = null;
if (!env.hiveServices.usageEstimator) {
server.log.debug('Usage estimation is disabled. Skip scheduling purge tasks.');
if (!env.hiveServices.commerce) {
server.log.debug('Commerce service is disabled. Skip scheduling purge tasks.');
} else {
server.log.debug(
`Usage estimation is enabled. Start scheduling purge tasks every ${env.hiveServices.usageEstimator.dateRetentionPurgeIntervalMinutes} minutes.`,
`Commerce service is enabled. Start scheduling purge tasks every ${env.hiveServices.commerce.dateRetentionPurgeIntervalMinutes} minutes.`,
);
dbPurgeTaskRunner = createTaskRunner({
run: traceInline(
@ -220,7 +220,7 @@ export async function main() {
}
},
),
interval: env.hiveServices.usageEstimator.dateRetentionPurgeIntervalMinutes * 60 * 1000,
interval: env.hiveServices.commerce.dateRetentionPurgeIntervalMinutes * 60 * 1000,
logger: server.log,
});
@ -317,8 +317,8 @@ export async function main() {
tokens: {
endpoint: env.hiveServices.tokens.endpoint,
},
billing: {
endpoint: env.hiveServices.billing ? env.hiveServices.billing.endpoint : null,
commerce: {
endpoint: env.hiveServices.commerce ? env.hiveServices.commerce.endpoint : null,
},
emailsEndpoint: env.hiveServices.emails ? env.hiveServices.emails.endpoint : undefined,
webhooks: {
@ -327,12 +327,6 @@ export async function main() {
schemaService: {
endpoint: env.hiveServices.schema.endpoint,
},
usageEstimationService: {
endpoint: env.hiveServices.usageEstimator ? env.hiveServices.usageEstimator.endpoint : null,
},
rateLimitService: {
endpoint: env.hiveServices.rateLimit ? env.hiveServices.rateLimit.endpoint : null,
},
schemaPolicyService: {
endpoint: env.hiveServices.schemaPolicy ? env.hiveServices.schemaPolicy.endpoint : null,
},
@ -486,7 +480,10 @@ export async function main() {
await registerTRPC(server, {
router: internalApiRouter,
createContext() {
return createContext({ storage, crypto });
return {
storage,
crypto,
};
},
});

View file

@ -5,7 +5,10 @@ import { experimental_standaloneMiddleware, type AnyRouter } from '@trpc/server'
import { fastifyTRPCPlugin } from '@trpc/server/adapters/fastify';
import type { CreateFastifyContextOptions } from '@trpc/server/adapters/fastify';
export function registerTRPC<TRouter extends AnyRouter, TContext>(
export function registerTRPC<
TRouter extends AnyRouter,
TContext extends TRouter['_def']['_config']['$types']['ctx'],
>(
server: FastifyInstance,
{
router,

View file

@ -1,9 +0,0 @@
PORT=4013
USAGE_ESTIMATOR_ENDPOINT=http://localhost:4011
STRIPE_SECRET_KEY="<sync>"
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DB=registry
OPENTELEMETRY_COLLECTOR_ENDPOINT="<sync>"

View file

@ -1,24 +0,0 @@
# `@hive/stripe-billing`
Optional service for billing customers with Stripe.
## Configuration
| Name | Required | Description | Example Value |
| ---------------------------------- | -------- | -------------------------------------------------------------------------------------------------------- | ---------------------------------------------------- |
| `PORT` | **Yes** | The port this service is running on. | `4013` |
| `USAGE_ESTIMATOR_ENDPOINT` | **Yes** | The endpoint of the usage estimator service. | `4011` |
| `POSTGRES_HOST` | **Yes** | Host of the postgres database | `127.0.0.1` |
| `POSTGRES_PORT` | **Yes** | Port of the postgres database | `5432` |
| `POSTGRES_DB` | **Yes** | Name of the postgres database. | `registry` |
| `POSTGRES_USER` | **Yes** | User name for accessing the postgres database. | `postgres` |
| `POSTGRES_PASSWORD` | No | Password for accessing the postgres database. | `postgres` |
| `POSTGRES_SSL` | No | Whether the postgres connection should be established via SSL. | `1` (enabled) or `0` (disabled) |
| `STRIPE_SECRET_KEY` | **Yes** | The stripe secret key. | `sk_test_abcd` |
| `STRIPE_SYNC_INTERVAL_MS` | No | The stripe sync interval in milliseconds (Default: `600_000`) | `1_000` |
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
| `SENTRY` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
| `SENTRY_DSN` | No | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |
| `REQUEST_LOGGING` | No | Log http requests | `1` (enabled) or `0` (disabled) |
| `LOG_LEVEL` | No | The verbosity of the service logs. One of `trace`, `debug`, `info`, `warn` ,`error`, `fatal` or `silent` | `info` (default) |
| `OPENTELEMETRY_COLLECTOR_ENDPOINT` | No | OpenTelemetry Collector endpoint. The expected traces transport is HTTP (port `4318`). | `http://localhost:4318/v1/traces` |

View file

@ -1,8 +0,0 @@
import { config } from 'dotenv';
config({
debug: true,
encoding: 'utf8',
});
await import('./index');

View file

@ -1,132 +0,0 @@
#!/usr/bin/env node
import 'reflect-metadata';
import { hostname } from 'os';
import {
configureTracing,
createServer,
registerShutdown,
registerTRPC,
reportReadiness,
startMetrics,
TracingInstance,
} from '@hive/service-common';
import { createConnectionString } from '@hive/storage';
import * as Sentry from '@sentry/node';
import { Context, stripeBillingApiRouter } from './api';
import { createStripeBilling } from './billing-sync';
import { env } from './environment';
async function main() {
let tracing: TracingInstance | undefined;
if (env.tracing.enabled && env.tracing.collectorEndpoint) {
tracing = configureTracing({
collectorEndpoint: env.tracing.collectorEndpoint,
serviceName: 'stripe-billing',
});
tracing.instrumentNodeFetch();
tracing.build();
tracing.start();
}
if (env.sentry) {
Sentry.init({
serverName: hostname(),
dist: 'stripe-billing',
enabled: !!env.sentry,
environment: env.environment,
dsn: env.sentry.dsn,
release: env.release,
});
}
const server = await createServer({
name: 'stripe-billing',
sentryErrorHandler: true,
log: {
level: env.log.level,
requests: env.log.requests,
},
});
if (tracing) {
await server.register(...tracing.instrumentFastify());
}
try {
const { readiness, start, stop, stripeApi, postgres$, loadStripeData$ } = createStripeBilling({
logger: server.log,
stripe: {
token: env.stripe.secretKey,
syncIntervalMs: env.stripe.syncIntervalMs,
},
rateEstimator: {
endpoint: env.hiveServices.usageEstimator.endpoint,
},
storage: {
connectionString: createConnectionString(env.postgres),
additionalInterceptors: tracing ? [tracing.instrumentSlonik()] : [],
},
});
registerShutdown({
logger: server.log,
async onShutdown() {
await Promise.all([stop(), server.close()]);
},
});
await registerTRPC(server, {
router: stripeBillingApiRouter,
createContext({ req }): Context {
return {
storage$: postgres$,
stripe: stripeApi,
stripeData$: loadStripeData$,
req,
};
},
});
server.route({
method: ['GET', 'HEAD'],
url: '/_health',
handler(_, res) {
void res.status(200).send();
},
});
server.route({
method: ['GET', 'HEAD'],
url: '/_readiness',
async handler(_, res) {
const isReady = await readiness();
reportReadiness(isReady);
void res.status(isReady ? 200 : 400).send();
},
});
if (env.prometheus) {
await startMetrics(env.prometheus.labels.instance);
}
await server.listen({
port: env.http.port,
host: '::',
});
await start();
} catch (error) {
server.log.fatal(error);
Sentry.captureException(error, {
level: 'fatal',
});
}
}
main().catch(err => {
Sentry.captureException(err, {
level: 'fatal',
});
console.error(err);
process.exit(1);
});

View file

@ -18,7 +18,6 @@ APIs (usage service and GraphQL API).
| `REDIS_PORT` | **Yes** | The port of your redis instance. | `6379` |
| `REDIS_PASSWORD` | **Yes** | The password of your redis instance. | `"apollorocks"` |
| `REDIS_TLS_ENABLED` | **No** | Enable TLS for redis connection (rediss://). | `"0"` |
| `RATE_LIMIT_ENDPOINT` | **Yes** | The endpoint of the rate limiting service. | `http://127.0.0.1:4012` |
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
| `SENTRY` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
| `SENTRY_DSN` | No | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |

View file

@ -1,28 +0,0 @@
# `@hive/usage-estimator`
This service takes care of estimating the usage of an account.
## Configuration
| Name | Required | Description | Example Value |
| ----------------------------------- | -------- | -------------------------------------------------------------------------------------------------------- | ---------------------------------------------------- |
| `CLICKHOUSE_PROTOCOL` | **Yes** | The clickhouse protocol for connecting to the clickhouse instance. | `http` |
| `CLICKHOUSE_HOST` | **Yes** | The host of the clickhouse instance. | `127.0.0.1` |
| `CLICKHOUSE_PORT` | **Yes** | The port of the clickhouse instance | `8123` |
| `CLICKHOUSE_USERNAME` | **Yes** | The username for accessing the clickhouse instance. | `test` |
| `CLICKHOUSE_PASSWORD` | **Yes** | The password for accessing the clickhouse instance. | `test` |
| `PORT` | **Yes** | The port this service is running on. | `4011` |
| `POSTGRES_HOST` | **Yes** | Host of the postgres database | `127.0.0.1` |
| `POSTGRES_PORT` | **Yes** | Port of the postgres database | `5432` |
| `POSTGRES_DB` | **Yes** | Name of the postgres database. | `registry` |
| `POSTGRES_USER` | **Yes** | User name for accessing the postgres database. | `postgres` |
| `POSTGRES_PASSWORD` | No | Password for accessing the postgres database. | `postgres` |
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
| `SENTRY_DSN` | No | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |
| `SENTRY_ENABLED` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
| `PROMETHEUS_METRICS` | No | Whether Prometheus metrics should be enabled | `1` (enabled) or `0` (disabled) |
| `PROMETHEUS_METRICS_LABEL_INSTANCE` | No | The instance label added for the prometheus metrics. | `rate-limit` |
| `PROMETHEUS_METRICS_PORT` | No | Port on which prometheus metrics are exposed | Defaults to `10254` |
| `REQUEST_LOGGING` | No | Log http requests | `1` (enabled) or `0` (disabled) |
| `LOG_LEVEL` | No | The verbosity of the service logs. One of `trace`, `debug`, `info`, `warn` ,`error`, `fatal` or `silent` | `info` (default) |
| `OPENTELEMETRY_COLLECTOR_ENDPOINT` | No | OpenTelemetry Collector endpoint. The expected traces transport is HTTP (port `4318`). | `http://localhost:4318/v1/traces` |

View file

@ -1,29 +0,0 @@
{
"name": "@hive/usage-estimator",
"type": "module",
"description": "A microservice for Hive Cloud, that calculates and exposes usage information.",
"license": "MIT",
"private": true,
"scripts": {
"build": "tsx ../../../scripts/runify.ts",
"dev": "tsup-node --config ../../../configs/tsup/dev.config.node.ts src/dev.ts",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@hive/api": "workspace:*",
"@hive/service-common": "workspace:*",
"@sentry/node": "7.120.2",
"@trpc/server": "10.45.2",
"dotenv": "16.4.7",
"got": "14.4.5",
"pino-pretty": "11.3.0",
"reflect-metadata": "0.2.2",
"tslib": "2.8.1",
"zod": "3.24.1"
},
"buildOptions": {
"external": [
"pg-native"
]
}
}

View file

@ -1,73 +0,0 @@
import { z } from 'zod';
import { handleTRPCError } from '@hive/service-common';
import type { FastifyRequest } from '@hive/service-common';
import type { inferRouterInputs, inferRouterOutputs } from '@trpc/server';
import { initTRPC } from '@trpc/server';
import type { Estimator } from './estimator';
export function createContext(estimator: Estimator, req: FastifyRequest) {
return {
estimator,
req,
};
}
const t = initTRPC.context<ReturnType<typeof createContext>>().create();
const procedure = t.procedure.use(handleTRPCError);
export const usageEstimatorApiRouter = t.router({
estimateOperationsForOrganization: procedure
.input(
z
.object({
month: z.number().min(1).max(12),
year: z
.number()
.min(new Date().getFullYear() - 1)
.max(new Date().getFullYear()),
organizationId: z.string().min(1),
})
.required(),
)
.query(async ({ ctx, input }) => {
const estimationResponse = await ctx.estimator.estimateCollectedOperationsForOrganization({
organizationId: input.organizationId,
month: input.month,
year: input.year,
});
if (!estimationResponse.data.length) {
return {
totalOperations: 0,
};
}
return {
totalOperations: parseInt(estimationResponse.data[0].total),
};
}),
estimateOperationsForAllTargets: procedure
.input(
z
.object({
startTime: z.string().min(1),
endTime: z.string().min(1),
})
.required(),
)
.query(async ({ ctx, input }) => {
const estimationResponse = await ctx.estimator.estimateOperationsForAllTargets({
startTime: new Date(input.startTime),
endTime: new Date(input.endTime),
});
return Object.fromEntries(
estimationResponse.data.map(item => [item.target, parseInt(item.total)]),
);
}),
});
export type UsageEstimatorApi = typeof usageEstimatorApiRouter;
export type UsageEstimatorApiInput = inferRouterInputs<UsageEstimatorApi>;
export type UsageEstimatorApiOutput = inferRouterOutputs<UsageEstimatorApi>;

View file

@ -1,8 +0,0 @@
import { config } from 'dotenv';
config({
debug: true,
encoding: 'utf8',
});
await import('./index');

View file

@ -1,143 +0,0 @@
import zod from 'zod';
import { OpenTelemetryConfigurationModel } from '@hive/service-common';
const isNumberString = (input: unknown) => zod.string().regex(/^\d+$/).safeParse(input).success;
const numberFromNumberOrNumberString = (input: unknown): number | undefined => {
if (typeof input == 'number') return input;
if (isNumberString(input)) return Number(input);
};
const NumberFromString = zod.preprocess(numberFromNumberOrNumberString, zod.number().min(1));
// treat an empty string (`''`) as undefined
const emptyString = <T extends zod.ZodType>(input: T) => {
return zod.preprocess((value: unknown) => {
if (value === '') return undefined;
return value;
}, input);
};
const EnvironmentModel = zod.object({
PORT: emptyString(NumberFromString.optional()),
ENVIRONMENT: emptyString(zod.string().optional()),
RELEASE: emptyString(zod.string().optional()),
});
const SentryModel = zod.union([
zod.object({
SENTRY: emptyString(zod.literal('0').optional()),
}),
zod.object({
SENTRY: zod.literal('1'),
SENTRY_DSN: zod.string(),
}),
]);
const PrometheusModel = zod.object({
PROMETHEUS_METRICS: emptyString(zod.union([zod.literal('0'), zod.literal('1')]).optional()),
PROMETHEUS_METRICS_LABEL_INSTANCE: emptyString(zod.string().optional()),
PROMETHEUS_METRICS_PORT: emptyString(NumberFromString.optional()),
});
const LogModel = zod.object({
LOG_LEVEL: emptyString(
zod
.union([
zod.literal('trace'),
zod.literal('debug'),
zod.literal('info'),
zod.literal('warn'),
zod.literal('error'),
zod.literal('fatal'),
zod.literal('silent'),
])
.optional(),
),
REQUEST_LOGGING: emptyString(zod.union([zod.literal('0'), zod.literal('1')]).optional()).default(
'1',
),
});
const ClickHouseModel = zod.object({
CLICKHOUSE_PROTOCOL: zod.union([zod.literal('http'), zod.literal('https')]),
CLICKHOUSE_HOST: zod.string(),
CLICKHOUSE_PORT: NumberFromString,
CLICKHOUSE_USERNAME: zod.string(),
CLICKHOUSE_PASSWORD: zod.string(),
});
const configs = {
base: EnvironmentModel.safeParse(process.env),
sentry: SentryModel.safeParse(process.env),
clickhouse: ClickHouseModel.safeParse(process.env),
prometheus: PrometheusModel.safeParse(process.env),
log: LogModel.safeParse(process.env),
tracing: OpenTelemetryConfigurationModel.safeParse(process.env),
};
const environmentErrors: Array<string> = [];
for (const config of Object.values(configs)) {
if (config.success === false) {
environmentErrors.push(JSON.stringify(config.error.format(), null, 4));
}
}
if (environmentErrors.length) {
const fullError = environmentErrors.join(`\n`);
console.error('❌ Invalid environment variables:', fullError);
process.exit(1);
}
function extractConfig<Input, Output>(config: zod.SafeParseReturnType<Input, Output>): Output {
if (!config.success) {
throw new Error('Something went wrong.');
}
return config.data;
}
const base = extractConfig(configs.base);
const clickhouse = extractConfig(configs.clickhouse);
const sentry = extractConfig(configs.sentry);
const prometheus = extractConfig(configs.prometheus);
const log = extractConfig(configs.log);
const tracing = extractConfig(configs.tracing);
export const env = {
environment: base.ENVIRONMENT,
release: base.RELEASE ?? 'local',
http: {
port: base.PORT ?? 4012,
},
tracing: {
enabled: !!tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
collectorEndpoint: tracing.OPENTELEMETRY_COLLECTOR_ENDPOINT,
},
clickhouse: {
protocol: clickhouse.CLICKHOUSE_PROTOCOL,
host: clickhouse.CLICKHOUSE_HOST,
port: clickhouse.CLICKHOUSE_PORT,
username: clickhouse.CLICKHOUSE_USERNAME,
password: clickhouse.CLICKHOUSE_PASSWORD,
},
sentry: sentry.SENTRY === '1' ? { dsn: sentry.SENTRY_DSN } : null,
log: {
level: log.LOG_LEVEL ?? 'info',
requests: log.REQUEST_LOGGING === '1',
},
prometheus:
prometheus.PROMETHEUS_METRICS === '1'
? {
labels: {
instance: prometheus.PROMETHEUS_METRICS_LABEL_INSTANCE ?? 'rate-limit',
},
port: prometheus.PROMETHEUS_METRICS_PORT ?? 10_254,
}
: null,
} as const;

View file

@ -1,9 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"target": "ES2020",
"module": "ESNext",
"rootDir": "../.."
},
"files": ["src/index.ts"]
}

View file

@ -6,5 +6,5 @@ KAFKA_BUFFER_INTERVAL="5000"
KAFKA_BUFFER_DYNAMIC=1
KAFKA_TOPIC="usage_reports_v2"
PORT=4001
RATE_LIMIT_ENDPOINT="http://localhost:4012"
COMMERCE_ENDPOINT="http://localhost:4013"
OPENTELEMETRY_COLLECTOR_ENDPOINT="<sync>"

View file

@ -11,7 +11,7 @@ The data is written to a Kafka broker, form Kafka the data is feed into clickhou
| ----------------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------- |
| `PORT` | No | The port this service is running on. | `4001` |
| `TOKENS_ENDPOINT` | **Yes** | The endpoint of the tokens service. | `http://127.0.0.1:6001` |
| `RATE_LIMIT_ENDPOINT` | No | The endpoint of the rate limiting service. | `http://127.0.0.1:4012` |
| `COMMERCE_ENDPOINT` | No | The endpoint of the commerce service. | `http://127.0.0.1:4012` |
| `KAFKA_TOPIC` | **Yes** | The kafka topic. | `usage_reports_v2` |
| `KAFKA_CONSUMER_GROUP` | **Yes** | The kafka consumer group. | `usage_reports_v2` |
| `KAFKA_BROKER` | **Yes** | The address of the Kafka broker. | `127.0.0.1:29092` |

View file

@ -22,7 +22,7 @@ const emptyString = <T extends zod.ZodType>(input: T) => {
const EnvironmentModel = zod.object({
PORT: emptyString(NumberFromString.optional()),
TOKENS_ENDPOINT: zod.string().url(),
RATE_LIMIT_ENDPOINT: emptyString(zod.string().url().optional()),
COMMERCE_ENDPOINT: emptyString(zod.string().url().optional()),
RATE_LIMIT_TTL: emptyString(NumberFromString.optional()).default(30_000),
ENVIRONMENT: emptyString(zod.string().optional()),
RELEASE: emptyString(zod.string().optional()),
@ -146,9 +146,9 @@ export const env = {
tokens: {
endpoint: base.TOKENS_ENDPOINT,
},
rateLimit: base.RATE_LIMIT_ENDPOINT
commerce: base.COMMERCE_ENDPOINT
? {
endpoint: base.RATE_LIMIT_ENDPOINT,
endpoint: base.COMMERCE_ENDPOINT,
ttl: base.RATE_LIMIT_TTL,
}
: null,

View file

@ -105,10 +105,10 @@ async function main() {
});
const rateLimit = createUsageRateLimit(
env.hive.rateLimit
env.hive.commerce
? {
endpoint: env.hive.rateLimit.endpoint,
ttlMs: env.hive.rateLimit.ttl,
endpoint: env.hive.commerce.endpoint,
ttlMs: env.hive.commerce.ttl,
logger: server.log,
}
: {

View file

@ -1,5 +1,5 @@
import { LRUCache } from 'lru-cache';
import type { RateLimitApi } from '@hive/rate-limit';
import type { CommerceRouter } from '@hive/commerce';
import { ServiceLogger } from '@hive/service-common';
import { createTRPCProxyClient, httpLink } from '@trpc/client';
import { rateLimitDuration } from './metrics';
@ -69,7 +69,7 @@ export function createUsageRateLimit(
};
}
const endpoint = config.endpoint.replace(/\/$/, '');
const rateLimit = createTRPCProxyClient<RateLimitApi>({
const commerceClient = createTRPCProxyClient<CommerceRouter>({
links: [
httpLink({
url: `${endpoint}/trpc`,
@ -98,7 +98,7 @@ export function createUsageRateLimit(
async fetchMethod(input) {
const { targetId, token } = rateLimitCacheKey.decodeCacheKey(input);
const timer = rateLimitDuration.startTimer();
const result = await rateLimit.checkRateLimit
const result = await commerceClient.rateLimit.checkRateLimit
.query({
id: targetId,
type: 'operations-reporting',
@ -130,7 +130,7 @@ export function createUsageRateLimit(
// even if multiple requests are waiting for it.
fetchMethod(targetId) {
const timer = rateLimitDuration.startTimer();
return rateLimit.getRetention.query({ targetId }).finally(() => {
return commerceClient.rateLimit.getRetention.query({ targetId }).finally(() => {
timer({
type: 'retention',
});

View file

@ -15,7 +15,6 @@ The following features are currently unavailable when self-hosting:
- High-Availability CDN (a replacement is available)
- Billing (payment integration)
- Usage rate-limiting and usage-estimation
</Callout>

View file

@ -295,9 +295,9 @@ importers:
'@graphql-typed-document-node/core':
specifier: 3.2.0
version: 3.2.0(graphql@16.9.0)
'@hive/rate-limit':
'@hive/commerce':
specifier: workspace:*
version: link:../packages/services/rate-limit
version: link:../packages/services/commerce
'@hive/schema':
specifier: workspace:*
version: link:../packages/services/schema
@ -831,6 +831,9 @@ importers:
slonik:
specifier: 30.4.4
version: 30.4.4(patch_hash=408d2a91c53799f60fa2e59860bc29067d20318cbf5844306888d0098b88d299)
stripe:
specifier: 17.5.0
version: 17.5.0
supertokens-node:
specifier: 16.7.5
version: 16.7.5(encoding@0.1.13)
@ -925,6 +928,51 @@ importers:
specifier: 3.24.1
version: 3.24.1
packages/services/commerce:
devDependencies:
'@hive/api':
specifier: workspace:*
version: link:../api
'@hive/emails':
specifier: workspace:*
version: link:../emails
'@hive/service-common':
specifier: workspace:*
version: link:../service-common
'@hive/storage':
specifier: workspace:*
version: link:../storage
'@sentry/node':
specifier: 7.120.2
version: 7.120.2
'@trpc/client':
specifier: 10.45.2
version: 10.45.2(@trpc/server@10.45.2)
'@trpc/server':
specifier: 10.45.2
version: 10.45.2
date-fns:
specifier: 4.1.0
version: 4.1.0
dotenv:
specifier: 16.4.7
version: 16.4.7
fastify:
specifier: 4.29.0
version: 4.29.0
pino-pretty:
specifier: 11.3.0
version: 11.3.0
reflect-metadata:
specifier: 0.2.2
version: 0.2.2
stripe:
specifier: 17.5.0
version: 17.5.0
zod:
specifier: 3.24.1
version: 3.24.1
packages/services/demo/federation:
dependencies:
'@apollo/subgraph':
@ -1067,48 +1115,6 @@ importers:
specifier: 3.4.0
version: 3.4.0(zod@3.24.1)
packages/services/rate-limit:
devDependencies:
'@hive/emails':
specifier: workspace:*
version: link:../emails
'@hive/service-common':
specifier: workspace:*
version: link:../service-common
'@hive/storage':
specifier: workspace:*
version: link:../storage
'@sentry/node':
specifier: 7.120.2
version: 7.120.2
'@trpc/client':
specifier: 10.45.2
version: 10.45.2(@trpc/server@10.45.2)
'@trpc/server':
specifier: 10.45.2
version: 10.45.2
date-fns:
specifier: 4.1.0
version: 4.1.0
dotenv:
specifier: 16.4.7
version: 16.4.7
got:
specifier: 14.4.5
version: 14.4.5(patch_hash=f7660444905ddadee251ff98241119fb54f5fec1e673a428192da361d5636299)
pino-pretty:
specifier: 11.3.0
version: 11.3.0
reflect-metadata:
specifier: 0.2.2
version: 0.2.2
tslib:
specifier: 2.8.1
version: 2.8.1
zod:
specifier: 3.24.1
version: 3.24.1
packages/services/schema:
devDependencies:
'@apollo/federation':
@ -1422,45 +1428,6 @@ importers:
specifier: 3.24.1
version: 3.24.1
packages/services/stripe-billing:
devDependencies:
'@hive/service-common':
specifier: workspace:*
version: link:../service-common
'@hive/storage':
specifier: workspace:*
version: link:../storage
'@sentry/node':
specifier: 7.120.2
version: 7.120.2
'@trpc/client':
specifier: 10.45.2
version: 10.45.2(@trpc/server@10.45.2)
'@trpc/server':
specifier: 10.45.2
version: 10.45.2
date-fns:
specifier: 4.1.0
version: 4.1.0
dotenv:
specifier: 16.4.7
version: 16.4.7
got:
specifier: 14.4.5
version: 14.4.5(patch_hash=f7660444905ddadee251ff98241119fb54f5fec1e673a428192da361d5636299)
pino-pretty:
specifier: 11.3.0
version: 11.3.0
reflect-metadata:
specifier: 0.2.2
version: 0.2.2
stripe:
specifier: 17.5.0
version: 17.5.0
zod:
specifier: 3.24.1
version: 3.24.1
packages/services/tokens:
devDependencies:
'@hive/service-common':
@ -1569,39 +1536,6 @@ importers:
specifier: 16.9.0
version: 16.9.0
packages/services/usage-estimator:
devDependencies:
'@hive/api':
specifier: workspace:*
version: link:../api
'@hive/service-common':
specifier: workspace:*
version: link:../service-common
'@sentry/node':
specifier: 7.120.2
version: 7.120.2
'@trpc/server':
specifier: 10.45.2
version: 10.45.2
dotenv:
specifier: 16.4.7
version: 16.4.7
got:
specifier: 14.4.5
version: 14.4.5(patch_hash=f7660444905ddadee251ff98241119fb54f5fec1e673a428192da361d5636299)
pino-pretty:
specifier: 11.3.0
version: 11.3.0
reflect-metadata:
specifier: 0.2.2
version: 0.2.2
tslib:
specifier: 2.8.1
version: 2.8.1
zod:
specifier: 3.24.1
version: 3.24.1
packages/services/usage-ingestor:
devDependencies:
'@graphql-hive/core':
@ -26913,7 +26847,7 @@ snapshots:
proxy-addr: 2.0.7
rfdc: 1.4.1
secure-json-parse: 2.7.0
semver: 7.6.2
semver: 7.6.3
toad-cache: 3.7.0
fastq@1.17.1:

View file

@ -50,17 +50,15 @@
],
"@hive/cdn-script/aws": ["./packages/services/cdn-worker/src/aws.ts"],
"@hive/server": ["./packages/services/server/src/api.ts"],
"@hive/stripe-billing": ["./packages/services/stripe-billing/src/api.ts"],
"@hive/schema": ["./packages/services/schema/src/api.ts"],
"@hive/usage-common": ["./packages/services/usage-common/src/index.ts"],
"@hive/usage-estimator": ["./packages/services/usage-estimator/src/api.ts"],
"@hive/usage": ["./packages/services/usage/src/index.ts"],
"@hive/usage-ingestor": ["./packages/services/usage-ingestor/src/index.ts"],
"@hive/rate-limit": ["./packages/services/rate-limit/src/api.ts"],
"@hive/policy": ["./packages/services/policy/src/api.ts"],
"@hive/tokens": ["./packages/services/tokens/src/api.ts"],
"@hive/webhooks": ["./packages/services/webhooks/src/api.ts"],
"@hive/emails": ["./packages/services/emails/src/api.ts"],
"@hive/commerce": ["./packages/services/commerce/src/api.ts"],
"@hive/storage": ["./packages/services/storage/src/index.ts"],
"@graphql-hive/yoga": ["./packages/libraries/yoga/src/index.ts"],
"@graphql-hive/apollo": ["./packages/libraries/apollo/src/index.ts"],