commit dc3bc1ec8782f07bb5bc9126204669550958cdf9 Author: Kamil Kisiela Date: Wed May 18 09:26:57 2022 +0200 Hello diff --git a/.changeset/README.md b/.changeset/README.md new file mode 100644 index 000000000..e5b6d8d6a --- /dev/null +++ b/.changeset/README.md @@ -0,0 +1,8 @@ +# Changesets + +Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works +with multi-package repos, or single-package repos to help you version and publish your code. You can +find the full documentation for it [in our repository](https://github.com/changesets/changesets) + +We have a quick list of common questions to get you started engaging with this project in +[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) diff --git a/.changeset/config.json b/.changeset/config.json new file mode 100644 index 000000000..db509215e --- /dev/null +++ b/.changeset/config.json @@ -0,0 +1,10 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@1.6.0/schema.json", + "changelog": "@changesets/cli/changelog", + "commit": false, + "linked": [], + "access": "restricted", + "baseBranch": "main", + "updateInternalDependencies": "patch", + "ignore": [] +} diff --git a/.eslintrc.cjs b/.eslintrc.cjs new file mode 100644 index 000000000..0f9e092a5 --- /dev/null +++ b/.eslintrc.cjs @@ -0,0 +1,46 @@ +/* eslint-env node */ + +module.exports = { + reportUnusedDisableDirectives: true, + ignorePatterns: [ + 'scripts', + 'out', + 'public', + 'packages/web/app/src/graphql/index.ts', + 'packages/libraries/cli/src/sdk.ts', + ], + parserOptions: { + ecmaVersion: 2020, + sourceType: 'module', + }, + parser: '@typescript-eslint/parser', + plugins: ['@typescript-eslint', 'import'], + extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended'], + rules: { + '@typescript-eslint/no-unused-vars': [ + 'error', + { argsIgnorePattern: '^_', ignoreRestSiblings: true }, + ], + 'no-empty': ['error', { allowEmptyCatch: true }], + + 'import/no-absolute-path': 'error', + 'import/no-self-import': 'error', + 'import/no-extraneous-dependencies': [ + 'error', + { + devDependencies: ['packages/services/storage/tools/*.js'], + optionalDependencies: false, + }, + ], + 'no-restricted-imports': ['error', { patterns: ['packages/*'] }], + + // 🚨 The following rules needs to be fixed and was temporarily disabled to avoid printing warning + '@typescript-eslint/no-explicit-any': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', + '@typescript-eslint/no-non-null-assertion': 'off', + '@typescript-eslint/no-namespace': 'off', + '@typescript-eslint/no-empty-function': 'off', + '@typescript-eslint/ban-types': 'off', + '@typescript-eslint/triple-slash-reference': 'off', + }, +}; diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml new file mode 100644 index 000000000..db7003111 --- /dev/null +++ b/.github/workflows/integration.yaml @@ -0,0 +1,55 @@ +name: Integration Tests +on: + pull_request: + branches: + - main + +jobs: + integration-tests: + runs-on: ubuntu-latest + + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - uses: actions/setup-node@v2 + with: + node-version: 16 + + - name: Install Dependencies + run: yarn --frozen-lockfile + + - uses: actions/cache@v3 + name: Turbo cache + with: + path: node_modules/.cache/turbo + key: ${{ runner.os }}-turbo-cache-v1-${{ hashFiles('yarn.lock') }} + + - name: Generate Types + run: yarn graphql:generate + + - name: Build + run: yarn workspace integration-tests run build-and-pack + env: + NEXT_PUBLIC_STRIPE_PUBLIC_KEY: ${{ secrets.TEST_STRIPE_PUBLIC_KEY }} + + - name: Pull images + run: docker-compose -f integration-tests/docker-compose.yml pull + + - name: Integration Tests + run: yarn workspace integration-tests run dockest + env: + AUTH0_DOMAIN: ${{ secrets.TEST_AUTH0_DOMAIN }} + AUTH0_CLIENT_ID: ${{ secrets.TEST_AUTH0_CLIENT_ID }} + AUTH0_CLIENT_SECRET: ${{ secrets.TEST_AUTH0_CLIENT_SECRET }} + AUTH0_USER_PASSWORD: ${{ secrets.AUTH0_TESTING_USER_PASSWORD }} + AUTH0_USER_MAIN_EMAIL: contact@the-guild.dev + AUTH0_USER_EXTRA_EMAIL: contact+extra@the-guild.dev + AUTH0_SECRET: ${{ secrets.TEST_AUTH0_SECRET }} + AUTH0_AUDIENCE: ${{ secrets.TEST_AUTH0_AUDIENCE }} + AUTH0_CONNECTION: Username-Password-Authentication + STRIPE_SECRET_KEY: ${{ secrets.TEST_STRIPE_SECRET_KEY }} + + - name: Dockest logs + if: always() + run: cat integration-tests/*.log diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml new file mode 100644 index 000000000..ea1cce2a3 --- /dev/null +++ b/.github/workflows/pr.yaml @@ -0,0 +1,77 @@ +name: PR Checks +on: + pull_request: + branches: + - main + +jobs: + pr-checks: + runs-on: ubuntu-latest + services: + postgres: + image: postgres:13.1-alpine + ports: + - 5432:5432 + env: + POSTGRES_PASSWORD: postgres + POSTGRES_USER: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + env: + POSTGRES_HOST: localhost + POSTGRES_PORT: 5432 + POSTGRES_PASSWORD: postgres + POSTGRES_USER: postgres + HIVE_TOKEN: ${{ secrets.HIVE_TOKEN }} + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - uses: actions/setup-node@v2 + with: + node-version: 16 + + - name: Install Dependencies + run: yarn --frozen-lockfile + + - uses: actions/cache@v3 + name: Turbo cache + with: + path: node_modules/.cache/turbo + key: ${{ runner.os }}-turbo-cache-v1-${{ hashFiles('yarn.lock') }} + + - name: Generate Types + run: yarn graphql:generate + + - name: Check PR label + if: contains(github.event.pull_request.labels.*.name, 'non-breaking') + run: echo '::set-output name=SAFE_FLAG::--forceSafe' + id: pr-label-check + + - name: Schema Check + run: ./packages/libraries/cli/bin/dev schema:check "packages/services/api/src/modules/*/module.graphql.ts" ${{ steps.pr-label-check.outputs.SAFE_FLAG }} --github + + - name: Create Database + working-directory: packages/services/storage + run: yarn db:create + + - name: Migrate Database + working-directory: packages/services/storage + run: yarn db:migrator up + + - name: Generate Database Types + working-directory: packages/services/storage + run: yarn db:generate + + - name: Build + run: yarn build + + - name: Test + run: yarn test + + - name: Type Check + run: yarn typecheck diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 000000000..0078854cb --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,107 @@ +name: ci/cd +on: + push: + branches: + - main + +jobs: + publish: + name: 'build' + runs-on: ubuntu-latest + + env: + HIVE_TOKEN: ${{ secrets.HIVE_TOKEN }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: actions/setup-node@v2 + with: + node-version: 16 + + - name: Install Dependencies + run: yarn --frozen-lockfile + + - uses: actions/cache@v3 + name: Turbo cache + with: + path: node_modules/.cache/turbo + key: ${{ runner.os }}-turbo-cache-v1-${{ hashFiles('yarn.lock') }} + + - name: Generate GraphQL Types + run: yarn graphql:generate + + - name: Build + run: yarn build:libraries + + - name: Schema Publish + run: ./packages/libraries/cli/bin/dev schema:publish "packages/services/api/src/modules/*/module.graphql.ts" --force --github + + - name: Prepare NPM Credentials + run: | + echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> $HOME/.npmrc + npm config set always-auth true + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + + - name: Create Release Pull Request or Publish packages + id: changesets + uses: changesets/action@master + with: + publish: yarn release + commit: 'chore(release): update monorepo packages versions' + title: 'Upcoming Release Changes' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract published version + if: steps.changesets.outputs.published && contains(steps.changesets.outputs.publishedPackages, '"@graphql-hive/cli"') + id: cli + run: | + echo '${{steps.changesets.outputs.publishedPackages}}' > cli-ver.json + VERSION=`echo $(jq -r '.[] | select(.name | contains("@graphql-hive/cli")).version' cli-ver.json)` + echo "::set-output name=version::$VERSION" + echo "::set-output name=publish::true" + + - name: Pack tarballs + if: steps.cli.outputs.publish == 'true' + working-directory: packages/libraries/cli + run: yarn oclif pack tarballs --no-xz + + - name: Upload tarballs + if: steps.cli.outputs.publish == 'true' + working-directory: packages/libraries/cli + run: yarn oclif upload tarballs --no-xz + + - name: Promote tarballs + if: steps.cli.outputs.publish == 'true' + working-directory: packages/libraries/cli + env: + VERSION: ${{ steps.cli.outputs.version }} + run: yarn oclif promote --no-xz --sha ${GITHUB_SHA:0:7} --version $VERSION || yarn oclif promote --no-xz --sha ${GITHUB_SHA:0:8} --version $VERSION + + deploy: + name: 'deploy to staging' + needs: publish + runs-on: ubuntu-latest + + steps: + - name: Dispatch Deployment + run: | + curl --request POST \ + --url 'https://api.github.com/repos/${{ secrets.PRIVATE_REPO_OWNER }}/${{ secrets.PRIVATE_REPO_NAME }}/dispatches' \ + --header 'Accept: application/vnd.github.everest-preview+json' \ + --header 'Authorization: token ${{ secrets.GH_PAT }}' \ + --header 'Content-Type: application/json' \ + --data '{ + "event_type": "deploy", + "client_payload": { + "environment": "staging", + "ref": "${{ github.sha }}" + } + }' diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..77095120e --- /dev/null +++ b/.gitignore @@ -0,0 +1,111 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env +.env.test + +# parcel-bundler cache (https://parceljs.org/) +.cache + +# Next.js build output +.next + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and *not* Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +temp + +.DS_STORE + +__generated__ + +integration-tests/testkit/gql +.turbo +.turbo/config.json + +# IntelliJ's project specific settings files +.idea/ \ No newline at end of file diff --git a/.husky/.gitignore b/.husky/.gitignore new file mode 100644 index 000000000..31354ec13 --- /dev/null +++ b/.husky/.gitignore @@ -0,0 +1 @@ +_ diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 000000000..025779ed2 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +yarn pre-commit diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 000000000..b6a7d89c6 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +16 diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..71c6c557d --- /dev/null +++ b/.prettierignore @@ -0,0 +1,15 @@ +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log +coverage +*.lcov +.env +.env.test +.next +out +dist +temp +__generated__ \ No newline at end of file diff --git a/.turbo/.gitkeep b/.turbo/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..e763d915c --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "fabiospampinato.vscode-terminals", + "fabiospampinato.vscode-commands", + "esbenp.prettier-vscode" + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..64878dace --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,24 @@ +{ + "commands.commands": [ + { + "command": "terminals.runTerminals", + "color": "#eab308", + "text": "$(rocket) Start Hive $(rocket)", + "tooltip": "Start dev environment" + } + ], + "files.associations": { + "*.env.template": "dotenv" + }, + "editor.codeActionsOnSave": { + "source.organizeImports": false + }, + "files.autoSave": "onFocusChange", + "eslint.format.enable": true, + "editor.formatOnSave": true, + "editor.defaultFormatter": "esbenp.prettier-vscode", + "yaml.schemas": { + "https://json.schemastore.org/github-workflow.json": ".github/workflows/deploy.yaml" + }, + "typescript.tsdk": "node_modules/typescript/lib" +} diff --git a/.vscode/terminals.json b/.vscode/terminals.json new file mode 100644 index 000000000..534705a99 --- /dev/null +++ b/.vscode/terminals.json @@ -0,0 +1,83 @@ +{ + "autorun": false, + "terminals": [ + { + "name": "server:dev", + "description": "Run server", + "focus": true, + "open": true, + "cwd": "packages/services/server", + "command": "yarn dev" + }, + { + "name": "app:dev", + "description": "Run application", + "open": true, + "cwd": "packages/web/app", + "command": "yarn dev" + }, + { + "name": "tokens:dev", + "description": "Run tokens service", + "open": true, + "cwd": "packages/services/tokens", + "command": "yarn dev" + }, + { + "name": "schema:dev", + "description": "Run schema service", + "open": true, + "cwd": "packages/services/schema", + "command": "yarn dev" + }, + { + "name": "usage-estimator:dev", + "description": "Run Usage Estimator Service", + "open": true, + "cwd": "packages/services/usage-estimator", + "command": "yarn dev" + }, + { + "name": "rate-limit:dev", + "description": "Run Rate Limiter Service", + "open": true, + "cwd": "packages/services/rate-limit", + "command": "yarn dev" + }, + { + "name": "workdir", + "description": "Run empty", + "open": true, + "cwd": "./", + "command": "" + }, + { + "name": "cdn:dev", + "description": "Run Local CDN", + "open": true, + "cwd": "packages/services/cdn-worker", + "command": "yarn dev" + }, + { + "name": "billing:dev", + "description": "Run Billing Service", + "open": true, + "cwd": "packages/services/stripe-billing", + "command": "yarn dev" + }, + { + "name": "usage:dev", + "description": "Run Usage Service", + "open": true, + "cwd": "packages/services/usage", + "command": "yarn dev" + }, + { + "name": "usage-ingestor:dev", + "description": "Run Usage Ingestor", + "open": true, + "cwd": "packages/services/usage-ingestor", + "command": "yarn dev" + } + ] +} diff --git a/README.md b/README.md new file mode 100644 index 000000000..6bb66bf54 --- /dev/null +++ b/README.md @@ -0,0 +1,17 @@ +# GraphQL Hive + +## Project Stack + +- General: Auth0, TypeScript, GraphQL, GraphQL-Codegen +- Server: NodeJS, GraphQL-Modules +- App: React, NextJS, Tailwind, Twin.Macro +- CLI: Oclif +- Deployment: Pulumi, K8s, Nginx Proxy, Azure Cloud, CloudFlare Workers + KV Cache +- Monitoring: Promthues, Grafana (+LogzIo), Sentry +- DB: Postgres, Redis, ClickHouse + +## Docs + +- [Deployment](./docs/DEPLOYMENT.md) +- [Development](./docs/DEVELOPMENT.md) +- [Testing](./docs/TESTING.md) diff --git a/codegen.yml b/codegen.yml new file mode 100644 index 000000000..992980d4f --- /dev/null +++ b/codegen.yml @@ -0,0 +1,103 @@ +schema: ./packages/services/api/src/modules/*/module.graphql.ts +generates: + # API + ./packages/services/api/src/modules: + preset: graphql-modules + presetConfig: + baseTypesPath: ../__generated__/types.ts + filename: __generated__/types.ts + encapsulateModuleTypes: namespace + config: + immutableTypes: true + contextType: GraphQLModules.ModuleContext + enumValues: + OrganizationType: ../shared/entities#OrganizationType + ProjectType: ../shared/entities#ProjectType + TargetAccessScope: ../modules/auth/providers/target-access#TargetAccessScope + ProjectAccessScope: ../modules/auth/providers/project-access#ProjectAccessScope + OrganizationAccessScope: ../modules/auth/providers/organization-access#OrganizationAccessScope + scalars: + DateTime: string + SafeInt: number + mappers: + SchemaChangeConnection: ../shared/mappers#SchemaChangeConnection as SchemaChangeConnectionMapper + SchemaErrorConnection: ../shared/mappers#SchemaErrorConnection as SchemaErrorConnectionMapper + OrganizationConnection: ../shared/mappers#OrganizationConnection as OrganizationConnectionMapper + UserConnection: ../shared/mappers#UserConnection as UserConnectionMapper + ActivityConnection: ../shared/mappers#ActivityConnection as ActivityConnectionMapper + MemberConnection: ../shared/mappers#MemberConnection as MemberConnectionMapper + ProjectConnection: ../shared/mappers#ProjectConnection as ProjectConnectionMapper + TargetConnection: ../shared/mappers#TargetConnection as TargetConnectionMapper + SchemaConnection: ../shared/mappers#SchemaConnection as SchemaConnectionMapper + TokenConnection: ../shared/mappers#TokenConnection as TokenConnectionMapper + OperationStatsConnection: ../shared/mappers#OperationStatsConnection as OperationStatsConnectionMapper + ClientStatsConnection: ../shared/mappers#ClientStatsConnection as ClientStatsConnectionMapper + OperationsStats: ../shared/mappers#OperationsStats as OperationsStatsMapper + DurationStats: ../shared/mappers#DurationStats as DurationStatsMapper + SchemaComparePayload: ../shared/mappers#SchemaComparePayload as SchemaComparePayloadMapper + SchemaCompareResult: ../shared/mappers#SchemaCompareResult as SchemaCompareResultMapper + SchemaVersionConnection: ../shared/mappers#SchemaVersionConnection as SchemaVersionConnectionMapper + SchemaVersion: ../shared/mappers#SchemaVersion as SchemaVersionMapper + Schema: ../shared/mappers#Schema as SchemaMapper + PersistedOperationConnection: ../shared/mappers#PersistedOperationConnection as PersistedOperationMapper + Organization: ../shared/entities#Organization as OrganizationMapper + Project: ../shared/entities#Project as ProjectMapper + Target: ../shared/entities#Target as TargetMapper + Member: ../shared/entities#Member as MemberMapper + Token: ../shared/entities#Token as TokenMapper + TokenInfo: ../shared/entities#Token as TokenInfoMapper + Activity: ../shared/entities#ActivityObject as ActivityMapper + AlertChannel: ../shared/entities#AlertChannel as AlertChannelMapper + AlertSlackChannel: AlertChannelMapper + AlertWebhookChannel: AlertChannelMapper + Alert: ../shared/entities#Alert as AlertMapper + AdminQuery: '{}' + AdminStats: '{ daysLimit?: number | null }' + AdminGeneralStats: '{ daysLimit?: number | null }' + AdminOrganizationStats: ../shared/entities#AdminOrganizationStats as AdminOrganizationStatsMapper + UsageEstimation: '../shared/mappers#TargetsEstimationFilter' + UsageEstimationScope: '../shared/mappers#TargetsEstimationDateFilter' + BillingPaymentMethod: 'StripeTypes.PaymentMethod.Card' + BillingDetails: 'StripeTypes.PaymentMethod.BillingDetails' + BillingInvoice: 'StripeTypes.Invoice' + plugins: + - add: + content: "import { StripeTypes } from '@hive/stripe-billing';" + - typescript + - typescript-resolvers + + # App + ./packages/web/app/src/graphql/index.ts: + documents: ./packages/web/app/src/graphql/*.graphql + config: + dedupeFragments: true + scalars: + DateTime: string + SafeInt: number + plugins: + - typescript + - typescript-operations + - typed-document-node + + ./packages/web/app/src/gql/: + documents: + - './packages/web/app/src/(components|lib)/**/*.ts(x)?' + + preset: gql-tag-operations-preset + presetConfig: + augmentedModuleName: '@urql/core' + + # CLI + packages/libraries/cli/src/sdk.ts: + documents: ./packages/libraries/cli/src/**/*.graphql + config: + flattenGeneratedTypes: true + plugins: + - typescript + - typescript-operations + - typescript-graphql-request + + # Integration tests + ./integration-tests/testkit/gql: + documents: ./integration-tests/**/*.ts + preset: gql-tag-operations-preset diff --git a/deployment/.gitignore b/deployment/.gitignore new file mode 100644 index 000000000..c6958891d --- /dev/null +++ b/deployment/.gitignore @@ -0,0 +1,2 @@ +/bin/ +/node_modules/ diff --git a/deployment/index.ts b/deployment/index.ts new file mode 100644 index 000000000..b080d74ee --- /dev/null +++ b/deployment/index.ts @@ -0,0 +1,207 @@ +import * as pulumi from '@pulumi/pulumi'; +import { DeploymentEnvironment } from './types'; +import { deployDbMigrations } from './services/db-migrations'; +import { deployTokens } from './services/tokens'; +import { deployWebhooks } from './services/webhooks'; +import { deploySchema } from './services/schema'; +import { deployUsage } from './services/usage'; +import { deployUsageIngestor } from './services/usage-ingestor'; +import { deployGraphQL } from './services/graphql'; +import { deployApp } from './services/app'; +import { deployLandingPage } from './services/landing-page'; +import { deployDocs } from './services/docs'; +import { deployRedis } from './services/redis'; +import { deployKafka } from './services/kafka'; +import { deployMetrics } from './services/observability'; +import { deployCloudflare } from './services/cloudflare'; +import { deployCloudflarePolice } from './services/police'; +import { deployBotKube } from './services/bot-kube'; +import { deployProxy } from './services/proxy'; +import { deployClickhouse } from './services/clickhouse'; +import { deployUsageEstimation } from './services/usage-estimation'; +import { createPackageHelper } from './utils/pack'; +import * as azure from '@pulumi/azure'; +import { optimizeAzureCluster } from './utils/azure-helpers'; +import { deployRateLimit } from './services/rate-limit'; +import { deployStripeBilling } from './services/billing'; + +const packageHelper = createPackageHelper(); + +optimizeAzureCluster(); + +const envName = pulumi.getStack(); +const commonConfig = new pulumi.Config('common'); +const appDns = 'app'; +const docsDns = 'docs'; +const rootDns = commonConfig.require('dnsZone'); +const appHostname = `${appDns}.${rootDns}`; +const docsHostname = `${docsDns}.${rootDns}`; + +const resourceGroup = new azure.core.ResourceGroup(`hive-${envName}-rg`, { + location: azure.Locations.EastUS, +}); + +const storageAccount = new azure.storage.Account(`hive${envName}`, { + resourceGroupName: resourceGroup.name, + accountReplicationType: 'LRS', + accountTier: 'Standard', + accountKind: 'StorageV2', + allowBlobPublicAccess: true, +}); + +const storageContainer = new azure.storage.Container('deploy-artifacts', { + storageAccountName: storageAccount.name, + containerAccessType: 'blob', +}); + +const deploymentEnv: DeploymentEnvironment = { + ENVIRONMENT: envName, + NODE_ENV: 'production', + DEPLOYED_DNS: appHostname, +}; + +deployBotKube({ envName }); +deployMetrics({ envName }); + +const cloudflare = deployCloudflare({ + envName, + rootDns, +}); + +deployCloudflarePolice({ envName, rootDns }); + +const redisApi = deployRedis({ deploymentEnv }); + +const kafkaApi = deployKafka(); + +const clickhouseApi = deployClickhouse(); + +const dbMigrations = deployDbMigrations({ + storageContainer, + packageHelper, + clickhouse: clickhouseApi, + kafka: kafkaApi, + deploymentEnv, +}); + +const tokensApi = deployTokens({ + packageHelper, + storageContainer, + deploymentEnv, + dbMigrations, +}); + +const webhooksApi = deployWebhooks({ + packageHelper, + storageContainer, + deploymentEnv, + redis: redisApi, +}); + +const usageEstimationApi = deployUsageEstimation({ + packageHelper, + storageContainer, + deploymentEnv, + clickhouse: clickhouseApi, + dbMigrations, +}); + +const billingApi = deployStripeBilling({ + packageHelper, + storageContainer, + deploymentEnv, + dbMigrations, + usageEstimator: usageEstimationApi, +}); + +const rateLimitApi = deployRateLimit({ + packageHelper, + storageContainer, + deploymentEnv, + dbMigrations, + usageEstimator: usageEstimationApi, +}); + +const usageApi = deployUsage({ + packageHelper, + storageContainer, + deploymentEnv, + tokens: tokensApi, + kafka: kafkaApi, + dbMigrations, + rateLimit: rateLimitApi, +}); + +const usageIngestorApi = deployUsageIngestor({ + clickhouse: clickhouseApi, + kafka: kafkaApi, + packageHelper, + storageContainer, + deploymentEnv, + dbMigrations, +}); + +const schemaApi = deploySchema({ + packageHelper, + storageContainer, + deploymentEnv, + redis: redisApi, +}); + +const graphqlApi = deployGraphQL({ + clickhouse: clickhouseApi, + packageHelper, + storageContainer, + deploymentEnv, + tokens: tokensApi, + webhooks: webhooksApi, + schema: schemaApi, + dbMigrations, + redis: redisApi, + usage: usageApi, + cloudflare, + usageEstimator: usageEstimationApi, + rateLimit: rateLimitApi, + billing: billingApi, +}); + +const app = deployApp({ + deploymentEnv, + graphql: graphqlApi, + dbMigrations, + packageHelper, + storageContainer, +}); + +const landingPage = deployLandingPage({ + rootDns, + packageHelper, + storageContainer, +}); + +const docs = deployDocs({ + rootDns, + packageHelper, + storageContainer, +}); + +const proxy = deployProxy({ + rootDns, + appHostname, + docsHostname, + app, + landingPage, + docs, + graphql: graphqlApi, + usage: usageApi, +}); + +export const graphqlApiServiceId = graphqlApi.service.id; +export const usageApiServiceId = usageApi.service.id; +export const usageIngestorApiServiceId = usageIngestorApi.service.id; +export const tokensApiServiceId = tokensApi.service.id; +export const schemaApiServiceId = schemaApi.service.id; +export const webhooksApiServiceId = webhooksApi.service.id; + +export const appId = app.deployment.id; +export const publicIp = proxy!.status.loadBalancer.ingress[0].ip; diff --git a/deployment/package.json b/deployment/package.json new file mode 100644 index 000000000..a0f086c0d --- /dev/null +++ b/deployment/package.json @@ -0,0 +1,21 @@ +{ + "name": "@hive/deployment", + "scripts": { + "test": "jest" + }, + "devDependencies": { + "@types/mime-types": "2.1.1", + "@types/node": "17.0.17", + "typescript": "4.6.4" + }, + "dependencies": { + "@manypkg/get-packages": "1.1.3", + "@pulumi/azure": "4.37.0", + "@pulumi/azure-native": "1.56.0", + "@pulumi/cloudflare": "4.3.0", + "@pulumi/kubernetes": "3.15.2", + "@pulumi/kubernetesx": "0.1.6", + "@pulumi/pulumi": "3.24.1", + "@pulumi/random": "4.3.1" + } +} diff --git a/deployment/services/app.ts b/deployment/services/app.ts new file mode 100644 index 000000000..759af1211 --- /dev/null +++ b/deployment/services/app.ts @@ -0,0 +1,117 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { GraphQL } from './graphql'; +import { DbMigrations } from './db-migrations'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { serviceLocalEndpoint } from '../utils/local-endpoint'; +import { DeploymentEnvironment } from '../types'; +import { PackageHelper } from '../utils/pack'; + +const appConfig = new pulumi.Config('app'); +const commonConfig = new pulumi.Config('common'); +const githubAppConfig = new pulumi.Config('ghapp'); + +const appEnv = appConfig.requireObject>('env'); +const commonEnv = commonConfig.requireObject>('env'); + +export type App = ReturnType; + +export function deployApp({ + deploymentEnv, + graphql, + dbMigrations, + storageContainer, + packageHelper, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + graphql: GraphQL; + dbMigrations: DbMigrations; +}) { + const appRelease = packageHelper.currentReleaseId(); + + return new RemoteArtifactAsServiceDeployment( + 'app', + { + storageContainer, + packageInfo: packageHelper.npmPack('@hive/app'), + readinessProbe: '/api/health', + livenessProbe: '/api/health', + env: [ + { name: 'DEPLOYED_DNS', value: deploymentEnv.DEPLOYED_DNS }, + { name: 'NODE_ENV', value: 'production' }, + { name: 'ENVIRONMENT', value: deploymentEnv.ENVIRONMENT }, + { + name: 'NEXT_PUBLIC_ENVIRONMENT', + value: deploymentEnv.ENVIRONMENT, + }, + { + name: 'RELEASE', + value: appRelease, + }, + { + name: 'NEXT_PUBLIC_RELEASE', + value: appRelease, + }, + { name: 'AUTH0_DOMAIN', value: commonConfig.require('auth0Domain') }, + { + name: 'AUTH0_CLIENT_ID', + value: commonConfig.require('auth0ClientId'), + }, + { + name: 'AUTH0_CLIENT_SECRET', + value: commonConfig.requireSecret('auth0ClientSecret'), + }, + { + name: 'AUTH0_BASE_URL', + value: `https://${deploymentEnv.DEPLOYED_DNS}/`, + }, + { + name: 'AUTH0_AUDIENCE', + value: `https://${commonConfig.require('auth0Domain')}/api/v2/`, + }, + { + name: 'AUTH0_ISSUER_BASE_URL', + value: `https://${commonConfig.require('auth0Domain')}`, + }, + { name: 'AUTH0_CALLBACK', value: `/api/callback` }, + { + name: 'POST_LOGOUT_REDIRECT_URI', + value: `https://${deploymentEnv.DEPLOYED_DNS}/`, + }, + { + name: 'AUTH0_SECRET', + value: commonConfig.requireSecret('cookieSecret'), + }, + { name: 'AUTH0_SCOPE', value: 'openid profile offline_access' }, + { name: 'SENTRY_DSN', value: commonEnv.SENTRY_DSN }, + { name: 'NEXT_PUBLIC_SENTRY_DSN', value: commonEnv.SENTRY_DSN }, + { + name: 'GRAPHQL_ENDPOINT', + value: serviceLocalEndpoint(graphql.service).apply( + (s) => `${s}/graphql` + ), + }, + { + name: 'APP_BASE_URL', + value: `https://${deploymentEnv.DEPLOYED_DNS}/`, + }, + { + name: 'SLACK_CLIENT_ID', + value: appEnv.SLACK_CLIENT_ID, + }, + { + name: 'SLACK_CLIENT_SECRET', + value: appEnv.SLACK_CLIENT_SECRET, + }, + { + name: 'GITHUB_APP_NAME', + value: githubAppConfig.require('name'), + }, + ], + port: 3000, + }, + [graphql.service, graphql.deployment, dbMigrations] + ).deploy(); +} diff --git a/deployment/services/billing.ts b/deployment/services/billing.ts new file mode 100644 index 000000000..1ebaec7d2 --- /dev/null +++ b/deployment/services/billing.ts @@ -0,0 +1,53 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { PackageHelper } from '../utils/pack'; +import { DeploymentEnvironment } from '../types'; +import { DbMigrations } from './db-migrations'; +import { UsageEstimator } from './usage-estimation'; +import { serviceLocalEndpoint } from '../utils/local-endpoint'; + +const billingConfig = new pulumi.Config('billing'); +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.requireObject>('env'); +const apiConfig = new pulumi.Config('api'); + +export type StripeBillingService = ReturnType; + +export function deployStripeBilling({ + storageContainer, + packageHelper, + deploymentEnv, + dbMigrations, + usageEstimator, +}: { + usageEstimator: UsageEstimator; + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + dbMigrations: DbMigrations; +}) { + return new RemoteArtifactAsServiceDeployment( + 'stripe-billing', + { + storageContainer, + replicas: 1, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + env: { + ...deploymentEnv, + ...commonEnv, + RELEASE: packageHelper.currentReleaseId(), + USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service), + STRIPE_SECRET_KEY: billingConfig.requireSecret('stripePrivateKey'), + POSTGRES_CONNECTION_STRING: apiConfig.requireSecret( + 'postgresConnectionString' + ), + }, + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/stripe-billing'), + port: 4000, + }, + [dbMigrations, usageEstimator.service, usageEstimator.deployment] + ).deploy(); +} diff --git a/deployment/services/bot-kube.ts b/deployment/services/bot-kube.ts new file mode 100644 index 000000000..d80549c00 --- /dev/null +++ b/deployment/services/bot-kube.ts @@ -0,0 +1,23 @@ +import * as pulumi from '@pulumi/pulumi'; +import { BotKube } from '../utils/botkube'; + +const botkubeConfig = new pulumi.Config('botkube'); + +export function deployBotKube({ envName }: { envName: string }) { + if (!botkubeConfig.getBoolean('enabled')) { + return; + } + + if ( + botkubeConfig && + botkubeConfig.get('slackChannel') && + botkubeConfig.getSecret('slackToken') + ) { + new BotKube().deploy({ + clusterName: envName, + enableKubectl: true, + slackChannelName: botkubeConfig.require('slackChannel'), + slackToken: botkubeConfig.requireSecret('slackToken'), + }); + } +} diff --git a/deployment/services/clickhouse.ts b/deployment/services/clickhouse.ts new file mode 100644 index 000000000..074f3242b --- /dev/null +++ b/deployment/services/clickhouse.ts @@ -0,0 +1,61 @@ +import * as pulumi from '@pulumi/pulumi'; +import { serviceLocalHost } from '../utils/local-endpoint'; +import { Clickhouse as ClickhouseDeployment } from '../utils/clickhouse'; + +const clickhouseConfig = new pulumi.Config('clickhouse'); +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.getObject>('env')!; + +export type Clickhouse = ReturnType; + +type ClickhouseConfig = { + protocol: pulumi.Output | string; + host: pulumi.Output | string; + port: pulumi.Output | string; + username: pulumi.Output | string; + password: pulumi.Output; +}; + +function getRemoteClickhouseConfig(): ClickhouseConfig { + return { + host: clickhouseConfig.require('host'), + port: clickhouseConfig.require('port'), + username: clickhouseConfig.require('username'), + password: clickhouseConfig.requireSecret('password'), + protocol: clickhouseConfig.requireSecret('protocol'), + }; +} + +export function deployClickhouse() { + if (!clickhouseConfig.getBoolean('inCluster')) { + return { + config: getRemoteClickhouseConfig(), + deployment: null, + service: null, + }; + } + + const password = clickhouseConfig.requireSecret('password'); + const username = clickhouseConfig.requireSecret('username'); + const chApi = new ClickhouseDeployment('clickhouse', { + env: { + CLICKHOUSE_USER: username, + CLICKHOUSE_PASSWORD: password, + }, + sentryDsn: commonEnv.SENTRY_DSN, + }).deploy(); + + const config: ClickhouseConfig = { + protocol: 'http', + host: serviceLocalHost(chApi.service), + port: String(chApi.port), + password: password, + username, + }; + + return { + deployment: chApi.deployment, + service: chApi.service, + config, + }; +} diff --git a/deployment/services/cloudflare.ts b/deployment/services/cloudflare.ts new file mode 100644 index 000000000..d4b74523b --- /dev/null +++ b/deployment/services/cloudflare.ts @@ -0,0 +1,27 @@ +import * as pulumi from '@pulumi/pulumi'; +import { CloudflareCDN } from '../utils/cdn'; + +const commonConfig = new pulumi.Config('common'); +const cfConfig = new pulumi.Config('cloudflareCustom'); + +export type Cloudflare = ReturnType; + +export function deployCloudflare({ + rootDns, + envName, +}: { + rootDns: string; + envName: string; +}) { + const cdnAuthPrivateKey = commonConfig.requireSecret('cdnAuthPrivateKey'); + const cdn = new CloudflareCDN( + envName, + cfConfig.require('zoneId'), + // We can't use `cdn.staging.graphql-hive.com` for staging env, since CF certificate only covers + // one level of subdomains. See: https://community.cloudflare.com/t/ssl-handshake-error-cloudflare-proxy/175088 + // So for staging env, we are going to use `cdn-staging` instead of `cdn.staging`. + envName === 'staging' ? `cdn-${rootDns}` : `cdn.${rootDns}`, + cdnAuthPrivateKey + ); + return cdn.deploy(); +} diff --git a/deployment/services/db-migrations.ts b/deployment/services/db-migrations.ts new file mode 100644 index 000000000..68f2e8b6f --- /dev/null +++ b/deployment/services/db-migrations.ts @@ -0,0 +1,50 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { Clickhouse } from './clickhouse'; +import { Kafka } from './kafka'; +import { PackageHelper } from '../utils/pack'; +import { DeploymentEnvironment } from '../types'; +const apiConfig = new pulumi.Config('api'); + +export type DbMigrations = ReturnType; + +export function deployDbMigrations({ + storageContainer, + packageHelper, + deploymentEnv, + clickhouse, + kafka, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + clickhouse: Clickhouse; + kafka: Kafka; +}) { + const { job } = new RemoteArtifactAsServiceDeployment( + 'db-migrations', + { + env: { + POSTGRES_CONNECTION_STRING: apiConfig.requireSecret( + 'postgresConnectionString' + ), + MIGRATOR: 'up', + CLICKHOUSE_MIGRATOR: 'up', + CLICKHOUSE_HOST: clickhouse.config.host, + CLICKHOUSE_PORT: clickhouse.config.port, + CLICKHOUSE_USERNAME: clickhouse.config.username, + CLICKHOUSE_PASSWORD: clickhouse.config.password, + CLICKHOUSE_PROTOCOL: clickhouse.config.protocol, + KAFKA_BROKER: kafka.config.endpoint, + ...deploymentEnv, + }, + storageContainer, + packageInfo: packageHelper.npmPack('@hive/storage'), + }, + [clickhouse.deployment, clickhouse.service], + clickhouse.service + ).deployAsJob(); + + return job; +} diff --git a/deployment/services/docs.ts b/deployment/services/docs.ts new file mode 100644 index 000000000..0054b12ca --- /dev/null +++ b/deployment/services/docs.ts @@ -0,0 +1,28 @@ +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { PackageHelper } from '../utils/pack'; + +export type Docs = ReturnType; + +export function deployDocs({ + rootDns, + storageContainer, + packageHelper, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + rootDns: string; +}) { + return new RemoteArtifactAsServiceDeployment('docs', { + storageContainer, + readinessProbe: '/api/health', + livenessProbe: '/api/health', + env: [ + { name: 'RELEASE', value: packageHelper.currentReleaseId() }, + { name: 'DEPLOYED_DNS', value: rootDns }, + { name: 'NODE_ENV', value: 'production' }, + ], + packageInfo: packageHelper.npmPack('@hive/docs'), + port: 3000, + }).deploy(); +} diff --git a/deployment/services/graphql.ts b/deployment/services/graphql.ts new file mode 100644 index 000000000..91d6466e1 --- /dev/null +++ b/deployment/services/graphql.ts @@ -0,0 +1,119 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { Cloudflare } from './cloudflare'; +import { Tokens } from './tokens'; +import { Webhooks } from './webhooks'; +import { Redis } from './redis'; +import { DbMigrations } from './db-migrations'; +import { Schema } from './schema'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { serviceLocalEndpoint } from '../utils/local-endpoint'; +import { DeploymentEnvironment } from '../types'; +import { Clickhouse } from './clickhouse'; +import { Usage } from './usage'; +import { PackageHelper } from '../utils/pack'; +import { UsageEstimator } from './usage-estimation'; +import { RateLimitService } from './rate-limit'; +import { StripeBillingService } from './billing'; + +const commonConfig = new pulumi.Config('common'); +const cloudflareConfig = new pulumi.Config('cloudflare'); +const apiConfig = new pulumi.Config('api'); +const githubAppConfig = new pulumi.Config('ghapp'); + +const commonEnv = commonConfig.requireObject>('env'); +const apiEnv = apiConfig.requireObject>('env'); + +export type GraphQL = ReturnType; + +export function deployGraphQL({ + clickhouse, + packageHelper, + storageContainer, + deploymentEnv, + tokens, + webhooks, + schema, + cloudflare, + redis, + usage, + usageEstimator, + dbMigrations, + rateLimit, + billing, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + clickhouse: Clickhouse; + deploymentEnv: DeploymentEnvironment; + tokens: Tokens; + webhooks: Webhooks; + schema: Schema; + redis: Redis; + cloudflare: Cloudflare; + usage: Usage; + usageEstimator: UsageEstimator; + dbMigrations: DbMigrations; + rateLimit: RateLimitService; + billing: StripeBillingService; +}) { + return new RemoteArtifactAsServiceDeployment( + 'graphql-api', + { + storageContainer, + replicas: 1, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + env: { + ...apiEnv, + ...deploymentEnv, + ...apiConfig.requireObject>('env'), + ...commonEnv, + CLICKHOUSE_PROTOCOL: clickhouse.config.protocol, + CLICKHOUSE_HOST: clickhouse.config.host, + CLICKHOUSE_PORT: clickhouse.config.port, + CLICKHOUSE_USERNAME: clickhouse.config.username, + CLICKHOUSE_PASSWORD: clickhouse.config.password, + REDIS_HOST: redis.config.host, + REDIS_PORT: String(redis.config.port), + REDIS_PASSWORD: redis.config.password, + RELEASE: packageHelper.currentReleaseId(), + POSTGRES_CONNECTION_STRING: apiConfig.requireSecret( + 'postgresConnectionString' + ), + AUTH0_DOMAIN: commonConfig.require('auth0Domain'), + AUTH0_CLIENT_ID: commonConfig.require('auth0ClientId'), + AUTH0_CLIENT_SECRET: commonConfig.requireSecret('auth0ClientSecret'), + BILLING_ENDPOINT: serviceLocalEndpoint(billing.service), + TOKENS_ENDPOINT: serviceLocalEndpoint(tokens.service), + WEBHOOKS_ENDPOINT: serviceLocalEndpoint(webhooks.service), + SCHEMA_ENDPOINT: serviceLocalEndpoint(schema.service), + CF_BASE_PATH: 'https://api.cloudflare.com/client/v4/accounts', + CF_ACCOUNT_ID: cloudflareConfig.require('accountId'), + CF_AUTH_TOKEN: cloudflareConfig.requireSecret('apiToken'), + CF_NAMESPACE_ID: cloudflare.cfStorageNamespaceId, + CDN_BASE_URL: cloudflare.workerBaseUrl, + CDN_AUTH_PRIVATE_KEY: cloudflare.authPrivateKey, + HIVE_USAGE_ENDPOINT: serviceLocalEndpoint(usage.service), + USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service), + HIVE_REPORTING_ENDPOINT: 'http://0.0.0.0:4000/graphql', + GITHUB_APP_PRIVATE_KEY: githubAppConfig.requireSecret('key'), + RATE_LIMIT_ENDPOINT: serviceLocalEndpoint(rateLimit.service), + GITHUB_APP_ID: githubAppConfig.require('id'), + ENCRYPTION_SECRET: commonConfig.requireSecret('encryptionSecret'), + }, + packageInfo: packageHelper.npmPack('@hive/server'), + exposesMetrics: true, + port: 4000, + }, + [ + dbMigrations, + redis.deployment, + redis.service, + clickhouse.deployment, + clickhouse.service, + rateLimit.deployment, + rateLimit.service, + ] + ).deploy(); +} diff --git a/deployment/services/kafka.ts b/deployment/services/kafka.ts new file mode 100644 index 000000000..8d0a43ff9 --- /dev/null +++ b/deployment/services/kafka.ts @@ -0,0 +1,18 @@ +import * as pulumi from '@pulumi/pulumi'; + +export type Kafka = ReturnType; + +export function deployKafka() { + const eventhubConfig = new pulumi.Config('eventhub'); + + return { + config: { + key: eventhubConfig.requireSecret('key'), + user: '$ConnectionString', + endpoint: eventhubConfig.require('endpoint'), + bufferSize: eventhubConfig.require('bufferSize'), + bufferInterval: eventhubConfig.require('bufferInterval'), + bufferDynamic: eventhubConfig.require('bufferDynamic'), + }, + }; +} diff --git a/deployment/services/landing-page.ts b/deployment/services/landing-page.ts new file mode 100644 index 000000000..86775a46f --- /dev/null +++ b/deployment/services/landing-page.ts @@ -0,0 +1,28 @@ +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { PackageHelper } from '../utils/pack'; + +export type LandingPage = ReturnType; + +export function deployLandingPage({ + rootDns, + storageContainer, + packageHelper, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + rootDns: string; +}) { + return new RemoteArtifactAsServiceDeployment('landing-page', { + storageContainer, + readinessProbe: '/api/health', + livenessProbe: '/api/health', + env: [ + { name: 'RELEASE', value: packageHelper.currentReleaseId() }, + { name: 'DEPLOYED_DNS', value: rootDns }, + { name: 'NODE_ENV', value: 'production' }, + ], + packageInfo: packageHelper.npmPack('@hive/landing-page'), + port: 3000, + }).deploy(); +} diff --git a/deployment/services/observability.ts b/deployment/services/observability.ts new file mode 100644 index 000000000..403bafbe1 --- /dev/null +++ b/deployment/services/observability.ts @@ -0,0 +1,25 @@ +import * as pulumi from '@pulumi/pulumi'; +import { Observability } from '../utils/observability'; + +const observabilityConfig = new pulumi.Config('observability'); + +export function deployMetrics(config: { envName: string }) { + if (!observabilityConfig.getBoolean('enabled')) { + return; + } + + const observability = new Observability(config.envName, { + prom: { + endpoint: observabilityConfig.require('promEndpoint'), + username: observabilityConfig.require('promUsername'), + password: observabilityConfig.requireSecret('promPassword'), + }, + loki: { + endpoint: observabilityConfig.require('lokiEndpoint'), + username: observabilityConfig.require('lokiUsername'), + password: observabilityConfig.requireSecret('lokiPassword'), + }, + }); + // logging.deployMetrics(logzioConfig.requireSecret('metricsSecret')); + observability.deploy(); +} diff --git a/deployment/services/police.ts b/deployment/services/police.ts new file mode 100644 index 000000000..54ffc702e --- /dev/null +++ b/deployment/services/police.ts @@ -0,0 +1,21 @@ +import * as pulumi from '@pulumi/pulumi'; +import { HivePolice } from '../utils/police'; + +const cfCustomConfig = new pulumi.Config('cloudflareCustom'); + +export function deployCloudflarePolice({ + envName, + rootDns, +}: { + envName: string; + rootDns: string; +}) { + const police = new HivePolice( + envName, + cfCustomConfig.require('zoneId'), + cfCustomConfig.requireSecret('policeApiToken'), + rootDns + ); + + return police.deploy(); +} diff --git a/deployment/services/proxy.ts b/deployment/services/proxy.ts new file mode 100644 index 000000000..a2362cf3a --- /dev/null +++ b/deployment/services/proxy.ts @@ -0,0 +1,85 @@ +import * as pulumi from '@pulumi/pulumi'; +import { Proxy } from '../utils/reverse-proxy'; +import { CertManager } from '../utils/cert-manager'; +import { GraphQL } from './graphql'; +import { LandingPage } from './landing-page'; +import { App } from './app'; +import { Usage } from './usage'; +import { Docs } from './docs'; + +const commonConfig = new pulumi.Config('common'); + +export function deployProxy({ + appHostname, + docsHostname, + rootDns, + graphql, + app, + docs, + usage, + landingPage, +}: { + appHostname: string; + docsHostname: string; + rootDns: string; + graphql: GraphQL; + app: App; + usage: Usage; + docs: Docs; + landingPage: LandingPage; +}) { + const { tlsIssueName } = new CertManager().deployCertManagerAndIssuer(); + return new Proxy(tlsIssueName, { + address: commonConfig.get('staticIp'), + }) + .deployProxy({ replicas: 2 }) + .registerService({ record: rootDns, apex: true }, [ + { + name: 'landing-page', + path: '/', + service: landingPage.service, + }, + ]) + .registerService( + { + record: docsHostname, + }, + [ + { + name: 'docs', + path: '/', + service: docs.service, + }, + ] + ) + .registerService({ record: appHostname }, [ + { + name: 'app', + path: '/', + service: app.service, + }, + { + name: 'server', + path: '/server', + service: graphql.service, + }, + { + name: 'registry-api-health', + path: '/registry/_health', + customRewrite: '/_health', + service: graphql.service, + }, + { + name: 'registry-api', + path: '/registry', + customRewrite: '/graphql', + service: graphql.service, + }, + { + name: 'usage', + path: '/usage', + service: usage.service, + }, + ]) + .get(); +} diff --git a/deployment/services/rate-limit.ts b/deployment/services/rate-limit.ts new file mode 100644 index 000000000..9d6b2441c --- /dev/null +++ b/deployment/services/rate-limit.ts @@ -0,0 +1,54 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { PackageHelper } from '../utils/pack'; +import { DeploymentEnvironment } from '../types'; +import { DbMigrations } from './db-migrations'; +import { UsageEstimator } from './usage-estimation'; +import { serviceLocalEndpoint } from '../utils/local-endpoint'; + +const rateLimitConfig = new pulumi.Config('rateLimit'); +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.requireObject>('env'); +const apiConfig = new pulumi.Config('api'); + +export type RateLimitService = ReturnType; + +export function deployRateLimit({ + storageContainer, + packageHelper, + deploymentEnv, + dbMigrations, + usageEstimator, +}: { + usageEstimator: UsageEstimator; + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + dbMigrations: DbMigrations; +}) { + return new RemoteArtifactAsServiceDeployment( + 'rate-limiter', + { + storageContainer, + replicas: 1, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + env: { + ...deploymentEnv, + ...commonEnv, + LIMIT_CACHE_UPDATE_INTERVAL_MS: + rateLimitConfig.require('updateIntervalMs'), + RELEASE: packageHelper.currentReleaseId(), + USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service), + POSTGRES_CONNECTION_STRING: apiConfig.requireSecret( + 'postgresConnectionString' + ), + }, + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/rate-limit'), + port: 4000, + }, + [dbMigrations, usageEstimator.service, usageEstimator.deployment] + ).deploy(); +} diff --git a/deployment/services/redis.ts b/deployment/services/redis.ts new file mode 100644 index 000000000..54620c5ec --- /dev/null +++ b/deployment/services/redis.ts @@ -0,0 +1,40 @@ +import * as pulumi from '@pulumi/pulumi'; +import { serviceLocalHost } from '../utils/local-endpoint'; +import { Redis as RedisStore } from '../utils/redis'; +import { isStaging } from '../utils/helpers'; +import { DeploymentEnvironment } from '../types'; + +const redisConfig = new pulumi.Config('redis'); + +export type Redis = ReturnType; + +export function deployRedis({ + deploymentEnv, +}: { + deploymentEnv: DeploymentEnvironment; +}) { + const redisPassword = redisConfig.require('password'); + const redisApi = new RedisStore({ + password: redisPassword, + }).deploy({ + limits: isStaging(deploymentEnv) + ? { + memory: '80Mi', + cpu: '50m', + } + : { + memory: '800Mi', + cpu: '1000m', + }, + }); + + return { + deployment: redisApi.deployment, + service: redisApi.service, + config: { + host: serviceLocalHost(redisApi.service), + port: redisApi.port, + password: redisPassword, + }, + }; +} diff --git a/deployment/services/schema.ts b/deployment/services/schema.ts new file mode 100644 index 000000000..6b835d39e --- /dev/null +++ b/deployment/services/schema.ts @@ -0,0 +1,45 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { isProduction } from '../utils/helpers'; +import { DeploymentEnvironment } from '../types'; +import { Redis } from './redis'; +import { PackageHelper } from '../utils/pack'; + +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.requireObject>('env'); + +export type Schema = ReturnType; + +export function deploySchema({ + deploymentEnv, + redis, + packageHelper, + storageContainer, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + redis: Redis; +}) { + return new RemoteArtifactAsServiceDeployment( + 'schema-service', + { + storageContainer, + env: { + ...deploymentEnv, + ...commonEnv, + RELEASE: packageHelper.currentReleaseId(), + REDIS_HOST: redis.config.host, + REDIS_PORT: String(redis.config.port), + REDIS_PASSWORD: redis.config.password, + }, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/schema'), + replicas: isProduction(deploymentEnv) ? 2 : 1, + }, + [redis.deployment, redis.service] + ).deploy(); +} diff --git a/deployment/services/tokens.ts b/deployment/services/tokens.ts new file mode 100644 index 000000000..5b3a62883 --- /dev/null +++ b/deployment/services/tokens.ts @@ -0,0 +1,44 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { DbMigrations } from './db-migrations'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { DeploymentEnvironment } from '../types'; +import { PackageHelper } from '../utils/pack'; +const commonConfig = new pulumi.Config('common'); +const apiConfig = new pulumi.Config('api'); + +const commonEnv = commonConfig.requireObject>('env'); + +export type Tokens = ReturnType; + +export function deployTokens({ + deploymentEnv, + dbMigrations, + storageContainer, + packageHelper, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + dbMigrations: DbMigrations; +}) { + return new RemoteArtifactAsServiceDeployment( + 'tokens-service', + { + storageContainer, + env: { + ...deploymentEnv, + ...commonEnv, + POSTGRES_CONNECTION_STRING: apiConfig.requireSecret( + 'postgresConnectionString' + ), + RELEASE: packageHelper.currentReleaseId(), + }, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/tokens'), + }, + [dbMigrations] + ).deploy(); +} diff --git a/deployment/services/usage-estimation.ts b/deployment/services/usage-estimation.ts new file mode 100644 index 000000000..6e9026e75 --- /dev/null +++ b/deployment/services/usage-estimation.ts @@ -0,0 +1,54 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { PackageHelper } from '../utils/pack'; +import { DeploymentEnvironment } from '../types'; +import { Clickhouse } from './clickhouse'; +import { DbMigrations } from './db-migrations'; + +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.requireObject>('env'); +const apiConfig = new pulumi.Config('api'); + +export type UsageEstimator = ReturnType; + +export function deployUsageEstimation({ + storageContainer, + packageHelper, + deploymentEnv, + clickhouse, + dbMigrations, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + clickhouse: Clickhouse; + dbMigrations: DbMigrations; +}) { + return new RemoteArtifactAsServiceDeployment( + 'usage-estimator', + { + storageContainer, + replicas: 1, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + env: { + ...deploymentEnv, + ...commonEnv, + CLICKHOUSE_PROTOCOL: clickhouse.config.protocol, + CLICKHOUSE_HOST: clickhouse.config.host, + CLICKHOUSE_PORT: clickhouse.config.port, + CLICKHOUSE_USERNAME: clickhouse.config.username, + CLICKHOUSE_PASSWORD: clickhouse.config.password, + RELEASE: packageHelper.currentReleaseId(), + POSTGRES_CONNECTION_STRING: apiConfig.requireSecret( + 'postgresConnectionString' + ), + }, + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/usage-estimator'), + port: 4000, + }, + [dbMigrations] + ).deploy(); +} diff --git a/deployment/services/usage-ingestor.ts b/deployment/services/usage-ingestor.ts new file mode 100644 index 000000000..370376ef4 --- /dev/null +++ b/deployment/services/usage-ingestor.ts @@ -0,0 +1,65 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { DbMigrations } from './db-migrations'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { PackageHelper } from '../utils/pack'; +import { DeploymentEnvironment } from '../types'; +import { Clickhouse } from './clickhouse'; +import { Kafka } from './kafka'; +import { isProduction } from '../utils/helpers'; + +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.requireObject>('env'); + +export type UsageIngestor = ReturnType; + +export function deployUsageIngestor({ + storageContainer, + packageHelper, + deploymentEnv, + clickhouse, + kafka, + dbMigrations, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + clickhouse: Clickhouse; + kafka: Kafka; + dbMigrations: DbMigrations; +}) { + const numberOfPartitions = 4; + const replicas = isProduction(deploymentEnv) ? 2 : 1; + const partitionsConsumedConcurrently = Math.floor( + numberOfPartitions / replicas + ); + + return new RemoteArtifactAsServiceDeployment( + 'usage-ingestor-service', + { + storageContainer, + replicas, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + env: { + ...deploymentEnv, + ...commonEnv, + KAFKA_CONNECTION_MODE: 'hosted', + KAFKA_KEY: kafka.config.key, + KAFKA_USER: kafka.config.user, + KAFKA_BROKER: kafka.config.endpoint, + KAFKA_CONCURRENCY: `${partitionsConsumedConcurrently}`, + CLICKHOUSE_PROTOCOL: clickhouse.config.protocol, + CLICKHOUSE_HOST: clickhouse.config.host, + CLICKHOUSE_PORT: clickhouse.config.port, + CLICKHOUSE_USERNAME: clickhouse.config.username, + CLICKHOUSE_PASSWORD: clickhouse.config.password, + RELEASE: packageHelper.currentReleaseId(), + }, + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/usage-ingestor'), + port: 4000, + }, + [clickhouse.deployment, clickhouse.service, dbMigrations] + ).deploy(); +} diff --git a/deployment/services/usage.ts b/deployment/services/usage.ts new file mode 100644 index 000000000..0c50b28e8 --- /dev/null +++ b/deployment/services/usage.ts @@ -0,0 +1,67 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { Tokens } from './tokens'; +import { DbMigrations } from './db-migrations'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { PackageHelper } from '../utils/pack'; +import { serviceLocalEndpoint } from '../utils/local-endpoint'; +import { DeploymentEnvironment } from '../types'; +import { Kafka } from './kafka'; +import { RateLimitService } from './rate-limit'; + +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.requireObject>('env'); + +export type Usage = ReturnType; + +export function deployUsage({ + storageContainer, + packageHelper, + deploymentEnv, + tokens, + kafka, + dbMigrations, + rateLimit, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + tokens: Tokens; + kafka: Kafka; + dbMigrations: DbMigrations; + rateLimit: RateLimitService; +}) { + return new RemoteArtifactAsServiceDeployment( + 'usage-service', + { + storageContainer, + replicas: 1, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + env: { + ...deploymentEnv, + ...commonEnv, + KAFKA_CONNECTION_MODE: 'hosted', + KAFKA_KEY: kafka.config.key, + KAFKA_USER: kafka.config.user, + KAFKA_BROKER: kafka.config.endpoint, + KAFKA_BUFFER_SIZE: kafka.config.bufferSize, + KAFKA_BUFFER_INTERVAL: kafka.config.bufferInterval, + KAFKA_BUFFER_DYNAMIC: kafka.config.bufferDynamic, + RELEASE: packageHelper.currentReleaseId(), + TOKENS_ENDPOINT: serviceLocalEndpoint(tokens.service), + RATE_LIMIT_ENDPOINT: serviceLocalEndpoint(rateLimit.service), + }, + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/usage'), + port: 4000, + }, + [ + dbMigrations, + tokens.deployment, + tokens.service, + rateLimit.deployment, + rateLimit.service, + ] + ).deploy(); +} diff --git a/deployment/services/webhooks.ts b/deployment/services/webhooks.ts new file mode 100644 index 000000000..fccccb159 --- /dev/null +++ b/deployment/services/webhooks.ts @@ -0,0 +1,45 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as azure from '@pulumi/azure'; +import { RemoteArtifactAsServiceDeployment } from '../utils/remote-artifact-as-service'; +import { DeploymentEnvironment } from '../types'; +import { Redis } from './redis'; +import { PackageHelper } from '../utils/pack'; + +const commonConfig = new pulumi.Config('common'); +const commonEnv = commonConfig.requireObject>('env'); + +export type Webhooks = ReturnType; + +export function deployWebhooks({ + storageContainer, + packageHelper, + deploymentEnv, + redis, +}: { + storageContainer: azure.storage.Container; + packageHelper: PackageHelper; + deploymentEnv: DeploymentEnvironment; + redis: Redis; +}) { + return new RemoteArtifactAsServiceDeployment( + 'webhooks-service', + { + storageContainer, + env: { + ...deploymentEnv, + ...commonEnv, + RELEASE: packageHelper.currentReleaseId(), + REDIS_HOST: redis.config.host, + REDIS_PORT: String(redis.config.port), + REDIS_PASSWORD: redis.config.password, + BULLMQ_COMMANDS_FROM_ROOT: 'true', + }, + readinessProbe: '/_readiness', + livenessProbe: '/_health', + exposesMetrics: true, + packageInfo: packageHelper.npmPack('@hive/webhooks'), + replicas: 1, + }, + [redis.deployment, redis.service] + ).deploy(); +} diff --git a/deployment/tsconfig.json b/deployment/tsconfig.json new file mode 100644 index 000000000..9469ac567 --- /dev/null +++ b/deployment/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "strict": true, + "outDir": "bin", + "target": "es2016", + "module": "commonjs", + "moduleResolution": "node", + "sourceMap": true, + "experimentalDecorators": true, + "pretty": true, + "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, + "forceConsistentCasingInFileNames": true + }, + "files": ["index.ts"] +} diff --git a/deployment/types.ts b/deployment/types.ts new file mode 100644 index 000000000..c830c6310 --- /dev/null +++ b/deployment/types.ts @@ -0,0 +1,13 @@ +import * as pulumi from '@pulumi/pulumi'; + +export interface DeploymentEnvironment { + ENVIRONMENT: string; + NODE_ENV: string; + DEPLOYED_DNS: string; +} + +export interface RegistryConfig { + registry: string; + registryToken: pulumi.Output; + registryScope: string; +} diff --git a/deployment/utils/azure-function.ts b/deployment/utils/azure-function.ts new file mode 100644 index 000000000..ba72f2d1f --- /dev/null +++ b/deployment/utils/azure-function.ts @@ -0,0 +1,193 @@ +import * as pulumi from '@pulumi/pulumi'; +import * as resources from '@pulumi/azure-native/resources'; +import * as storage from '@pulumi/azure-native/storage'; +import * as web from '@pulumi/azure-native/web'; +import { tmpdir } from 'os'; +import { + mkdtempSync, + copyFileSync, + writeFileSync, + mkdirSync, + readFileSync, +} from 'fs'; +import { join } from 'path'; +import { createHash } from 'crypto'; + +function createFunctionFolder({ + name, + functionDefinition, + functionFile, +}: { + name: string; + functionDefinition: Record; + functionFile: string; +}) { + const hostDir = mkdtempSync( + join(tmpdir(), Math.random().toString(16).slice(2)) + ); + const fnDir = join(hostDir, name); + mkdirSync(fnDir); + + writeFileSync( + join(hostDir, 'host.json'), + JSON.stringify( + { + version: '2.0', + }, + null, + 2 + ) + ); + + copyFileSync(functionFile, join(fnDir, 'index.js')); + writeFileSync( + join(fnDir, 'function.json'), + JSON.stringify(functionDefinition, null, 2) + ); + + return { + checksum: createHash('sha256') + .update(readFileSync(functionFile, 'utf-8')) + .update(JSON.stringify(functionDefinition)) + .digest('hex'), + dir: hostDir, + }; +} + +export class AzureFunction { + constructor( + private config: { + name: string; + envName: string; + functionFile: string; + functionDefinition: Record; + env: Record; + } + ) {} + + deployAsJob() { + const resourceGroup = new resources.ResourceGroup( + `hive-${this.config.envName}-fn-rg` + ); + const storageAccount = new storage.StorageAccount( + `hive${this.config.envName}fn`, + { + resourceGroupName: resourceGroup.name, + sku: { + name: storage.SkuName.Standard_LRS, + }, + kind: storage.Kind.StorageV2, + } + ); + + const codeContainer = new storage.BlobContainer('functions', { + resourceGroupName: resourceGroup.name, + accountName: storageAccount.name, + }); + + const { dir, checksum } = createFunctionFolder({ + name: this.config.name, + functionDefinition: this.config.functionDefinition, + functionFile: this.config.functionFile, + }); + + const codeBlob = new storage.Blob(this.config.name, { + resourceGroupName: resourceGroup.name, + accountName: storageAccount.name, + containerName: codeContainer.name, + source: new pulumi.asset.FileArchive(dir), + }); + + const plan = new web.AppServicePlan('plan', { + resourceGroupName: resourceGroup.name, + sku: { + name: 'Y1', + tier: 'Dynamic', + }, + }); + + const storageConnectionString = getConnectionString( + resourceGroup.name, + storageAccount.name + ); + const codeBlobUrl = signedBlobReadUrl( + codeBlob, + codeContainer, + storageAccount, + resourceGroup + ); + + const app = new web.WebApp( + `${this.config.name}-${this.config.envName}-fn`, + { + resourceGroupName: resourceGroup.name, + serverFarmId: plan.id, + kind: 'functionapp', + siteConfig: { + appSettings: [ + { name: 'AzureWebJobsStorage', value: storageConnectionString }, + { name: 'FUNCTIONS_EXTENSION_VERSION', value: '~3' }, + { name: 'FUNCTIONS_WORKER_RUNTIME', value: 'node' }, + { name: 'WEBSITE_NODE_DEFAULT_VERSION', value: '~16' }, + { name: 'WEBSITE_RUN_FROM_PACKAGE', value: codeBlobUrl }, + { + name: 'FUNCTION_CHECKSUM', + value: checksum, + }, + ...Object.entries(this.config.env).map(([name, value]) => ({ + name, + value, + })), + ], + http20Enabled: true, + nodeVersion: '~16', + }, + }, + { + additionalSecretOutputs: [], + } + ); + + return { + endpoint: pulumi.interpolate`https://${app.defaultHostName}/api/index`, + }; + } +} + +function getConnectionString( + resourceGroupName: pulumi.Input, + accountName: pulumi.Input +): pulumi.Output { + // Retrieve the primary storage account key. + const storageAccountKeys = storage.listStorageAccountKeysOutput({ + resourceGroupName, + accountName, + }); + const primaryStorageKey = storageAccountKeys.keys[0].value; + + // Build the connection string to the storage account. + return pulumi.interpolate`DefaultEndpointsProtocol=https;AccountName=${accountName};AccountKey=${primaryStorageKey}`; +} + +function signedBlobReadUrl( + blob: storage.Blob, + container: storage.BlobContainer, + account: storage.StorageAccount, + resourceGroup: resources.ResourceGroup +): pulumi.Output { + const blobSAS = storage.listStorageAccountServiceSASOutput({ + accountName: account.name, + protocols: storage.HttpProtocol.Https, + sharedAccessExpiryTime: '2030-01-01', + sharedAccessStartTime: '2021-01-01', + resourceGroupName: resourceGroup.name, + resource: storage.SignedResource.C, + permissions: storage.Permissions.R, + canonicalizedResource: pulumi.interpolate`/blob/${account.name}/${container.name}`, + contentType: 'application/json', + cacheControl: 'max-age=5', + contentDisposition: 'inline', + contentEncoding: 'deflate', + }); + return pulumi.interpolate`https://${account.name}.blob.core.windows.net/${container.name}/${blob.name}?${blobSAS.serviceSasToken}`; +} diff --git a/deployment/utils/azure-helpers.ts b/deployment/utils/azure-helpers.ts new file mode 100644 index 000000000..38622b0c2 --- /dev/null +++ b/deployment/utils/azure-helpers.ts @@ -0,0 +1,32 @@ +import * as k8s from '@pulumi/kubernetes'; + +export function optimizeAzureCluster() { + /** + * The following disabled Azure logging. We are not really using it. + */ + new k8s.core.v1.ConfigMap('optimize-azure-cluster', { + metadata: { + name: 'container-azm-ms-agentconfig', + namespace: 'kube-system', + }, + data: { + 'schema-version': 'v1', + 'config-version': 'v1', + 'log-data-collection-settings': ` +[log_collection_settings] + [log_collection_settings.stdout] + enabled = false + + [log_collection_settings.stderr] + enabled = false + + [log_collection_settings.env_var] + enabled = false + [log_collection_settings.enrich_container_logs] + enabled = false + [log_collection_settings.collect_all_kube_events] + enabled = false +`, + }, + }); +} diff --git a/deployment/utils/botkube.ts b/deployment/utils/botkube.ts new file mode 100644 index 000000000..6018fd729 --- /dev/null +++ b/deployment/utils/botkube.ts @@ -0,0 +1,93 @@ +import * as k8s from '@pulumi/kubernetes'; +import { Output } from '@pulumi/pulumi'; + +export class BotKube { + deploy(config: { + slackChannelName: string; + slackToken: Output; + clusterName: string; + enableKubectl: boolean; + }) { + const ns = new k8s.core.v1.Namespace('botkube', { + metadata: { + name: 'botkube', + }, + }); + + new k8s.helm.v3.Chart( + 'botkube', + { + chart: 'botkube', + version: '0.12.4', + namespace: ns.metadata.name, + fetchOpts: { + repo: 'https://infracloudio.github.io/charts', + }, + values: { + communications: { + slack: { + enabled: true, + channel: config.slackChannelName, + token: config.slackToken, + notiftype: 'short', + }, + }, + config: { + resources: [ + { + name: 'apps/v1/deployments', + namespaces: { + include: ['default', 'ingress-nginx'], + }, + events: ['all'], + }, + { + name: 'v1/pods', + namespaces: { + include: ['default', 'ingress-nginx'], + }, + events: ['all'], + }, + ], + recommendations: true, + settings: { + clustername: config.clusterName, + kubectl: { + defaultNamespace: 'default', + restrictAccess: 'true', + enabled: String(config.enableKubectl), + commands: { + verbs: [ + 'cluster-info', + 'describe', + 'get', + 'logs', + 'top', + 'restart', + ], + resources: [ + 'deployments', + 'pods', + 'namespaces', + 'services', + 'daemonsets', + 'httpproxy', + 'statefulsets', + 'nodes', + ], + }, + }, + }, + }, + image: { + repository: 'infracloudio/botkube', + tag: 'v0.12.4', + }, + }, + }, + { + dependsOn: [ns], + } + ); + } +} diff --git a/deployment/utils/cdn.ts b/deployment/utils/cdn.ts new file mode 100644 index 000000000..bd2db6260 --- /dev/null +++ b/deployment/utils/cdn.ts @@ -0,0 +1,57 @@ +import * as cf from '@pulumi/cloudflare'; +import * as pulumi from '@pulumi/pulumi'; +import { readFileSync } from 'fs'; +import { resolve } from 'path'; + +export class CloudflareCDN { + constructor( + private envName: string, + private zoneId: string, + private cdnDnsRecord: string, + private authPrivateKey: pulumi.Output + ) {} + + deploy() { + const kvStorage = new cf.WorkersKvNamespace('hive-ha-storage', { + title: `hive-ha-cdn-${this.envName}`, + }); + + const script = new cf.WorkerScript('hive-ha-worker', { + content: readFileSync( + resolve(__dirname, '../../packages/services/cdn-worker/dist/worker.js'), + 'utf-8' + ), + name: `hive-storage-cdn-${this.envName}`, + kvNamespaceBindings: [ + { + // HIVE_DATA is in use in cdn-script.js as well, its the name of the global variable + name: 'HIVE_DATA', + namespaceId: kvStorage.id, + }, + ], + secretTextBindings: [ + { + // KEY_DATA is in use in cdn-script.js as well, its the name of the global variable, + // basically it's the private key for the hmac key. + name: 'KEY_DATA', + text: this.authPrivateKey, + }, + ], + }); + + const workerBase = this.cdnDnsRecord; + const workerUrl = `https://${workerBase}`; + + new cf.WorkerRoute('cf-hive-worker', { + scriptName: script.name, + pattern: `${workerBase}/*`, + zoneId: this.zoneId, + }); + + return { + authPrivateKey: this.authPrivateKey, + workerBaseUrl: workerUrl, + cfStorageNamespaceId: kvStorage.id, + }; + } +} diff --git a/deployment/utils/cert-manager.ts b/deployment/utils/cert-manager.ts new file mode 100644 index 000000000..627fe6ef4 --- /dev/null +++ b/deployment/utils/cert-manager.ts @@ -0,0 +1,47 @@ +import * as k8s from '@pulumi/kubernetes'; + +export class CertManager { + public deployCertManagerAndIssuer() { + const certManager = new k8s.yaml.ConfigFile('cert-manager', { + file: 'https://github.com/jetstack/cert-manager/releases/download/v1.8.0/cert-manager.yaml', + }); + + const issuerName = 'letsencrypt-prod'; + + new k8s.apiextensions.CustomResource( + 'cert-manager-issuer', + { + apiVersion: 'cert-manager.io/v1', + kind: 'ClusterIssuer', + metadata: { + name: issuerName, + }, + spec: { + acme: { + server: 'https://acme-v02.api.letsencrypt.org/directory', + email: 'contact@the-guild.dev', + privateKeySecretRef: { + name: issuerName, + }, + solvers: [ + { + http01: { + ingress: { + class: 'contour', + }, + }, + }, + ], + }, + }, + }, + { + dependsOn: [certManager], + } + ); + + return { + tlsIssueName: issuerName, + }; + } +} diff --git a/deployment/utils/clickhouse.ts b/deployment/utils/clickhouse.ts new file mode 100644 index 000000000..9a9afa811 --- /dev/null +++ b/deployment/utils/clickhouse.ts @@ -0,0 +1,114 @@ +import * as kx from '@pulumi/kubernetesx'; +import * as k8s from '@pulumi/kubernetes'; +import { PodBuilder } from './pod-builder'; + +export class Clickhouse { + constructor( + protected name: string, + protected options: { + env?: kx.types.Container['env']; + sentryDsn: string; + } + ) {} + + deploy() { + const image = 'clickhouse/clickhouse-server:22.3.3.44-alpine'; + const port = 8123; + + const env: any[] = Array.isArray(this.options.env) + ? this.options.env + : Object.keys(this.options.env as kx.types.EnvMap).map((name) => ({ + name, + value: (this.options.env as kx.types.EnvMap)[name], + })); + + const cm = new kx.ConfigMap('clickhouse-config', { + data: { + 'config.xml': createConfig({ + sentryDsn: this.options.sentryDsn, + }), + }, + }); + + const pb = new PodBuilder({ + restartPolicy: 'Always', + containers: [ + { + name: this.name, + image, + env, + volumeMounts: [cm.mount('/etc/clickhouse-server/conf.d')], + ports: { + http: port, + }, + readinessProbe: { + initialDelaySeconds: 5, + periodSeconds: 20, + failureThreshold: 5, + timeoutSeconds: 5, + httpGet: { + path: '/ping', + port, + }, + }, + livenessProbe: { + initialDelaySeconds: 3, + periodSeconds: 20, + failureThreshold: 10, + timeoutSeconds: 5, + httpGet: { + path: '/ping', + port, + }, + }, + }, + ], + }); + + const metadata: k8s.types.input.meta.v1.ObjectMeta = { + annotations: {}, + }; + + const deployment = new kx.Deployment(this.name, { + spec: pb.asExtendedDeploymentSpec( + { + replicas: 1, + strategy: { + type: 'RollingUpdate', + }, + }, + { + annotations: metadata.annotations, + } + ), + }); + const service = deployment.createService({}); + + return { deployment, service, port }; + } +} + +const createConfig = ({ sentryDsn }: { sentryDsn: string }) => ` + :: + 0.0.0.0 + + true + false + ${sentryDsn} + + + + + 1 + + 16 + + 5000000 + + 1000 + + 0 + + + +`; diff --git a/deployment/utils/helpers.ts b/deployment/utils/helpers.ts new file mode 100644 index 000000000..4c328c522 --- /dev/null +++ b/deployment/utils/helpers.ts @@ -0,0 +1,29 @@ +import { DeploymentEnvironment } from '../types'; + +export function isProduction( + deploymentEnv: DeploymentEnvironment | string +): boolean { + return !isStaging(deploymentEnv); +} + +export function isStaging( + deploymentEnv: DeploymentEnvironment | string +): boolean { + return isDeploymentEnvironment(deploymentEnv) + ? deploymentEnv.ENVIRONMENT === 'staging' + : deploymentEnv === 'staging'; +} + +export function isDeploymentEnvironment( + value: any +): value is DeploymentEnvironment { + return ( + value && + typeof value === 'object' && + typeof value['ENVIRONMENT'] === 'string' + ); +} + +export function isDefined(value: T | null | undefined): value is T { + return value !== null && value !== undefined; +} diff --git a/deployment/utils/local-endpoint.ts b/deployment/utils/local-endpoint.ts new file mode 100644 index 000000000..90c8c88c6 --- /dev/null +++ b/deployment/utils/local-endpoint.ts @@ -0,0 +1,33 @@ +import * as k8s from '@pulumi/kubernetes'; +import * as pulumi from '@pulumi/pulumi'; + +export function serviceLocalEndpoint(service: k8s.types.input.core.v1.Service) { + return pulumi + .all([service.metadata, service.spec]) + .apply(([metadata, spec]) => { + const defaultPort = (spec.ports || [])[0]; + const portText = defaultPort ? `:${defaultPort.port}` : ''; + + return `http://${metadata.name}.${ + metadata.namespace || 'default' + }.svc.cluster.local${portText}`; + }); +} + +export function serviceLocalHost(service: k8s.types.input.core.v1.Service) { + return pulumi.all([service.metadata]).apply(([metadata]) => { + return `${metadata.name}.${ + metadata.namespace || 'default' + }.svc.cluster.local`; + }); +} + +export function serviceLocalMetricsEndpoint( + service: k8s.types.input.core.v1.Service +) { + return pulumi.all([service.metadata]).apply(([metadata]) => { + return `${metadata.name}.${ + metadata.namespace || 'default' + }.svc.cluster.local:10254/metrics`; + }); +} diff --git a/deployment/utils/observability.ts b/deployment/utils/observability.ts new file mode 100644 index 000000000..6c00d3424 --- /dev/null +++ b/deployment/utils/observability.ts @@ -0,0 +1,363 @@ +import * as k8s from '@pulumi/kubernetes'; +import { Output, interpolate } from '@pulumi/pulumi'; + +export type ObservabilityConfig = { + loki: { + endpoint: Output | string; + username: Output | string; + password: Output; + }; + prom: { + endpoint: Output | string; + username: Output | string; + password: Output; + }; +}; +export class Observability { + constructor(private envName: string, private config: ObservabilityConfig) {} + + deploy() { + const ns = new k8s.core.v1.Namespace('observability', { + metadata: { + name: 'observability', + }, + }); + + // We are using otel-collector to scrape metrics from Pods + // dotansimha: once Vector supports scraping K8s metrics based on Prom, we can drop this. + new k8s.helm.v3.Chart('metrics', { + chart: 'opentelemetry-collector', + namespace: ns.metadata.name, + version: '0.16.1', + fetchOpts: { + repo: 'https://open-telemetry.github.io/opentelemetry-helm-charts', + }, + // https://github.com/open-telemetry/opentelemetry-helm-charts/blob/main/charts/opentelemetry-collector/values.yaml + values: { + agentCollector: { + enabled: false, + }, + standaloneCollector: { + enabled: true, + resources: { + limits: { + cpu: '256m', + memory: '512Mi', + }, + }, + }, + clusterRole: { + create: true, + rules: [ + { + apiGroups: [''], + resources: [ + 'events', + 'namespaces', + 'namespaces/status', + 'nodes', + 'nodes/spec', + 'pods', + 'pods/metrics', + 'nodes/metrics', + 'pods/status', + 'replicationcontrollers', + 'replicationcontrollers/status', + 'resourcequotas', + 'services', + 'endpoints', + ], + verbs: ['get', 'list', 'watch'], + }, + { + apiGroups: ['apps'], + resources: [ + 'daemonsets', + 'deployments', + 'replicasets', + 'statefulsets', + ], + verbs: ['get', 'list', 'watch'], + }, + { + apiGroups: ['extensions'], + resources: ['daemonsets', 'deployments', 'replicasets'], + verbs: ['get', 'list', 'watch'], + }, + { + apiGroups: ['batch'], + resources: ['jobs', 'cronjobs'], + verbs: ['get', 'list', 'watch'], + }, + { + apiGroups: ['autoscaling'], + resources: ['horizontalpodautoscalers'], + verbs: ['get', 'list', 'watch'], + }, + ], + }, + config: { + exporters: { + logging: { + loglevel: 'info', + }, + prometheusremotewrite: { + endpoint: interpolate`https://${this.config.prom.username}:${this.config.prom.password}@${this.config.prom.endpoint}`, + }, + }, + extensions: { + health_check: {}, + }, + processors: { + batch: {}, + memory_limiter: { + check_interval: '5s', + limit_mib: 409, + spike_limit_mib: 128, + }, + }, + receivers: { + prometheus: { + config: { + global: { + evaluation_interval: '10s', + scrape_interval: '30s', + scrape_timeout: '10s', + }, + scrape_configs: [ + // { + // job_name: 'ingress-contour-endpoints', + // kubernetes_sd_configs: [ + // { + // role: 'pod', + // namespaces: { + // names: ['contour'], + // }, + // }, + // ], + // relabel_configs: [ + // { + // source_labels: [ + // '__meta_kubernetes_pod_container_port_name', + // ], + // action: 'keep', + // regex: 'metrics', + // }, + // { + // source_labels: [ + // '__meta_kubernetes_pod_annotation_prometheus_io_scrape', + // ], + // action: 'keep', + // regex: true, + // }, + // { + // source_labels: [ + // '__meta_kubernetes_pod_annotation_prometheus_io_scheme', + // ], + // action: 'replace', + // target_label: '__scheme__', + // regex: '(https?)', + // }, + // { + // source_labels: [ + // '__meta_kubernetes_pod_annotation_prometheus_io_path', + // ], + // action: 'replace', + // target_label: '__metrics_path__', + // regex: '(.+)', + // }, + // { + // source_labels: [ + // '__address__', + // '__meta_kubernetes_pod_annotation_prometheus_io_port', + // ], + // action: 'replace', + // regex: '([^:]+)(?::d+)?;(d+)', + // replacement: '$1:$2', + // target_label: '__address__', + // }, + // ], + // }, + { + honor_labels: true, + honor_timestamps: true, + job_name: 'service-metrics', + kubernetes_sd_configs: [ + { + role: 'pod', + namespaces: { + names: ['default'], + }, + }, + ], + metrics_path: '/metrics', + relabel_configs: [ + { + source_labels: [ + '__meta_kubernetes_pod_container_port_name', + ], + action: 'keep', + regex: 'metrics', + }, + { + source_labels: [ + '__meta_kubernetes_pod_annotation_prometheus_io_scrape', + ], + action: 'keep', + regex: true, + }, + { + source_labels: [ + '__meta_kubernetes_pod_annotation_prometheus_io_scheme', + ], + action: 'replace', + target_label: '__scheme__', + regex: '(https?)', + }, + { + source_labels: [ + '__meta_kubernetes_pod_annotation_prometheus_io_path', + ], + action: 'replace', + target_label: '__metrics_path__', + regex: '(.+)', + }, + { + source_labels: [ + '__address__', + '__meta_kubernetes_pod_annotation_prometheus_io_port', + ], + action: 'replace', + regex: '([^:]+)(?::d+)?;(d+)', + replacement: '$1:$2', + target_label: '__address__', + }, + { + action: 'labelmap', + regex: '__meta_kubernetes_service_label_(.+)', + }, + { + action: 'replace', + source_labels: ['__meta_kubernetes_namespace'], + target_label: 'namespace', + }, + { + action: 'replace', + source_labels: ['__meta_kubernetes_service_name'], + target_label: 'service', + }, + { + action: 'replace', + source_labels: ['__meta_kubernetes_pod_name'], + target_label: 'pod', + }, + { + action: 'replace', + source_labels: ['__meta_kubernetes_pod_node_name'], + target_label: 'kubernetes_node', + }, + ], + scheme: 'http', + }, + // { + // bearer_token_file: + // '/var/run/secrets/kubernetes.io/serviceaccount/token', + // job_name: 'kubernetes-cadvisor', + // kubernetes_sd_configs: [ + // { + // role: 'node', + // }, + // ], + // metrics_path: '/metrics/cadvisor', + // relabel_configs: [ + // { + // action: 'labelmap', + // regex: '__meta_kubernetes_node_label_(.+)', + // }, + // ], + // scheme: 'https', + // tls_config: { + // ca_file: + // '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt', + // insecure_skip_verify: true, + // }, + // }, + ], + }, + }, + }, + service: { + extensions: ['health_check'], + pipelines: { + metrics: { + exporters: ['logging', 'prometheusremotewrite'], + processors: ['memory_limiter', 'batch'], + receivers: ['prometheus'], + }, + }, + }, + }, + }, + }); + + // We are using Vector to scrape logs from the K8s Pods, and send it to Grafana Cloud + new k8s.helm.v3.Chart( + 'vector-logging', + { + chart: 'vector', + version: '0.10.3', + namespace: ns.metadata.name, + fetchOpts: { + repo: 'https://helm.vector.dev', + }, + // https://vector.dev/docs/reference/configuration/ + values: { + role: 'Agent', + customConfig: { + data_dir: '/vector-data-dir', + api: { + enabled: true, + playground: false, + address: '127.0.0.1:7676', + }, + sources: { + kubernetes_logs: { + type: 'kubernetes_logs', + extra_field_selector: 'metadata.namespace=default', + }, + }, + sinks: { + // enable if you need to debug the raw vector messages + // stdout: { + // type: 'console', + // inputs: ['kubernetes_logs'], + // encoding: { codec: 'json' }, + // }, + grafana_lab: { + type: 'loki', + inputs: ['kubernetes_logs'], + endpoint: interpolate`https://${this.config.loki.endpoint}`, + auth: { + strategy: 'basic', + user: this.config.loki.username, + password: this.config.loki.password, + }, + labels: { + namespace: '{{`{{ kubernetes.pod_namespace }}`}}', + container_name: '{{`{{ kubernetes.container_name }}`}}', + env: this.envName, + }, + encoding: { + codec: 'text', + }, + }, + }, + }, + }, + }, + { + dependsOn: [ns], + } + ); + } +} diff --git a/deployment/utils/pack.ts b/deployment/utils/pack.ts new file mode 100644 index 000000000..479aa3e18 --- /dev/null +++ b/deployment/utils/pack.ts @@ -0,0 +1,51 @@ +import { resolve } from 'path'; +import { getPackagesSync } from '@manypkg/get-packages'; +import { execSync } from 'child_process'; + +export function createPackageHelper(dir = resolve(process.cwd(), '../')) { + const { packages } = getPackagesSync(dir); + const revision = execSync('git rev-parse HEAD') + .toString() + .trim() + .replace(/\r?\n|\r/g, ''); + + return { + currentReleaseId: () => revision, + npmPack(name: string): PackageInfo { + const dir = packages.find((p) => p.packageJson.name === name)?.dir; + + if (!dir) { + throw new Error(`Failed to find package "${name}" in workspace!`); + } + + const distDir = resolve(dir, './dist/'); + const fileName = execSync('npm pack --pack-destination ../', { + cwd: distDir, + stdio: ['ignore', 'pipe', 'ignore'], + }) + .toString() + .trim() + .replace(/\r?\n|\r/g, ''); + + // TODO: maybe manypkg can give it to us? + const withoutOrg = name.split('/'); + const packName = withoutOrg.length === 2 ? withoutOrg[1] : withoutOrg[0]; + const binName = packName.split('@')[0]; + + return { + runtime: 'node', + name, + file: resolve(dir, fileName), + bin: binName, + }; + }, + }; +} + +export type PackageHelper = ReturnType; +export type PackageInfo = { + runtime: 'node' | 'rust'; + name: string; + file: string; + bin: string; +}; diff --git a/deployment/utils/pod-builder.ts b/deployment/utils/pod-builder.ts new file mode 100644 index 000000000..f507aa658 --- /dev/null +++ b/deployment/utils/pod-builder.ts @@ -0,0 +1,36 @@ +import * as kx from '@pulumi/kubernetesx'; +import * as k8s from '@pulumi/kubernetes'; +import * as pulumi from '@pulumi/pulumi'; + +export function normalizeEnv(env: kx.types.Container['env']): any[] { + return Array.isArray(env) + ? env + : Object.keys(env as kx.types.EnvMap).map((name) => ({ + name, + value: (env as kx.types.EnvMap)[name], + })); +} + +export class PodBuilder extends kx.PodBuilder { + public asExtendedDeploymentSpec( + args?: kx.types.PodBuilderDeploymentSpec, + metadata?: k8s.types.input.meta.v1.ObjectMeta + ): pulumi.Output { + const podName = this.podSpec.containers.apply((containers: any) => { + return pulumi.output(containers[0].name); + }); + const appLabels = { app: podName }; + + const _args = args || {}; + const deploymentSpec: k8s.types.input.apps.v1.DeploymentSpec = { + ..._args, + selector: { matchLabels: appLabels }, + replicas: _args.replicas ?? 1, + template: { + metadata: { labels: appLabels, ...(metadata || {}) }, + spec: this.podSpec, + }, + }; + return pulumi.output(deploymentSpec); + } +} diff --git a/deployment/utils/police.ts b/deployment/utils/police.ts new file mode 100644 index 000000000..287201aa6 --- /dev/null +++ b/deployment/utils/police.ts @@ -0,0 +1,68 @@ +import * as cf from '@pulumi/cloudflare'; +import * as pulumi from '@pulumi/pulumi'; +import { readFileSync } from 'fs'; +import { resolve } from 'path'; + +export class HivePolice { + constructor( + private envName: string, + private zoneId: string, + private cfToken: pulumi.Output, + private rootDns: string + ) {} + + deploy() { + const kvStorage = new cf.WorkersKvNamespace('hive-police-kv', { + title: `hive-police-${this.envName}`, + }); + + const script = new cf.WorkerScript('hive-police-worker', { + content: readFileSync( + resolve( + __dirname, + '../../packages/services/police-worker/dist/worker.js' + ), + 'utf-8' + ), + name: `hive-police-${this.envName}`, + kvNamespaceBindings: [ + { + // HIVE_POLICE is in use in police-script js as well, its the name of the global variable + name: 'HIVE_POLICE', + namespaceId: kvStorage.id, + }, + ], + // + secretTextBindings: [ + { + name: 'CF_BEARER_TOKEN', + text: this.cfToken, + }, + { + name: 'ZONE_IDENTIFIER', + text: this.zoneId, + }, + { + name: 'HOSTNAMES', + text: `${this.rootDns},app.${this.rootDns},cdn.${this.rootDns}`, + }, + { + name: 'WAF_RULE_NAME', + text: `hive-police-rule-${this.envName}`, + }, + ], + }); + + new cf.WorkerCronTrigger('cf-police-trigger', { + scriptName: script.name, + // https://developers.cloudflare.com/workers/platform/cron-triggers/#examples + schedules: [ + '*/10 * * * *', // every 10 minutes + ], + }); + + return { + cfStorageNamespaceId: kvStorage.id, + }; + } +} diff --git a/deployment/utils/redis.ts b/deployment/utils/redis.ts new file mode 100644 index 000000000..f0f2fbca0 --- /dev/null +++ b/deployment/utils/redis.ts @@ -0,0 +1,119 @@ +import * as kx from '@pulumi/kubernetesx'; +import * as k8s from '@pulumi/kubernetes'; +import { normalizeEnv, PodBuilder } from './pod-builder'; + +const DEFAULT_IMAGE = 'bitnami/redis:6.2.6'; +const PORT = 6379; + +export class Redis { + constructor( + protected options: { + env?: kx.types.Container['env']; + password: string; + } + ) {} + + deploy({ + limits, + }: { + limits: k8s.types.input.core.v1.ResourceRequirements['limits']; + }) { + const name = 'redis-store'; + const image = DEFAULT_IMAGE; + + const env = normalizeEnv(this.options.env ?? {}).concat([ + { + name: 'REDIS_PASSWORD', + value: this.options.password, + }, + { + name: 'POD_NAME', + valueFrom: { + fieldRef: { + fieldPath: 'metadata.name', + }, + }, + }, + ]); + + const cm = new kx.ConfigMap('redis-scripts', { + data: { + 'readiness.sh': `#!/bin/bash +response=$(timeout -s SIGTERM 3 $1 redis-cli -h localhost -a ${this.options.password} -p ${PORT} ping) +if [ "$response" != "PONG" ]; then + echo "$response" + exit 1 +fi + `, + 'liveness.sh': `#!/bin/bash +response=$(timeout -s SIGTERM 3 $1 redis-cli -h localhost -a ${this.options.password} -p ${PORT} ping) +if [ "$response" != "PONG" ] && [ "$response" != "LOADING Redis is loading the dataset in memory" ]; then + echo "$response" + exit 1 +fi + `, + }, + }); + + const volumeMounts = [cm.mount('/scripts')]; + + const pb = new PodBuilder({ + restartPolicy: 'Always', + containers: [ + { + name, + image, + env, + volumeMounts, + ports: [{ containerPort: PORT, hostPort: PORT, protocol: 'TCP' }], + resources: { + limits, + }, + livenessProbe: { + initialDelaySeconds: 3, + periodSeconds: 10, + failureThreshold: 10, + timeoutSeconds: 3, + exec: { + command: ['/bin/sh', '/scripts/liveness.sh'], + }, + }, + readinessProbe: { + initialDelaySeconds: 5, + periodSeconds: 8, + failureThreshold: 5, + timeoutSeconds: 3, + exec: { + command: ['/bin/sh', '/scripts/readiness.sh'], + }, + }, + }, + ], + }); + + const metadata: k8s.types.input.meta.v1.ObjectMeta = { + annotations: {}, + }; + + const deployment = new kx.Deployment(name, { + spec: pb.asExtendedDeploymentSpec( + { + replicas: 1, + strategy: { + type: 'RollingUpdate', + rollingUpdate: { + maxSurge: 1, + maxUnavailable: 0, + }, + }, + }, + { + annotations: metadata.annotations, + } + ), + }); + const service = deployment.createService({}); + + return { deployment, service, port: PORT }; + } +} diff --git a/deployment/utils/remote-artifact-as-service.ts b/deployment/utils/remote-artifact-as-service.ts new file mode 100644 index 000000000..bc4dff2ab --- /dev/null +++ b/deployment/utils/remote-artifact-as-service.ts @@ -0,0 +1,212 @@ +import * as kx from '@pulumi/kubernetesx'; +import * as k8s from '@pulumi/kubernetes'; +import * as azure from '@pulumi/azure'; +import * as pulumi from '@pulumi/pulumi'; +import { PodBuilder, normalizeEnv } from './pod-builder'; +import { PackageInfo } from './pack'; +import { isDefined } from './helpers'; + +const DEFAULT_IMAGE = 'node:16.13.2-alpine3.15'; + +export class RemoteArtifactAsServiceDeployment { + constructor( + protected name: string, + protected options: { + storageContainer: azure.storage.Container; + env?: kx.types.Container['env']; + packageInfo: PackageInfo; + port?: number; + image?: string; + livenessProbe?: string; + readinessProbe?: string; + memoryLimit?: string; + cpuLimit?: string; + bin?: string; + /** + * Enables /metrics endpoint on port 10254 + */ + exposesMetrics?: boolean; + replicas?: number; + }, + protected dependencies?: Array, + protected parent?: pulumi.Resource | null + ) {} + + deployAsJob() { + const artifactUrl = this.makeArtifactUrl(); + const { pb } = this.createPod(artifactUrl, true); + + const job = new kx.Job( + this.name, + { + spec: pb.asJobSpec(), + }, + { dependsOn: this.dependencies?.filter(isDefined) } + ); + + return { job }; + } + + createPod(artifactUrl: pulumi.Output, asJob: boolean) { + const port = this.options.port || 3000; + const additionalEnv: any[] = normalizeEnv(this.options.env); + + let livenessProbe: k8s.types.input.core.v1.Probe | undefined = undefined; + let readinessProbe: k8s.types.input.core.v1.Probe | undefined = undefined; + + if (this.options.livenessProbe) { + livenessProbe = { + initialDelaySeconds: 3, + periodSeconds: 20, + failureThreshold: 10, + timeoutSeconds: 5, + httpGet: { + path: this.options.livenessProbe, + port, + }, + }; + } + + if (this.options.readinessProbe) { + readinessProbe = { + initialDelaySeconds: 5, + periodSeconds: 20, + failureThreshold: 5, + timeoutSeconds: 5, + httpGet: { + path: this.options.readinessProbe, + port, + }, + }; + } + + const image = this.options.image || DEFAULT_IMAGE; + const appVolume = { + mountPath: '/app', + name: 'app', + }; + + const volumeMounts = [appVolume]; + + if (this.options.exposesMetrics) { + additionalEnv.push({ name: 'METRICS_ENABLED', value: 'true' }); + } + + const pb = new PodBuilder({ + restartPolicy: asJob ? 'Never' : 'Always', + volumes: [ + { + name: appVolume.name, + emptyDir: {}, + }, + ], + initContainers: [ + { + name: `${this.name}-init`, + image, + workingDir: appVolume.mountPath, + volumeMounts, + command: + this.options.packageInfo.runtime === 'node' + ? ['/bin/sh', '-c', artifactUrl.apply((v) => `yarn add ${v}`)] + : this.options.packageInfo.runtime === 'rust' + ? ['/bin/sh', '-c', artifactUrl.apply((v) => `wget ${v}`)] + : ['echo missing script!'], + }, + ], + containers: [ + { + livenessProbe, + readinessProbe, + env: [ + { name: 'PORT', value: String(port) }, + { + name: 'POD_NAME', + valueFrom: { + fieldRef: { + fieldPath: 'metadata.name', + }, + }, + }, + ].concat(additionalEnv), + name: this.name, + image, + workingDir: appVolume.mountPath, + volumeMounts: [appVolume], + command: + this.options.packageInfo.runtime === 'node' + ? ['yarn', this.options.bin || this.options.packageInfo.bin] + : this.options.packageInfo.runtime === 'rust' + ? [this.options.packageInfo.bin] + : [], + ports: { + http: port, + ...(this.options.exposesMetrics + ? { + metrics: 10254, + } + : {}), + }, + }, + ], + }); + + return { pb }; + } + + private makeArtifactUrl() { + const azureStaticFile = new azure.storage.Blob(`${this.name}-artifact`, { + storageAccountName: this.options.storageContainer.storageAccountName, + storageContainerName: this.options.storageContainer.name, + type: 'Block', + source: new pulumi.asset.FileAsset(this.options.packageInfo.file), + }); + + return azureStaticFile.url; + } + + deploy() { + const artifactUrl = this.makeArtifactUrl(); + const { pb } = this.createPod(artifactUrl, false); + + const metadata: k8s.types.input.meta.v1.ObjectMeta = { + annotations: {}, + }; + + if (this.options.exposesMetrics) { + metadata.annotations = { + 'prometheus.io/port': '10254', + 'prometheus.io/path': '/metrics', + 'prometheus.io/scrape': 'true', + }; + } + + const deployment = new kx.Deployment( + this.name, + { + spec: pb.asExtendedDeploymentSpec( + { + replicas: this.options.replicas ?? 1, + strategy: { + type: 'RollingUpdate', + rollingUpdate: { + maxSurge: this.options.replicas ?? 1, + maxUnavailable: 0, + }, + }, + }, + { + annotations: metadata.annotations, + } + ), + }, + { + dependsOn: this.dependencies?.filter(isDefined), + parent: this.parent ?? undefined, + } + ); + const service = deployment.createService({}); + + return { deployment, service }; + } +} diff --git a/deployment/utils/reverse-proxy.ts b/deployment/utils/reverse-proxy.ts new file mode 100644 index 000000000..8955e2c3e --- /dev/null +++ b/deployment/utils/reverse-proxy.ts @@ -0,0 +1,213 @@ +import * as k8s from '@pulumi/kubernetes'; +import { Output } from '@pulumi/pulumi'; + +export class Proxy { + private lbService: Output | null = null; + + constructor( + private tlsSecretName: string, + private staticIp?: { address?: string } + ) {} + + registerService( + dns: { record: string; apex?: boolean }, + routes: { + name: string; + path: string; + service: k8s.core.v1.Service; + customRewrite?: string; + virtualHost?: Output; + httpsUpstream?: boolean; + withWwwDomain?: boolean; + }[] + ) { + const cert = new k8s.apiextensions.CustomResource(`cert-${dns.record}`, { + apiVersion: 'cert-manager.io/v1', + kind: 'Certificate', + metadata: { + name: dns.record, + }, + spec: { + commonName: dns.record, + dnsNames: [dns.record], + issuerRef: { + name: this.tlsSecretName, + kind: 'ClusterIssuer', + }, + secretName: dns.record, + }, + }); + + new k8s.apiextensions.CustomResource( + `httpproxy-${dns.record}`, + { + apiVersion: 'projectcontour.io/v1', + kind: 'HTTPProxy', + metadata: { + annotations: { + 'ingress.kubernetes.io/force-ssl-redirect': 'true', + }, + name: `ingress-${dns.record}`, + }, + spec: { + virtualhost: { + fqdn: dns.record, + tls: { + secretName: dns.record, + }, + corsPolicy: { + allowOrigin: [ + 'https://app.graphql-hive.com', + 'https://graphql-hive.com', + ], + allowMethods: ['GET', 'POST', 'OPTIONS'], + allowHeaders: ['*'], + exposeHeaders: ['*'], + }, + }, + routes: routes.map((route) => ({ + conditions: [ + { + prefix: route.path, + }, + ], + services: [ + { + name: route.service.metadata.name, + port: route.service.spec.ports[0].port, + }, + ], + ...(route.path === '/' + ? {} + : { + pathRewritePolicy: { + replacePrefix: [ + { + replacement: route.customRewrite || '/', + }, + ], + }, + }), + })), + }, + }, + { + dependsOn: [cert, this.lbService!], + } + ); + + return this; + } + + deployProxy(options: { replicas?: number }) { + const ns = new k8s.core.v1.Namespace('contour', { + metadata: { + name: 'contour', + }, + }); + + const proxyController = new k8s.helm.v3.Chart('contour-proxy', { + chart: 'contour', + version: '7.8.0', + namespace: ns.metadata.name, + fetchOpts: { + repo: 'https://charts.bitnami.com/bitnami', + }, + // https://github.com/bitnami/charts/tree/master/bitnami/contour + values: { + commonLabels: { + 'vector.dev/exclude': 'true', + }, + configInline: { + // https://projectcontour.io/docs/main/configuration/ + 'accesslog-format': 'json', + // https://www.envoyproxy.io/docs/envoy/latest/configuration/observability/access_log/usage + 'json-fields': [ + '@timestamp', + 'bytes_received', + 'bytes_sent', + 'downstream_local_address', + 'duration', + 'method', + 'path', + 'request_id', + 'response_code', + 'response_flags', + 'upstream_cluster', + 'upstream_host', + 'upstream_service_time', + 'user_agent', + 'x_forwarded_for', + ], + }, + contour: { + podAnnotations: { + 'prometheus.io/scrape': 'true', + 'prometheus.io/port': '8000', + 'prometheus.io/scheme': 'http', + 'prometheus.io/path': '/metrics', + }, + podLabels: { + 'vector.dev/exclude': 'true', + }, + }, + envoy: { + service: { + loadBalancerIP: this.staticIp?.address, + }, + podAnnotations: { + 'prometheus.io/scrape': 'true', + 'prometheus.io/port': '8002', + 'prometheus.io/scheme': 'http', + 'prometheus.io/path': '/stats/prometheus', + }, + podLabels: { + 'vector.dev/exclude': 'true', + }, + autoscaling: + options?.replicas && options?.replicas > 1 + ? { + enabled: true, + minReplicas: 1, + maxReplicas: options.replicas, + } + : {}, + }, + }, + }); + + this.lbService = proxyController.getResource( + 'v1/Service', + 'contour/contour-proxy-envoy' + ); + + new k8s.apiextensions.CustomResource( + 'secret-delegation', + { + apiVersion: 'projectcontour.io/v1', + kind: 'TLSCertificateDelegation', + metadata: { + name: this.tlsSecretName, + namespace: 'cert-manager', + }, + spec: { + delegations: [ + { + secretName: this.tlsSecretName, + targetNamespaces: ['*'], + }, + ], + }, + }, + { + dependsOn: [this.lbService], + } + ); + + return this; + } + + get() { + return this.lbService; + } +} diff --git a/deployment/yarn.lock b/deployment/yarn.lock new file mode 100644 index 000000000..d95970165 --- /dev/null +++ b/deployment/yarn.lock @@ -0,0 +1,1903 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@azure/abort-controller@^1.0.0": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@azure/abort-controller/-/abort-controller-1.0.4.tgz#fd3c4d46c8ed67aace42498c8e2270960250eafd" + integrity sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw== + dependencies: + tslib "^2.0.0" + +"@azure/core-auth@^1.1.4": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.2.0.tgz#a5a181164e99f8446a3ccf9039345ddc9bb63bb9" + integrity sha512-KUl+Nwn/Sm6Lw5d3U90m1jZfNSL087SPcqHLxwn2T6PupNKmcgsEbDjHB25gDvHO4h7pBsTlrdJAY7dz+Qk8GA== + dependencies: + "@azure/abort-controller" "^1.0.0" + tslib "^2.0.0" + +"@azure/functions@=1.2.2": + version "1.2.2" + resolved "https://registry.yarnpkg.com/@azure/functions/-/functions-1.2.2.tgz#8fcb6aa3a879d3be0dc3d68919f969b054bbe3f3" + integrity sha512-p/dDHq1sG/iAib+eDY4NxskWHoHW1WFzD85s0SfWxc2wVjJbxB0xz/zBF4s7ymjVgTu+0ceipeBk+tmpnt98oA== + +"@azure/ms-rest-azure-env@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@azure/ms-rest-azure-env/-/ms-rest-azure-env-2.0.0.tgz#45809f89763a480924e21d3c620cd40866771625" + integrity sha512-dG76W7ElfLi+fbTjnZVGj+M9e0BIEJmRxU6fHaUQ12bZBe8EJKYb2GV50YWNaP2uJiVQ5+7nXEVj1VN1UQtaEw== + +"@azure/ms-rest-azure-js@^2.0.1": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@azure/ms-rest-azure-js/-/ms-rest-azure-js-2.1.0.tgz#8c90b31468aeca3146b06c7144b386fd4827f64c" + integrity sha512-CjZjB8apvXl5h97Ck6SbeeCmU0sk56YPozPtTyGudPp1RGoHXNjFNtoOvwOG76EdpmMpxbK10DqcygI16Lu60Q== + dependencies: + "@azure/core-auth" "^1.1.4" + "@azure/ms-rest-js" "^2.2.0" + tslib "^1.10.0" + +"@azure/ms-rest-js@^2.0.4", "@azure/ms-rest-js@^2.2.0": + version "2.2.3" + resolved "https://registry.yarnpkg.com/@azure/ms-rest-js/-/ms-rest-js-2.2.3.tgz#8f0085f7768c69f17b3cdb20ce95728b452dc304" + integrity sha512-sXOhOu/37Tr8428f32Jwuwga975Xw64pYg1UeUwOBMhkNgtn5vUuNRa3fhmem+I6f8EKoi6hOsYDFlaHeZ52jA== + dependencies: + "@azure/core-auth" "^1.1.4" + "@types/node-fetch" "^2.3.7" + "@types/tunnel" "0.0.1" + abort-controller "^3.0.0" + form-data "^2.5.0" + node-fetch "^2.6.0" + tough-cookie "^3.0.1" + tslib "^1.10.0" + tunnel "0.0.6" + uuid "^3.3.2" + xml2js "^0.4.19" + +"@azure/ms-rest-nodeauth@^3.0.0": + version "3.0.7" + resolved "https://registry.yarnpkg.com/@azure/ms-rest-nodeauth/-/ms-rest-nodeauth-3.0.7.tgz#73c399b0aef45c75104324b6617aa4e0a6c27875" + integrity sha512-7Q1MyMB+eqUQy8JO+virSIzAjqR2UbKXE/YQZe+53gC8yakm8WOQ5OzGfPP+eyHqeRs6bQESyw2IC5feLWlT2A== + dependencies: + "@azure/ms-rest-azure-env" "^2.0.0" + "@azure/ms-rest-js" "^2.0.4" + adal-node "^0.1.28" + +"@babel/runtime@^7.5.5": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.16.7.tgz#03ff99f64106588c9c403c6ecb8c3bafbbdff1fa" + integrity sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ== + dependencies: + regenerator-runtime "^0.13.4" + +"@changesets/types@^4.0.1": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@changesets/types/-/types-4.0.2.tgz#d20e1e45bdc96a97cc509c655e708b53a9292465" + integrity sha512-OeDaB7D+WVy/ErymPzFm58IeGvz4DOl+oedyZETfnkfMezF/Uhrm1Ub6MHrO5LcAaQTW+ptDmr0fmaVyoTxgHw== + +"@grpc/grpc-js@^1.2.7": + version "1.2.10" + resolved "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.2.10.tgz#f316d29a45fcc324e923d593cb849d292b1ed598" + integrity sha512-wj6GkNiorWYaPiIZ767xImmw7avMMVUweTvPFg4mJWOxz2180DKwfuxhJJZ7rpc1+7D3mX/v8vJdxTuIo71Ieg== + dependencies: + "@types/node" ">=12.12.47" + google-auth-library "^6.1.1" + semver "^6.2.0" + +"@grpc/grpc-js@~1.3.8": + version "1.3.8" + resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.3.8.tgz#0d7dce9de7aeb20702a28f0704e61b0bf34061c1" + integrity sha512-4qJqqn+CU/nBydz9ePJP+oa8dz0U42Ut/GejlbyaQ1xTkynCc+ndNHHnISlNeHawDsv4MOAyP3mV/EnDNUw2zA== + dependencies: + "@types/node" ">=12.12.47" + +"@logdna/tail-file@^2.0.6": + version "2.0.6" + resolved "https://registry.npmjs.org/@logdna/tail-file/-/tail-file-2.0.6.tgz#083bf0aceca3c128be8d7d92290c90cc038b75eb" + integrity sha512-RSwA1ZWaDFdDjCqOdk5ZtCuf3bmyai8in9UQmIU57P2ZVcUbkQZ3HevC+WZ1QYyHwph3RgJQmC4IvAAi5yHIig== + +"@manypkg/find-root@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@manypkg/find-root/-/find-root-1.1.0.tgz#a62d8ed1cd7e7d4c11d9d52a8397460b5d4ad29f" + integrity sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA== + dependencies: + "@babel/runtime" "^7.5.5" + "@types/node" "^12.7.1" + find-up "^4.1.0" + fs-extra "^8.1.0" + +"@manypkg/get-packages@1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@manypkg/get-packages/-/get-packages-1.1.3.tgz#e184db9bba792fa4693de4658cfb1463ac2c9c47" + integrity sha512-fo+QhuU3qE/2TQMQmbVMqaQ6EWbMhi4ABWP+O4AM1NqPBuy0OrApV5LO6BrrgnhtAHS2NH6RrVk9OL181tTi8A== + dependencies: + "@babel/runtime" "^7.5.5" + "@changesets/types" "^4.0.1" + "@manypkg/find-root" "^1.1.0" + fs-extra "^8.1.0" + globby "^11.0.0" + read-yaml-file "^1.1.0" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": + version "1.1.2" + resolved "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" + integrity sha1-m4sMxmPWaafY9vXQiToU00jzD78= + +"@protobufjs/base64@^1.1.2": + version "1.1.2" + resolved "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735" + integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== + +"@protobufjs/codegen@^2.0.4": + version "2.0.4" + resolved "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb" + integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== + +"@protobufjs/eventemitter@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70" + integrity sha1-NVy8mLr61ZePntCV85diHx0Ga3A= + +"@protobufjs/fetch@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45" + integrity sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU= + dependencies: + "@protobufjs/aspromise" "^1.1.1" + "@protobufjs/inquire" "^1.1.0" + +"@protobufjs/float@^1.0.2": + version "1.0.2" + resolved "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1" + integrity sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E= + +"@protobufjs/inquire@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089" + integrity sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik= + +"@protobufjs/path@^1.1.2": + version "1.1.2" + resolved "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d" + integrity sha1-bMKyDFya1q0NzP0hynZz2Nf79o0= + +"@protobufjs/pool@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54" + integrity sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q= + +"@protobufjs/utf8@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" + integrity sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA= + +"@pulumi/azure-native@1.56.0": + version "1.56.0" + resolved "https://registry.yarnpkg.com/@pulumi/azure-native/-/azure-native-1.56.0.tgz#d72fad1e6358a7b33d69ca586e247bbbaf536bad" + integrity sha512-KXH/ORxEuCtcjJNRNPphzRytHJ4HWppjeeUV09SBm9ySlmKEGkHkMmOSzN22JIfECbXzF4IAYKCP36IAePTBDg== + dependencies: + "@pulumi/pulumi" "^3.0.0" + +"@pulumi/azure@4.37.0": + version "4.37.0" + resolved "https://registry.yarnpkg.com/@pulumi/azure/-/azure-4.37.0.tgz#455ce5707b26e625ac286e4feb3003fa7163686f" + integrity sha512-R3UdFDxKBiKgVGnzlnaE7MbmAt4yxADyw8r5+7y/Ndvf5H0MULXgfY/5/w2/TimvzQGiLpeNu0k5B6pTMdje3w== + dependencies: + "@azure/functions" "=1.2.2" + "@azure/ms-rest-azure-js" "^2.0.1" + "@azure/ms-rest-nodeauth" "^3.0.0" + "@pulumi/pulumi" "^3.0.0" + azure-eventgrid "^1.6.0" + azure-functions-ts-essentials "^1.3.2" + moment "2.24.0" + node-fetch "^2.3.0" + +"@pulumi/cloudflare@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@pulumi/cloudflare/-/cloudflare-4.3.0.tgz#12e603290353ea300bbb919d6a81980217eb706d" + integrity sha512-hPnv9O+xEiFsfho75ILAWEdfpewhGyqF+V3ZpQzLH/J/yra0hUtSEnh188bVukZ/My7QBzmdN7MAf+8vc/XMcA== + dependencies: + "@pulumi/pulumi" "^3.0.0" + +"@pulumi/kubernetes@3.15.2": + version "3.15.2" + resolved "https://registry.yarnpkg.com/@pulumi/kubernetes/-/kubernetes-3.15.2.tgz#4ca769ea34fe9ac3e87d2cfa629905472ef7d9ea" + integrity sha512-TwhJdWbKoQqg7Mh8jG2e+lf4+fGq3BL6szdtarINovittbax86cpoxJC4nSGklXeAckGz1WXsk1ssHiSny9x2Q== + dependencies: + "@pulumi/pulumi" "^3.0.0" + "@types/glob" "^5.0.35" + "@types/node-fetch" "^2.1.4" + "@types/tmp" "^0.0.33" + glob "^7.1.2" + node-fetch "^2.3.0" + shell-quote "^1.6.1" + tmp "^0.0.33" + +"@pulumi/kubernetesx@0.1.6": + version "0.1.6" + resolved "https://registry.npmjs.org/@pulumi/kubernetesx/-/kubernetesx-0.1.6.tgz#61518ae6a7d37c17998561b8d5290546815bad1d" + integrity sha512-9VL4Yi4b4aLC/obBarJuNkm86kABByUZICYPSTdV396MGZtOc066o2brsB+kWVQcVfkYVXTPrpjIkAwBXXnzGw== + +"@pulumi/pulumi@3.24.1": + version "3.24.1" + resolved "https://registry.yarnpkg.com/@pulumi/pulumi/-/pulumi-3.24.1.tgz#e1e00d8af0516ee2356f73f491378a1d947d4255" + integrity sha512-tRbf4JgaLnUeImQ3Hco76YJSalp6YNY3CcV//u309iequ7Zyiap/A/acKXkJ3BeDjN7aCJ2wUYNWBjet2zArZA== + dependencies: + "@grpc/grpc-js" "~1.3.8" + "@logdna/tail-file" "^2.0.6" + "@pulumi/query" "^0.3.0" + google-protobuf "^3.5.0" + js-yaml "^3.14.0" + minimist "^1.2.0" + normalize-package-data "^2.4.0" + protobufjs "^6.8.6" + read-package-tree "^5.3.1" + require-from-string "^2.0.1" + semver "^6.1.0" + source-map-support "^0.4.16" + ts-node "^7.0.1" + typescript "~3.7.3" + upath "^1.1.0" + +"@pulumi/pulumi@^3.0.0": + version "3.0.0" + resolved "https://registry.npmjs.org/@pulumi/pulumi/-/pulumi-3.0.0.tgz#b1a1a845718b21d3b4975740627f5b25f2c04fcb" + integrity sha512-s9pwbdFrMU8vt4F5aIf8cpnDmHSM5Pn5V2Y7T7m0R14pfxTjCqt5ZAuEdKys0SgL+DxDp5L4Kz/53SXC6MFEEw== + dependencies: + "@grpc/grpc-js" "^1.2.7" + "@logdna/tail-file" "^2.0.6" + "@pulumi/query" "^0.3.0" + google-protobuf "^3.5.0" + js-yaml "^3.14.0" + minimist "^1.2.0" + normalize-package-data "^2.4.0" + protobufjs "^6.8.6" + read-package-tree "^5.3.1" + require-from-string "^2.0.1" + semver "^6.1.0" + source-map-support "^0.4.16" + split2 "^3.2.2" + ts-node "^7.0.1" + typescript "~3.7.3" + upath "^1.1.0" + +"@pulumi/query@^0.3.0": + version "0.3.0" + resolved "https://registry.npmjs.org/@pulumi/query/-/query-0.3.0.tgz#f496608e86a18c3dd31b6c533408e2441c29071d" + integrity sha512-xfo+yLRM2zVjVEA4p23IjQWzyWl1ZhWOGobsBqRpIarzLvwNH/RAGaoehdxlhx4X92302DrpdIFgTICMN4P38w== + +"@pulumi/random@4.3.1": + version "4.3.1" + resolved "https://registry.yarnpkg.com/@pulumi/random/-/random-4.3.1.tgz#f035adadf44ca0f3f97b5b4e5cd20839cf9480c0" + integrity sha512-QdNUEOSPhCdIVW1L2/8dsMdJCY0GRBDednDP1Q4qzNruq7eC483S7lkB4l/1dgI+jq7WMQU7boJcMquqIY7j0A== + dependencies: + "@pulumi/pulumi" "^3.0.0" + +"@types/events@*": + version "3.0.0" + resolved "https://registry.npmjs.org/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" + integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== + +"@types/glob@^5.0.35": + version "5.0.36" + resolved "https://registry.npmjs.org/@types/glob/-/glob-5.0.36.tgz#0c80a9c8664fc7d19781de229f287077fd622cb2" + integrity sha512-KEzSKuP2+3oOjYYjujue6Z3Yqis5HKA1BsIC+jZ1v3lrRNdsqyNNtX0rQf6LSuI4DJJ2z5UV//zBZCcvM0xikg== + dependencies: + "@types/events" "*" + "@types/minimatch" "*" + "@types/node" "*" + +"@types/long@^4.0.1": + version "4.0.1" + resolved "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9" + integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w== + +"@types/mime-types@2.1.1": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@types/mime-types/-/mime-types-2.1.1.tgz#d9ba43490fa3a3df958759adf69396c3532cf2c1" + integrity sha512-vXOTGVSLR2jMw440moWTC7H19iUyLtP3Z1YTj7cSsubOICinjMxFeb/V57v9QdyyPGbbWolUFSSmSiRSn94tFw== + +"@types/minimatch@*": + version "3.0.3" + resolved "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" + integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== + +"@types/node-fetch@^2.1.4", "@types/node-fetch@^2.3.7": + version "2.5.8" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.8.tgz#e199c835d234c7eb0846f6618012e558544ee2fb" + integrity sha512-fbjI6ja0N5ZA8TV53RUqzsKNkl9fv8Oj3T7zxW7FGv1GSH7gwJaNF8dzCjrqKaxKeUpTz4yT1DaJFq/omNpGfw== + dependencies: + "@types/node" "*" + form-data "^3.0.0" + +"@types/node@*", "@types/node@>=12.12.47": + version "14.14.32" + resolved "https://registry.npmjs.org/@types/node/-/node-14.14.32.tgz#90c5c4a8d72bbbfe53033f122341343249183448" + integrity sha512-/Ctrftx/zp4m8JOujM5ZhwzlWLx22nbQJiVqz8/zE15gOeEW+uly3FSX4fGFpcfEvFzXcMCJwq9lGVWgyARXhg== + +"@types/node@17.0.17": + version "17.0.17" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.17.tgz#a8ddf6e0c2341718d74ee3dc413a13a042c45a0c" + integrity sha512-e8PUNQy1HgJGV3iU/Bp2+D/DXh3PYeyli8LgIwsQcs1Ar1LoaWHSIT6Rw+H2rNJmiq6SNWiDytfx8+gYj7wDHw== + +"@types/node@^12.7.1": + version "12.20.42" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.42.tgz#2f021733232c2130c26f9eabbdd3bfd881774733" + integrity sha512-aI3/oo5DzyiI5R/xAhxxRzfZlWlsbbqdgxfTPkqu/Zt+23GXiJvMCyPJT4+xKSXOnLqoL8jJYMLTwvK2M3a5hw== + +"@types/node@^13.7.0": + version "13.13.45" + resolved "https://registry.npmjs.org/@types/node/-/node-13.13.45.tgz#e6676bcca092bae5751d015f074a234d5a82eb63" + integrity sha512-703YTEp8AwQeapI0PTXDOj+Bs/mtdV/k9VcTP7z/de+lx6XjFMKdB+JhKnK+6PZ5za7omgZ3V6qm/dNkMj/Zow== + +"@types/node@^8.0.47": + version "8.10.66" + resolved "https://registry.yarnpkg.com/@types/node/-/node-8.10.66.tgz#dd035d409df322acc83dff62a602f12a5783bbb3" + integrity sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw== + +"@types/tmp@^0.0.33": + version "0.0.33" + resolved "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz#1073c4bc824754ae3d10cfab88ab0237ba964e4d" + integrity sha1-EHPEvIJHVK49EM+riKsCN7qWTk0= + +"@types/tunnel@0.0.1": + version "0.0.1" + resolved "https://registry.yarnpkg.com/@types/tunnel/-/tunnel-0.0.1.tgz#0d72774768b73df26f25df9184273a42da72b19c" + integrity sha512-AOqu6bQu5MSWwYvehMXLukFHnupHrpZ8nvgae5Ggie9UwzDR1CCwoXgSSWNZJuyOlCdfdsWMA5F2LlmvyoTv8A== + dependencies: + "@types/node" "*" + +abort-controller@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== + dependencies: + event-target-shim "^5.0.0" + +adal-node@^0.1.28: + version "0.1.28" + resolved "https://registry.yarnpkg.com/adal-node/-/adal-node-0.1.28.tgz#468c4bb3ebbd96b1270669f4b9cba4e0065ea485" + integrity sha1-RoxLs+u9lrEnBmn0ucuk4AZepIU= + dependencies: + "@types/node" "^8.0.47" + async ">=0.6.0" + date-utils "*" + jws "3.x.x" + request ">= 2.52.0" + underscore ">= 1.3.1" + uuid "^3.1.0" + xmldom ">= 0.1.x" + xpath.js "~1.1.0" + +agent-base@6: + version "6.0.2" + resolved "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv@^6.12.3: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +arrify@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= + +arrify@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" + integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + +asap@^2.0.0: + version "2.0.6" + resolved "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + +asn1@~0.2.3: + version "0.2.4" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" + integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= + +async@2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.0.tgz#61a29abb6fcc026fea77e56d1c6ec53a795951f4" + integrity sha512-xAfGg1/NTLBBKlHFmnd7PlmUW9KhVQIUuSrYem9xzFUZy13ScvtyGGejaae9iAVRiRq9+Cx7DPFaAAhCpyxyPw== + dependencies: + lodash "^4.14.0" + +async@>=0.6.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.0.tgz#b3a2685c5ebb641d3de02d161002c60fc9f85720" + integrity sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= + +aws4@^1.8.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" + integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + +azure-eventgrid@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/azure-eventgrid/-/azure-eventgrid-1.6.0.tgz#c9b75bbdb3bb4a57963c329e9301b6129c2fa160" + integrity sha512-MZ84z3NEbRCdqNY0d2C+7PVq6mzNfCw38XLIqRdSNSn6V/rV+HstlgrqvN4vZzxXVhhlzUhYAvl13U5jvvu7LQ== + dependencies: + ms-rest "^2.3.3" + ms-rest-azure "^2.5.5" + +azure-functions-ts-essentials@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/azure-functions-ts-essentials/-/azure-functions-ts-essentials-1.3.2.tgz#a0917842ef700125411289e0c5c2f04e0d961d7c" + integrity sha512-DdeXyzAbx632bwNs/+xp6meXWHvKpHCC2647Trfzs48QtEgsxEUB+0Jz4W5J4SNs4k640lWE6431ITOuPsSgpQ== + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +base64-js@^1.3.0: + version "1.5.1" + resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +bcrypt-pbkdf@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= + dependencies: + tweetnacl "^0.14.3" + +bignumber.js@^9.0.0: + version "9.0.1" + resolved "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz#8d7ba124c882bfd8e43260c67475518d0689e4e5" + integrity sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +buffer-equal-constant-time@1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk= + +buffer-from@^1.0.0, buffer-from@^1.1.0: + version "1.1.1" + resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" + integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= + +combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +core-util-is@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= + dependencies: + assert-plus "^1.0.0" + +date-utils@*: + version "1.2.21" + resolved "https://registry.yarnpkg.com/date-utils/-/date-utils-1.2.21.tgz#61fb16cdc1274b3c9acaaffe9fc69df8720a2b64" + integrity sha1-YfsWzcEnSzyayq/+n8ad+HIKK2Q= + +debug@4: + version "4.3.1" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" + integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== + dependencies: + ms "2.1.2" + +debuglog@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" + integrity sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI= + +define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +dezalgo@^1.0.0: + version "1.0.3" + resolved "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz#7f742de066fc748bc8db820569dddce49bf0d456" + integrity sha1-f3Qt4Gb8dIvI24IFad3c5Jvw1FY= + dependencies: + asap "^2.0.0" + wrappy "1" + +diff@^3.1.0: + version "3.5.0" + resolved "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" + integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +duplexer@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +ecdsa-sig-formatter@1.0.11, ecdsa-sig-formatter@^1.0.11: + version "1.0.11" + resolved "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" + integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== + dependencies: + safe-buffer "^5.0.1" + +es-abstract@^1.18.0-next.2: + version "1.18.0" + resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0.tgz#ab80b359eecb7ede4c298000390bc5ac3ec7b5a4" + integrity sha512-LJzK7MrQa8TS0ja2w3YNLzUgJCGPdPOV1yVvezjNnS89D+VR08+Szt2mz3YB2Dck/+w5tfIq/RoUAFqJJGM2yw== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + get-intrinsic "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.2" + is-callable "^1.2.3" + is-negative-zero "^2.0.1" + is-regex "^1.1.2" + is-string "^1.0.5" + object-inspect "^1.9.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + string.prototype.trimend "^1.0.4" + string.prototype.trimstart "^1.0.4" + unbox-primitive "^1.0.0" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +event-target-shim@^5.0.0: + version "5.0.1" + resolved "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== + +extend@^3.0.2, extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= + +extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" + integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-text-encoding@^1.0.0: + version "1.0.3" + resolved "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz#ec02ac8e01ab8a319af182dae2681213cfe9ce53" + integrity sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig== + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= + +form-data@^2.5.0: + version "2.5.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4" + integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +fs-extra@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" + integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +gaxios@^4.0.0: + version "4.2.0" + resolved "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz#33bdc4fc241fc33b8915a4b8c07cfb368b932e46" + integrity sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g== + dependencies: + abort-controller "^3.0.0" + extend "^3.0.2" + https-proxy-agent "^5.0.0" + is-stream "^2.0.0" + node-fetch "^2.3.0" + +gcp-metadata@^4.2.0: + version "4.2.1" + resolved "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz#31849fbcf9025ef34c2297c32a89a1e7e9f2cd62" + integrity sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw== + dependencies: + gaxios "^4.0.0" + json-bigint "^1.0.0" + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" + integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= + dependencies: + assert-plus "^1.0.0" + +glob-parent@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob@^7.1.1, glob@^7.1.2: + version "7.1.6" + resolved "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globby@^11.0.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +google-auth-library@^6.1.1: + version "6.1.6" + resolved "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz#deacdcdb883d9ed6bac78bb5d79a078877fdf572" + integrity sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ== + dependencies: + arrify "^2.0.0" + base64-js "^1.3.0" + ecdsa-sig-formatter "^1.0.11" + fast-text-encoding "^1.0.0" + gaxios "^4.0.0" + gcp-metadata "^4.2.0" + gtoken "^5.0.4" + jws "^4.0.0" + lru-cache "^6.0.0" + +google-p12-pem@^3.0.3: + version "3.0.3" + resolved "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz#673ac3a75d3903a87f05878f3c75e06fc151669e" + integrity sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA== + dependencies: + node-forge "^0.10.0" + +google-protobuf@^3.5.0: + version "3.15.5" + resolved "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.15.5.tgz#d011f334231e79b472e42d4e8ec7409cd402a747" + integrity sha512-6bLpAI4nMIQODlegR7OevgkCoyOj5frLVDArUpeuBWad7XWUNWMGP0v5lz1/aeUI6Yf3cG9XA6acZkPxom4SEw== + +graceful-fs@^4.1.2: + version "4.2.6" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" + integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== + +graceful-fs@^4.1.5, graceful-fs@^4.1.6, graceful-fs@^4.2.0: + version "4.2.9" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" + integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ== + +gtoken@^5.0.4: + version "5.2.1" + resolved "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz#4dae1fea17270f457954b4a45234bba5fc796d16" + integrity sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw== + dependencies: + gaxios "^4.0.0" + google-p12-pem "^3.0.3" + jws "^4.0.0" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= + +har-validator@~5.1.3: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + +has-bigints@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" + integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== + +has-symbols@^1.0.0, has-symbols@^1.0.1, has-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" + integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hosted-git-info@^2.1.4: + version "2.8.8" + resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488" + integrity sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg== + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +https-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" + integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== + dependencies: + agent-base "6" + debug "4" + +ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.3: + version "2.0.4" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ip-regex@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" + integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= + +is-bigint@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.1.tgz#6923051dfcbc764278540b9ce0e6b3213aa5ebc2" + integrity sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg== + +is-boolean-object@^1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.0.tgz#e2aaad3a3a8fca34c28f6eee135b156ed2587ff0" + integrity sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA== + dependencies: + call-bind "^1.0.0" + +is-buffer@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + +is-callable@^1.1.4, is-callable@^1.2.3: + version "1.2.3" + resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e" + integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ== + +is-core-module@^2.2.0: + version "2.2.0" + resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" + integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.2" + resolved "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" + integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-glob@^4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-negative-zero@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" + integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== + +is-number-object@^1.0.4: + version "1.0.4" + resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.4.tgz#36ac95e741cf18b283fc1ddf5e83da798e3ec197" + integrity sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw== + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-regex@^1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/is-regex/-/is-regex-1.1.2.tgz#81c8ebde4db142f2cf1c53fc86d6a45788266251" + integrity sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg== + dependencies: + call-bind "^1.0.2" + has-symbols "^1.0.1" + +is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= + +is-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" + integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== + +is-string@^1.0.5: + version "1.0.5" + resolved "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6" + integrity sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ== + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" + integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== + dependencies: + has-symbols "^1.0.1" + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= + +js-yaml@^3.14.0, js-yaml@^3.6.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= + +json-bigint@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz#ae547823ac0cad8398667f8cd9ef4730f5b01ff1" + integrity sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ== + dependencies: + bignumber.js "^9.0.0" + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= + +json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= + +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= + optionalDependencies: + graceful-fs "^4.1.6" + +jsprim@^1.2.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.2.3" + verror "1.10.0" + +jwa@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" + integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + +jwa@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz#a7e9c3f29dae94027ebcaf49975c9345593410fc" + integrity sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + +jws@3.x.x: + version "3.2.2" + resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== + dependencies: + jwa "^1.4.1" + safe-buffer "^5.0.1" + +jws@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz#2d4e8cf6a318ffaa12615e9dec7e86e6c97310f4" + integrity sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg== + dependencies: + jwa "^2.0.0" + safe-buffer "^5.0.1" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash@^4.14.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +long@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" + integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" + integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== + dependencies: + braces "^3.0.1" + picomatch "^2.2.3" + +mime-db@1.46.0: + version "1.46.0" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.46.0.tgz#6267748a7f799594de3cbc8cde91def349661cee" + integrity sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ== + +mime-types@^2.1.12, mime-types@~2.1.19: + version "2.1.29" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.29.tgz#1d4ab77da64b91f5f72489df29236563754bb1b2" + integrity sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ== + dependencies: + mime-db "1.46.0" + +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.0, minimist@^1.2.5: + version "1.2.5" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + +mkdirp@^0.5.1: + version "0.5.5" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" + integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== + dependencies: + minimist "^1.2.5" + +moment@2.24.0: + version "2.24.0" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.24.0.tgz#0d055d53f5052aa653c9f6eb68bb5d12bf5c2b5b" + integrity sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg== + +moment@^2.21.0, moment@^2.22.2: + version "2.29.1" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" + integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== + +ms-rest-azure@^2.5.5: + version "2.6.0" + resolved "https://registry.yarnpkg.com/ms-rest-azure/-/ms-rest-azure-2.6.0.tgz#2098efec529eecfa0c6e215b69143abcaba12140" + integrity sha512-J6386a9krZ4VtU7CRt+Ypgo9RGf8+d3gjMBkH7zbkM4zzkhbbMOYiPRaZ+bHZcfihkKLlktTgA6rjshTjF329A== + dependencies: + adal-node "^0.1.28" + async "2.6.0" + moment "^2.22.2" + ms-rest "^2.3.2" + request "^2.88.0" + uuid "^3.2.1" + +ms-rest@^2.3.2, ms-rest@^2.3.3: + version "2.5.4" + resolved "https://registry.yarnpkg.com/ms-rest/-/ms-rest-2.5.4.tgz#57b42299cf302e45d5e1a734220bf7d4a110167a" + integrity sha512-VeqCbawxRM6nhw0RKNfj7TWL7SL8PB6MypqwgylXCi+u412uvYoyY/kSmO8n06wyd8nIcnTbYToCmSKFMI1mCg== + dependencies: + duplexer "^0.1.1" + is-buffer "^1.1.6" + is-stream "^1.1.0" + moment "^2.21.0" + request "^2.88.0" + through "^2.3.8" + tunnel "0.0.5" + uuid "^3.2.1" + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +node-fetch@^2.3.0, node-fetch@^2.6.0: + version "2.6.1" + resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" + integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== + +node-forge@^0.10.0: + version "0.10.0" + resolved "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" + integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== + +normalize-package-data@^2.0.0, normalize-package-data@^2.4.0: + version "2.5.0" + resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== + dependencies: + hosted-git-info "^2.1.4" + resolve "^1.10.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +npm-normalize-package-bin@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2" + integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA== + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +object-inspect@^1.9.0: + version "1.9.0" + resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a" + integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw== + +object-keys@^1.0.12, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.getownpropertydescriptors@^2.0.3: + version "2.1.2" + resolved "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz#1bd63aeacf0d5d2d2f31b5e393b03a7c601a23f7" + integrity sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.2" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-parse@^1.0.6: + version "1.0.6" + resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" + integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + +picomatch@^2.2.3: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + +protobufjs@^6.8.6: + version "6.10.2" + resolved "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz#b9cb6bd8ec8f87514592ba3fdfd28e93f33a469b" + integrity sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/long" "^4.0.1" + "@types/node" "^13.7.0" + long "^4.0.0" + +psl@^1.1.28: + version "1.8.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" + integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +qs@~6.5.2: + version "6.5.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +read-package-json@^2.0.0: + version "2.1.2" + resolved "https://registry.npmjs.org/read-package-json/-/read-package-json-2.1.2.tgz#6992b2b66c7177259feb8eaac73c3acd28b9222a" + integrity sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA== + dependencies: + glob "^7.1.1" + json-parse-even-better-errors "^2.3.0" + normalize-package-data "^2.0.0" + npm-normalize-package-bin "^1.0.0" + +read-package-tree@^5.3.1: + version "5.3.1" + resolved "https://registry.npmjs.org/read-package-tree/-/read-package-tree-5.3.1.tgz#a32cb64c7f31eb8a6f31ef06f9cedf74068fe636" + integrity sha512-mLUDsD5JVtlZxjSlPPx1RETkNjjvQYuweKwNVt1Sn8kP5Jh44pvYuUHCp6xSVDZWbNxVxG5lyZJ921aJH61sTw== + dependencies: + read-package-json "^2.0.0" + readdir-scoped-modules "^1.0.0" + util-promisify "^2.1.0" + +read-yaml-file@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-yaml-file/-/read-yaml-file-1.1.0.tgz#9362bbcbdc77007cc8ea4519fe1c0b821a7ce0d8" + integrity sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA== + dependencies: + graceful-fs "^4.1.5" + js-yaml "^3.6.1" + pify "^4.0.1" + strip-bom "^3.0.0" + +readable-stream@^3.0.0: + version "3.6.0" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdir-scoped-modules@^1.0.0: + version "1.1.0" + resolved "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz#8d45407b4f870a0dcaebc0e28670d18e74514309" + integrity sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw== + dependencies: + debuglog "^1.0.1" + dezalgo "^1.0.0" + graceful-fs "^4.1.2" + once "^1.3.0" + +regenerator-runtime@^0.13.4: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +"request@>= 2.52.0", request@^2.88.0: + version "2.88.2" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" + integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.5.0" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +require-from-string@^2.0.1: + version "2.0.2" + resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +resolve@^1.10.0: + version "1.20.0" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" + integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== + dependencies: + is-core-module "^2.2.0" + path-parse "^1.0.6" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sax@>=0.6.0: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +"semver@2 || 3 || 4 || 5": + version "5.7.1" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +semver@^6.1.0, semver@^6.2.0: + version "6.3.0" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +shell-quote@^1.6.1: + version "1.7.2" + resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" + integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +source-map-support@^0.4.16: + version "0.4.18" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" + integrity sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA== + dependencies: + source-map "^0.5.6" + +source-map-support@^0.5.6: + version "0.5.19" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" + integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.5.6: + version "0.5.7" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.6.0: + version "0.6.1" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +spdx-correct@^3.0.0: + version "3.1.1" + resolved "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" + integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== + dependencies: + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" + +spdx-exceptions@^2.1.0: + version "2.3.0" + resolved "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" + integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== + +spdx-expression-parse@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.7" + resolved "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz#e9c18a410e5ed7e12442a549fbd8afa767038d65" + integrity sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ== + +split2@^3.2.2: + version "3.2.2" + resolved "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" + integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg== + dependencies: + readable-stream "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +sshpk@^1.7.0: + version "1.16.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" + integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +string.prototype.trimend@^1.0.4: + version "1.0.4" + resolved "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" + integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string.prototype.trimstart@^1.0.4: + version "1.0.4" + resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" + integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +through@^2.3.8: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tough-cookie@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-3.0.1.tgz#9df4f57e739c26930a018184887f4adb7dca73b2" + integrity sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg== + dependencies: + ip-regex "^2.1.0" + psl "^1.1.28" + punycode "^2.1.1" + +tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + +ts-node@^7.0.1: + version "7.0.1" + resolved "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz#9562dc2d1e6d248d24bc55f773e3f614337d9baf" + integrity sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw== + dependencies: + arrify "^1.0.0" + buffer-from "^1.1.0" + diff "^3.1.0" + make-error "^1.1.1" + minimist "^1.2.0" + mkdirp "^0.5.1" + source-map-support "^0.5.6" + yn "^2.0.0" + +tslib@^1.10.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" + integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + dependencies: + safe-buffer "^5.0.1" + +tunnel@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.5.tgz#d1532254749ed36620fcd1010865495a1fa9d0ae" + integrity sha512-gj5sdqherx4VZKMcBA4vewER7zdK25Td+z1npBqpbDys4eJrLx+SlYjJvq1bDXs2irkuJM5pf8ktaEQVipkrbA== + +tunnel@0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c" + integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + +typescript@4.5.5: + version "4.5.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3" + integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA== + +typescript@~3.7.3: + version "3.7.7" + resolved "https://registry.npmjs.org/typescript/-/typescript-3.7.7.tgz#c931733e2ec10dda56b855b379cc488a72a81199" + integrity sha512-MmQdgo/XenfZPvVLtKZOq9jQQvzaUAUpcKW8Z43x9B2fOm4S5g//tPtMweZUIP+SoBqrVPEIm+dJeQ9dfO0QdA== + +unbox-primitive@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.0.tgz#eeacbc4affa28e9b3d36b5eaeccc50b3251b1d3f" + integrity sha512-P/51NX+JXyxK/aigg1/ZgyccdAxm5K1+n8+tvqSntjOivPt19gvm1VC49RWYetsiub8WViUchdxl/KWHHB0kzA== + dependencies: + function-bind "^1.1.1" + has-bigints "^1.0.0" + has-symbols "^1.0.0" + which-boxed-primitive "^1.0.1" + +"underscore@>= 1.3.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.12.1.tgz#7bb8cc9b3d397e201cf8553336d262544ead829e" + integrity sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw== + +universalify@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + +upath@^1.1.0: + version "1.2.0" + resolved "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +util-deprecate@^1.0.1: + version "1.0.2" + resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +util-promisify@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/util-promisify/-/util-promisify-2.1.0.tgz#3c2236476c4d32c5ff3c47002add7c13b9a82a53" + integrity sha1-PCI2R2xNMsX/PEcAKt18E7moKlM= + dependencies: + object.getownpropertydescriptors "^2.0.3" + +uuid@^3.1.0, uuid@^3.2.1, uuid@^3.3.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +validate-npm-package-license@^3.0.1: + version "3.0.4" + resolved "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + dependencies: + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +which-boxed-primitive@^1.0.1: + version "1.0.2" + resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +wrappy@1: + version "1.0.2" + resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +xml2js@^0.4.19: + version "0.4.23" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66" + integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + +xmlbuilder@~11.0.0: + version "11.0.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== + +"xmldom@>= 0.1.x": + version "0.5.0" + resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.5.0.tgz#193cb96b84aa3486127ea6272c4596354cb4962e" + integrity sha512-Foaj5FXVzgn7xFzsKeNIde9g6aFBxTPi37iwsno8QvApmtg7KYrr+OPyRHcJF7dud2a5nGRBXK3n0dL62Gf7PA== + +xpath.js@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/xpath.js/-/xpath.js-1.1.0.tgz#3816a44ed4bb352091083d002a383dd5104a5ff1" + integrity sha512-jg+qkfS4K8E7965sqaUl8mRngXiKb3WZGfONgE18pr03FUQiuSV6G+Ej4tS55B+rIQSFEIw3phdVAQ4pPqNWfQ== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yn@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a" + integrity sha1-5a2ryKz0CPY4X8dklWhMiOavaJo= diff --git a/docs/DEPLOYMENT.md b/docs/DEPLOYMENT.md new file mode 100644 index 000000000..a7a7510f8 --- /dev/null +++ b/docs/DEPLOYMENT.md @@ -0,0 +1,15 @@ +## Deployment + +Deployment is based on NPM packages. That means we are bundling (as much as possible) each service or package, and publish it to the private GitHub Packages artifactory. + +Doing that allows us to have a simple, super fast deployments, because we don't need to deal with Docker images (which are heavy). + +We create an executable package (with `bin` entrypoint) and then use `npx PACKAGE_NAME@PACKAGE_VERSION` as command for a base Docker image of NodeJS. So instead of building a Docker image for each change, we build NPM package, and the Docker image we are using in prod is the same. + +Think of it as Lambda (bundled JS, runtime is predefined) without all the crap (weird cache, weird pricing, cold start and so on). + +### How to deploy? + +We are using Pulumi (infrastructure as code) to describe and run our deployment. It's managed as GitHub Actions that runs on every bump release by Changesets. + +So changes are aggregated in a Changesets PR, and when merge, it updated the deployment manifest `package.json`, leading to a deployment of only the updated packages to production. diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md new file mode 100644 index 000000000..5b2a70bd4 --- /dev/null +++ b/docs/DEVELOPMENT.md @@ -0,0 +1,55 @@ +# Development + +## Setup Instructions + +- Clone the repository locally +- Make sure to install the recommended VSCode extensions (defined in `.vscode/extensions.json`) +- In the root of the repo, run `nvm use` to use the same version of node as mentioned +- Run `yarn` at the root to install all the dependencies and run the hooks + +- Run `yarn setup` to create and apply migrations on the PostgreSQL database +- Run `yarn generate` to generate the typings from the graphql files (use `yarn graphql:generate` if you only need to run GraphQL Codegen) +- Run `yarn build` to build all services +- Click on `Start Hive` in the bottom bar of VSCode +- Open the UI (`http://localhost:3000` by default) and Sign in with any of the identity provider +- If you are not added to the list of guest users, request access from The Guild maintainers +- Once this is done, you should be able to login and use the project +- Once you generate the token against your organization/personal account in hive, the same can be added locally to `hive.json` within `packages/libraries/cli` which can be used to interact via the hive cli with the registry + +## Development Seed + +We have a script to feed your local instance of Hive. + +1. Use `Start Hive` to run your local Hive instance. +2. Make sure `usage` and `usage-ingestor` are running as well (with `yarn dev`) +3. Open Hive app, create a project and a target, then create a token. +4. Run the seed script: `TOKEN="MY_TOKEN_HERE" yarn seed` +5. This should report a dummy schema and some dummy usage data to your local instance of Hive, allowing you to test features e2e. + +> Note: You can set `STAGING=1` in order to target staging env and seed a target there. + +> To send more operations and test heavy load on Hive instance, you can also set `OPERATIONS` (amount of operations in each interval round, default is `1`) and `INTERVAL` (frequency of sending operations, default: `1000`ms). For example, using `INTERVAL=1000 OPERATIONS=1000` will send 1000 requests per second. + +## Publish your first schema (manually) + +1. Start Hive locally +1. Create a project and a target +1. Create a token from that target +1. Go to `packages/libraries/cli` and run `yarn build` +1. Inside `packages/libraries/cli`, run: `yarn start schema:publish --token "YOUR_TOKEN_HERE" --registry "http://localhost:4000/graphql" examples/single.graphql` + +### Setting up Slack App for developing + +1. [Download](https://loophole.cloud/download) Loophole CLI (same as ngrok but supports non-random urls) +2. Log in to Loophole `$ loophole account login` +3. Start the proxy by running `$ loophole http 3000 --hostname hive-` (@kamilkisiela I use `hive-kamil`). It creates `https://hive-.loophole.site` endpoint. +4. Message @kamilkisiela and send him the url (He will update the list of accepted redirect urls in both Auth0 and Slack App). +5. Update `APP_BASE_URL` and `AUTH0_BASE_URL` in [`packages/web/app/.env`](./packages/web/app/.env) +6. Run `packages/web/app` and open `https://hive-.loophole.site`. + +> We have a special slack channel called `#hive-tests` to not spam people :) + +### Setting up GitHub App for developing + +1. Follow the steps above for Slack App. +2. Update `Setup URL` in [GraphQL Hive Development](https://github.com/organizations/the-guild-org/settings/apps/graphql-hive-development) app and set it to `https://hive-.loophole.site/api/github/setup-callback`. diff --git a/docs/TESTING.md b/docs/TESTING.md new file mode 100644 index 000000000..f810fe1b8 --- /dev/null +++ b/docs/TESTING.md @@ -0,0 +1,23 @@ +# Testing + +## Unit tests + +We are using Jest. Simply run `yarn test` to run all the tests. + +## Integration Tests + +We are using Dockest to test the following concerns: + +1. Main application flows and integration of different services +2. Build and pack process of all packages +3. Containerize execution of all services +4. Cross-service network calls + +To run integration tests locally, follow: + +1. Make sure you have Docker installed. If you are having issues, try to run `docker system prune` to clean the Docker caches. +2. Install all deps: `yarn install` +3. Generate types: `yarn graphql:generate` +4. Build and pack all services: `yarn workspace integration-tests run build-and-pack` +5. Pull the images: `docker-compose -f integration-tests/docker-compose.yml pull` +6. Run the tests: `yarn workspace integration-tests run dockest` diff --git a/integration-tests/.env.local b/integration-tests/.env.local new file mode 100644 index 000000000..63040da9d --- /dev/null +++ b/integration-tests/.env.local @@ -0,0 +1,12 @@ +AUTH0_DOMAIN="" +AUTH0_CLIENT_ID="" +AUTH0_CLIENT_SECRET="" +AUTH0_USER_PASSWORD="" +AUTH0_USER_MAIN_EMAIL="" +AUTH0_USER_EXTRA_EMAIL="" +AUTH0_SECRET="" +AUTH0_AUDIENCE="" +AUTH0_CONNECTION="" + +STRIPE_SECRET_KEY="" + diff --git a/integration-tests/.gitignore b/integration-tests/.gitignore new file mode 100644 index 000000000..125646da7 --- /dev/null +++ b/integration-tests/.gitignore @@ -0,0 +1,4 @@ +docker-compose.dockest-generated.yml +db-clickhouse +tarballs +volumes \ No newline at end of file diff --git a/integration-tests/docker-compose.yml b/integration-tests/docker-compose.yml new file mode 100644 index 000000000..90043a086 --- /dev/null +++ b/integration-tests/docker-compose.yml @@ -0,0 +1,494 @@ +version: '3.8' +services: + db: + image: postgres:13.4-alpine + ports: + - '5432:5432' + environment: + POSTGRES_DB: registry + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + PGDATA: /var/lib/postgresql/data + healthcheck: + test: ['CMD-SHELL', 'pg_isready'] + interval: 5s + timeout: 5s + retries: 6 + networks: + - 'stack' + + clickhouse: + image: clickhouse/clickhouse-server:22.3.5.5-alpine + volumes: + - ../packages/services/storage/configs/clickhouse:/etc/clickhouse-server/conf.d + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:8123/ping'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 10s + environment: + CLICKHOUSE_USER: test + CLICKHOUSE_PASSWORD: test + KAFKA_BROKER: broker:29092 + ports: + - '8123:8123' + networks: + - 'stack' + + zookeeper: + image: confluentinc/cp-zookeeper:6.2.2-3-ubi8 + hostname: zookeeper + networks: + - 'stack' + ports: + - '2181:2181' + ulimits: + nofile: + soft: 20000 + hard: 40000 + healthcheck: + test: ['CMD', 'cub', 'zk-ready', '127.0.0.1:2181', '10'] + interval: 5s + timeout: 10s + retries: 6 + start_period: 15s + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + + broker: + image: confluentinc/cp-kafka:6.2.2-3-ubi8 + hostname: borker + depends_on: + zookeeper: + condition: service_healthy + networks: + - 'stack' + ports: + - '29092:29092' + - '9092:9092' + ulimits: + nofile: + soft: 20000 + hard: 40000 + healthcheck: + test: + [ + 'CMD', + 'cub', + 'kafka-ready', + '1', + '5', + '-b', + '127.0.0.1:9092', + '-c', + '/etc/kafka/kafka.properties', + ] + interval: 15s + timeout: 10s + retries: 6 + start_period: 15s + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + + redis: + image: bitnami/redis:6.2 + networks: + - 'stack' + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] + interval: 5s + timeout: 10s + retries: 6 + start_period: 5s + ports: + - '6379:6379' + environment: + - REDIS_PASSWORD=test + - REDIS_DISABLE_COMMANDS=FLUSHDB,FLUSHALL + + migrations: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-migrations.sh' + networks: + - 'stack' + depends_on: + clickhouse: + condition: service_healthy + db: + condition: service_healthy + broker: + condition: service_healthy + environment: + MIGRATOR: 'up' + CLICKHOUSE_MIGRATOR: 'up' + POSTGRES_CONNECTION_STRING: 'postgresql://postgres:postgres@db:5432/registry' + CLICKHOUSE_PROTOCOL: 'http' + CLICKHOUSE_HOST: 'clickhouse' + CLICKHOUSE_PORT: '8123' + CLICKHOUSE_USERNAME: 'test' + CLICKHOUSE_PASSWORD: 'test' + KAFKA_BROKER: 'broker:29092' + volumes: + - './tarballs/storage.tgz:/storage.tgz' + - './run-migrations.sh:/run-migrations.sh' + + server: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-server.sh' + networks: + - 'stack' + depends_on: + redis: + condition: service_healthy + clickhouse: + condition: service_healthy + migrations: + condition: service_completed_successfully + tokens: + condition: service_healthy + webhooks: + condition: service_healthy + schema: + condition: service_healthy + usage_estimator: + condition: service_healthy + rate_limit: + condition: service_healthy + stripe_billing: + condition: service_healthy + local_cdn: + condition: service_healthy + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3001/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + ports: + - '3001:3001' + volumes: + - './tarballs/server.tgz:/server.tgz' + - './run-server.sh:/run-server.sh' + environment: + POSTGRES_HOST: db + POSTGRES_PORT: 5432 + POSTGRES_DB: registry + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + CLICKHOUSE_PROTOCOL: 'http' + CLICKHOUSE_HOST: clickhouse + CLICKHOUSE_PORT: 8123 + CLICKHOUSE_USERNAME: test + CLICKHOUSE_PASSWORD: test + REDIS_HOST: redis + REDIS_PORT: 6379 + REDIS_PASSWORD: test + TOKENS_ENDPOINT: http://tokens:3003 + WEBHOOKS_ENDPOINT: http://webhooks:3005 + SCHEMA_ENDPOINT: http://schema:3002 + USAGE_ESTIMATOR_ENDPOINT: http://usage_estimator:3008 + RATE_LIMIT_ENDPOINT: http://rate_limit:3009 + BILLING_ENDPOINT: http://stripe_billing:3010 + CF_BASE_PATH: http://local_cdn:3004 + CF_ACCOUNT_ID: 103df45224310d669213971ce28b5b70 + CF_AUTH_TOKEN: 85e20c26c03759603c0f45884824a1c3 + CF_NAMESPACE_ID: 33b1e3bbb4a4707d05ea0307cbb55c79 + CDN_AUTH_PRIVATE_KEY: 1e1064ef9cda8bf38936b77317e90dc3 + CDN_BASE_URL: http://localhost:3004 + GITHUB_APP_ID: 123123 + GITHUB_APP_PRIVATE_KEY: 5f938d51a065476c4dc1b04aeba13afb + ENCRYPTION_SECRET: 8ebe95cf24c1fbe306e9fa32c8c33148 + FEEDBACK_SLACK_TOKEN: '' + FEEDBACK_SLACK_CHANNEL: '#hive' + AUTH0_SECRET: ${AUTH0_SECRET} + AUTH0_DOMAIN: ${AUTH0_DOMAIN} + AUTH0_CLIENT_ID: ${AUTH0_CLIENT_ID} + AUTH0_CLIENT_SECRET: ${AUTH0_CLIENT_SECRET} + AUTH0_SCOPE: 'openid profile offline_access' + AUTH0_AUDIENCE: ${AUTH0_AUDIENCE} + AUTH0_CONNECTION: ${AUTH0_CONNECTION} + PORT: 3001 + + schema: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-schema.sh' + networks: + - 'stack' + depends_on: + redis: + condition: service_healthy + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3002/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + ports: + - '3002:3002' + volumes: + - './tarballs/schema.tgz:/schema.tgz' + - './run-schema.sh:/run-schema.sh' + environment: + PORT: 3002 + REDIS_HOST: redis + REDIS_PORT: 6379 + REDIS_PASSWORD: test + + tokens: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-tokens.sh' + networks: + - 'stack' + depends_on: + migrations: + condition: service_completed_successfully + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3003/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + ports: + - '3003:3003' + volumes: + - './tarballs/tokens.tgz:/tokens.tgz' + - './run-tokens.sh:/run-tokens.sh' + environment: + POSTGRES_HOST: db + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_PORT: 5432 + POSTGRES_DB: registry + PORT: 3003 + + local_cdn: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-local-cdn.sh' + networks: + - 'stack' + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3004/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + ports: + - '3004:3004' + volumes: + - '../packages/services/cdn-worker/dist/dev.js:/cdn.js' + - './run-local-cdn.sh:/run-local-cdn.sh' + environment: + PORT: 3004 + + webhooks: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-webhooks.sh' + networks: + - 'stack' + depends_on: + redis: + condition: service_healthy + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3005/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + ports: + - '3005:3005' + volumes: + - './tarballs/webhooks.tgz:/webhooks.tgz' + - './run-webhooks.sh:/run-webhooks.sh' + environment: + BULLMQ_COMMANDS_FROM_ROOT: 'true' + PORT: 3005 + REDIS_HOST: redis + REDIS_PORT: 6379 + REDIS_PASSWORD: test + + usage: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-usage.sh' + networks: + - 'stack' + ports: + - '3006:3006' + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3006/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + volumes: + - './tarballs/usage.tgz:/usage.tgz' + - './run-usage.sh:/run-usage.sh' + depends_on: + broker: + condition: service_healthy + rate_limit: + condition: service_healthy + tokens: + condition: service_healthy + environment: + TOKENS_ENDPOINT: http://tokens:3003 + RATE_LIMIT_ENDPOINT: http://rate_limit:3009 + KAFKA_CONNECTION_MODE: 'docker' + KAFKA_BROKER: broker:29092 + KAFKA_BUFFER_SIZE: 350 + KAFKA_BUFFER_INTERVAL: 1000 + KAFKA_BUFFER_DYNAMIC: 'true' + PORT: 3006 + + usage_ingestor: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-usage-ingestor.sh' + networks: + - 'stack' + depends_on: + broker: + condition: service_healthy + clickhouse: + condition: service_healthy + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3007/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + ports: + - '3007:3007' + volumes: + - './tarballs/usage-ingestor.tgz:/usage-ingestor.tgz' + - './run-usage-ingestor.sh:/run-usage-ingestor.sh' + environment: + KAFKA_CONNECTION_MODE: 'docker' + KAFKA_BROKER: broker:29092 + KAFKA_CONCURRENCY: 1 + CLICKHOUSE_PROTOCOL: 'http' + CLICKHOUSE_HOST: clickhouse + CLICKHOUSE_PORT: 8123 + CLICKHOUSE_USERNAME: test + CLICKHOUSE_PASSWORD: test + PORT: 3007 + + usage_estimator: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-usage-estimator.sh' + networks: + - 'stack' + ports: + - '3008:3008' + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3008/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + volumes: + - './tarballs/usage-estimator.tgz:/usage-estimator.tgz' + - './run-usage-estimator.sh:/run-usage-estimator.sh' + depends_on: + clickhouse: + condition: service_healthy + migrations: + condition: service_completed_successfully + environment: + POSTGRES_CONNECTION_STRING: 'postgresql://postgres:postgres@db:5432/registry' + CLICKHOUSE_PROTOCOL: 'http' + CLICKHOUSE_HOST: 'clickhouse' + CLICKHOUSE_PORT: '8123' + CLICKHOUSE_USERNAME: 'test' + CLICKHOUSE_PASSWORD: 'test' + PORT: 3008 + + rate_limit: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-rate-limit.sh' + networks: + - 'stack' + ports: + - '3009:3009' + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3009/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + volumes: + - './tarballs/rate-limit.tgz:/rate-limit.tgz' + - './run-rate-limit.sh:/run-rate-limit.sh' + depends_on: + clickhouse: + condition: service_healthy + migrations: + condition: service_completed_successfully + usage_estimator: + condition: service_healthy + environment: + POSTGRES_CONNECTION_STRING: 'postgresql://postgres:postgres@db:5432/registry' + USAGE_ESTIMATOR_ENDPOINT: http://usage_estimator:3008 + PORT: 3009 + + stripe_billing: + image: node:16.13.2-alpine3.14 + entrypoint: + - '/bin/sh' + - '/run-stripe-billing.sh' + networks: + - 'stack' + ports: + - '3010:3010' + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:3010/_readiness'] + interval: 5s + timeout: 5s + retries: 6 + start_period: 5s + volumes: + - './tarballs/stripe-billing.tgz:/stripe-billing.tgz' + - './run-stripe-billing.sh:/run-stripe-billing.sh' + depends_on: + clickhouse: + condition: service_healthy + migrations: + condition: service_completed_successfully + usage_estimator: + condition: service_healthy + environment: + STRIPE_SECRET_KEY: ${STRIPE_SECRET_KEY} + POSTGRES_CONNECTION_STRING: 'postgresql://postgres:postgres@db:5432/registry' + USAGE_ESTIMATOR_ENDPOINT: http://usage_estimator:3008 + PORT: 3010 + +networks: + stack: {} diff --git a/integration-tests/dockest.ts b/integration-tests/dockest.ts new file mode 100644 index 000000000..1bb28fbbf --- /dev/null +++ b/integration-tests/dockest.ts @@ -0,0 +1,33 @@ +import { Dockest, logLevel } from 'dockest'; +import { cleanDockerContainers, createServices } from './testkit/dockest'; +import dotenv from 'dotenv'; + +async function main() { + dotenv.config(); + + const dockest = new Dockest({ + logLevel: logLevel.DEBUG, + jestOpts: { + runInBand: true, + config: JSON.stringify({ + roots: ['/tests'], + transform: { + '^.+\\.ts$': 'ts-jest', + }, + testTimeout: 45_000, + maxConcurrency: 1, + setupFiles: ['dotenv/config'], + setupFilesAfterEnv: ['./jest-setup.ts'], + }), + }, + }); + + cleanDockerContainers(); + + return dockest.run(createServices()); +} + +await main().catch((err) => { + console.error(err); + process.exit(1); +}); diff --git a/integration-tests/fixtures/breaking-schema.graphql b/integration-tests/fixtures/breaking-schema.graphql new file mode 100644 index 000000000..cf9f433f2 --- /dev/null +++ b/integration-tests/fixtures/breaking-schema.graphql @@ -0,0 +1,8 @@ +type Query { + users: [User!] +} + +type User { + id: ID! + name: String! +} diff --git a/integration-tests/fixtures/init-schema.graphql b/integration-tests/fixtures/init-schema.graphql new file mode 100644 index 000000000..6995150b5 --- /dev/null +++ b/integration-tests/fixtures/init-schema.graphql @@ -0,0 +1,9 @@ +type Query { + users: [User!] +} + +type User { + id: ID! + name: String! + email: String! +} diff --git a/integration-tests/fixtures/nonbreaking-schema.graphql b/integration-tests/fixtures/nonbreaking-schema.graphql new file mode 100644 index 000000000..9b324ac89 --- /dev/null +++ b/integration-tests/fixtures/nonbreaking-schema.graphql @@ -0,0 +1,10 @@ +type Query { + users: [User!] +} + +type User { + id: ID! + name: String! + email: String! + nickname: String +} diff --git a/integration-tests/jest-setup.ts b/integration-tests/jest-setup.ts new file mode 100644 index 000000000..b5bfed5fd --- /dev/null +++ b/integration-tests/jest-setup.ts @@ -0,0 +1,20 @@ +import { createPool } from 'slonik'; +import * as utils from 'dockest/test-helper'; +import { resetDb } from './testkit/db'; +import { resetClickHouse } from './testkit/clickhouse'; +import { resetRedis } from './testkit/redis'; + +const dbAddress = utils.getServiceAddress('db', 5432); +const redisAddress = utils.getServiceAddress('redis', 6379); + +const pool = createPool(`postgresql://postgres:postgres@${dbAddress}/registry`); + +beforeEach(() => resetDb(pool)); +beforeEach(() => resetClickHouse()); +beforeEach(() => + resetRedis({ + host: redisAddress.replace(':6379', ''), + port: 6379, + password: 'test', + }) +); diff --git a/integration-tests/package.json b/integration-tests/package.json new file mode 100644 index 000000000..56bbcfa0b --- /dev/null +++ b/integration-tests/package.json @@ -0,0 +1,23 @@ +{ + "name": "integration-tests", + "type": "module", + "private": true, + "version": "0.0.0", + "dependencies": { + "@app/gql": "link:./testkit/gql", + "@graphql-typed-document-node/core": "3.1.1", + "auth0": "2.36.2", + "axios": "0.27.2", + "dotenv": "10.0.0", + "date-fns": "2.25.0", + "dependency-graph": "0.11.0", + "dockest": "npm:@n1ru4l/dockest@2.1.0-rc.6", + "slonik": "23.9.0", + "tsup": "5.12.7", + "yaml": "2.1.0" + }, + "scripts": { + "build-and-pack": "(cd ../ && yarn build:services && yarn build:libraries && yarn build:local-cdn) && node ./scripts/pack.mjs", + "dockest": "tsup-node dockest.ts --format esm --target node16 --onSuccess 'node dist/dockest.js'" + } +} diff --git a/integration-tests/run-local-cdn.sh b/integration-tests/run-local-cdn.sh new file mode 100755 index 000000000..c90b1b501 --- /dev/null +++ b/integration-tests/run-local-cdn.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +set -e + +node cdn.js diff --git a/integration-tests/run-migrations.sh b/integration-tests/run-migrations.sh new file mode 100755 index 000000000..f11bede05 --- /dev/null +++ b/integration-tests/run-migrations.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:storage.tgz +storage diff --git a/integration-tests/run-rate-limit.sh b/integration-tests/run-rate-limit.sh new file mode 100755 index 000000000..0127ee89f --- /dev/null +++ b/integration-tests/run-rate-limit.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:rate-limit.tgz +rate-limit diff --git a/integration-tests/run-schema.sh b/integration-tests/run-schema.sh new file mode 100755 index 000000000..7b3bbfee0 --- /dev/null +++ b/integration-tests/run-schema.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:schema.tgz +schema diff --git a/integration-tests/run-server.sh b/integration-tests/run-server.sh new file mode 100755 index 000000000..c103ac022 --- /dev/null +++ b/integration-tests/run-server.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:server.tgz +server diff --git a/integration-tests/run-stripe-billing.sh b/integration-tests/run-stripe-billing.sh new file mode 100755 index 000000000..ee445ecef --- /dev/null +++ b/integration-tests/run-stripe-billing.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:stripe-billing.tgz +stripe-billing diff --git a/integration-tests/run-tokens.sh b/integration-tests/run-tokens.sh new file mode 100755 index 000000000..8994f2034 --- /dev/null +++ b/integration-tests/run-tokens.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:tokens.tgz +tokens diff --git a/integration-tests/run-usage-estimator.sh b/integration-tests/run-usage-estimator.sh new file mode 100755 index 000000000..b326f81ea --- /dev/null +++ b/integration-tests/run-usage-estimator.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:usage-estimator.tgz +usage-estimator diff --git a/integration-tests/run-usage-ingestor.sh b/integration-tests/run-usage-ingestor.sh new file mode 100644 index 000000000..e69c943cd --- /dev/null +++ b/integration-tests/run-usage-ingestor.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:usage-ingestor.tgz +usage-ingestor diff --git a/integration-tests/run-usage.sh b/integration-tests/run-usage.sh new file mode 100755 index 000000000..0c5013e5a --- /dev/null +++ b/integration-tests/run-usage.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:usage.tgz +usage diff --git a/integration-tests/run-webhooks.sh b/integration-tests/run-webhooks.sh new file mode 100755 index 000000000..ff26be16a --- /dev/null +++ b/integration-tests/run-webhooks.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +npm install -g file:webhooks.tgz +webhooks diff --git a/integration-tests/scripts/pack.mjs b/integration-tests/scripts/pack.mjs new file mode 100644 index 000000000..3f3332462 --- /dev/null +++ b/integration-tests/scripts/pack.mjs @@ -0,0 +1,103 @@ +/** + * !! Node !! + * + * Gets all the packages from the manifest and packs them. + * As a result, we get a tarball for each package in the integration-tests/tarballs directory. + * + * Naming convention: + * @hive/tokens -> tokens.tgz + */ + +import { exec } from 'child_process'; +import path from 'path'; +import fs from 'fs'; +import fsExtra from 'fs-extra'; +import glob from 'glob'; +import rimraf from 'rimraf'; +import { fileURLToPath } from 'url'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const cwd = path.resolve(__dirname, '../..'); +const tarballDir = path.resolve(cwd, 'integration-tests/tarballs'); + +async function main() { + rimraf.sync(`${tarballDir}`, {}); + fsExtra.mkdirSync(tarballDir, { recursive: true }); + + function isBackendPackage(manifestPath) { + return JSON.parse( + fs.readFileSync(manifestPath, 'utf-8') + ).buildOptions?.tags.includes('backend'); + } + + function listBackendPackages() { + const manifestPathCollection = glob.sync( + 'packages/services/*/package.json', + { + cwd, + absolute: true, + ignore: ['**/node_modules/**', '**/dist/**'], + } + ); + + return manifestPathCollection + .filter(isBackendPackage) + .map((filepath) => path.relative(cwd, path.dirname(filepath))); + } + + async function pack(location) { + const { version, name } = JSON.parse( + await fsExtra.readFile(path.join(cwd, location, 'package.json'), 'utf-8') + ); + const stdout = await new Promise((resolve, reject) => { + exec( + `npm pack ${path.join(cwd, location, 'dist')}`, + { + cwd, + encoding: 'utf8', + }, + (err, stdout, stderr) => { + console.log(stderr); + if (err) { + reject(err); + } else { + resolve(stdout); + } + } + ); + }); + + const lines = stdout.split('\n'); + const org_filename = path.resolve(cwd, lines[lines.length - 2]); + let filename = org_filename + .replace(cwd, tarballDir) + .replace('hive-', '') + .replace(`-${version}`, ''); + + if (/-\d+\.\d+\.\d+\.tgz$/.test(filename)) { + throw new Error(`Build ${name} package first!`); + } + + await fsExtra.rename(org_filename, filename); + + return filename; + } + + const locations = listBackendPackages(); + + await Promise.all( + locations.map(async (loc) => { + try { + const filename = await pack(loc); + + console.log('[pack] Done', path.resolve(cwd, filename)); + } catch (error) { + console.error(`[pack] Failed to pack ${loc}: ${error}`); + console.error('[pack] Maybe you forgot to build the packages first?'); + process.exit(1); + } + }) + ); +} + +await main(); diff --git a/integration-tests/testkit/auth.ts b/integration-tests/testkit/auth.ts new file mode 100644 index 000000000..41a93d06f --- /dev/null +++ b/integration-tests/testkit/auth.ts @@ -0,0 +1,35 @@ +import { AuthenticationClient, TokenResponse } from 'auth0'; +import { ensureEnv } from './env'; + +const authenticationApi = new AuthenticationClient({ + domain: ensureEnv('AUTH0_DOMAIN'), + clientId: ensureEnv('AUTH0_CLIENT_ID'), + clientSecret: ensureEnv('AUTH0_CLIENT_SECRET'), +}); + +type UserID = 'main' | 'extra'; +const password = ensureEnv('AUTH0_USER_PASSWORD'); + +const userEmails: Record = { + main: ensureEnv('AUTH0_USER_MAIN_EMAIL'), + extra: ensureEnv('AUTH0_USER_EXTRA_EMAIL'), +}; + +const tokenResponsePromise: Record | null> = { + main: null, + extra: null, +}; + +export function authenticate(userId: UserID) { + if (!tokenResponsePromise[userId]) { + tokenResponsePromise[userId] = authenticationApi.passwordGrant({ + username: userEmails[userId], + password, + audience: `https://${ensureEnv('AUTH0_DOMAIN')}/api/v2/`, + scope: 'openid profile email offline_access', + realm: 'Username-Password-Authentication', + }); + } + + return tokenResponsePromise[userId]!; +} diff --git a/integration-tests/testkit/cli.ts b/integration-tests/testkit/cli.ts new file mode 100644 index 000000000..5f687a8aa --- /dev/null +++ b/integration-tests/testkit/cli.ts @@ -0,0 +1,22 @@ +import * as utils from 'dockest/test-helper'; +import { run } from '../../packages/libraries/cli/src/index'; + +const registryAddress = utils.getServiceAddress('server', 3001); + +export async function schemaPublish(args: string[]) { + return run([ + 'schema:publish', + `--registry`, + `http://${registryAddress}/graphql`, + ...args, + ]); +} + +export async function schemaCheck(args: string[]) { + return run([ + 'schema:check', + `--registry`, + `http://${registryAddress}/graphql`, + ...args, + ]); +} diff --git a/integration-tests/testkit/clickhouse.ts b/integration-tests/testkit/clickhouse.ts new file mode 100644 index 000000000..348b1e71a --- /dev/null +++ b/integration-tests/testkit/clickhouse.ts @@ -0,0 +1,43 @@ +import * as utils from 'dockest/test-helper'; +import axios from 'axios'; + +const clickhouseAddress = utils.getServiceAddress('clickhouse', 8123); +const endpoint = `http://${clickhouseAddress}/?default_format=JSON`; + +export async function resetClickHouse() { + const queries = [ + `operations_registry`, + `operations_new_hourly_mv`, + `operations_new`, + `schema_coordinates_daily`, + `client_names_daily`, + ].map((table) => `TRUNCATE TABLE default.${table}`); + + for await (const query of queries) { + await axios.post(endpoint, query, { + method: 'POST', + timeout: 10_000, + headers: { + 'Accept-Encoding': 'gzip', + Accept: 'application/json', + Authorization: `Basic ${Buffer.from('test:test').toString('base64')}`, + }, + }); + } +} + +export async function clickHouseQuery(query: string) { + const res = await axios.post<{ + data: T[]; + rows: number; + }>(endpoint, query, { + timeout: 10_000, + headers: { + 'Accept-Encoding': 'gzip', + Authorization: `Basic ${Buffer.from('test:test').toString('base64')}`, + }, + responseType: 'json', + }); + + return res.data; +} diff --git a/integration-tests/testkit/db.ts b/integration-tests/testkit/db.ts new file mode 100644 index 000000000..3aa340f32 --- /dev/null +++ b/integration-tests/testkit/db.ts @@ -0,0 +1,27 @@ +import { sql, DatabasePoolConnectionType } from 'slonik'; + +export const resetDb = async (conn: DatabasePoolConnectionType) => { + const migrationTables = ['migrations']; + + const result = await conn.many<{ tablename: string }>(sql` + SELECT "tablename" + FROM "pg_tables" + WHERE "schemaname" = 'public'; + `); + + const tablenames = result + .map(({ tablename }) => tablename) + .filter((tablename) => !migrationTables.includes(tablename)); + + if (tablenames.length) { + await conn.query(sql` + TRUNCATE TABLE + ${sql.join( + tablenames.map((name) => sql.identifier([name])), + sql`,` + )} + RESTART IDENTITY + ; + `); + } +}; diff --git a/integration-tests/testkit/dockest.ts b/integration-tests/testkit/dockest.ts new file mode 100644 index 000000000..72243518e --- /dev/null +++ b/integration-tests/testkit/dockest.ts @@ -0,0 +1,91 @@ +import { DockestService, execa } from 'dockest'; +import { + containerIsHealthyReadinessCheck, + zeroExitCodeReadinessCheck, +} from 'dockest/dist/readiness-check/index.js'; +import { DepGraph } from 'dependency-graph'; +import { readFileSync } from 'fs'; +import { join } from 'path'; +import { parse } from 'yaml'; + +export function createServices() { + const dockerComposeFile: { + services: { + [key: string]: { + depends_on?: { [key: string]: unknown }; + healthcheck?: any; + }; + }; + } = parse(readFileSync(join(process.cwd(), 'docker-compose.yml'), 'utf8')); + + const serviceNameCollection = Object.keys(dockerComposeFile.services); + + const graph = new DepGraph(); + + // First, add all services to the graph + for (const serviceName of serviceNameCollection) { + const service = dockerComposeFile.services[serviceName]; + graph.addNode(serviceName, { + serviceName, + dependsOn: [], + readinessCheck: service.healthcheck + ? containerIsHealthyReadinessCheck + : zeroExitCodeReadinessCheck, + }); + } + + // Now, create dependencies between them + for (const serviceName of serviceNameCollection) { + const dockerService = dockerComposeFile.services[serviceName]; + if (dockerService.depends_on) { + const dependsOn = Object.keys(dockerService.depends_on); + + for (const depName of dependsOn) { + graph.addDependency(serviceName, depName); + } + } + } + + // Next, sort the graph + const allServices = graph.overallOrder(); + + // Finally, create the services + const registry: { + [key: string]: DockestService; + } = {}; + for (const serviceName of allServices) { + const service = graph.getNodeData(serviceName); + + registry[serviceName] = { + ...service, + dependsOn: graph + .directDependenciesOf(serviceName) + .map((dep) => graph.getNodeData(dep)), + }; + } + + // And return a list of services + return allServices.map((serviceName) => graph.getNodeData(serviceName)); +} + +export function cleanDockerContainers() { + const output = execa( + `docker ps --all --filter "name=integration-tests" --format={{.ID}}:{{.Status}}` + ); + + if (output.stdout.length) { + const runningContainers = output.stdout.split('\n'); + for (const line of runningContainers) { + const [containerId, containerStatus] = line.split(':'); + const containerRunning = containerStatus?.toLowerCase().includes('up'); + if (containerRunning) { + console.log(`Stopping container ${containerId}`); + execa(`docker stop ${containerId}`); + } + console.log(`Removing container ${containerId} with its volumes`); + execa(`docker rm -v -f ${containerId}`); + } + + console.log('Stopped and removed all containers'); + } +} diff --git a/integration-tests/testkit/env.ts b/integration-tests/testkit/env.ts new file mode 100644 index 000000000..3f1e8ffab --- /dev/null +++ b/integration-tests/testkit/env.ts @@ -0,0 +1,52 @@ +type ValueType = 'string' | 'number' | 'boolean'; + +const prefix = 'Invariant failed'; + +// Throw an error if the condition fails +// > Not providing an inline default argument for message as the result is smaller +export function invariant( + condition: any, + // Can provide a string, or a function that returns a string for cases where + // the message takes a fair amount of effort to compute + message?: string | (() => string) +): asserts condition { + if (condition) { + return; + } + // Condition not passed + + // When not in production we allow the message to pass through + // *This block will be removed in production builds* + + const provided: string | undefined = + typeof message === 'function' ? message() : message; + + // Options: + // 1. message provided: `${prefix}: ${provided}` + // 2. message not provided: prefix + const value: string = provided ? `${prefix}: ${provided}` : prefix; + throw new Error(value); +} + +export function ensureEnv(key: string): string; +export function ensureEnv(key: string, valueType: 'string'): string; +export function ensureEnv(key: string, valueType: 'number'): number; +export function ensureEnv(key: string, valueType: 'boolean'): boolean; +export function ensureEnv(key: string, valueType?: ValueType) { + let value = process.env[key]; + + if (value === '') { + value = undefined; + } + + invariant(typeof value === 'string', `Missing "${key}" environment variable`); + + switch (valueType) { + case 'number': + return parseInt(value, 10); + case 'boolean': + return value === 'true'; + default: + return value; + } +} diff --git a/integration-tests/testkit/flow.ts b/integration-tests/testkit/flow.ts new file mode 100644 index 000000000..3f9b5ad43 --- /dev/null +++ b/integration-tests/testkit/flow.ts @@ -0,0 +1,600 @@ +import { gql } from '@app/gql'; +import axios from 'axios'; + +import type { + CreateOrganizationInput, + SchemaPublishInput, + CreateProjectInput, + CreateTokenInput, + OrganizationMemberAccessInput, + SchemaCheckInput, + PublishPersistedOperationInput, + SetTargetValidationInput, + UpdateTargetValidationSettingsInput, + OperationsStatsSelectorInput, + UpdateBaseSchemaInput, + SchemaVersionsInput, + CreateTargetInput, + SchemaVersionUpdateInput, + TargetSelectorInput, + SchemaSyncCdnInput, +} from './gql/graphql'; +import { execute } from './graphql'; + +export function waitFor(ms: number) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export function createOrganization( + input: CreateOrganizationInput, + authToken: string +) { + return execute({ + document: gql(/* GraphQL */ ` + mutation createOrganization($input: CreateOrganizationInput!) { + createOrganization(input: $input) { + organization { + id + name + cleanId + inviteCode + owner { + id + organizationAccessScopes + projectAccessScopes + targetAccessScopes + } + } + } + } + `), + authToken, + variables: { + input, + }, + }); +} + +export function joinOrganization(code: string, authToken: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation joinOrganization($code: String!) { + joinOrganization(code: $code) { + __typename + ... on OrganizationPayload { + organization { + id + name + cleanId + me { + id + organizationAccessScopes + projectAccessScopes + targetAccessScopes + } + } + } + ... on OrganizationInvitationError { + message + } + } + } + `), + authToken, + variables: { + code, + }, + }); +} + +export function createProject(input: CreateProjectInput, authToken: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation createProject($input: CreateProjectInput!) { + createProject(input: $input) { + createdProject { + id + cleanId + } + createdTarget { + id + cleanId + } + } + } + `), + authToken, + variables: { + input, + }, + }); +} +export function createTarget(input: CreateTargetInput, authToken: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation createTarget($input: CreateTargetInput!) { + createTarget(input: $input) { + createdTarget { + id + cleanId + } + } + } + `), + authToken, + variables: { + input, + }, + }); +} +export function createToken(input: CreateTokenInput, authToken: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation createToken($input: CreateTokenInput!) { + createToken(input: $input) { + secret + } + } + `), + authToken, + variables: { + input, + }, + }); +} + +export function updateMemberAccess( + input: OrganizationMemberAccessInput, + authToken: string +) { + return execute({ + document: gql(/* GraphQL */ ` + mutation updateOrganizationMemberAccess( + $input: OrganizationMemberAccessInput! + ) { + updateOrganizationMemberAccess(input: $input) { + organization { + cleanId + members { + nodes { + id + organizationAccessScopes + projectAccessScopes + targetAccessScopes + } + } + me { + id + } + } + } + } + `), + authToken, + variables: { + input, + }, + }); +} + +export function publishSchema(input: SchemaPublishInput, token: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation schemaPublish($input: SchemaPublishInput!) { + schemaPublish(input: $input) { + __typename + ... on SchemaPublishSuccess { + initial + valid + message + changes { + nodes { + message + criticality + } + total + } + } + ... on SchemaPublishError { + valid + changes { + nodes { + message + criticality + } + total + } + errors { + nodes { + message + } + total + } + } + } + } + `), + token, + variables: { + input, + }, + }); +} + +export function checkSchema(input: SchemaCheckInput, token: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation schemaCheck($input: SchemaCheckInput!) { + schemaCheck(input: $input) { + ... on SchemaCheckSuccess { + __typename + valid + changes { + nodes { + message + criticality + } + total + } + } + ... on SchemaCheckError { + __typename + valid + changes { + nodes { + message + criticality + } + total + } + errors { + nodes { + message + } + total + } + } + } + } + `), + token, + variables: { + input, + }, + }); +} + +export function setTargetValidation( + input: SetTargetValidationInput, + access: + | { + token: string; + } + | { + authToken: string; + } +) { + return execute({ + document: gql(/* GraphQL */ ` + mutation setTargetValidation($input: SetTargetValidationInput!) { + setTargetValidation(input: $input) { + enabled + period + percentage + } + } + `), + ...access, + variables: { + input, + }, + }); +} + +export function updateTargetValidationSettings( + input: UpdateTargetValidationSettingsInput, + access: + | { + token: string; + } + | { + authToken: string; + } +) { + return execute({ + document: gql(/* GraphQL */ ` + mutation updateTargetValidationSettings( + $input: UpdateTargetValidationSettingsInput! + ) { + updateTargetValidationSettings(input: $input) { + enabled + period + percentage + targets { + id + } + } + } + `), + ...access, + variables: { + input, + }, + }); +} + +export function updateBaseSchema(input: UpdateBaseSchemaInput, token: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation updateBaseSchema($input: UpdateBaseSchemaInput!) { + updateBaseSchema(input: $input) { + __typename + } + } + `), + token, + variables: { + input, + }, + }); +} + +export function readOperationsStats( + input: OperationsStatsSelectorInput, + token: string +) { + return execute({ + document: gql(/* GraphQL */ ` + query readOperationsStats($input: OperationsStatsSelectorInput!) { + operationsStats(selector: $input) { + totalOperations + operations { + nodes { + id + document + operationHash + kind + name + count + percentage + duration { + p75 + p90 + p95 + p99 + } + } + } + } + } + `), + token, + variables: { + input, + }, + }); +} + +export function fetchLatestSchema(token: string) { + return execute({ + document: gql(/* GraphQL */ ` + query latestVersion { + latestVersion { + baseSchema + schemas { + nodes { + source + commit + } + total + } + } + } + `), + token, + }); +} + +export function fetchLatestValidSchema(token: string) { + return execute({ + document: gql(/* GraphQL */ ` + query latestValidVersion { + latestValidVersion { + id + baseSchema + schemas { + nodes { + source + commit + } + total + } + } + } + `), + token, + }); +} + +export function fetchVersions( + selector: SchemaVersionsInput, + limit: number, + token: string +) { + return execute({ + document: gql(/* GraphQL */ ` + query schemaVersions($limit: Int!, $selector: SchemaVersionsInput!) { + schemaVersions(selector: $selector, limit: $limit) { + nodes { + id + valid + date + commit { + source + commit + } + baseSchema + schemas { + nodes { + source + commit + } + } + } + } + } + `), + token, + variables: { + selector, + limit, + }, + }); +} + +export function publishPersistedOperations( + input: PublishPersistedOperationInput[], + token: string +) { + return execute({ + document: gql(/* GraphQL */ ` + mutation publishPersistedOperations( + $input: [PublishPersistedOperationInput!]! + ) { + publishPersistedOperations(input: $input) { + summary { + total + unchanged + } + operations { + id + operationHash + content + name + kind + } + } + } + `), + token, + variables: { + input, + }, + }); +} + +export function updateSchemaVersionStatus( + input: SchemaVersionUpdateInput, + token: string +) { + return execute({ + document: gql(/* GraphQL */ ` + mutation updateSchemaVersionStatus($input: SchemaVersionUpdateInput!) { + updateSchemaVersionStatus(input: $input) { + id + date + valid + commit { + id + commit + } + } + } + `), + token, + variables: { + input, + }, + }); +} + +export function schemaSyncCDN(input: SchemaSyncCdnInput, token: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation schemaSyncCDN($input: SchemaSyncCDNInput!) { + schemaSyncCDN(input: $input) { + __typename + ... on SchemaSyncCDNSuccess { + message + } + ... on SchemaSyncCDNError { + message + } + } + } + `), + token, + variables: { + input, + }, + }); +} + +export function createCdnAccess(selector: TargetSelectorInput, token: string) { + return execute({ + document: gql(/* GraphQL */ ` + mutation createCdnToken($selector: TargetSelectorInput!) { + createCdnToken(selector: $selector) { + url + token + } + } + `), + token, + variables: { + selector, + }, + }); +} + +export async function fetchSchemaFromCDN( + selector: TargetSelectorInput, + token: string +) { + const cdnAccessResult = await createCdnAccess(selector, token); + + if (cdnAccessResult.body.errors) { + throw new Error(cdnAccessResult.body.errors[0].message); + } + + const cdn = cdnAccessResult.body.data!.createCdnToken; + + const res = await axios.get<{ sdl: string }>(`${cdn.url}/schema`, { + headers: { + 'Content-Type': 'application/json', + 'X-Hive-CDN-Key': cdn.token, + }, + responseType: 'json', + }); + + return { + body: res.data, + status: res.status, + }; +} + +export async function fetchMetadataFromCDN( + selector: TargetSelectorInput, + token: string +) { + const cdnAccessResult = await createCdnAccess(selector, token); + + if (cdnAccessResult.body.errors) { + throw new Error(cdnAccessResult.body.errors[0].message); + } + + const cdn = cdnAccessResult.body.data!.createCdnToken; + + const res = await axios.get(`${cdn.url}/metadata`, { + headers: { + 'Content-Type': 'application/json', + 'X-Hive-CDN-Key': cdn.token, + }, + responseType: 'json', + }); + + return { + body: res.data, + status: res.status, + }; +} diff --git a/integration-tests/testkit/graphql.ts b/integration-tests/testkit/graphql.ts new file mode 100644 index 000000000..8d80a85b2 --- /dev/null +++ b/integration-tests/testkit/graphql.ts @@ -0,0 +1,44 @@ +import * as utils from 'dockest/test-helper'; +import axios from 'axios'; +import type { ExecutionResult } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; + +const registryAddress = utils.getServiceAddress('server', 3001); + +export async function execute(params: { + document: TypedDocumentNode; + operationName?: string; + variables?: V; + authToken?: string; + token?: string; +}) { + const res = await axios.post>( + `http://${registryAddress}/graphql`, + { + query: params.document, + operationName: params.operationName, + variables: params.variables, + }, + { + headers: { + 'Content-Type': 'application/json', + ...(params.authToken + ? { + Authorization: `Bearer ${params.authToken}`, + } + : {}), + ...(params.token + ? { + 'X-API-Token': params.token, + } + : {}), + }, + responseType: 'json', + } + ); + + return { + body: res.data, + status: res.status, + }; +} diff --git a/integration-tests/testkit/redis.ts b/integration-tests/testkit/redis.ts new file mode 100644 index 000000000..51f6340f5 --- /dev/null +++ b/integration-tests/testkit/redis.ts @@ -0,0 +1,22 @@ +/* eslint-disable import/no-extraneous-dependencies */ +import Redis from 'ioredis'; + +export const resetRedis = async (conn: { + host: string; + port: number; + password: string; +}) => { + const redis = new Redis({ + host: conn.host, + port: conn.port, + password: conn.password, + db: 0, + maxRetriesPerRequest: 5, + enableReadyCheck: true, + }); + + const keys = await redis.keys('*'); + if (keys?.length) { + await redis.del(keys); + } +}; diff --git a/integration-tests/testkit/usage.ts b/integration-tests/testkit/usage.ts new file mode 100644 index 000000000..f6e5ffb61 --- /dev/null +++ b/integration-tests/testkit/usage.ts @@ -0,0 +1,38 @@ +import * as utils from 'dockest/test-helper'; +import axios from 'axios'; + +const usageAddress = utils.getServiceAddress('usage', 3006); + +export interface CollectedOperation { + timestamp?: number; + operation: string; + operationName?: string; + fields: string[]; + execution: { + ok: boolean; + duration: number; + errorsTotal: number; + }; + metadata?: { + client?: { + name?: string; + version?: string; + }; + }; +} + +export async function collect(params: { + operations: CollectedOperation[]; + token: string; +}) { + const res = await axios.post(`http://${usageAddress}`, params.operations, { + headers: { + 'Content-Type': 'application/json', + 'X-API-Token': params.token, + }, + }); + + return { + status: res.status, + }; +} diff --git a/integration-tests/tests/api/organization/members.spec.ts b/integration-tests/tests/api/organization/members.spec.ts new file mode 100644 index 000000000..3fcd30f63 --- /dev/null +++ b/integration-tests/tests/api/organization/members.spec.ts @@ -0,0 +1,141 @@ +import { + OrganizationAccessScope, + ProjectAccessScope, + TargetAccessScope, +} from '@app/gql/graphql'; +import { + createOrganization, + joinOrganization, + updateMemberAccess, +} from '../../../testkit/flow'; +import { authenticate } from '../../../testkit/auth'; + +test('owner of an organization should have all scopes', async () => { + const { access_token } = await authenticate('main'); + const result = await createOrganization( + { + name: 'foo', + }, + access_token + ); + + expect(result.body.errors).not.toBeDefined(); + + const owner = result.body.data!.createOrganization.organization.owner; + + Object.values(OrganizationAccessScope).forEach((scope) => { + expect(owner.organizationAccessScopes).toContain(scope); + }); + + Object.values(ProjectAccessScope).forEach((scope) => { + expect(owner.projectAccessScopes).toContain(scope); + }); + + Object.values(TargetAccessScope).forEach((scope) => { + expect(owner.targetAccessScopes).toContain(scope); + }); +}); + +test('regular member of an organization should have basic scopes', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const code = orgResult.body.data!.createOrganization.organization.inviteCode; + const joinResult = await joinOrganization(code, member_access_token); + + expect(joinResult.body.errors).not.toBeDefined(); + expect(joinResult.body.data?.joinOrganization.__typename).toBe( + 'OrganizationPayload' + ); + + if ( + joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload' + ) { + throw new Error('Join failed'); + } + + const member = joinResult.body.data!.joinOrganization.organization.me; + + // Should have only organization:read access + expect(member.organizationAccessScopes).toContainEqual( + OrganizationAccessScope.Read + ); + // Nothing more + expect(member.organizationAccessScopes).toHaveLength(1); + + // Should have only project:read and project:operations-store:read access + expect(member.projectAccessScopes).toContainEqual(ProjectAccessScope.Read); + expect(member.projectAccessScopes).toContainEqual( + ProjectAccessScope.OperationsStoreRead + ); + // Nothing more + expect(member.projectAccessScopes).toHaveLength(2); + + // Should have only target:read and target:registry:read access + expect(member.targetAccessScopes).toContainEqual(TargetAccessScope.Read); + expect(member.targetAccessScopes).toContainEqual( + TargetAccessScope.RegistryRead + ); + // Nothing more + expect(member.targetAccessScopes).toHaveLength(2); +}); + +test('cannot grant an access scope to another user if user has no access to that scope', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + const joinResult = await joinOrganization(code, member_access_token); + + if ( + joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload' + ) { + throw new Error( + `Join failed: ${joinResult.body.data!.joinOrganization.message}` + ); + } + + const member = joinResult.body.data!.joinOrganization.organization.me; + + // Grant organization:members access + await updateMemberAccess( + { + organization: org.cleanId, + organizationScopes: [OrganizationAccessScope.Members], + projectScopes: [], + targetScopes: [], + user: member.id, + }, + owner_access_token + ); + + // Grant access to target:tokens:write + const accessResult = await updateMemberAccess( + { + organization: org.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [TargetAccessScope.TokensWrite], + user: member.id, + }, + member_access_token + ); + + expect(accessResult.body.errors).toHaveLength(1); + expect(accessResult.body.errors![0].message).toMatch('target:tokens:write'); +}); diff --git a/integration-tests/tests/api/persisted-operations/publish.spec.ts b/integration-tests/tests/api/persisted-operations/publish.spec.ts new file mode 100644 index 000000000..2358477a5 --- /dev/null +++ b/integration-tests/tests/api/persisted-operations/publish.spec.ts @@ -0,0 +1,212 @@ +import { ProjectType, ProjectAccessScope } from '@app/gql/graphql'; +import { + createOrganization, + publishPersistedOperations, + createProject, + createToken, +} from '../../../testkit/flow'; +import { authenticate } from '../../../testkit/auth'; + +test('can publish persisted operations only with project:operations-store:write', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with no rights + const noAccessTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [], + }, + owner_access_token + ); + expect(noAccessTokenResult.body.errors).not.toBeDefined(); + + // Create a token with read rights + const readTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [ProjectAccessScope.OperationsStoreRead], + targetScopes: [], + }, + owner_access_token + ); + expect(readTokenResult.body.errors).not.toBeDefined(); + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [ + ProjectAccessScope.OperationsStoreRead, + ProjectAccessScope.OperationsStoreWrite, + ], + targetScopes: [], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + + const writeToken = writeTokenResult.body.data!.createToken.secret; + const readToken = readTokenResult.body.data!.createToken.secret; + const noAccessToken = noAccessTokenResult.body.data!.createToken.secret; + + const operations = [ + { + content: `query Me { me { id } }`, + operationHash: 'meme', + }, + { + content: `query user($id: ID!) { user(id: $id) { id } }`, + }, + ]; + + // Cannot persist operations with no read and write rights + let result = await publishPersistedOperations(operations, noAccessToken); + expect(result.body.errors).toHaveLength(1); + expect(result.body.errors![0].message).toMatch( + 'project:operations-store:write' + ); + + // Cannot persist operations with read rights + result = await publishPersistedOperations(operations, readToken); + expect(result.body.errors).toHaveLength(1); + expect(result.body.errors![0].message).toMatch( + 'project:operations-store:write' + ); + + // Persist operations with write rights + result = await publishPersistedOperations(operations, writeToken); + expect(result.body.errors).not.toBeDefined(); + + const persisted = result.body.data!.publishPersistedOperations; + + // Check the result + expect(persisted.summary.total).toEqual(2); + expect(persisted.summary.unchanged).toEqual(0); + expect(persisted.operations).toHaveLength(2); + expect(persisted.operations[0].operationHash).toEqual( + operations[0].operationHash + ); + expect(persisted.operations[1].operationHash).toBeDefined(); +}); + +test('should skip on already persisted operations', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [ + ProjectAccessScope.OperationsStoreRead, + ProjectAccessScope.OperationsStoreWrite, + ], + targetScopes: [], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + + const writeToken = writeTokenResult.body.data!.createToken.secret; + + const operations = [ + { + content: `query Me { me { id } }`, + operationHash: 'meme', + }, + { + content: `query user($id: ID!) { user(id: $id) { id } }`, + }, + ]; + + // Persist operations + let result = await publishPersistedOperations(operations, writeToken); + expect(result.body.errors).not.toBeDefined(); + + let persisted = result.body.data!.publishPersistedOperations; + + // Check the result + expect(persisted.summary.total).toEqual(2); + expect(persisted.summary.unchanged).toEqual(0); + expect(persisted.operations).toHaveLength(2); + expect(persisted.operations[0].operationHash).toEqual( + operations[0].operationHash + ); + expect(persisted.operations[1].operationHash).toBeDefined(); + + // Persist operations with read rights + operations[1].operationHash = 'useruser'; + result = await publishPersistedOperations(operations, writeToken); + expect(result.body.errors).not.toBeDefined(); + + persisted = result.body.data!.publishPersistedOperations; + + // Check the result + expect(persisted.summary.total).toEqual(2); + expect(persisted.summary.unchanged).toEqual(1); + expect(persisted.operations).toHaveLength(2); + + const meOperation = persisted.operations.find( + (op) => op.operationHash === operations[0].operationHash + ); + const userOperation = persisted.operations.find( + (op) => op.operationHash === operations[1].operationHash + ); + + expect(meOperation?.operationHash).toEqual(operations[0].operationHash); + expect(userOperation?.operationHash).toEqual(operations[1].operationHash); +}); diff --git a/integration-tests/tests/api/schema/check.spec.ts b/integration-tests/tests/api/schema/check.spec.ts new file mode 100644 index 000000000..4eed2f5ce --- /dev/null +++ b/integration-tests/tests/api/schema/check.spec.ts @@ -0,0 +1,242 @@ +import { TargetAccessScope, ProjectType } from '@app/gql/graphql'; +import { + createOrganization, + joinOrganization, + publishSchema, + checkSchema, + createProject, + createToken, +} from '../../../testkit/flow'; +import { authenticate } from '../../../testkit/auth'; + +test('can check a schema with target:registry:read access', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + // Publish schema with write rights + const publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { ping: String }`, + }, + writeToken + ); + + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + // Create a token with no rights + const noAccessTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [], + }, + owner_access_token + ); + expect(noAccessTokenResult.body.errors).not.toBeDefined(); + + // Create a token with read rights + const readTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [TargetAccessScope.RegistryRead], + }, + owner_access_token + ); + expect(readTokenResult.body.errors).not.toBeDefined(); + + const readToken = readTokenResult.body.data!.createToken.secret; + const noAccessToken = noAccessTokenResult.body.data!.createToken.secret; + + // Check schema with no read and write rights + let checkResult = await checkSchema( + { + sdl: `type Query { ping: String foo: String }`, + }, + noAccessToken + ); + expect(checkResult.body.errors).toHaveLength(1); + expect(checkResult.body.errors![0].message).toMatch('target:registry:read'); + + // Check schema with read rights + checkResult = await checkSchema( + { + sdl: `type Query { ping: String foo: String }`, + }, + readToken + ); + expect(checkResult.body.errors).not.toBeDefined(); + expect(checkResult.body.data!.schemaCheck.__typename).toBe( + 'SchemaCheckSuccess' + ); +}); + +test('should match indentation of previous description', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + // Publish schema with write rights + const publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: ` + type Query { + " ping-ping " + ping: String + "pong-pong" + pong: String + } + `, + }, + writeToken + ); + + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + // Create a token with read rights + const readTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [TargetAccessScope.RegistryRead], + }, + owner_access_token + ); + expect(readTokenResult.body.errors).not.toBeDefined(); + + const readToken = readTokenResult.body.data!.createToken.secret; + + // Check schema with read rights + const checkResult = await checkSchema( + { + sdl: ` + type Query { + """ + ping-ping + """ + ping: String + " pong-pong " + pong: String + } + `, + }, + readToken + ); + expect(checkResult.body.errors).not.toBeDefined(); + + const check = checkResult.body.data!.schemaCheck; + + if (check.__typename !== 'SchemaCheckSuccess') { + throw new Error(`Expected SchemaCheckSuccess, got ${check.__typename}`); + } + + expect(check.__typename).toBe('SchemaCheckSuccess'); + expect(check.changes!.total).toBe(0); +}); diff --git a/integration-tests/tests/api/schema/publish.spec.ts b/integration-tests/tests/api/schema/publish.spec.ts new file mode 100644 index 000000000..3ac326527 --- /dev/null +++ b/integration-tests/tests/api/schema/publish.spec.ts @@ -0,0 +1,1006 @@ +import { TargetAccessScope, ProjectType } from '@app/gql/graphql'; +import { + createOrganization, + joinOrganization, + publishSchema, + createProject, + createToken, + updateBaseSchema, + fetchVersions, + fetchLatestSchema, + fetchLatestValidSchema, + updateSchemaVersionStatus, + fetchSchemaFromCDN, + createTarget, + fetchMetadataFromCDN, +} from '../../../testkit/flow'; +import { authenticate } from '../../../testkit/auth'; + +test('cannot publish a schema without target:registry:write access', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [TargetAccessScope.RegistryRead], + }, + owner_access_token + ); + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + const result = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { ping: String }`, + }, + token + ); + + expect(result.body.errors).toHaveLength(1); + expect(result.body.errors![0].message).toMatch('target:registry:write'); +}); + +test('can publish a schema with target:registry:write access', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + + let result = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { ping: String }`, + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + expect(result.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + result = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { ping: String pong: String }`, + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + expect(result.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const versionsResult = await fetchVersions( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + 3, + token + ); + + expect(versionsResult.body.errors).not.toBeDefined(); + expect(versionsResult.body.data!.schemaVersions.nodes).toHaveLength(2); +}); + +test('base schema should not affect the output schema persisted in db', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + // Publish schema with write rights + let publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { ping: String }`, + }, + writeToken + ); + + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const updateBaseResult = await updateBaseSchema( + { + newBase: ` + directive @auth on OBJECT | FIELD_DEFINITION + `, + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + writeToken + ); + expect(updateBaseResult.body.errors).not.toBeDefined(); + + // Check schema with no read and write rights + publishResult = await publishSchema( + { + sdl: `type Query { ping: String @auth pong: String }`, + author: 'Kamil', + commit: 'abc234', + }, + writeToken + ); + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const versionsResult = await fetchVersions( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + 5, + writeToken + ); + + expect(versionsResult.body.errors).not.toBeDefined(); + expect(versionsResult.body.data!.schemaVersions.nodes).toHaveLength(2); + + const latestResult = await fetchLatestSchema(writeToken); + expect(latestResult.body.errors).not.toBeDefined(); + expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe( + 'abc234' + ); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch( + 'type Query { ping: String @auth pong: String }' + ); + expect( + latestResult.body.data!.latestVersion.schemas.nodes[0].source + ).not.toMatch('directive'); + expect(latestResult.body.data!.latestVersion.baseSchema).toMatch( + 'directive @auth on OBJECT | FIELD_DEFINITION' + ); +}); + +test('directives should not be removed (federation)', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Federation, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + // Publish schema with write rights + const publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { me: User } type User @key(fields: "id") { id: ID! name: String }`, + }, + writeToken + ); + + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const versionsResult = await fetchVersions( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + 5, + writeToken + ); + + expect(versionsResult.body.errors).not.toBeDefined(); + expect(versionsResult.body.data!.schemaVersions.nodes).toHaveLength(1); + + const latestResult = await fetchLatestSchema(writeToken); + expect(latestResult.body.errors).not.toBeDefined(); + expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe( + 'abc123' + ); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch( + `type Query { me: User } type User @key(fields: "id") { id: ID! name: String }` + ); +}); + +test('directives should not be removed (stitching)', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Stitching, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + // Publish schema with write rights + const publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { me: User } type User @key(selectionSet: "{ id }") { id: ID! name: String }`, + }, + writeToken + ); + + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const versionsResult = await fetchVersions( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + 5, + writeToken + ); + + expect(versionsResult.body.errors).not.toBeDefined(); + expect(versionsResult.body.data!.schemaVersions.nodes).toHaveLength(1); + + const latestResult = await fetchLatestSchema(writeToken); + expect(latestResult.body.errors).not.toBeDefined(); + expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe( + 'abc123' + ); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch( + `type Query { me: User } type User @key(selectionSet: "{ id }") { id: ID! name: String }` + ); +}); + +test('directives should not be removed (single)', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + // Publish schema with write rights + const publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `directive @auth on FIELD_DEFINITION type Query { me: User @auth } type User { id: ID! name: String }`, + }, + writeToken + ); + + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const versionsResult = await fetchVersions( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + 5, + writeToken + ); + + expect(versionsResult.body.errors).not.toBeDefined(); + expect(versionsResult.body.data!.schemaVersions.nodes).toHaveLength(1); + + const latestResult = await fetchLatestSchema(writeToken); + expect(latestResult.body.errors).not.toBeDefined(); + expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe( + 'abc123' + ); + expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch( + `directive @auth on FIELD_DEFINITION type Query { me: User @auth } type User { id: ID! name: String }` + ); +}); + +test('share publication of schema using redis', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + // Publish schema with write rights + const publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { ping: String }`, + }, + writeToken + ); + + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const [publishResult1, publishResult2] = await Promise.all([ + publishSchema( + { + sdl: `type Query { ping: String pong: String }`, + author: 'Kamil', + commit: 'abc234', + }, + writeToken + ), + publishSchema( + { + sdl: `type Query { ping: String pong: String }`, + author: 'Kamil', + commit: 'abc234', + }, + writeToken + ), + ]); + expect(publishResult1.body.errors).not.toBeDefined(); + expect(publishResult2.body.errors).not.toBeDefined(); + expect(publishResult1.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + expect(publishResult2.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); +}); + +test("Two targets with the same commit id shouldn't return an error", async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + const publishResult = await publishSchema( + { + author: 'gilad', + commit: 'abc123', + sdl: `type Query { ping: String }`, + }, + writeToken + ); + const createTargetResult = await createTarget( + { + organization: org.cleanId, + project: project.cleanId, + name: 'target2', + }, + owner_access_token + ); + const target2 = createTargetResult.body!.data!.createTarget.createdTarget; + const writeTokenResult2 = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target2.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + const writeToken2 = writeTokenResult2.body.data!.createToken.secret; + const publishResult2 = await publishSchema( + { + author: 'gilad', + commit: 'abc123', + sdl: `type Query { ping: String }`, + }, + writeToken2 + ); + // Schema publish should be successful + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + expect(publishResult2.body.errors).not.toBeDefined(); + expect(publishResult2.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); +}); + +test('marking versions as valid', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + + // Initial schema + let result = await publishSchema( + { + author: 'Kamil', + commit: 'c0', + sdl: `type Query { ping: String }`, + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + expect(result.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + // Second version with a forced breaking change + result = await publishSchema( + { + author: 'Kamil', + commit: 'c1', + sdl: `type Query { pong: String }`, + force: true, + metadata: JSON.stringify({ c1: true }), + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + + // third version with another forced breaking change + result = await publishSchema( + { + author: 'Kamil', + commit: 'c2', + sdl: `type Query { tennis: String }`, + force: true, + metadata: JSON.stringify({ c2: true }), + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + + const versionsResult = await fetchVersions( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + 3, + token + ); + + expect(versionsResult.body.errors).not.toBeDefined(); + expect(versionsResult.body.data!.schemaVersions.nodes).toHaveLength(3); + + // the initial version should be the latest valid version + let latestValidSchemaResult = await fetchLatestValidSchema(token); + expect(latestValidSchemaResult.body.errors).not.toBeDefined(); + expect( + latestValidSchemaResult.body.data!.latestValidVersion.schemas.total + ).toEqual(1); + expect( + latestValidSchemaResult.body.data!.latestValidVersion.schemas.nodes[0] + .commit + ).toEqual('c0'); + + const versionId = (commit: string) => + versionsResult.body.data!.schemaVersions.nodes.find( + (node) => node.commit.commit === commit + )!.id; + + // marking the third version as valid should promote it to be the latest valid version + let versionStatusUpdateResult = await updateSchemaVersionStatus( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + valid: true, + version: versionId('c2'), + }, + token + ); + + expect(versionStatusUpdateResult.body.errors).not.toBeDefined(); + expect( + versionStatusUpdateResult.body.data!.updateSchemaVersionStatus.id + ).toEqual(versionId('c2')); + + latestValidSchemaResult = await fetchLatestValidSchema(token); + expect(latestValidSchemaResult.body.errors).not.toBeDefined(); + expect(latestValidSchemaResult.body.data!.latestValidVersion.id).toEqual( + versionId('c2') + ); + + // marking the second (not the most recent) version as valid should NOT promote it to be the latest valid version + versionStatusUpdateResult = await updateSchemaVersionStatus( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + valid: true, + version: versionId('c1'), + }, + token + ); + expect(versionStatusUpdateResult.body.errors).not.toBeDefined(); + + latestValidSchemaResult = await fetchLatestValidSchema(token); + expect(latestValidSchemaResult.body.errors).not.toBeDefined(); + expect(latestValidSchemaResult.body.data!.latestValidVersion.id).toEqual( + versionId('c2') + ); +}); + +test('marking only the most recent version as valid result in an update of CDN', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + + // Initial schema + let result = await publishSchema( + { + author: 'Kamil', + commit: 'c0', + sdl: `type Query { ping: String }`, + metadata: JSON.stringify({ c0: 1 }), + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + expect(result.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + // Second version with a forced breaking change + result = await publishSchema( + { + author: 'Kamil', + commit: 'c1', + sdl: `type Query { pong: String }`, + force: true, + metadata: JSON.stringify({ c1: 1 }), + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + + // third version with another forced breaking change + result = await publishSchema( + { + author: 'Kamil', + commit: 'c2', + sdl: `type Query { tennis: String }`, + force: true, + metadata: JSON.stringify({ c2: 1 }), + }, + token + ); + + expect(result.body.errors).not.toBeDefined(); + + const targetSelector = { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }; + + // the initial version should available on CDN + let cdnResult = await fetchSchemaFromCDN(targetSelector, token); + expect(cdnResult.body.sdl).toContain('ping'); + + let cdnMetadataResult = await fetchMetadataFromCDN(targetSelector, token); + expect(cdnMetadataResult.status).toEqual(200); + expect(cdnMetadataResult.body).toEqual({ c0: 1 }); + + const versionsResult = await fetchVersions(targetSelector, 3, token); + + const versionId = (commit: string) => + versionsResult.body.data!.schemaVersions.nodes.find( + (node) => node.commit.commit === commit + )!.id; + + // marking the third version as valid should promote it to be the latest valid version and publish it to CDN + await updateSchemaVersionStatus( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + valid: true, + version: versionId('c2'), + }, + token + ); + + cdnResult = await fetchSchemaFromCDN(targetSelector, token); + expect(cdnResult.body.sdl).toContain('tennis'); + + cdnMetadataResult = await fetchMetadataFromCDN(targetSelector, token); + expect(cdnMetadataResult.status).toEqual(200); + expect(cdnMetadataResult.body).toEqual({ c2: 1 }); + + // marking the second (not the most recent) version as valid should NOT promote it to be the latest valid version + // const updateSchemaVersionStatusResult = + await updateSchemaVersionStatus( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + valid: true, + version: versionId('c1'), + }, + token + ); + // console.log(JSON.stringify(updateSchemaVersionStatusResult)); + + cdnResult = await fetchSchemaFromCDN(targetSelector, token); + expect(cdnResult.body.sdl).toContain('tennis'); + + cdnMetadataResult = await fetchMetadataFromCDN(targetSelector, token); + expect(cdnMetadataResult.status).toEqual(200); + expect(cdnMetadataResult.body).toEqual({ c2: 1 }); +}); diff --git a/integration-tests/tests/api/schema/sync.spec.ts b/integration-tests/tests/api/schema/sync.spec.ts new file mode 100644 index 000000000..362788c13 --- /dev/null +++ b/integration-tests/tests/api/schema/sync.spec.ts @@ -0,0 +1,103 @@ +import { TargetAccessScope, ProjectType } from '@app/gql/graphql'; +import { + createOrganization, + joinOrganization, + publishSchema, + createProject, + createToken, + fetchSchemaFromCDN, + schemaSyncCDN, +} from '../../../testkit/flow'; +import { authenticate } from '../../../testkit/auth'; + +test('marking only the most recent version as valid result in an update of CDN', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + + // Initial schema + const publishResult = await publishSchema( + { + author: 'Kamil', + commit: 'c0', + sdl: `type Query { ping: String }`, + }, + token + ); + + expect(publishResult.body.errors).not.toBeDefined(); + expect(publishResult.body.data!.schemaPublish.__typename).toBe( + 'SchemaPublishSuccess' + ); + + const targetSelector = { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }; + + // the initial version should available on CDN + let cdnResult = await fetchSchemaFromCDN(targetSelector, token); + expect(cdnResult.body.sdl).toContain('ping'); + + // Force a re-upload of the schema to CDN + const syncResult = await schemaSyncCDN( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + token + ); + + expect(syncResult.body.errors).not.toBeDefined(); + expect(syncResult.body.data!.schemaSyncCDN.__typename).toBe( + 'SchemaSyncCDNSuccess' + ); + + // the initial version should available on CDN + cdnResult = await fetchSchemaFromCDN(targetSelector, token); + expect(cdnResult.body.sdl).toContain('ping'); +}); diff --git a/integration-tests/tests/api/sign-up.spec.ts b/integration-tests/tests/api/sign-up.spec.ts new file mode 100644 index 000000000..ec0df863c --- /dev/null +++ b/integration-tests/tests/api/sign-up.spec.ts @@ -0,0 +1,62 @@ +import { gql } from '@app/gql'; +import { execute } from '../../testkit/graphql'; +import { authenticate } from '../../testkit/auth'; + +test('should auto-create an organization for freshly signed-up user', async () => { + const { access_token } = await authenticate('main'); + const result = await execute({ + document: gql(/* GraphQL */ ` + query organizations { + organizations { + total + nodes { + id + name + } + } + } + `), + authToken: access_token, + }); + + expect(result.body.errors).not.toBeDefined(); + expect(result.body.data?.organizations.total).toBe(1); +}); + +test('should auto-create an organization for freshly signed-up user with no race-conditions', async () => { + const { access_token } = await authenticate('main'); + const query1 = execute({ + document: gql(/* GraphQL */ ` + query organizations { + organizations { + total + nodes { + id + name + } + } + } + `), + authToken: access_token, + }); + const query2 = execute({ + document: gql(/* GraphQL */ ` + query organizations { + organizations { + total + nodes { + id + name + } + } + } + `), + authToken: access_token, + }); + const [result1, result2] = await Promise.all([query1, query2]); + + expect(result1.body.errors).not.toBeDefined(); + expect(result1.body.data?.organizations.total).toBe(1); + expect(result2.body.errors).not.toBeDefined(); + expect(result2.body.data?.organizations.total).toBe(1); +}); diff --git a/integration-tests/tests/api/target/tokens.spec.ts b/integration-tests/tests/api/target/tokens.spec.ts new file mode 100644 index 000000000..b86725ce8 --- /dev/null +++ b/integration-tests/tests/api/target/tokens.spec.ts @@ -0,0 +1,80 @@ +import { TargetAccessScope, ProjectType } from '@app/gql/graphql'; +import { + createOrganization, + joinOrganization, + createProject, + createToken, + updateMemberAccess, +} from '../../../testkit/flow'; +import { authenticate } from '../../../testkit/auth'; + +test('cannot set a scope on a token if user has no access to that scope', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + const joinResult = await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + if ( + joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload' + ) { + throw new Error( + `Join failed: ${joinResult.body.data!.joinOrganization.message}` + ); + } + + const member = joinResult.body.data!.joinOrganization.organization.me; + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Give access to tokens + await updateMemberAccess( + { + organization: org.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.Read, + TargetAccessScope.RegistryRead, + TargetAccessScope.TokensRead, + TargetAccessScope.TokensWrite, + ], + user: member.id, + }, + owner_access_token + ); + + // member should not have access to target:registry:write + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [TargetAccessScope.RegistryWrite], + }, + member_access_token + ); + + expect(tokenResult.body.errors).toHaveLength(1); + expect(tokenResult.body.errors![0].message).toMatch('target:registry:write'); +}); diff --git a/integration-tests/tests/api/target/usage.spec.ts b/integration-tests/tests/api/target/usage.spec.ts new file mode 100644 index 000000000..93dfa35e8 --- /dev/null +++ b/integration-tests/tests/api/target/usage.spec.ts @@ -0,0 +1,845 @@ +import { + TargetAccessScope, + ProjectType, + ProjectAccessScope, + OrganizationAccessScope, +} from '@app/gql/graphql'; +import formatISO from 'date-fns/formatISO'; +import subHours from 'date-fns/subHours'; +import { + createOrganization, + createProject, + createTarget, + createToken, + publishSchema, + checkSchema, + setTargetValidation, + updateTargetValidationSettings, + readOperationsStats, + waitFor, +} from '../../../testkit/flow'; +import { authenticate } from '../../../testkit/auth'; +import { collect, CollectedOperation } from '../../../testkit/usage'; +import { clickHouseQuery } from '../../../testkit/clickhouse'; +// eslint-disable-next-line import/no-extraneous-dependencies +import { normalizeOperation } from '@graphql-hive/core'; +// eslint-disable-next-line import/no-extraneous-dependencies +import { parse, print } from 'graphql'; + +function sendBatch( + amount: number, + operation: CollectedOperation, + token: string +) { + return Promise.all( + new Array(amount).fill(null).map(() => + collect({ + operations: [operation], + token, + }) + ) + ); +} + +test('collect operation', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const settingsTokenResult = await createToken( + { + name: 'test-settings', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [TargetAccessScope.Read, TargetAccessScope.Settings], + }, + owner_access_token + ); + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [ + TargetAccessScope.Read, + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(settingsTokenResult.body.errors).not.toBeDefined(); + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + const tokenForSettings = settingsTokenResult.body.data!.createToken.secret; + + const schemaPublishResult = await publishSchema( + { + author: 'Kamil', + commit: 'abc123', + sdl: `type Query { ping: String me: String }`, + }, + token + ); + + expect(schemaPublishResult.body.errors).not.toBeDefined(); + expect((schemaPublishResult.body.data!.schemaPublish as any).valid).toEqual( + true + ); + + const targetValidationResult = await setTargetValidation( + { + enabled: true, + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + { + token: tokenForSettings, + } + ); + + expect(targetValidationResult.body.errors).not.toBeDefined(); + expect(targetValidationResult.body.data!.setTargetValidation.enabled).toEqual( + true + ); + expect( + targetValidationResult.body.data!.setTargetValidation.percentage + ).toEqual(0); + expect(targetValidationResult.body.data!.setTargetValidation.period).toEqual( + 30 + ); + + // should not be breaking because the field is unused + const unusedCheckResult = await checkSchema( + { + sdl: `type Query { me: String }`, + }, + token + ); + expect(unusedCheckResult.body.errors).not.toBeDefined(); + expect(unusedCheckResult.body.data!.schemaCheck.__typename).toEqual( + 'SchemaCheckSuccess' + ); + + const collectResult = await collect({ + operations: [ + { + operation: 'query ping { ping }', + operationName: 'ping', + fields: ['Query', 'Query.ping'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + }, + ], + token, + }); + + expect(collectResult.status).toEqual(200); + + await waitFor(5_000); + + // should be breaking because the field is used now + const usedCheckResult = await checkSchema( + { + sdl: `type Query { me: String }`, + }, + token + ); + + if ( + usedCheckResult.body.data!.schemaCheck.__typename !== 'SchemaCheckError' + ) { + throw new Error( + `Expected SchemaCheckError, got ${ + usedCheckResult.body.data!.schemaCheck.__typename + }` + ); + } + + expect(usedCheckResult.body.data!.schemaCheck.valid).toEqual(false); + + const from = formatISO(subHours(Date.now(), 6)); + const to = formatISO(Date.now()); + const operationStatsResult = await readOperationsStats( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + period: { + from, + to, + }, + }, + token + ); + + expect(operationStatsResult.body.errors).not.toBeDefined(); + + const operationsStats = operationStatsResult.body.data!.operationsStats; + + expect(operationsStats.operations.nodes).toHaveLength(1); + + const op = operationsStats.operations.nodes[0]; + + expect(op.count).toEqual(1); + expect(op.document).toMatch('ping'); + expect(op.operationHash).toBeDefined(); + expect(op.duration.p75).toEqual(200); + expect(op.duration.p90).toEqual(200); + expect(op.duration.p95).toEqual(200); + expect(op.duration.p99).toEqual(200); + expect(op.kind).toEqual('query'); + expect(op.name).toMatch('ping'); + expect(op.percentage).toBeGreaterThan(99); +}); + +test('normalize and collect operation without breaking its syntax', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const settingsTokenResult = await createToken( + { + name: 'test-settings', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [TargetAccessScope.Read, TargetAccessScope.Settings], + }, + owner_access_token + ); + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [ + TargetAccessScope.Read, + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(settingsTokenResult.body.errors).not.toBeDefined(); + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + + const raw_document = ` + query outfit { + recommendations( + input: { + strategies: [{ name: "asd" }] + articleId: "asd" + customerId: "asd" + phoenixEnabled: true + sessionId: "asd" + } + ) { + ... on RecommendationResponse { + frequentlyBoughtTogether { + recommendedProducts { + id + } + strategyMessage + } + outfit { + strategyMessage + } + outfit { + recommendedProducts { + articleId + id + imageUrl + name + productUrl + rating + tCode + } + strategyMessage + } + similar { + recommendedProducts { + articleId + id + imageUrl + name + productUrl + rating + tCode + } + strategyMessage + } + visualSearch { + strategyMessage + } + } + } + } + `; + + const normalized_document = normalizeOperation({ + document: parse(raw_document), + operationName: 'outfit', + hideLiterals: true, + removeAliases: true, + }); + + const collectResult = await collect({ + operations: [ + { + operation: normalizeOperation({ + document: parse(raw_document), + operationName: 'outfit', + hideLiterals: true, + removeAliases: true, + }), + operationName: 'outfit', + fields: ['Query', 'Query.ping'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + }, + ], + token, + }); + + expect(collectResult.status).toEqual(200); + + await waitFor(5_000); + + const from = formatISO(subHours(Date.now(), 6)); + const to = formatISO(Date.now()); + const operationStatsResult = await readOperationsStats( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + period: { + from, + to, + }, + }, + token + ); + + expect(operationStatsResult.body.errors).not.toBeDefined(); + + const operationsStats = operationStatsResult.body.data!.operationsStats; + + expect(operationsStats.operations.nodes).toHaveLength(1); + + const op = operationsStats.operations.nodes[0]; + + expect(op.count).toEqual(1); + expect(() => { + parse(op.document); + }).not.toThrow(); + expect(print(parse(op.document))).toEqual(print(parse(normalized_document))); + expect(op.operationHash).toBeDefined(); + expect(op.duration.p75).toEqual(200); + expect(op.duration.p90).toEqual(200); + expect(op.duration.p95).toEqual(200); + expect(op.duration.p99).toEqual(200); + expect(op.kind).toEqual('query'); + expect(op.name).toMatch('outfit'); + expect(op.percentage).toBeGreaterThan(99); +}); + +test('number of produced and collected operations should match', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [ + TargetAccessScope.Read, + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + + const batchSize = 10; + const totalAmount = 10_000; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for await (const _ of new Array(totalAmount / batchSize)) { + await sendBatch( + batchSize, + { + operation: 'query ping { ping }', + operationName: 'ping', + fields: ['Query', 'Query.ping'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + }, + token + ); + } + + await waitFor(5_000); + + const from = formatISO(subHours(Date.now(), 6)); + const to = formatISO(Date.now()); + const operationStatsResult = await readOperationsStats( + { + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + period: { + from, + to, + }, + }, + token + ); + + expect(operationStatsResult.body.errors).not.toBeDefined(); + + const operationsStats = operationStatsResult.body.data!.operationsStats; + + // We sent a single operation (multiple times) + expect(operationsStats.operations.nodes).toHaveLength(1); + + const op = operationsStats.operations.nodes[0]; + + expect(op.count).toEqual(totalAmount); + expect(op.document).toMatch('ping'); + expect(op.operationHash).toBeDefined(); + expect(op.duration.p75).toEqual(200); + expect(op.duration.p90).toEqual(200); + expect(op.duration.p95).toEqual(200); + expect(op.duration.p99).toEqual(200); + expect(op.kind).toEqual('query'); + expect(op.name).toMatch('ping'); + expect(op.percentage).toBeGreaterThan(99); +}); + +test('check usage from two selected targets', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const staging = projectResult.body.data!.createProject.createdTarget; + + const productionTargetResult = await createTarget( + { + name: 'production', + organization: org.cleanId, + project: project.cleanId, + }, + owner_access_token + ); + + const production = + productionTargetResult.body.data!.createTarget.createdTarget; + + const stagingTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: staging.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [ + TargetAccessScope.Read, + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + const productionTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: production.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [ + TargetAccessScope.Read, + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(stagingTokenResult.body.errors).not.toBeDefined(); + expect(productionTokenResult.body.errors).not.toBeDefined(); + + const tokenForStaging = stagingTokenResult.body.data!.createToken.secret; + const tokenForProduction = + productionTokenResult.body.data!.createToken.secret; + + const schemaPublishResult = await publishSchema( + { + author: 'Kamil', + commit: 'usage-check-2', + sdl: `type Query { ping: String me: String }`, + }, + tokenForStaging + ); + + expect(schemaPublishResult.body.errors).not.toBeDefined(); + expect((schemaPublishResult.body.data!.schemaPublish as any).valid).toEqual( + true + ); + + const targetValidationResult = await setTargetValidation( + { + enabled: true, + organization: org.cleanId, + project: project.cleanId, + target: staging.cleanId, + }, + { + authToken: owner_access_token, + } + ); + + expect(targetValidationResult.body.errors).not.toBeDefined(); + expect(targetValidationResult.body.data!.setTargetValidation.enabled).toEqual( + true + ); + expect( + targetValidationResult.body.data!.setTargetValidation.percentage + ).toEqual(0); + expect(targetValidationResult.body.data!.setTargetValidation.period).toEqual( + 30 + ); + + const collectResult = await collect({ + operations: [ + { + timestamp: Date.now(), + operation: 'query ping { ping }', + operationName: 'ping', + fields: ['Query', 'Query.ping'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + metadata: {}, + }, + { + timestamp: Date.now(), + operation: 'query me { me }', + operationName: 'me', + fields: ['Query', 'Query.me'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + }, + { + timestamp: Date.now(), + operation: 'query me { me }', + operationName: 'me', + fields: ['Query', 'Query.me'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + }, + ], + token: tokenForProduction, // put collected operation in production + }); + + expect(collectResult.status).toEqual(200); + + await waitFor(22_000); + + // should not be breaking because the field is unused on staging + const unusedCheckResult = await checkSchema( + { + sdl: `type Query { me: String }`, // ping is used but on production + }, + tokenForStaging + ); + expect(unusedCheckResult.body.errors).not.toBeDefined(); + expect(unusedCheckResult.body.data!.schemaCheck.__typename).toEqual( + 'SchemaCheckSuccess' + ); + + // Now switch to using checking both staging and production + + const updateValidationResult = await updateTargetValidationSettings( + { + organization: org.cleanId, + project: project.cleanId, + target: staging.cleanId, + percentage: 50, // Out of 3 requests, 1 is for Query.me, 2 are done for Query.me so it's 1/3 = 33.3% + period: 30, + targets: [production.id, staging.id], + }, + { + authToken: owner_access_token, + } + ); + + expect(updateValidationResult.body.errors).not.toBeDefined(); + expect( + updateValidationResult.body.data!.updateTargetValidationSettings.percentage + ).toEqual(50); + expect( + updateValidationResult.body.data!.updateTargetValidationSettings.period + ).toEqual(30); + expect( + updateValidationResult.body.data!.updateTargetValidationSettings.targets + ).toHaveLength(2); + + // should be non-breaking because the field is used in production and we are checking staging and production now + // and it used in less than 50% of traffic + const usedCheckResult = await checkSchema( + { + sdl: `type Query { me: String }`, // ping is used on production and we do check production now + }, + tokenForStaging + ); + + if ( + usedCheckResult.body.data!.schemaCheck.__typename !== 'SchemaCheckSuccess' + ) { + throw new Error( + `Expected SchemaCheckSuccess, got ${ + usedCheckResult.body.data!.schemaCheck.__typename + }` + ); + } + + expect(usedCheckResult.body.data!.schemaCheck.valid).toEqual(true); + expect(usedCheckResult.body.errors).not.toBeDefined(); +}); + +test('number of produced and collected operations should match', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + + const org = orgResult.body.data!.createOrganization.organization; + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + const tokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [OrganizationAccessScope.Read], + projectScopes: [ProjectAccessScope.Read], + targetScopes: [ + TargetAccessScope.Read, + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + + expect(tokenResult.body.errors).not.toBeDefined(); + + const token = tokenResult.body.data!.createToken.secret; + + const batchSize = 10; + const totalAmount = 10_000; + for await (const i of new Array(totalAmount / batchSize) + .fill(null) + .map((_, i) => i)) { + await sendBatch( + batchSize, + i % 2 === 0 + ? { + operation: 'query ping { ping }', + operationName: 'ping', + fields: ['Query', 'Query.ping'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + } + : { + operation: 'query ping { ping }', + operationName: 'ping', + fields: ['Query', 'Query.ping'], + execution: { + ok: true, + duration: 200000000, + errorsTotal: 0, + }, + metadata: { + client: { + name: 'web', + version: '1.2.3', + }, + }, + }, + token + ); + } + + await waitFor(5_000); + + const result = await clickHouseQuery<{ + target: string; + client_name: string | null; + hash: string; + total: number; + }>(` + SELECT + target, client_name, hash, sum(total) as total + FROM client_names_daily + WHERE + timestamp >= subtractDays(now(), 30) + AND timestamp <= now() + GROUP BY target, client_name, hash + `); + + expect(result.rows).toEqual(2); + expect(result.data).toContainEqual( + expect.objectContaining({ + target: target.id, + client_name: 'web', + hash: expect.any(String), + total: expect.stringMatching('5000'), + }) + ); + expect(result.data).toContainEqual( + expect.objectContaining({ + target: target.id, + client_name: '', + hash: expect.any(String), + total: expect.stringMatching('5000'), + }) + ); +}); diff --git a/integration-tests/tests/cli/schema.spec.ts b/integration-tests/tests/cli/schema.spec.ts new file mode 100644 index 000000000..cb587ca5b --- /dev/null +++ b/integration-tests/tests/cli/schema.spec.ts @@ -0,0 +1,76 @@ +import { TargetAccessScope, ProjectType } from '@app/gql/graphql'; +import { schemaPublish, schemaCheck } from '../../testkit/cli'; +import { authenticate } from '../../testkit/auth'; +import { + createOrganization, + joinOrganization, + createProject, + createToken, +} from '../../testkit/flow'; + +test('can publish and check a schema with target:registry:read access', async () => { + const { access_token: owner_access_token } = await authenticate('main'); + const orgResult = await createOrganization( + { + name: 'foo', + }, + owner_access_token + ); + const org = orgResult.body.data!.createOrganization.organization; + const code = org.inviteCode; + + // Join + const { access_token: member_access_token } = await authenticate('extra'); + await joinOrganization(code, member_access_token); + + const projectResult = await createProject( + { + organization: org.cleanId, + type: ProjectType.Single, + name: 'foo', + }, + owner_access_token + ); + + const project = projectResult.body.data!.createProject.createdProject; + const target = projectResult.body.data!.createProject.createdTarget; + + // Create a token with write rights + const writeTokenResult = await createToken( + { + name: 'test', + organization: org.cleanId, + project: project.cleanId, + target: target.cleanId, + organizationScopes: [], + projectScopes: [], + targetScopes: [ + TargetAccessScope.RegistryRead, + TargetAccessScope.RegistryWrite, + ], + }, + owner_access_token + ); + expect(writeTokenResult.body.errors).not.toBeDefined(); + const writeToken = writeTokenResult.body.data!.createToken.secret; + + await schemaPublish([ + '--token', + writeToken, + '--author', + 'Kamil', + '--commit', + 'abc123', + 'fixtures/init-schema.graphql', + ]); + + await schemaCheck([ + '--token', + writeToken, + 'fixtures/nonbreaking-schema.graphql', + ]); + + await expect( + schemaCheck(['--token', writeToken, 'fixtures/breaking-schema.graphql']) + ).rejects.toThrowError('EXIT: 1'); +}); diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 000000000..3ff168210 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,33 @@ +import { resolve, dirname } from 'path'; +import { readFileSync } from 'fs'; +import { fileURLToPath } from 'url'; +import { pathsToModuleNameMapper } from 'ts-jest'; + +const ROOT_DIR = dirname(fileURLToPath(import.meta.url)); +const TSCONFIG = resolve(ROOT_DIR, 'tsconfig.json'); +const tsconfig = JSON.parse(readFileSync(TSCONFIG, 'utf-8')); + +export default { + transform: { '^.+\\.tsx?$': 'ts-jest' }, + extensionsToTreatAsEsm: ['.ts', '.tsx'], + testEnvironment: 'node', + rootDir: ROOT_DIR, + globals: { + 'ts-jest': { + diagnostics: false, + tsconfig: TSCONFIG, + useESM: true, + }, + }, + restoreMocks: true, + reporters: ['default'], + modulePathIgnorePatterns: ['dist', 'integration-tests', 'tmp'], + moduleNameMapper: { + ...pathsToModuleNameMapper(tsconfig.compilerOptions.paths, { + prefix: `${ROOT_DIR}/`, + }), + '^(\\.{1,2}/.*)\\.js$': '$1', + }, + setupFiles: [], + collectCoverage: false, +}; diff --git a/package.json b/package.json new file mode 100644 index 000000000..7021b33ba --- /dev/null +++ b/package.json @@ -0,0 +1,108 @@ +{ + "name": "graphql-hive", + "type": "module", + "private": true, + "repository": { + "type": "git", + "url": "kamilkisiela/graphql-hive" + }, + "author": { + "email": "contact@the-guild.dev", + "name": "The Guild", + "url": "https://the-guild.dev" + }, + "license": "MIT", + "scripts": { + "seed": "node scripts/seed-local-env.js", + "postinstall": "husky install && patch-package && node ./scripts/patch-manifests.js && node ./scripts/sync-env-files.js && node ./scripts/turborepo-cleanup.js && node ./scripts/turborepo-setup.js", + "pre-commit": "lint-staged", + "prerelease": "yarn build:libraries", + "release": "changeset publish", + "test": "jest", + "lint": "eslint --ignore-path .gitignore \"packages/**/*.{ts,tsx}\"", + "format": "prettier --write .", + "setup": "yarn workspace @hive/storage run setup", + "generate": "yarn workspace @hive/storage run db:generate && yarn graphql:generate", + "graphql:generate": "graphql-codegen", + "typecheck": "yarn turbo typecheck --color", + "build": "yarn graphql:generate && yarn turbo build --color", + "build:libraries": "yarn graphql:generate && yarn turbo build --filter=./packages/libraries/* --color", + "build:web": "yarn graphql:generate && yarn turbo build --filter=./packages/web/* --color", + "build:services": "yarn graphql:generate && yarn turbo build --filter=./packages/services/* --color", + "build:local-cdn": "yarn graphql:generate && yarn turbo build-local --color", + "env:sync": "node ./scripts/sync-env-files.js", + "turbo": "env-cmd turbo run" + }, + "devDependencies": { + "@changesets/cli": "2.17.0", + "@graphql-codegen/add": "3.1.1", + "@graphql-codegen/cli": "2.6.2", + "@graphql-codegen/gql-tag-operations-preset": "1.4.0", + "@graphql-codegen/graphql-modules-preset": "2.3.11", + "@graphql-codegen/typed-document-node": "2.2.11", + "@graphql-codegen/typescript": "2.4.11", + "@graphql-codegen/typescript-graphql-request": "4.4.8", + "@graphql-codegen/typescript-operations": "2.4.0", + "@graphql-codegen/typescript-resolvers": "2.6.4", + "@swc/core": "1.2.185", + "@types/jest": "27.5.1", + "@types/lru-cache": "7.6.1", + "@types/node": "16.11.22", + "@typescript-eslint/eslint-plugin": "5.23.0", + "@typescript-eslint/parser": "5.23.0", + "bob-the-bundler": "1.7.2", + "env-cmd": "10.1.0", + "eslint": "8.15.0", + "eslint-plugin-import": "2.26.0", + "fs-extra": "10.1.0", + "graphql": "16.5.0", + "husky": "7.0.4", + "jest": "28.1.0", + "lint-staged": "11.2.6", + "patch-package": "6.4.7", + "prettier": "2.4.1", + "pretty-quick": "3.1.3", + "rimraf": "3.0.2", + "ts-jest": "28.0.2", + "tsup": "5.12.7", + "turbo": "1.2.6", + "typescript": "4.6.4" + }, + "workspaces": { + "packages": [ + "packages/services/*", + "packages/web/*", + "packages/libraries/*", + "integration-tests/" + ] + }, + "resolutions": { + "@types/react": "17.0.45", + "@urql/core": "2.3.6", + "graphql": "16.5.0", + "typescript": "4.6.4" + }, + "prettier": { + "trailingComma": "es5", + "semi": true, + "singleQuote": true + }, + "husky": { + "hooks": { + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "**/*": [ + "pretty-quick --staged" + ], + "**/*.ts": [ + "eslint" + ] + }, + "engines": { + "node": ">=16" + }, + "packageManager": "yarn@1.22.18", + "version": "0.0.0" +} diff --git a/packages/libraries/cli/.gitignore b/packages/libraries/cli/.gitignore new file mode 100644 index 000000000..b5decdf66 --- /dev/null +++ b/packages/libraries/cli/.gitignore @@ -0,0 +1,10 @@ +*-debug.log +*-error.log +/.nyc_output +/dist +/lib +/package-lock.json +/tmp +node_modules +src/sdk.ts +schema.graphql \ No newline at end of file diff --git a/packages/libraries/cli/.npmignore b/packages/libraries/cli/.npmignore new file mode 100644 index 000000000..9458ec818 --- /dev/null +++ b/packages/libraries/cli/.npmignore @@ -0,0 +1,2 @@ +src +examples diff --git a/packages/libraries/cli/CHANGELOG.md b/packages/libraries/cli/CHANGELOG.md new file mode 100644 index 000000000..12d0dadca --- /dev/null +++ b/packages/libraries/cli/CHANGELOG.md @@ -0,0 +1,302 @@ +# @graphql-hive/cli + +## 0.14.3 + +### Patch Changes + +- 9e487129: Bump + +## 0.14.2 + +### Patch Changes + +- c87df3ad: Bump + +## 0.14.1 + +### Patch Changes + +- 11958e9d: Add update command to self-update Hive CLI + +## 0.14.0 + +### Minor Changes + +- 6290ec23: Introduce operations:check to validate GraphQL Operations against latest schema +- 6290ec23: Rename operation:publish command to operations:publish + +## 0.13.0 + +### Minor Changes + +- d5db6070: Support URLs + +## 0.12.0 + +### Minor Changes + +- d9fbd878: Add --github flag to schema:publish command + +## 0.11.0 + +### Minor Changes + +- ac9b868c: Support GraphQL v16 +- e03185a7: GitHub Application + +### Patch Changes + +- Updated dependencies [ac9b868c] + - @graphql-hive/core@0.2.0 + +## 0.10.0 + +### Minor Changes + +- c5bfa4c9: Add a new `metadata` flag for publishing schema metadata (JSON) to Hive. + + The `--metadata` can contain anything you wish to have attached to your GraphQL schema, and can support your runtime needs. + + You can either specify a path to a file: `--metadata my-file.json`, or an inline JSON object: `--metadata '{"test": 1}'`. + + Metadata published to Hive will be available as part of Hive CDN, under `/metadata` route. + +## 0.9.6 + +### Patch Changes + +- 903edf84: Bump + +## 0.9.5 + +### Patch Changes + +- ccb93298: Remove content-encoding header and improve error logs + +## 0.9.4 + +### Patch Changes + +- 28bc8af3: Fix version header + +## 0.9.3 + +### Patch Changes + +- 3a435baa: Show one value of x-request-id + +## 0.9.2 + +### Patch Changes + +- 79d4b4c2: fix(deps): update envelop monorepo + +## 0.9.1 + +### Patch Changes + +- 016dd92c: handle missing service name argument for federation and stitching projects + +## 0.9.0 + +### Minor Changes + +- 7eca7f0: Display access to commands + +## 0.8.1 + +### Patch Changes + +- 273f096: show registry url + +## 0.8.0 + +### Minor Changes + +- 91a6957: Allow to update url of a service + +## 0.7.0 + +### Minor Changes + +- 6f204be: Display token info + +## 0.6.4 + +### Patch Changes + +- 52ab1f2: Find .git directory when CLI is installed globally + +## 0.6.3 + +### Patch Changes + +- 73a840d: Warn about missing git and make git optional + +## 0.6.2 + +### Patch Changes + +- df6c501: Do not exit with 0 when forceSafe + +## 0.6.1 + +### Patch Changes + +- aff0857: Throw on empty schema and use x-request-id as reference + +## 0.6.0 + +### Minor Changes + +- 4647d25: Add --forceSafe flag to mark the check as non-breaking regardless of breaking changes + +## 0.5.0 + +### Minor Changes + +- 0e712c7: Update normalization logic + +### Patch Changes + +- 0e712c7: Support --url + +## 0.4.9 + +### Patch Changes + +- Updated dependencies [d7348a3] +- Updated dependencies [d7348a3] + - @graphql-hive/core@0.1.0 + +## 0.4.8 + +### Patch Changes + +- 6214042: Fix auto-update error related to oclif + +## 0.4.7 + +### Patch Changes + +- bda322c: Add --require flag and normalize schema printing + +## 0.4.6 + +### Patch Changes + +- 5aa5e93: Bump + +## 0.4.5 + +### Patch Changes + +- 968614d: Fix persisting the same query twice +- 968614d: Add auto-updates and new-version warnings + +## 0.4.4 + +### Patch Changes + +- 1a16360: Send GraphQL Client name and version + +## 0.4.3 + +### Patch Changes + +- 41a9117: Fix an issue when publishing a schema for the first time + +## 0.4.2 + +### Patch Changes + +- c6ef3d2: Bob update +- 4224cb9: Support HIVE\_\* env variables +- Updated dependencies [c6ef3d2] + - @graphql-hive/core@0.0.5 + +## 0.4.1 + +### Patch Changes + +- aa12cdc: Use process.cwd() +- aa12cdc: Use HIVE_SPACE instead of REGISTRY_KEY env var + +## 0.4.0 + +### Minor Changes + +- e8dc8c6: Move file to be an argument, fix config + +## 0.3.2 + +### Patch Changes + +- 85b85d4: Dependencies update, cleanup, ui fixes + +## 0.3.1 + +### Patch Changes + +- Updated dependencies [4a7c569] + - @graphql-hive/core@0.0.4 + +## 0.3.0 + +### Minor Changes + +- 34cff78: Added support for specifying multiple configs in hive json file + +## 0.2.1 + +### Patch Changes + +- e257a0d: Support relay-like outputs of persisted operations +- bb5b3c4: Preparations for persisted operations in Lance + +## 0.2.0 + +### Minor Changes + +- acab74b: Added support for persisted operations - Changes made in API, APP, CLI, Server, Storage + +## 0.1.1 + +### Patch Changes + +- 79fe734: Set default registry url + +## 0.1.0 + +### Minor Changes + +- 078e758: Token per Target +- 1dd9cdb: --file flag is now replaced with a positional arg at the end, comments in graphql sdl files are now converted to descriptions, docs are updated to mention wildcard file uploads + +### Patch Changes + +- 60cd35d: Use default endpoint + +## 0.0.5 + +### Patch Changes + +- d433269: Fixes + +## 0.0.4 + +### Patch Changes + +- d64a3c5: Target 2017 + +## 0.0.3 + +### Patch Changes + +- 7e88e71: bump + +## 0.0.2 + +### Patch Changes + +- b2d686e: bump diff --git a/packages/libraries/cli/README.md b/packages/libraries/cli/README.md new file mode 100644 index 000000000..6e7d185a3 --- /dev/null +++ b/packages/libraries/cli/README.md @@ -0,0 +1,301 @@ +# GraphQL Hive CLI + +A CLI util to manage and control your GraphQL Hive + +[![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io) +[![Version](https://img.shields.io/npm/v/@graphql-hive/cli.svg)](https://npmjs.org/package/@graphql-hive/cli) +[![Downloads/week](https://img.shields.io/npm/dw/@graphql-hive/cli.svg)](https://npmjs.org/package/@graphql-hive/cli) + + + +- [GraphQL Hive CLI](#graphql-hive-cli) +- [Usage](#usage) +- [Commands](#commands) +- [Config](#config) + + +# Usage + + + +```sh-session +$ npm install -g @graphql-hive/cli +$ hive COMMAND +running command... +$ hive (--version) +@graphql-hive/cli/0.14.0 darwin-arm64 node-v16.15.0 +$ hive --help [COMMAND] +USAGE + $ hive COMMAND +... +``` + + + +# Commands + + + +- [`hive config:delete KEY`](#hive-configdelete-key) +- [`hive config:get KEY`](#hive-configget-key) +- [`hive config:reset`](#hive-configreset) +- [`hive config:set KEY VALUE`](#hive-configset-key-value) +- [`hive help [COMMAND]`](#hive-help-command) +- [`hive operations:check FILE`](#hive-operationscheck-file) +- [`hive operations:publish FILE`](#hive-operationspublish-file) +- [`hive schema:check FILE`](#hive-schemacheck-file) +- [`hive schema:publish FILE`](#hive-schemapublish-file) +- [`hive update [CHANNEL]`](#hive-update-channel) +- [`hive whoami`](#hive-whoami) + +## `hive config:delete KEY` + +deletes specific cli configuration + +``` +USAGE + $ hive config:delete [KEY] + +ARGUMENTS + KEY config key + +DESCRIPTION + deletes specific cli configuration +``` + +## `hive config:get KEY` + +prints specific cli configuration + +``` +USAGE + $ hive config:get [KEY] + +ARGUMENTS + KEY config key + +DESCRIPTION + prints specific cli configuration +``` + +## `hive config:reset` + +resets local cli configuration + +``` +USAGE + $ hive config:reset + +DESCRIPTION + resets local cli configuration +``` + +## `hive config:set KEY VALUE` + +updates specific cli configuration + +``` +USAGE + $ hive config:set [KEY] [VALUE] + +ARGUMENTS + KEY config key + VALUE config value + +DESCRIPTION + updates specific cli configuration +``` + +## `hive help [COMMAND]` + +Display help for hive. + +``` +USAGE + $ hive help [COMMAND] [-n] + +ARGUMENTS + COMMAND Command to show help for. + +FLAGS + -n, --nested-commands Include all nested commands in the output. + +DESCRIPTION + Display help for hive. +``` + +_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v5.1.12/src/commands/help.ts)_ + +## `hive operations:check FILE` + +checks operations against a published schema + +``` +USAGE + $ hive operations:check [FILE] [--registry ] [--token ] [--require ] + +ARGUMENTS + FILE Glob pattern to find the operations + +FLAGS + --registry= registry address + --require=... [default: ] Loads specific require.extensions before running the command + --token= api token + +DESCRIPTION + checks operations against a published schema +``` + +## `hive operations:publish FILE` + +saves operations to the store + +``` +USAGE + $ hive operations:publish [FILE] [--registry ] [--token ] [--require ] + +ARGUMENTS + FILE Glob pattern to find the operations + +FLAGS + --registry= registry address + --require=... [default: ] Loads specific require.extensions before running the codegen and reading the + configuration + --token= api token + +DESCRIPTION + saves operations to the store +``` + +## `hive schema:check FILE` + +checks schema + +``` +USAGE + $ hive schema:check [FILE] [--service ] [--registry ] [--token ] [--forceSafe] [--github] + [--require ] + +ARGUMENTS + FILE Path to the schema file(s) + +FLAGS + --forceSafe mark the check as safe, breaking changes are expected + --github Connect with GitHub Application + --registry= registry address + --require=... [default: ] Loads specific require.extensions before running the codegen and reading the + configuration + --service= service name (only for distributed schemas) + --token= api token + +DESCRIPTION + checks schema +``` + +## `hive schema:publish FILE` + +publishes schema + +``` +USAGE + $ hive schema:publish [FILE] [--service ] [--url ] [--metadata ] [--registry ] + [--token ] [--author ] [--commit ] [--github] [--force] [--require ] + +ARGUMENTS + FILE Path to the schema file(s) + +FLAGS + --author= author of the change + --commit= associated commit sha + --force force publish even on breaking changes + --github Connect with GitHub Application + --metadata= additional metadata to attach to the GraphQL schema. This can be a string with a valid JSON, or + a path to a file containing a valid JSON + --registry= registry address + --require=... [default: ] Loads specific require.extensions before running the codegen and reading the + configuration + --service= service name (only for distributed schemas) + --token= api token + --url= service url (only for distributed schemas) + +DESCRIPTION + publishes schema +``` + +## `hive update [CHANNEL]` + +update the hive CLI + +``` +USAGE + $ hive update [CHANNEL] [-a] [-v | -i] [--force] + +FLAGS + -a, --available Install a specific version. + -i, --interactive Interactively select version to install. This is ignored if a channel is provided. + -v, --version= Install a specific version. + --force Force a re-download of the requested version. + +DESCRIPTION + update the hive CLI + +EXAMPLES + Update to the stable channel: + + $ hive update stable + + Update to a specific version: + + $ hive update --version 1.0.0 + + Interactively select version: + + $ hive update --interactive + + See available versions: + + $ hive update --available +``` + +_See code: [@oclif/plugin-update](https://github.com/oclif/plugin-update/blob/v3.0.0/src/commands/update.ts)_ + +## `hive whoami` + +checks schema + +``` +USAGE + $ hive whoami [--registry ] [--token ] + +FLAGS + --registry= registry address + --token= api token + +DESCRIPTION + checks schema +``` + + + + + +# Config + +In addition to using the CLI args, you can also define your configuration in a JSON file which the CLI would pick up when it runs. + +You can use the `HIVE_CONFIG` environment variable to define the path to the JSON file as follows: + +`HIVE_CONFIG=/path/to/hive.json hive schema:publish --author Vignesh path/to/file.gql` + +Note that the CLI args will override the values in config if both are specified. + +This is how the structure of the config file should look like: + +```json +{ + "registry": "", + "token": "" +} +``` + + diff --git a/packages/libraries/cli/bin/dev b/packages/libraries/cli/bin/dev new file mode 100755 index 000000000..02b50d62a --- /dev/null +++ b/packages/libraries/cli/bin/dev @@ -0,0 +1,17 @@ +#!/usr/bin/env node + +const oclif = require('@oclif/core'); + +const path = require('path'); +const project = path.join(__dirname, '..', 'tsconfig.json'); + +// In dev mode -> use ts-node and dev plugins +process.env.NODE_ENV = 'development'; + +require('ts-node').register({ project }); + +// In dev mode, always show stack traces +oclif.settings.debug = true; + +// Start the CLI +oclif.run().then(oclif.flush).catch(oclif.Errors.handle); diff --git a/packages/libraries/cli/bin/dev.cmd b/packages/libraries/cli/bin/dev.cmd new file mode 100644 index 000000000..077b57ae7 --- /dev/null +++ b/packages/libraries/cli/bin/dev.cmd @@ -0,0 +1,3 @@ +@echo off + +node "%~dp0\dev" %* \ No newline at end of file diff --git a/packages/libraries/cli/bin/run b/packages/libraries/cli/bin/run new file mode 100755 index 000000000..e0a1f504d --- /dev/null +++ b/packages/libraries/cli/bin/run @@ -0,0 +1,6 @@ +#!/usr/bin/env node + +require('@oclif/core') + .run() + .then(require('@oclif/core/flush')) + .catch(require('@oclif/core/handle')); diff --git a/packages/libraries/cli/bin/run.cmd b/packages/libraries/cli/bin/run.cmd new file mode 100644 index 000000000..968fc3075 --- /dev/null +++ b/packages/libraries/cli/bin/run.cmd @@ -0,0 +1,3 @@ +@echo off + +node "%~dp0\run" %* diff --git a/packages/libraries/cli/examples/aws.graphql b/packages/libraries/cli/examples/aws.graphql new file mode 100644 index 000000000..629515488 --- /dev/null +++ b/packages/libraries/cli/examples/aws.graphql @@ -0,0 +1,9 @@ +type Query { + test: Post! +} + +type Post @model { + id: ID! # id: ID! is a required attribute. + title: String! + tags: [String!]! +} diff --git a/packages/libraries/cli/examples/federation.products.graphql b/packages/libraries/cli/examples/federation.products.graphql new file mode 100644 index 000000000..37e2b7152 --- /dev/null +++ b/packages/libraries/cli/examples/federation.products.graphql @@ -0,0 +1,60 @@ +enum CURRENCY_CODE { + USD +} + +type Department { + category: ProductCategory + url: String +} + +type Money { + amount: Float + currencyCode: CURRENCY_CODE +} + +""" +Here are some helpful details about your type +""" +type Price { + cost: Money + + """ + A number between 0 and 1 signifying the % discount + """ + deal: Float + dealSavings: Money +} + +""" +This is an Entity, docs:https://www.apollographql.com/docs/federation/entities/ +You will need to define a __resolveReference resolver for the type you define, docs: https://www.apollographql.com/docs/federation/entities/#resolving +""" +type Product @key(fields: "id") { + id: ID! + title: String + url: String + description: String + price: Price + salesRank(category: ProductCategory = ALL): Int + salesRankOverall: Int + salesRankInCategory: Int + category: ProductCategory + images(size: Int = 1000): [String] + primaryImage(size: Int = 1000): String +} + +enum ProductCategory { + ALL + GIFT_CARDS + ELECTRONICS + CAMERA_N_PHOTO + VIDEO_GAMES + BOOKS + CLOTHING +} + +extend type Query { + bestSellers(category: ProductCategory = ALL): [Product] + categories: [Department] + product(id: ID!): Product +} diff --git a/packages/libraries/cli/examples/federation.reviews.graphql b/packages/libraries/cli/examples/federation.reviews.graphql new file mode 100644 index 000000000..043ce5981 --- /dev/null +++ b/packages/libraries/cli/examples/federation.reviews.graphql @@ -0,0 +1,20 @@ +extend type Product @key(fields: "id") { + id: ID! @external + reviews: [Review] + reviewSummary: ReviewSummary +} + +""" +This is an Entity, docs:https://www.apollographql.com/docs/federation/entities/ +You will need to define a __resolveReference resolver for the type you define, docs: https://www.apollographql.com/docs/federation/entities/#resolving +""" +type Review @key(fields: "id") { + id: ID! + rating: Float + content: String +} + +type ReviewSummary { + totalReviews: Int + averageRating: Float +} diff --git a/packages/libraries/cli/examples/mesh/jsonSchemaBundle.json b/packages/libraries/cli/examples/mesh/jsonSchemaBundle.json new file mode 100644 index 000000000..e9d803ba2 --- /dev/null +++ b/packages/libraries/cli/examples/mesh/jsonSchemaBundle.json @@ -0,0 +1,76 @@ +{ + "name": "ExampleService", + "baseUrl": "http://ip-api.com/", + "operations": [ + { + "type": "query", + "field": "geoData", + "path": "/json", + "responseSample": "./exampleResponse.json" + } + ], + "referencedSchema": { + "$ref": "#/definitions/_schema", + "definitions": { + "query_geoData_lat": { "type": "number", "title": "query_geoData_lat" }, + "query_geoData": { + "type": "object", + "properties": { + "query": { "type": "string" }, + "status": { "type": "string" }, + "country": { "type": "string" }, + "countryCode": { "type": "string" }, + "region": { "type": "string" }, + "regionName": { "type": "string" }, + "city": { "type": "string" }, + "zip": { "type": "string" }, + "lat": { "$ref": "#/definitions/query_geoData_lat" }, + "lon": { "$ref": "#/definitions/query_geoData_lat" }, + "timezone": { "type": "string" }, + "isp": { "type": "string" }, + "org": { "type": "string" }, + "as": { "type": "string" } + }, + "additionalProperties": false, + "title": "query_geoData", + "examples": [ + { + "query": "24.48.0.1", + "status": "success", + "country": "Canada", + "countryCode": "CA", + "region": "QC", + "regionName": "Quebec", + "city": "Montreal", + "zip": "H3G", + "lat": 45.4995, + "lon": -73.5848, + "timezone": "America/Toronto", + "isp": "Le Groupe Videotron Ltee", + "org": "Videotron Ltee", + "as": "AS5769 Videotron Telecom Ltee" + } + ] + }, + "Query": { + "type": "object", + "title": "Query", + "properties": { "geoData": { "$ref": "#/definitions/query_geoData" } } + }, + "QueryInput": { + "type": "object", + "title": "QueryInput", + "properties": {} + }, + "_schema": { + "type": "object", + "title": "_schema", + "properties": { + "query": { "$ref": "#/definitions/Query" }, + "queryInput": { "$ref": "#/definitions/QueryInput" } + }, + "required": ["query"] + } + } + } +} diff --git a/packages/libraries/cli/examples/operation.graphql b/packages/libraries/cli/examples/operation.graphql new file mode 100644 index 000000000..ef733321d --- /dev/null +++ b/packages/libraries/cli/examples/operation.graphql @@ -0,0 +1,3 @@ +query getFoo { + fooa +} diff --git a/packages/libraries/cli/examples/single.graphql b/packages/libraries/cli/examples/single.graphql new file mode 100644 index 000000000..d72f4898c --- /dev/null +++ b/packages/libraries/cli/examples/single.graphql @@ -0,0 +1,4 @@ +type Query { + foo: Int! + bar: String +} diff --git a/packages/libraries/cli/examples/stitching.graphql b/packages/libraries/cli/examples/stitching.graphql new file mode 100644 index 000000000..2832c02c6 --- /dev/null +++ b/packages/libraries/cli/examples/stitching.graphql @@ -0,0 +1,15 @@ +type Post { + id: ID! + message: String! + author: User +} + +type User { + id: ID! + posts: [Post] +} + +type Query { + post(id: ID!): Post + users(ids: [ID!]!): [User]! @merge(keyField: "id") +} diff --git a/packages/libraries/cli/hive.json b/packages/libraries/cli/hive.json new file mode 100644 index 000000000..072102ee4 --- /dev/null +++ b/packages/libraries/cli/hive.json @@ -0,0 +1,4 @@ +{ + "registry": "http://localhost:4000/graphql", + "token": "" +} diff --git a/packages/libraries/cli/package.json b/packages/libraries/cli/package.json new file mode 100644 index 000000000..c0ffaa0d8 --- /dev/null +++ b/packages/libraries/cli/package.json @@ -0,0 +1,90 @@ +{ + "name": "@graphql-hive/cli", + "description": "A CLI util to manage and control your GraphQL Hive", + "version": "0.14.3", + "author": { + "email": "contact@the-guild.dev", + "name": "The Guild", + "url": "https://the-guild.dev" + }, + "repository": { + "type": "git", + "url": "kamilkisiela/graphql-hive", + "directory": "packages/libraries/cli" + }, + "license": "MIT", + "keywords": [ + "graphql" + ], + "bin": { + "hive": "./bin/run" + }, + "files": [ + "/bin", + "/dist", + "/npm-shrinkwrap.json", + "/oclif.manifest.json" + ], + "scripts": { + "start": "./bin/dev", + "build": "tsc", + "postpack": "rm -f oclif.manifest.json", + "prepack": "rimraf lib && tsc -b && oclif manifest && oclif readme", + "version": "oclif readme && git add README.md", + "schema:check:single": "yarn start schema:check examples/single.graphql", + "schema:check:stitching": "yarn start schema:check examples/stitching.graphql --service posts", + "schema:check:federation": "yarn start schema:check examples/federation.graphql --service reviews", + "schema:publish:federation": "yarn start schema:publish --service reviews examples/federation.reviews.graphql" + }, + "dependencies": { + "@graphql-hive/core": "0.2.0", + "@graphql-inspector/core": "~3.1.2", + "@graphql-tools/code-file-loader": "~7.2.14", + "@graphql-tools/graphql-file-loader": "~7.3.11", + "@graphql-tools/json-file-loader": "~7.3.11", + "@graphql-tools/url-loader": "~7.9.15", + "@graphql-tools/load": "~7.5.10", + "@graphql-tools/utils": "8.2.4", + "@oclif/core": "^1.7.0", + "@oclif/plugin-help": "5.1.12", + "@oclif/plugin-update": "3.0.0", + "colors": "1.4.0", + "env-ci": "7.1.0", + "git-parse": "2.1.1", + "graphql-request": "4.1.0", + "graphql-tag": "2.12.6", + "log-symbols": "4.1.0", + "mkdirp": "1.0.4", + "tslib": "2.3.1", + "ts-node": "10.7.0" + }, + "peerDependencies": { + "graphql": "^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "devDependencies": { + "oclif": "^3.0.1", + "@types/env-ci": "3.1.1", + "@types/git-parse": "2.1.2", + "@types/mkdirp": "1.0.2" + }, + "oclif": { + "commands": "./dist/commands", + "bin": "hive", + "plugins": [ + "@oclif/plugin-help", + "@oclif/plugin-update" + ], + "update": { + "s3": { + "bucket": "graphql-hive-cli-test" + } + } + }, + "publishConfig": { + "registry": "https://registry.npmjs.org", + "access": "public" + }, + "engines": { + "node": ">=14.0.0" + } +} diff --git a/packages/libraries/cli/src/base-command.ts b/packages/libraries/cli/src/base-command.ts new file mode 100644 index 000000000..a06b9f948 --- /dev/null +++ b/packages/libraries/cli/src/base-command.ts @@ -0,0 +1,154 @@ +import { Command, Config as OclifConfig, Errors } from '@oclif/core'; +import colors from 'colors'; +import symbols from 'log-symbols'; +import { GraphQLClient } from 'graphql-request'; +import { Config } from './helpers/config'; +import { getSdk } from './sdk'; + +export interface UserConfig { + [key: string]: any; + endpoint: string | null; + project: string | null; +} + +export default abstract class extends Command { + protected _userConfig: Config; + + constructor(argv: string[], config: OclifConfig) { + super(argv, config); + + this._userConfig = new Config({ + filepath: process.env.HIVE_CONFIG, + rootDir: process.cwd(), + }); + } + + success(...args: any[]) { + this.log(colors.green(symbols.success), ...args); + } + + fail(...args: any[]) { + this.log(colors.red(symbols.error), ...args); + } + + info(...args: any[]) { + this.log(colors.yellow(symbols.info), ...args); + } + + bolderize(msg: string) { + const findSingleQuotes = /'([^']+)'/gim; + const findDoubleQuotes = /"([^"]+)"/gim; + + return msg + .replace(findSingleQuotes, (_: string, value: string) => + colors.bold(value) + ) + .replace(findDoubleQuotes, (_: string, value: string) => + colors.bold(value) + ); + } + + /** + * Get a value from arguments or flags first, then fallback to config. + * Throw when no there's no value. + * + * @param key + * @param args all arguments or flags + * @param defaultValue default value + * @param message custom error message in case of no value + */ + ensure< + TArgs extends { + [key: string]: any; + }, + TKey extends keyof TArgs + >({ + key, + args, + defaultValue, + message, + env, + }: { + key: TKey; + args: TArgs; + defaultValue?: TArgs[TKey] | null; + message?: string; + env?: string; + }): NonNullable | never { + if (args[key]) { + return args[key]; + } + + if (env && process.env[env]) { + return process.env[env] as TArgs[TKey]; + } + + if (this._userConfig.has(key as string)) { + return this._userConfig.get(key as string); + } + + if (defaultValue) { + return defaultValue; + } + + if (message) { + throw new Errors.CLIError(message); + } + + throw new Errors.CLIError(`Missing "${key}"`); + } + + /** + * Get a value from arguments or flags first, then fallback to config. + * Do NOT throw when there's no value. + * + * @param key + * @param args all arguments or flags + */ + maybe< + TArgs extends { + [key: string]: any; + }, + TKey extends keyof TArgs + >(key: TKey, args: TArgs): TArgs[TKey] | undefined { + if (args[key]) { + return args[key]; + } + + if (this._userConfig.has(key as string)) { + return this._userConfig.get(key as string); + } + } + + cleanRequestId(requestId?: string | null) { + return requestId ? requestId.split(',')[0].trim() : undefined; + } + + registryApi(registry: string, token: string) { + return getSdk( + new GraphQLClient(registry, { + headers: { + 'User-Agent': `HiveCLI@${this.config.version}`, + 'X-API-Token': token, + 'graphql-client-name': 'Hive CLI', + 'graphql-client-version': this.config.version, + }, + }) + ); + } + + async require< + TFlags extends { + require: string[]; + [key: string]: any; + } + >(flags: TFlags) { + if (flags.require && flags.require.length > 0) { + await Promise.all( + flags.require.map( + (mod) => import(require.resolve(mod, { paths: [process.cwd()] })) + ) + ); + } + } +} diff --git a/packages/libraries/cli/src/commands/config/delete.ts b/packages/libraries/cli/src/commands/config/delete.ts new file mode 100644 index 000000000..922f7f35d --- /dev/null +++ b/packages/libraries/cli/src/commands/config/delete.ts @@ -0,0 +1,20 @@ +import Command from '../../base-command'; + +export default class DeleteConfig extends Command { + static description = 'deletes specific cli configuration'; + static args = [ + { + name: 'key', + required: true, + description: 'config key', + }, + ]; + + async run() { + const { args } = await this.parse(DeleteConfig); + this._userConfig.set(args.key, args.value); + this.success( + this.bolderize(`Config flag "${args.key}" was set to "${args.value}"!`) + ); + } +} diff --git a/packages/libraries/cli/src/commands/config/get.ts b/packages/libraries/cli/src/commands/config/get.ts new file mode 100644 index 000000000..b78124188 --- /dev/null +++ b/packages/libraries/cli/src/commands/config/get.ts @@ -0,0 +1,17 @@ +import Command from '../../base-command'; + +export default class GetConfig extends Command { + static description = 'prints specific cli configuration'; + static args = [ + { + name: 'key', + required: true, + description: 'config key', + }, + ]; + + async run() { + const { args } = await this.parse(GetConfig); + console.dir(this._userConfig.get(args.key)); + } +} diff --git a/packages/libraries/cli/src/commands/config/reset.ts b/packages/libraries/cli/src/commands/config/reset.ts new file mode 100644 index 000000000..19ceb3d8c --- /dev/null +++ b/packages/libraries/cli/src/commands/config/reset.ts @@ -0,0 +1,10 @@ +import Command from '../../base-command'; + +export default class ResetConfig extends Command { + static description = 'resets local cli configuration'; + + async run() { + this._userConfig.clear(); + this.success('Config cleared.'); + } +} diff --git a/packages/libraries/cli/src/commands/config/set.ts b/packages/libraries/cli/src/commands/config/set.ts new file mode 100644 index 000000000..49984233d --- /dev/null +++ b/packages/libraries/cli/src/commands/config/set.ts @@ -0,0 +1,25 @@ +import Command from '../../base-command'; + +export default class SetConfig extends Command { + static description = 'updates specific cli configuration'; + static args = [ + { + name: 'key', + required: true, + description: 'config key', + }, + { + name: 'value', + required: true, + description: 'config value', + }, + ]; + + async run() { + const { args } = await this.parse(SetConfig); + this._userConfig.set(args.key, args.value); + this.success( + this.bolderize(`Config flag "${args.key}" was set to "${args.value}"!`) + ); + } +} diff --git a/packages/libraries/cli/src/commands/operations/check.graphql b/packages/libraries/cli/src/commands/operations/check.graphql new file mode 100644 index 000000000..b2d1e5677 --- /dev/null +++ b/packages/libraries/cli/src/commands/operations/check.graphql @@ -0,0 +1,5 @@ +query fetchLatestVersion { + latestVersion { + sdl + } +} diff --git a/packages/libraries/cli/src/commands/operations/check.ts b/packages/libraries/cli/src/commands/operations/check.ts new file mode 100644 index 000000000..6bfff2edd --- /dev/null +++ b/packages/libraries/cli/src/commands/operations/check.ts @@ -0,0 +1,142 @@ +import { Flags, Errors } from '@oclif/core'; +import { buildSchema, Source, GraphQLError } from 'graphql'; +import { validate, InvalidDocument } from '@graphql-inspector/core'; +import Command from '../../base-command'; +import { loadOperations } from '../../helpers/operations'; + +export default class OperationsCheck extends Command { + static description = 'checks operations against a published schema'; + static flags = { + registry: Flags.string({ + description: 'registry address', + }), + token: Flags.string({ + description: 'api token', + }), + require: Flags.string({ + description: + 'Loads specific require.extensions before running the command', + default: [], + multiple: true, + }), + }; + + static args = [ + { + name: 'file', + required: true, + description: 'Glob pattern to find the operations', + hidden: false, + }, + ]; + + async run() { + try { + const { flags, args } = await this.parse(OperationsCheck); + + await this.require(flags); + + const registry = this.ensure({ + key: 'registry', + args: flags, + defaultValue: 'https://app.graphql-hive.com/registry', + env: 'HIVE_REGISTRY', + }); + const file: string = args.file; + const token = this.ensure({ + key: 'token', + args: flags, + env: 'HIVE_TOKEN', + }); + + const operations = await loadOperations(file, { + normalize: false, + }); + + if (operations.length === 0) { + this.info('No operations found'); + this.exit(0); + return; + } + + const result = await this.registryApi( + registry, + token + ).fetchLatestVersion(); + + const sdl = result.latestVersion.sdl; + + if (!sdl) { + this.error('No schema found'); + } + + const schema = buildSchema(sdl, { + assumeValidSDL: true, + assumeValid: true, + }); + + const invalidOperations = validate( + schema, + operations.map((s) => new Source(s.content, s.location)) + ); + + if (invalidOperations.length === 0) { + this.success('All operations are valid'); + this.exit(0); + return; + } + + this.fail('Some operations are invalid'); + + this.log( + [ + '', + `Total: ${operations.length}`, + `Invalid: ${invalidOperations.length}`, + '', + ].join('\n') + ); + + this.printInvalidDocuments(invalidOperations, 'errors'); + } catch (error) { + if (error instanceof Errors.ExitError) { + throw error; + } else { + const parsedError: Error & { response?: any } = + error instanceof Error ? error : new Error(error as string); + this.fail('Failed to validate operations'); + + if ('response' in parsedError) { + this.error(parsedError.response.errors[0].message, { + ref: this.cleanRequestId( + parsedError.response?.headers?.get('x-request-id') + ), + }); + } else { + this.error(parsedError); + } + } + } + } + + private printInvalidDocuments( + invalidDocuments: InvalidDocument[], + listKey: 'errors' | 'deprecated' + ): void { + invalidDocuments.forEach((doc) => { + if (doc.errors.length) { + this.renderErrors(doc.source.name, doc[listKey]).forEach((line) => { + this.log(line); + }); + } + }); + } + + private renderErrors(sourceName: string, errors: GraphQLError[]): string[] { + const errorsAsString = errors + .map((e) => ` - ${this.bolderize(e.message)}`) + .join('\n'); + + return [`ERROR in ${sourceName}:\n`, errorsAsString, '\n\n']; + } +} diff --git a/packages/libraries/cli/src/commands/operations/compare.graphql b/packages/libraries/cli/src/commands/operations/compare.graphql new file mode 100644 index 000000000..ae14652ac --- /dev/null +++ b/packages/libraries/cli/src/commands/operations/compare.graphql @@ -0,0 +1,3 @@ +query comparePersistedOperations($hashes: [String!]!) { + comparePersistedOperations(hashes: $hashes) +} diff --git a/packages/libraries/cli/src/commands/operations/publish.graphql b/packages/libraries/cli/src/commands/operations/publish.graphql new file mode 100644 index 000000000..7d7346bfa --- /dev/null +++ b/packages/libraries/cli/src/commands/operations/publish.graphql @@ -0,0 +1,13 @@ +mutation publishPersistedOperations( + $input: [PublishPersistedOperationInput!]! +) { + publishPersistedOperations(input: $input) { + summary { + total + unchanged + } + operations { + operationHash + } + } +} diff --git a/packages/libraries/cli/src/commands/operations/publish.ts b/packages/libraries/cli/src/commands/operations/publish.ts new file mode 100644 index 000000000..be0636469 --- /dev/null +++ b/packages/libraries/cli/src/commands/operations/publish.ts @@ -0,0 +1,126 @@ +import { Flags, Errors } from '@oclif/core'; +import Command from '../../base-command'; +import { loadOperations } from '../../helpers/operations'; + +export default class OperationsPublish extends Command { + static description = 'saves operations to the store'; + static flags = { + registry: Flags.string({ + description: 'registry address', + }), + token: Flags.string({ + description: 'api token', + }), + require: Flags.string({ + description: + 'Loads specific require.extensions before running the codegen and reading the configuration', + default: [], + multiple: true, + }), + }; + + static args = [ + { + name: 'file', + required: true, + description: 'Glob pattern to find the operations', + hidden: false, + }, + ]; + + async run() { + try { + const { flags, args } = await this.parse(OperationsPublish); + + await this.require(flags); + + const registry = this.ensure({ + key: 'registry', + args: flags, + defaultValue: 'https://app.graphql-hive.com/registry', + env: 'HIVE_REGISTRY', + }); + const file: string = args.file; + const token = this.ensure({ + key: 'token', + args: flags, + env: 'HIVE_TOKEN', + }); + + let operations = await loadOperations(file, { + normalize: true, + }); + const collectedOperationsTotal = operations.length; + const noMissingHashes = operations.some((op) => !!op.operationHash); + + if (noMissingHashes) { + const comparisonResult = await this.registryApi( + registry, + token + ).comparePersistedOperations({ + hashes: operations.map((op) => op.operationHash!), + }); + + const operationsToPublish = comparisonResult.comparePersistedOperations; + + operations = operations.filter((op) => + operationsToPublish.includes(op.operationHash!) + ); + } + + const unchangedTotal = collectedOperationsTotal - operations.length; + + if (!operations.length) { + return this.success( + [ + `Nothing to publish`, + '', + ` Total: ${collectedOperationsTotal}`, + ` Unchanged: ${unchangedTotal}`, + '', + ].join('\n') + ); + } + + const result = await this.registryApi( + registry, + token + ).publishPersistedOperations({ + input: operations, + }); + + if (result.publishPersistedOperations) { + const summary = result.publishPersistedOperations.summary; + this.success( + [ + 'Operations successfully published!', + '', + ` Total: ${summary.total}`, + ` Unchanged: ${summary.unchanged}`, + '', + ].join('\n') + ); + } else { + this.error('OOPS! An error occurred in publishing the operation(s)'); + } + } catch (error) { + if (error instanceof Errors.ExitError) { + throw error; + } else { + const parsedError: Error & { response?: any } = + error instanceof Error ? error : new Error(error as string); + this.fail('Failed to publish operations'); + + if ('response' in parsedError) { + this.error(parsedError.response.errors[0].message, { + ref: this.cleanRequestId( + parsedError.response?.headers?.get('x-request-id') + ), + }); + } else { + this.error(parsedError); + } + } + } + } +} diff --git a/packages/libraries/cli/src/commands/schema/check.graphql b/packages/libraries/cli/src/commands/schema/check.graphql new file mode 100644 index 000000000..dc413e440 --- /dev/null +++ b/packages/libraries/cli/src/commands/schema/check.graphql @@ -0,0 +1,37 @@ +mutation schemaCheck($input: SchemaCheckInput!, $usesGitHubApp: Boolean!) { + schemaCheck(input: $input) { + __typename + ... on SchemaCheckSuccess @skip(if: $usesGitHubApp) { + valid + changes { + nodes { + message + criticality + } + total + } + } + ... on SchemaCheckError @skip(if: $usesGitHubApp) { + valid + changes { + nodes { + message + criticality + } + total + } + errors { + nodes { + message + } + total + } + } + ... on GitHubSchemaCheckSuccess @include(if: $usesGitHubApp) { + message + } + ... on GitHubSchemaCheckError @include(if: $usesGitHubApp) { + message + } + } +} diff --git a/packages/libraries/cli/src/commands/schema/check.ts b/packages/libraries/cli/src/commands/schema/check.ts new file mode 100644 index 000000000..8183f93d1 --- /dev/null +++ b/packages/libraries/cli/src/commands/schema/check.ts @@ -0,0 +1,149 @@ +import { Flags, Errors } from '@oclif/core'; +import { + loadSchema, + renderChanges, + renderErrors, + minifySchema, +} from '../../helpers/schema'; +import { invariant } from '../../helpers/validation'; +import { gitInfo } from '../../helpers/git'; +import Command from '../../base-command'; + +export default class SchemaCheck extends Command { + static description = 'checks schema'; + static flags = { + service: Flags.string({ + description: 'service name (only for distributed schemas)', + }), + registry: Flags.string({ + description: 'registry address', + }), + token: Flags.string({ + description: 'api token', + }), + forceSafe: Flags.boolean({ + description: 'mark the check as safe, breaking changes are expected', + }), + github: Flags.boolean({ + description: 'Connect with GitHub Application', + default: false, + }), + require: Flags.string({ + description: + 'Loads specific require.extensions before running the codegen and reading the configuration', + default: [], + multiple: true, + }), + }; + + static args = [ + { + name: 'file', + required: true, + description: 'Path to the schema file(s)', + hidden: false, + }, + ]; + + async run() { + try { + const { flags, args } = await this.parse(SchemaCheck); + + await this.require(flags); + + const service = this.maybe('service', flags); + const forceSafe = this.maybe('forceSafe', flags); + const usesGitHubApp = this.maybe('github', flags) === true; + const registry = this.ensure({ + key: 'registry', + args: flags, + defaultValue: 'https://app.graphql-hive.com/registry', + env: 'HIVE_REGISTRY', + }); + const file = args.file; + const token = this.ensure({ + key: 'token', + args: flags, + env: 'HIVE_TOKEN', + }); + const sdl = await loadSchema(file); + const git = await gitInfo(() => { + // noop + }); + const commit = git.commit; + + invariant( + typeof sdl === 'string' && sdl.length > 0, + 'Schema seems empty' + ); + + if (usesGitHubApp) { + invariant( + typeof commit === 'string', + `Couldn't resolve commit sha required for GitHub Application` + ); + } + + const result = await this.registryApi(registry, token).schemaCheck({ + input: { + service, + sdl: minifySchema(sdl), + github: usesGitHubApp + ? { + commit: commit!, + } + : null, + }, + usesGitHubApp, + }); + + if (result.schemaCheck.__typename === 'SchemaCheckSuccess') { + const changes = result.schemaCheck.changes; + if (!changes?.total) { + this.success('No changes'); + } else { + renderChanges.call(this, changes); + this.log(''); + } + } else if (result.schemaCheck.__typename === 'SchemaCheckError') { + const changes = result.schemaCheck.changes; + const errors = result.schemaCheck.errors; + renderErrors.call(this, errors); + + if (changes && changes.total) { + this.log(''); + renderChanges.call(this, changes); + } + this.log(''); + + if (forceSafe) { + this.success('Breaking changes were expected (forced)'); + } else { + this.exit(1); + } + } else if (result.schemaCheck.__typename === 'GitHubSchemaCheckSuccess') { + this.success(result.schemaCheck.message); + } else { + this.error(result.schemaCheck.message); + } + } catch (error) { + if (error instanceof Errors.ExitError) { + throw error; + } else { + const parsedError: Error & { response?: any } = + error instanceof Error ? error : new Error(error as string); + + this.fail('Failed to check schema'); + if ('response' in parsedError) { + this.error(parsedError.response.errors[0].message, { + ref: this.cleanRequestId( + parsedError.response?.headers?.get('x-request-id') + ), + }); + } else { + this.error(parsedError); + } + } + } + } +} diff --git a/packages/libraries/cli/src/commands/schema/publish.graphql b/packages/libraries/cli/src/commands/schema/publish.graphql new file mode 100644 index 000000000..1d59509e5 --- /dev/null +++ b/packages/libraries/cli/src/commands/schema/publish.graphql @@ -0,0 +1,42 @@ +mutation schemaPublish($input: SchemaPublishInput!, $usesGitHubApp: Boolean!) { + schemaPublish(input: $input) { + __typename + ... on SchemaPublishSuccess @skip(if: $usesGitHubApp) { + initial + valid + successMessage: message + changes { + nodes { + message + criticality + } + total + } + } + ... on SchemaPublishError @skip(if: $usesGitHubApp) { + valid + changes { + nodes { + message + criticality + } + total + } + errors { + nodes { + message + } + total + } + } + ... on SchemaPublishMissingServiceError @skip(if: $usesGitHubApp) { + missingServiceError: message + } + ... on GitHubSchemaPublishSuccess @include(if: $usesGitHubApp) { + message + } + ... on GitHubSchemaPublishError @include(if: $usesGitHubApp) { + message + } + } +} diff --git a/packages/libraries/cli/src/commands/schema/publish.ts b/packages/libraries/cli/src/commands/schema/publish.ts new file mode 100644 index 000000000..64c52468a --- /dev/null +++ b/packages/libraries/cli/src/commands/schema/publish.ts @@ -0,0 +1,237 @@ +import { transformCommentsToDescriptions } from '@graphql-tools/utils'; +import { Flags, Errors } from '@oclif/core'; +import { print } from 'graphql'; +import Command from '../../base-command'; +import { gitInfo } from '../../helpers/git'; +import { invariant } from '../../helpers/validation'; +import { + loadSchema, + minifySchema, + renderChanges, + renderErrors, +} from '../../helpers/schema'; +import { existsSync, readFileSync } from 'fs'; + +export default class SchemaPublish extends Command { + static description = 'publishes schema'; + static flags = { + service: Flags.string({ + description: 'service name (only for distributed schemas)', + }), + url: Flags.string({ + description: 'service url (only for distributed schemas)', + }), + metadata: Flags.string({ + description: + 'additional metadata to attach to the GraphQL schema. This can be a string with a valid JSON, or a path to a file containing a valid JSON', + }), + registry: Flags.string({ + description: 'registry address', + }), + token: Flags.string({ + description: 'api token', + }), + author: Flags.string({ + description: 'author of the change', + }), + commit: Flags.string({ + description: 'associated commit sha', + }), + github: Flags.boolean({ + description: 'Connect with GitHub Application', + default: false, + }), + force: Flags.boolean({ + description: 'force publish even on breaking changes', + default: false, + }), + require: Flags.string({ + description: + 'Loads specific require.extensions before running the codegen and reading the configuration', + default: [], + multiple: true, + }), + }; + + static args = [ + { + name: 'file', + required: true, + description: 'Path to the schema file(s)', + hidden: false, + }, + ]; + + resolveMetadata(metadata: string | undefined): string | undefined { + if (!metadata) { + return; + } + + try { + JSON.parse(metadata); + // If we are able to parse it, it means it's a valid JSON, let's use it as-is + + return metadata; + } catch (e) { + // If we can't parse it, we can try to load it from FS + const exists = existsSync(metadata); + + if (!exists) { + throw new Error( + `Failed to load metadata from "${metadata}": Please specify a path to an existing file, or a string with valid JSON.` + ); + } + + try { + const fileContent = readFileSync(metadata, 'utf-8'); + JSON.parse(fileContent); + + return fileContent; + } catch (e) { + throw new Error( + `Failed to load metadata from file "${metadata}": Please make sure the file is readable and contains a valid JSON` + ); + } + } + } + + async run() { + try { + const { flags, args } = await this.parse(SchemaPublish); + + await this.require(flags); + + const registry = this.ensure({ + key: 'registry', + args: flags, + defaultValue: 'https://app.graphql-hive.com/registry', + env: 'HIVE_REGISTRY', + }); + const service = this.maybe('service', flags); + const url = this.maybe('url', flags); + const file = args.file; + const token = this.ensure({ + key: 'token', + args: flags, + env: 'HIVE_TOKEN', + }); + const force = this.maybe('force', flags); + const metadata = this.resolveMetadata(this.maybe('metadata', flags)); + const usesGitHubApp = this.maybe('github', flags) === true; + + let commit: string | undefined | null = flags.commit; + let author: string | undefined | null = flags.author; + + if (!commit || !author) { + const git = await gitInfo(() => { + this.warn( + `No git information found. Couldn't resolve author and commit.` + ); + }); + + if (!commit) { + commit = git.commit; + } + + if (!author) { + author = git.author; + } + } + + if (!author) { + throw new Errors.CLIError(`Missing "author"`); + } + + if (!commit) { + throw new Errors.CLIError(`Missing "commit"`); + } + + const sdl = await loadSchema(file); + + invariant( + typeof sdl === 'string' && sdl.length > 0, + 'Schema seems empty' + ); + + const transformedSDL = print(transformCommentsToDescriptions(sdl)); + const minifiedSDL = minifySchema(transformedSDL); + + const result = await this.registryApi(registry, token).schemaPublish({ + input: { + service, + url, + author, + commit, + sdl: minifiedSDL, + force, + metadata, + github: usesGitHubApp, + }, + usesGitHubApp, + }); + + if (result.schemaPublish.__typename === 'SchemaPublishSuccess') { + const changes = result.schemaPublish.changes; + + if (result.schemaPublish.initial) { + this.success('Published initial schema.'); + } else if (result.schemaPublish.successMessage) { + this.success(result.schemaPublish.successMessage); + } else if (!changes?.total) { + this.success('No changes. Skipping.'); + } else { + renderChanges.call(this, changes); + this.success('Schema published'); + } + } else if ( + result.schemaPublish.__typename === 'SchemaPublishMissingServiceError' + ) { + this.fail( + `${result.schemaPublish.missingServiceError} Please use the '--service ' parameter.` + ); + this.exit(1); + } else if (result.schemaPublish.__typename === 'SchemaPublishError') { + const changes = result.schemaPublish.changes; + const errors = result.schemaPublish.errors; + renderErrors.call(this, errors); + + if (changes && changes.total) { + this.log(''); + renderChanges.call(this, changes); + } + this.log(''); + + if (!force) { + this.fail('Failed to publish schema'); + this.exit(1); + } else { + this.success('Schema published (forced)'); + } + } else if ( + result.schemaPublish.__typename === 'GitHubSchemaPublishSuccess' + ) { + this.success(result.schemaPublish.message); + } else { + this.error(result.schemaPublish.message); + } + } catch (error) { + if (error instanceof Errors.ExitError) { + throw error; + } else { + const parsedError: Error & { response?: any } = + error instanceof Error ? error : new Error(error as string); + + this.fail('Failed to publish schema'); + if ('response' in parsedError) { + this.error(parsedError.response.errors[0].message, { + ref: this.cleanRequestId( + parsedError.response?.headers?.get('x-request-id') + ), + }); + } else { + this.error(parsedError); + } + } + } + } +} diff --git a/packages/libraries/cli/src/commands/whoami.graphql b/packages/libraries/cli/src/commands/whoami.graphql new file mode 100644 index 000000000..473b7f0a3 --- /dev/null +++ b/packages/libraries/cli/src/commands/whoami.graphql @@ -0,0 +1,29 @@ +query myTokenInfo { + tokenInfo { + __typename + ... on TokenInfo { + token { + name + } + organization { + name + cleanId + } + project { + name + type + cleanId + } + target { + name + cleanId + } + canPublishSchema: hasTargetScope(scope: REGISTRY_WRITE) + canCheckSchema: hasTargetScope(scope: REGISTRY_READ) + canPublishOperations: hasProjectScope(scope: OPERATIONS_STORE_WRITE) + } + ... on TokenNotFoundError { + message + } + } +} diff --git a/packages/libraries/cli/src/commands/whoami.ts b/packages/libraries/cli/src/commands/whoami.ts new file mode 100644 index 000000000..982fe2c32 --- /dev/null +++ b/packages/libraries/cli/src/commands/whoami.ts @@ -0,0 +1,114 @@ +import { Flags } from '@oclif/core'; +import colors from 'colors'; +import Command from '../base-command'; + +export default class WhoAmI extends Command { + static description = 'checks schema'; + static flags = { + registry: Flags.string({ + description: 'registry address', + }), + token: Flags.string({ + description: 'api token', + }), + }; + + async run() { + const { flags } = await this.parse(WhoAmI); + + const registry = this.ensure({ + key: 'registry', + args: flags, + defaultValue: 'https://app.graphql-hive.com/registry', + env: 'HIVE_REGISTRY', + }); + const token = this.ensure({ + key: 'token', + args: flags, + env: 'HIVE_TOKEN', + }); + + const result = await this.registryApi(registry, token) + .myTokenInfo() + .catch((error: Error & { response?: any }) => { + if ('response' in error) { + this.error(error.response.errors[0].message, { + ref: this.cleanRequestId( + error.response?.headers?.get('x-request-id') + ), + }); + } else { + this.error(error); + } + }); + + if (result.tokenInfo.__typename === 'TokenInfo') { + const { tokenInfo } = result; + const { organization, project, target } = tokenInfo; + + const organizationUrl = `https://app.graphql-hive.com/${organization.cleanId}`; + const projectUrl = `${organizationUrl}/${project.cleanId}`; + const targetUrl = `${projectUrl}/${target.cleanId}`; + + const access = { + yes: colors.green('Yes'), + not: colors.red('No access'), + }; + + const print = createPrinter({ + 'Token name:': [colors.bold(tokenInfo.token.name)], + ' ': [''], + 'Organization:': [ + colors.bold(organization.name), + colors.dim(organizationUrl), + ], + 'Project:': [colors.bold(project.name), colors.dim(projectUrl)], + 'Target:': [colors.bold(target.name), colors.dim(targetUrl)], + ' ': [''], + 'Access to schema:publish': [ + tokenInfo.canPublishSchema ? access.yes : access.not, + ], + 'Access to schema:check': [ + tokenInfo.canCheckSchema ? access.yes : access.not, + ], + 'Access to operation:publish': [ + tokenInfo.canPublishOperations ? access.yes : access.not, + ], + }); + + this.log(print()); + } else if (result.tokenInfo.__typename === 'TokenNotFoundError') { + this.error(`Token not found. Reason: ${result.tokenInfo.message}`, { + exit: 0, + suggestions: [ + `How to create a token? https://docs.graphql-hive.com/features/tokens`, + ], + }); + } + } +} + +function createPrinter(records: { + [label: string]: [value: string, extra?: string]; +}) { + const labels = Object.keys(records); + const values = Object.values(records).map((v) => v[0]); + const maxLabelsLen = Math.max(...labels.map((v) => v.length)) + 4; + const maxValuesLen = Math.max(...values.map((v) => v.length)) + 4; + + return () => { + const lines: string[] = []; + + for (const label in records) { + const [value, extra] = records[label]; + + lines.push( + label.padEnd(maxLabelsLen, ' ') + + value.padEnd(maxValuesLen, ' ') + + (extra || '') + ); + } + + return lines.join('\n'); + }; +} diff --git a/packages/libraries/cli/src/helpers/config.ts b/packages/libraries/cli/src/helpers/config.ts new file mode 100644 index 000000000..4c4456a91 --- /dev/null +++ b/packages/libraries/cli/src/helpers/config.ts @@ -0,0 +1,87 @@ +import fs from 'fs'; +import mkdirp from 'mkdirp'; +import path from 'path'; + +export class Config { + private cache?: Record; + private filepath: string; + + constructor({ filepath, rootDir }: { filepath?: string; rootDir: string }) { + if (filepath) { + this.filepath = filepath; + } else { + this.filepath = path.join(rootDir, 'hive.json'); + } + } + + has(key: string) { + const map = this.read(); + + return typeof map[key] !== 'undefined' && map[key] !== null; + } + + get(key: string) { + const map = this.read(); + + return map[key]; + } + + set(key: string, value: TValue) { + const map = this.read(); + + map[key] = value; + + this.write(map); + } + + delete(key: string) { + if (this.has(key)) { + const map = this.read(); + delete map[key]; + this.write(map); + } + } + + clear(): void { + try { + mkdirp.sync(path.dirname(this.filepath)); + } catch (e) {} + fs.writeFileSync(this.filepath, JSON.stringify({})); + } + + private readSpace(content: Record) { + const space = process.env.HIVE_SPACE; + + if (space) { + return content[space]; + } + + if ('default' in content) { + return content['default']; + } + + return content; + } + + private read() { + try { + if (!this.cache) { + this.cache = this.readSpace( + JSON.parse(fs.readFileSync(this.filepath, 'utf-8')) + ); + } + } catch (error) { + this.cache = {}; + } + + return this.cache!; + } + + private write(map: Record) { + this.cache = map; + try { + mkdirp.sync(path.dirname(this.filepath)); + } catch (e) {} + fs.writeFileSync(this.filepath, JSON.stringify(this.cache)); + } +} diff --git a/packages/libraries/cli/src/helpers/git.ts b/packages/libraries/cli/src/helpers/git.ts new file mode 100644 index 000000000..e23699718 --- /dev/null +++ b/packages/libraries/cli/src/helpers/git.ts @@ -0,0 +1,115 @@ +import { gitToJs } from 'git-parse'; +import ci from 'env-ci'; +import { readFileSync, existsSync } from 'fs'; +import { join } from 'path'; + +function splitPath(path: string) { + const parts = path.split(/(\/|\\)/); + if (!parts.length) { + return parts; + } + + // when path starts with a slash, the first part is empty string + return !parts[0].length ? parts.slice(1) : parts; +} + +function findParentDir(currentFullPath: string, clue: string) { + function testDir(parts: string[]): null | string { + if (parts.length === 0) { + return null; + } + + const p = parts.join(''); + + const itdoes = existsSync(join(p, clue)); + return itdoes ? p : testDir(parts.slice(0, -1)); + } + + return testDir(splitPath(currentFullPath)); +} + +interface CIRunner { + detect(): boolean; + env(): { commit: string | undefined | null }; +} + +function useGitHubAction(): CIRunner { + return { + detect() { + return Boolean(process.env.GITHUB_ACTIONS); + }, + env() { + const isPr = + process.env.GITHUB_EVENT_NAME === 'pull_request' || + process.env.GITHUB_EVENT_NAME === 'pull_request_target'; + + if (isPr) { + try { + const event = process.env.GITHUB_EVENT_PATH + ? JSON.parse(readFileSync(process.env.GITHUB_EVENT_PATH, 'utf-8')) + : undefined; + + if (event?.pull_request) { + return { + commit: event.pull_request.head.sha as string, + }; + } + } catch { + // Noop + } + } + + return { commit: undefined }; + }, + }; +} + +export async function gitInfo(noGit: () => void) { + let commit: string | null = null; + let author: string | null = null; + + const env = ci(); + + const githubAction = useGitHubAction(); + + if (githubAction.detect()) { + commit = githubAction.env().commit ?? null; + } + + if (!commit) { + commit = env.commit ?? null; + } + + if (!commit || !author) { + const rootFromEnv = 'root' in env ? env.root : null; + const git = + rootFromEnv ?? + findParentDir(__dirname, '.git') ?? + findParentDir(process.cwd(), '.git'); + + if (git) { + const commits = await gitToJs(git); + + if (commits && commits.length) { + const lastCommit = commits[0]; + + if (!commit) { + commit = lastCommit.hash; + } + + if (!author) { + author = `${lastCommit.authorName || ''} ${ + lastCommit.authorEmail ? `<${lastCommit.authorEmail}>` : '' + }`.trim(); + } + } + } else { + noGit(); + } + } + + return { + commit, + author, + }; +} diff --git a/packages/libraries/cli/src/helpers/operations.ts b/packages/libraries/cli/src/helpers/operations.ts new file mode 100644 index 000000000..00666c21e --- /dev/null +++ b/packages/libraries/cli/src/helpers/operations.ts @@ -0,0 +1,69 @@ +import { normalizeOperation } from '@graphql-hive/core'; +import { promises as fs } from 'fs'; +import { relative } from 'path'; +import { parse } from 'graphql'; +import { loadDocuments } from '@graphql-tools/load'; +import { CodeFileLoader } from '@graphql-tools/code-file-loader'; +import { GraphQLFileLoader } from '@graphql-tools/graphql-file-loader'; + +export async function loadOperations( + file: string, + options?: { + normalize?: boolean; + } +): Promise< + Array<{ + operationHash?: string; + content: string; + location?: string; + }> +> { + const shouldNormalize = options?.normalize ?? true; + + if (file.toLowerCase().endsWith('.json')) { + const output: Record = JSON.parse( + await fs.readFile(file, { + encoding: 'utf-8', + }) + ); + + const operations: Array<{ + operationHash: string; + content: string; + location?: string; + }> = []; + + for (const operationHash in output) { + const content = output[operationHash]; + const doc = parse(content); + + operations.push({ + operationHash, + content: shouldNormalize + ? normalizeOperation({ + document: doc, + hideLiterals: true, + removeAliases: true, + }) + : content, + }); + } + + return operations; + } + + const cwd = process.cwd(); + const sources = await loadDocuments(file, { + cwd, + loaders: [new CodeFileLoader(), new GraphQLFileLoader()], + }); + + return sources.map((source) => ({ + content: normalizeOperation({ + document: source.document!, + hideLiterals: false, + removeAliases: false, + }), + location: source.location ? relative(cwd, source.location) : undefined, + })); +} diff --git a/packages/libraries/cli/src/helpers/schema.ts b/packages/libraries/cli/src/helpers/schema.ts new file mode 100644 index 000000000..0c6f79e83 --- /dev/null +++ b/packages/libraries/cli/src/helpers/schema.ts @@ -0,0 +1,64 @@ +import colors from 'colors'; +import { print, concatAST } from 'graphql'; +import { loadTypedefs } from '@graphql-tools/load'; +import { CodeFileLoader } from '@graphql-tools/code-file-loader'; +import { GraphQLFileLoader } from '@graphql-tools/graphql-file-loader'; +import { JsonFileLoader } from '@graphql-tools/json-file-loader'; +import { UrlLoader } from '@graphql-tools/url-loader'; +import { + CriticalityLevel, + SchemaChangeConnection, + SchemaErrorConnection, +} from '../sdk'; +import baseCommand from '../base-command'; + +const indent = ' '; + +const criticalityMap: Record = { + [CriticalityLevel.Breaking]: colors.red('-'), + [CriticalityLevel.Safe]: colors.green('-'), + [CriticalityLevel.Dangerous]: colors.yellow('-'), +}; + +export function renderErrors(this: baseCommand, errors: SchemaErrorConnection) { + this.fail(`Detected ${errors.total} error${errors.total > 1 ? 's' : ''}`); + this.log(''); + + errors.nodes.forEach((error) => { + this.log(`${indent}`, colors.red('-'), this.bolderize(error.message)); + }); +} + +export function renderChanges( + this: baseCommand, + changes: SchemaChangeConnection +) { + this.info(`Detected ${changes.total} change${changes.total > 1 ? 's' : ''}`); + this.log(''); + + changes.nodes.forEach((change) => { + this.log( + indent, + criticalityMap[change.criticality], + this.bolderize(change.message) + ); + }); +} + +export async function loadSchema(file: string) { + const sources = await loadTypedefs(file, { + cwd: process.cwd(), + loaders: [ + new CodeFileLoader(), + new GraphQLFileLoader(), + new JsonFileLoader(), + new UrlLoader(), + ], + }); + + return print(concatAST(sources.map((s) => s.document!))); +} + +export function minifySchema(schema: string): string { + return schema.replace(/\s+/g, ' ').trim(); +} diff --git a/packages/libraries/cli/src/helpers/validation.ts b/packages/libraries/cli/src/helpers/validation.ts new file mode 100644 index 000000000..5e4081a04 --- /dev/null +++ b/packages/libraries/cli/src/helpers/validation.ts @@ -0,0 +1,5 @@ +export function invariant(condition: boolean, message?: string) { + if (!condition) { + throw new Error(message); + } +} diff --git a/packages/libraries/cli/src/index.ts b/packages/libraries/cli/src/index.ts new file mode 100644 index 000000000..d620e709a --- /dev/null +++ b/packages/libraries/cli/src/index.ts @@ -0,0 +1 @@ +export { run } from '@oclif/core'; diff --git a/packages/libraries/cli/tsconfig.json b/packages/libraries/cli/tsconfig.json new file mode 100644 index 000000000..15f378f62 --- /dev/null +++ b/packages/libraries/cli/tsconfig.json @@ -0,0 +1,17 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "module": "commonjs", + "target": "es2017", + "lib": ["es2017"], + + "baseUrl": ".", + "outDir": "dist", + "rootDir": "src", + + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "skipLibCheck": true + }, + "include": ["src"] +} diff --git a/packages/libraries/client/.gitignore b/packages/libraries/client/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/libraries/client/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/libraries/client/CHANGELOG.md b/packages/libraries/client/CHANGELOG.md new file mode 100644 index 000000000..b24a57189 --- /dev/null +++ b/packages/libraries/client/CHANGELOG.md @@ -0,0 +1,324 @@ +# @graphql-hive/client + +## 0.15.2 + +### Patch Changes + +- a33cdcef: Update link to documentation + +## 0.15.1 + +### Patch Changes + +- cd998fab: add readme + +## 0.15.0 + +### Minor Changes + +- ac9b868c: Support GraphQL v16 + +### Patch Changes + +- Updated dependencies [ac9b868c] + - @graphql-hive/core@0.2.0 + +## 0.14.2 + +### Patch Changes + +- 903edf84: Bump + +## 0.14.1 + +### Patch Changes + +- ff82bd75: Improve scheduling +- ccb93298: Remove content-encoding header and improve error logs + +## 0.14.0 + +### Minor Changes + +- fe2b5dbc: Introduce new reporting format and set maxSize to 1500 + +## 0.13.0 + +### Minor Changes + +- 607a4fe2: Support new Apollo Server Plugin V3 next to V0 + +### Patch Changes + +- 79d4b4c2: fix(deps): update envelop monorepo + +## 0.12.0 + +### Minor Changes + +- b5966ab: Replace undici with axios + +## 0.11.1 + +### Patch Changes + +- 02b00f0: Update undici, sentry, bullmq + +## 0.11.0 + +### Minor Changes + +- 7eca7f0: Display access to actions + +## 0.10.0 + +### Minor Changes + +- d67d3e8: Add schema and services fetchers for gateways other than Apollo Gateway + +## 0.9.1 + +### Patch Changes + +- f9b545f: Send version of Hive client + +## 0.9.0 + +### Minor Changes + +- 6f204be: Display token info + +## 0.8.0 + +### Minor Changes + +- 0527e3c: Support Envelop 1.0 + +### Patch Changes + +- 0527e3c: Update undici + +## 0.7.0 + +### Minor Changes + +- 0e712c7: Normalize operations and remove literals before sending them to Hive + +## 0.6.3 + +### Patch Changes + +- e09f95a: Bump version + +## 0.6.2 + +### Patch Changes + +- 074c052: Fix supergraph fetcher not being a function + +## 0.6.1 + +### Patch Changes + +- 38bfd02: Export createSupergraphSDLFetcher + +## 0.6.0 + +### Minor Changes + +- 23636de: Support Federation Gateway (polling and supergraph) + +### Patch Changes + +- 23636de: Support federated services when reporting schema +- 23636de: Fix missing directives, service name and service url when reporting the schema +- 23636de: Compress with gzip + +## 0.5.3 + +### Patch Changes + +- aa4e661: Bump Undici + +## 0.5.2 + +### Patch Changes + +- e0a47fb: Use Undici instead of Got and Agentkeepalive + +## 0.5.1 + +### Patch Changes + +- 8a9fdd7: The has method returns true on staled values - tiny-lru + +## 0.5.0 + +### Minor Changes + +- d7348a3: Collect timestamps + +## 0.4.5 + +### Patch Changes + +- ee6b82b: Bump undici to stable v4 + +## 0.4.4 + +### Patch Changes + +- a73e5cb: Warn about missing token + +## 0.4.3 + +### Patch Changes + +- 5aa5e93: Bump + +## 0.4.2 + +### Patch Changes + +- 968614d: Much more explanatory messages in Agent + +## 0.4.1 + +### Patch Changes + +- 1a16360: Send GraphQL Client name and version + +## 0.4.0 + +### Minor Changes + +- 4224cb9: Move author and commit under reporting and token to top level of options + +### Patch Changes + +- c6ef3d2: Bob update + +## 0.3.3 + +### Patch Changes + +- 148b294: Fix issues with undici headers timeout + +## 0.3.2 + +### Patch Changes + +- 85b85d4: Dependencies update, cleanup, ui fixes + +## 0.3.1 + +### Patch Changes + +- a19fef4: Fix missing document in Apollo + +## 0.3.0 + +### Minor Changes + +- 1fe62bb: Apollo Plugin + +## 0.2.2 + +### Patch Changes + +- 4a7c569: Move operation hashing to Usage service + +## 0.2.1 + +### Patch Changes + +- 5ca6a06: Move debug to top level +- f96cfc9: Add hash to usage collector and allow for custom logic + +## 0.2.0 + +### Minor Changes + +- 30da7e7: When disabled, run everything in dry mode (only http agent is disabled). This should help to catch errors in development. + +### Patch Changes + +- bb5b3c4: Preparations for persisted operations in Lance + +## 0.1.3 + +### Patch Changes + +- Updated dependencies [6b74355] + - @graphql-hive/core@0.0.3 + +## 0.1.2 + +### Patch Changes + +- e1f9e1e: Use normalization +- 02322e7: Collect execution info +- 8585fd8: Collect error path +- Updated dependencies [094c861] + - @graphql-hive/core@0.0.2 + +## 0.1.1 + +### Patch Changes + +- 5135307: Collect client info + +## 0.1.0 + +### Minor Changes + +- 078e758: Token per Target + +### Patch Changes + +- 7113a0e: Custom logger +- 7113a0e: Add dispose method +- 65cc5b5: Collect arguments + +## 0.0.8 + +### Patch Changes + +- fd38851: Add try/catch on top of report/usage +- 32f198b: Enabled flag + +## 0.0.7 + +### Patch Changes + +- eedbad6: Make ttl and max optional + +## 0.0.6 + +### Patch Changes + +- ab5c204: Collect more with Sentry + +## 0.0.5 + +### Patch Changes + +- 2269c61: No extra calls to Auth0 + +## 0.0.4 + +### Patch Changes + +- d64a3c5: Target 2017 + +## 0.0.3 + +### Patch Changes + +- 7e88e71: bump + +## 0.0.2 + +### Patch Changes + +- b2d686e: bump diff --git a/packages/libraries/client/LICENSE b/packages/libraries/client/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/libraries/client/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/libraries/client/README.md b/packages/libraries/client/README.md new file mode 100644 index 000000000..1665a27ba --- /dev/null +++ b/packages/libraries/client/README.md @@ -0,0 +1,182 @@ +# GraphQL Hive Client + +GraphQL Hive is a GraphQL schemas registry where you can host, manage and collaborate on all your GraphQL schemas and operations, compatible with all architecture: schema stitching, federation, or just a good old monolith. + +GraphQL Hive is currently available as a hosted service to be used by all. +We take care of the heavy lifting behind the scenes be managing the registry, scaling it for your needs, to free your time to focus on the most important things at hand. + +### Installation + +``` +npm install @graphql-hive/client +``` + +### Usage + +Hive Client comes with generic client and plugins for [Envelop](https://envelop.dev) and [Apollo Server](https://github.com/apollographql/apollo-server) + +#### With Envelop + +If you're not familiar with Envelop - in "short" it's a lightweight JavaScript library for wrapping GraphQL execution layer and flow, allowing developers to develop, share and collaborate on GraphQL-related plugins, while filling the missing pieces in GraphQL implementations. + +Here's [more](https://github.com/dotansimha/envelop#envelop) on that topic. + +```ts +import { envelop } from '@envelop/core'; +import { useHive } from '@graphql-hive/client'; + +const envelopProxy = envelop({ + plugins: [ + useHive({ + enabled: true, // Enable/Disable Hive Client + debug: true, // Debugging mode + token: 'YOUR-TOKEN', + // Schema reporting + reporting: { + // feel free to set dummy values here + author: 'Author of the schema version', + commit: 'git sha or any identifier', + }, + usage: true, // Collects schema usage based on operations + }), + ], +}); +``` + +#### With Apollo Server + +Thanks to the plugin system it's a matter of adding hiveApollo plugin to ApolloServer instance: + +```ts +import { ApolloServer } from 'apollo-server'; +import { hiveApollo } from '@graphql-hive/client'; + +const server = new ApolloServer({ + typeDefs, + resolvers, + plugins: [ + hiveApollo({ + enabled: true, // Enable/Disable Hive Client + debug: true, // Debugging mode + token: 'YOUR-TOKEN', + reporting: { + // feel free to set dummy values here + author: 'Author of the latest change', + commit: 'git sha or any identifier', + }, + usage: true, // Collects schema usage based on operations + }), + ], +}); +``` + +#### With Other Servers + +First you need to instantiate the Hive Client. + +The `collectUsage` method accepts the same arguments as execute function of graphql-js and returns a function that expects the execution result object. + +- `collectUsage(args)` - should be called when a GraphQL execution starts. +- `finish(result)` (function returned by `collectUsage(args)`) - has to be invoked right after execution finishes. + +```ts +import express from 'express'; +import { graphqlHTTP } from 'express-graphql'; +import { createHive } from '@graphql-hive/client'; + +const app = express(); +const hive = createHive({ + enabled: true, // Enable/Disable Hive Client + debug: true, // Debugging mode + token: 'YOUR-TOKEN', + reporting: { + // feel free to set dummy values here + author: 'Author of the latest change', + commit: 'git sha or any identifier', + }, + usage: true, // Collects schema usage based operations + }, +}); + +// Report Schema +hive.reportSchema({ schema: yourSchema }); + +app.post("/graphql", graphqlHTTP({ + schema: yourSchema, + async customExecuteFn(args) { + // Collecting usage + const finish = hive.collectUsage(args); + const result = await execute(args); + finish(result); + return result; + } +})); +``` + +#### Using the registry when Stitching + +Stitching could be done in many ways, that's why `@graphql-hive/client` provide generic functions, not something dedicated for stitching. Unfortunately the implementation of gateway + polling is up to you. + +Prerequisites: + +- `HIVE_CDN_ENDPOINT` - the endpoint Hive generated for you in the previous step +- `HIVE_CDN_KEY` - the access key + +The `createServicesFetcher` factory function returns another function that is responsible for fetching a list of services from Hive's high-availability endpoint. + +```ts +import { createServicesFetcher } from '@graphql-hive/client'; + +const fetchServices = createServicesFetcher({ + endpoint: process.env.HIVE_CDN_ENDPOINT, + key: process.env.HIVE_CDN_KEY, +}); + +// This is your GraphQL gateway with built-in polling mechanism, in which the `stitchServices` method is called every 10 seconds. +startMyGraphQLGateway({ + // a function that resolves a list of services to stitch them together + async stitchServices() { + const services = await fetchServices(); + + return services.map((service) => { + return { + sdl: service.sdl, + url: service.url, + checksum: service.id, // to check if service's schema was modified + }; + }); + }, + pollingInSec: 10, // every 10s +}); +``` + +#### Using the registry with Apollo Gateway + +You can connect your Apollo Gateway with Hive client. + +The `experimental_pollInterval` value is up to you. Apollo Gateway uses 10s (10_000 ms) by default but we think it's better to fetch a supergraph more often. + +- `HIVE_CDN_ENDPOINT` - the endpoint Hive generated for you in the previous step +- `HIVE_CDN_KEY` - the access + +```ts +import { createSupergraphSDLFetcher } from '@graphql-hive/client'; +import { ApolloGateway } from '@apollo/gateway'; +import { ApolloServer } from 'apollo-server'; + +const gateway = new ApolloGateway({ + experimental_pollInterval: 10_000, // define the poll interval (in ms) + experimental_updateSupergraphSdl: createSupergraphFetcher({ + endpoint: HIVE_CDN_ENDPOINT, + key: HIVE_CDN_KEY, + }), +}); + +const server = new ApolloServer({ + gateway, +}); + +server.listen().then(({ url }) => { + console.log(`🚀 Server ready at ${url}`); +}); +``` diff --git a/packages/libraries/client/package.json b/packages/libraries/client/package.json new file mode 100644 index 000000000..682398f16 --- /dev/null +++ b/packages/libraries/client/package.json @@ -0,0 +1,59 @@ +{ + "name": "@graphql-hive/client", + "description": "A NodeJS client for GraphQL Hive", + "version": "0.15.2", + "author": { + "email": "contact@the-guild.dev", + "name": "The Guild", + "url": "https://the-guild.dev" + }, + "repository": { + "type": "git", + "url": "kamilkisiela/graphql-hive", + "directory": "packages/libraries/client" + }, + "license": "MIT", + "main": "dist/index.js", + "module": "dist/index.mjs", + "typings": "dist/index.d.ts", + "typescript": { + "definition": "dist/index.d.ts" + }, + "exports": { + ".": { + "require": "./dist/index.js", + "import": "./dist/index.mjs" + }, + "./*": { + "require": "./dist/*.js", + "import": "./dist/*.mjs" + } + }, + "scripts": { + "build": "node scripts/update-version.mjs && bob build --single" + }, + "peerDependencies": { + "graphql": "^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "dependencies": { + "@graphql-hive/core": "^0.2.0", + "@graphql-tools/utils": "^8.0.0", + "async-retry": "1.3.3", + "axios": "^0.27.0", + "tiny-lru": "8.0.2" + }, + "devDependencies": { + "@apollo/federation": "0.36.1", + "@envelop/types": "2.2.0", + "@graphql-yoga/node": "2.5.0", + "@types/async-retry": "1.4.3", + "apollo-server-core": "3.7.0", + "apollo-server-plugin-base": "3.5.3", + "nock": "13.2.4" + }, + "publishConfig": { + "registry": "https://registry.npmjs.org", + "access": "public", + "directory": "dist" + } +} diff --git a/packages/libraries/client/scripts/update-version.mjs b/packages/libraries/client/scripts/update-version.mjs new file mode 100644 index 000000000..83972b122 --- /dev/null +++ b/packages/libraries/client/scripts/update-version.mjs @@ -0,0 +1,11 @@ +import fs from 'fs'; +import { dirname, join } from 'path'; +import { fileURLToPath } from 'url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const pkg = JSON.parse( + fs.readFileSync(join(__dirname, '../package.json'), 'utf-8') +); +const code = `export const version = '${pkg.version}';\n`; + +fs.writeFileSync(join(__dirname, '../src/version.ts'), code); diff --git a/packages/libraries/client/src/apollo.ts b/packages/libraries/client/src/apollo.ts new file mode 100644 index 000000000..73b59f81c --- /dev/null +++ b/packages/libraries/client/src/apollo.ts @@ -0,0 +1,112 @@ +import type { ApolloServerPlugin } from 'apollo-server-plugin-base'; +import type { DocumentNode } from 'graphql'; +import type { + HiveClient, + HivePluginOptions, + SupergraphSDLFetcherOptions, +} from './internal/types'; +import { createHash } from 'crypto'; +import axios from 'axios'; +import { createHive } from './client'; +import { isHiveClient } from './internal/utils'; + +export function createSupergraphSDLFetcher({ + endpoint, + key, +}: SupergraphSDLFetcherOptions) { + return function supergraphSDLFetcher() { + return axios + .get(endpoint + '/supergraph', { + headers: { + 'X-Hive-CDN-Key': key, + }, + }) + .then((response) => { + if (response.status >= 200 && response.status < 300) { + return response.data; + } + + return Promise.reject( + new Error(`Failed to fetch supergraph [${response.status}]`) + ); + }) + .then((supergraphSdl) => ({ + id: createHash('sha256').update(supergraphSdl).digest('base64'), + supergraphSdl, + })); + }; +} + +export function hiveApollo( + clientOrOptions: HiveClient | HivePluginOptions +): ApolloServerPlugin { + const hive = isHiveClient(clientOrOptions) + ? clientOrOptions + : createHive({ + ...clientOrOptions, + agent: { + name: 'HiveApollo', + ...(clientOrOptions.agent ?? {}), + }, + }); + + hive.info(); + + return { + requestDidStart(context) { + // `overallCachePolicy` does not exist in v0 + const isLegacyV0 = !('overallCachePolicy' in context); + + let doc: DocumentNode; + const complete = hive.collectUsage({ + schema: context.schema, + get document() { + return doc; + }, + operationName: context.operationName, + contextValue: context.context, + }); + + if (isLegacyV0) { + return { + willSendResponse(ctx: any) { + doc = ctx.document; + complete(ctx.response); + }, + } as any; + } + + return Promise.resolve({ + async willSendResponse(ctx) { + doc = ctx.document!; + complete(ctx.response); + }, + }); + }, + serverWillStart(ctx) { + // `engine` does not exist in v3 + const isLegacyV0 = 'engine' in ctx; + + hive.reportSchema({ schema: ctx.schema }); + + if (isLegacyV0) { + return { + async serverWillStop() { + await hive.dispose(); + }, + } as any; + } + + return Promise.resolve({ + async serverWillStop() { + await hive.dispose(); + }, + schemaDidLoadOrUpdate(schemaContext) { + if (ctx.schema !== schemaContext.apiSchema) { + hive.reportSchema({ schema: schemaContext.apiSchema }); + } + }, + }); + }, + }; +} diff --git a/packages/libraries/client/src/client.ts b/packages/libraries/client/src/client.ts new file mode 100644 index 000000000..21fb791b1 --- /dev/null +++ b/packages/libraries/client/src/client.ts @@ -0,0 +1,176 @@ +import { GraphQLSchema, ExecutionArgs, ExecutionResult } from 'graphql'; +import axios from 'axios'; +import type { HivePluginOptions, HiveClient } from './internal/types'; +import { createUsage } from './internal/usage'; +import { createReporting } from './internal/reporting'; +import { createOperationsStore } from './internal/operations-store'; + +export function createHive(options: HivePluginOptions): HiveClient { + const logger = options?.agent?.logger ?? console; + + if (!options.token && options.enabled) { + options.enabled = false; + logger.info('[hive] Missing token, disabling.'); + } + + const usage = createUsage(options); + const schemaReporter = createReporting(options); + const operationsStore = createOperationsStore(options); + + function reportSchema({ schema }: { schema: GraphQLSchema }) { + schemaReporter.report({ schema }); + } + + function collectUsage(args: ExecutionArgs) { + return usage.collect(args); + } + + async function dispose() { + await Promise.all([schemaReporter.dispose(), usage.dispose()]); + } + + async function info() { + if (options.enabled !== true) { + return; + } + + try { + let endpoint = 'https://app.graphql-hive.com/registry'; + + if (options.reporting && options.reporting.endpoint) { + endpoint = options.reporting.endpoint; + } + + const query = /* GraphQL */ ` + query myTokenInfo { + tokenInfo { + __typename + ... on TokenInfo { + token { + name + } + organization { + name + cleanId + } + project { + name + type + cleanId + } + target { + name + cleanId + } + canReportSchema: hasTargetScope(scope: REGISTRY_WRITE) + canCollectUsage: hasTargetScope(scope: REGISTRY_WRITE) + canReadOperations: hasProjectScope(scope: OPERATIONS_STORE_READ) + } + ... on TokenNotFoundError { + message + } + } + } + `; + + const response = await axios.post( + endpoint, + JSON.stringify({ + query, + operationName: 'myTokenInfo', + }), + { + headers: { + 'content-type': 'application/json', + 'x-api-token': options.token, + }, + timeout: 30_000, + decompress: true, + responseType: 'json', + } + ); + + if (response.status >= 200 && response.status < 300) { + const result: ExecutionResult = await response.data; + + if (result.data?.tokenInfo.__typename === 'TokenInfo') { + const { tokenInfo } = result.data; + + const { + organization, + project, + target, + canReportSchema, + canCollectUsage, + canReadOperations, + } = tokenInfo; + const print = createPrinter([ + tokenInfo.token.name, + organization.name, + project.name, + target.name, + ]); + + const organizationUrl = `https://app.graphql-hive.com/${organization.cleanId}`; + const projectUrl = `${organizationUrl}/${project.cleanId}`; + const targetUrl = `${projectUrl}/${target.cleanId}`; + + logger.info( + [ + '[hive][info] Token details', + '', + `Token name: ${print(tokenInfo.token.name)}`, + `Organization: ${print( + organization.name, + organizationUrl + )}`, + `Project: ${print(project.name, projectUrl)}`, + `Target: ${print(target.name, targetUrl)}`, + '', + `Can report schema? ${print(canReportSchema ? 'Yes' : 'No')}`, + `Can collect usage? ${print(canCollectUsage ? 'Yes' : 'No')}`, + `Can read operations? ${print( + canReadOperations ? 'Yes' : 'No' + )}`, + '', + ].join('\n') + ); + } else if (result.data?.tokenInfo.message) { + logger.error( + `[hive][info] Token not found. Reason: ${result.data?.tokenInfo.message}` + ); + logger.info( + `[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens` + ); + } else { + logger.error(`[hive][info] ${result.errors![0].message}`); + logger.info( + `[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens` + ); + } + } else { + logger.error( + `[hive][info] Error ${response.status}: ${response.statusText}` + ); + } + } catch (error: any) { + logger.error(`[hive][info] Error ${error.message}`); + } + } + + return { + info, + reportSchema, + collectUsage, + operationsStore, + dispose, + }; +} + +function createPrinter(values: string[]) { + const maxLen = Math.max(...values.map((v) => v.length)) + 4; + + return (base: string, extra?: string) => { + return base.padEnd(maxLen, ' ') + (extra || ''); + }; +} diff --git a/packages/libraries/client/src/envelop.ts b/packages/libraries/client/src/envelop.ts new file mode 100644 index 000000000..3b565ae76 --- /dev/null +++ b/packages/libraries/client/src/envelop.ts @@ -0,0 +1,37 @@ +import type { Plugin } from '@envelop/types'; +import type { HiveClient, HivePluginOptions } from './internal/types'; +import { createHive } from './client'; +import { isHiveClient } from './internal/utils'; + +export function useHive(clientOrOptions: HiveClient): Plugin; +export function useHive(clientOrOptions: HivePluginOptions): Plugin; +export function useHive( + clientOrOptions: HiveClient | HivePluginOptions +): Plugin { + const hive = isHiveClient(clientOrOptions) + ? clientOrOptions + : createHive({ + ...clientOrOptions, + agent: { + name: 'HiveEnvelop', + ...(clientOrOptions.agent ?? {}), + }, + }); + + hive.info(); + + return { + onSchemaChange({ schema }) { + hive.reportSchema({ schema }); + }, + onExecute({ args }) { + const complete = hive.collectUsage(args); + + return { + onExecuteDone({ result }) { + complete(result); + }, + }; + }, + }; +} diff --git a/packages/libraries/client/src/gateways.ts b/packages/libraries/client/src/gateways.ts new file mode 100644 index 000000000..784d4235c --- /dev/null +++ b/packages/libraries/client/src/gateways.ts @@ -0,0 +1,72 @@ +import axios from 'axios'; +import { createHash } from 'crypto'; +import type { + SchemaFetcherOptions, + ServicesFetcherOptions, +} from './internal/types'; + +interface Schema { + sdl: string; + url: string; + name: string; +} + +function createFetcher({ + endpoint, + key, +}: SchemaFetcherOptions & ServicesFetcherOptions) { + return function fetcher(): Promise { + return axios + .get(endpoint + '/schema', { + headers: { + 'X-Hive-CDN-Key': key, + accept: 'application/json', + }, + responseType: 'json', + }) + .then((response) => { + if (response.status >= 200 && response.status < 300) { + return response.data; + } + + return Promise.reject( + new Error(`Failed to fetch [${response.status}]`) + ); + }); + }; +} + +export function createSchemaFetcher({ endpoint, key }: SchemaFetcherOptions) { + const fetcher = createFetcher({ endpoint, key }); + + return function schemaFetcher() { + return fetcher().then((schema) => ({ + id: createHash('sha256') + .update(schema.sdl) + .update(schema.url) + .update(schema.name) + .digest('base64'), + ...schema, + })); + }; +} + +export function createServicesFetcher({ + endpoint, + key, +}: ServicesFetcherOptions) { + const fetcher = createFetcher({ endpoint, key }); + + return function schemaFetcher() { + return fetcher().then((services) => + services.map((service) => ({ + id: createHash('sha256') + .update(service.sdl) + .update(service.url) + .update(service.name) + .digest('base64'), + ...service, + })) + ); + }; +} diff --git a/packages/libraries/client/src/index.ts b/packages/libraries/client/src/index.ts new file mode 100644 index 000000000..9b43c13ba --- /dev/null +++ b/packages/libraries/client/src/index.ts @@ -0,0 +1,5 @@ +export type { HivePluginOptions, HiveClient } from './internal/types'; +export { useHive } from './envelop'; +export { hiveApollo, createSupergraphSDLFetcher } from './apollo'; +export { createSchemaFetcher, createServicesFetcher } from './gateways'; +export { createHive } from './client'; diff --git a/packages/libraries/client/src/internal/agent.ts b/packages/libraries/client/src/internal/agent.ts new file mode 100644 index 000000000..aa1b8ab3c --- /dev/null +++ b/packages/libraries/client/src/internal/agent.ts @@ -0,0 +1,210 @@ +import retry from 'async-retry'; +import axios from 'axios'; +import { version } from '../version'; +import type { Logger } from './types'; + +export interface AgentOptions { + enabled?: boolean; + name?: string; + /** + * Hive endpoint or proxy + */ + endpoint: string; + /** + * API Token + */ + token: string; + /** + * 30s by default + */ + timeout?: number; + /** + * false by default + */ + debug?: boolean; + /** + * 5 by default + */ + maxRetries?: number; + /** + * 200 by default + */ + minTimeout?: number; + /** + * Send report after each GraphQL operation + */ + sendImmediately?: boolean; + /** + * Send reports in interval (defaults to 10_000ms) + */ + sendInterval?: number; + /** + * Max number of traces to send at once (defaults to 25) + */ + maxSize?: number; + /** + * Custom logger (defaults to console) + */ + logger?: Logger; +} + +export function createAgent( + pluginOptions: AgentOptions, + { + prefix, + data, + body, + headers = () => ({}), + }: { + prefix: string; + data: { + clear(): void; + set(data: T): void; + size(): number; + }; + body(): Buffer | string | Promise; + headers?(): Record; + } +) { + const options: Required = { + timeout: 30_000, + debug: false, + enabled: true, + minTimeout: 200, + maxRetries: 3, + sendImmediately: false, + sendInterval: 10_000, + maxSize: 25, + logger: console, + name: 'Hive', + ...pluginOptions, + }; + + const enabled = options.enabled !== false; + + let timeoutID: any = null; + + function schedule() { + if (timeoutID) { + clearTimeout(timeoutID); + } + + timeoutID = setTimeout(() => { + send(); + }, options.sendInterval); + } + + if (!options.sendImmediately) { + schedule(); + } + + function debugLog(msg: string) { + if (options.debug) { + options.logger.info( + `[hive][${prefix}]${enabled ? '' : '[DISABLED]'} ${msg}` + ); + } + } + + function capture(event: T) { + data.set(event); + + if (options.sendImmediately || data.size() >= options.maxSize) { + debugLog('Sending immediately'); + setImmediate(() => { + send({ + runOnce: true, + }); + }); + } + } + + async function send(sendOptions?: { runOnce?: boolean }) { + const runOnce = sendOptions?.runOnce ?? false; + + if (!data.size()) { + if (!runOnce) { + schedule(); + } + return; + } + + try { + const buffer = await body(); + const dataToSend = data.size(); + + data.clear(); + + const sendReport: retry.RetryFunction = async (_bail, attempt) => { + debugLog(`Sending (queue ${dataToSend}) (attempt ${attempt})`); + + if (!enabled) { + return { + statusCode: 200, + }; + } + + const response = await axios + .post(options.endpoint, buffer, { + headers: { + 'content-type': 'application/json', + 'x-api-token': options.token, + 'User-Agent': `${options.name}@${version}`, + ...headers(), + }, + responseType: 'json', + timeout: options.timeout, + }) + .catch((error) => { + debugLog(`Attempt ${attempt} failed: ${error.message}`); + return Promise.reject(error); + }); + + if (response.status >= 200 && response.status < 300) { + return response; + } + + debugLog(`Attempt ${attempt} failed: ${response.status}`); + throw new Error(`${response.status}: ${response.statusText}`); + }; + + const response = await retry(sendReport, { + retries: options.maxRetries, + minTimeout: options.minTimeout, + factor: 2, + }); + + if (response.statusCode < 200 || response.statusCode >= 300) { + throw new Error( + `[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${response.data}` + ); + } + + debugLog(`Sent!`); + } catch (error: any) { + options.logger.error( + `[hive][${prefix}] Failed to send data: ${error.message}` + ); + } + + if (!runOnce) { + schedule(); + } + } + + async function dispose() { + debugLog('Disposing'); + if (timeoutID) { + clearTimeout(timeoutID); + } + + await send({ + runOnce: true, + }); + } + + return { + capture, + dispose, + }; +} diff --git a/packages/libraries/client/src/internal/operations-store.ts b/packages/libraries/client/src/internal/operations-store.ts new file mode 100644 index 000000000..5abf587b0 --- /dev/null +++ b/packages/libraries/client/src/internal/operations-store.ts @@ -0,0 +1,97 @@ +import type { DocumentNode } from 'graphql'; +import { stripIgnoredCharacters, parse } from 'graphql'; +import axios from 'axios'; +import type { HivePluginOptions } from './types'; + +export interface OperationsStore { + canHandle(key: string): boolean; + get(key: string): DocumentNode | null; + load(): Promise; + reload(): Promise; +} + +export function createOperationsStore( + pluginOptions: HivePluginOptions +): OperationsStore { + const operationsStoreOptions = pluginOptions.operationsStore; + const token = pluginOptions.token; + + if (!operationsStoreOptions || pluginOptions.enabled === false) { + return { + canHandle() { + return false; + }, + get() { + return null; + }, + async load() {}, + async reload() {}, + }; + } + + const store = new Map(); + + const canHandle: OperationsStore['canHandle'] = (key) => { + return typeof key === 'string' && !key.includes('{'); + }; + + const get: OperationsStore['get'] = (key) => { + return store.get(key)!; + }; + + const load: OperationsStore['load'] = async () => { + const response = await axios.post( + operationsStoreOptions.endpoint ?? + 'https://app.graphql-hive.com/registry', + { + query, + operationName: 'loadStoredOperations', + }, + { + responseType: 'json', + headers: { + 'content-type': 'application/json', + 'x-api-token': token, + }, + } + ); + + const parsedData: { + data: { + storedOperations: Array<{ + key: string; + document: string; + }>; + }; + } = await response.data; + + store.clear(); + + parsedData.data.storedOperations.forEach(({ key, document }) => { + store.set( + key, + parse(document, { + noLocation: true, + }) + ); + }); + }; + + const reload: OperationsStore['reload'] = load; + + return { + canHandle, + get, + load, + reload, + }; +} + +const query = stripIgnoredCharacters(/* GraphQL */ ` + query loadStoredOperations { + storedOperations { + key: operationHash + document: content + } + } +`); diff --git a/packages/libraries/client/src/internal/reporting.ts b/packages/libraries/client/src/internal/reporting.ts new file mode 100644 index 000000000..106403442 --- /dev/null +++ b/packages/libraries/client/src/internal/reporting.ts @@ -0,0 +1,173 @@ +import { GraphQLSchema, stripIgnoredCharacters, print, Kind } from 'graphql'; +import { getDocumentNodeFromSchema } from '@graphql-tools/utils'; +import { createAgent } from './agent'; +import { version } from '../version'; +import type { HivePluginOptions } from './types'; + +export interface SchemaReporter { + report(args: { schema: GraphQLSchema }): void; + dispose(): Promise; +} + +export function createReporting( + pluginOptions: HivePluginOptions +): SchemaReporter { + if (!pluginOptions.reporting) { + return { + report() {}, + async dispose() {}, + }; + } + + const token = pluginOptions.token; + const reportingOptions = pluginOptions.reporting; + + const logger = pluginOptions.agent?.logger ?? console; + let currentSchema: GraphQLSchema | null = null; + const agent = createAgent( + { + logger, + ...(pluginOptions.agent ?? {}), + endpoint: + reportingOptions.endpoint ?? 'https://app.graphql-hive.com/registry', + token: token, + enabled: pluginOptions.enabled, + debug: pluginOptions.debug, + sendImmediately: true, + }, + { + prefix: 'reporting', + data: { + set(incomingSchema) { + currentSchema = incomingSchema; + }, + size() { + return currentSchema ? 1 : 0; + }, + clear() { + currentSchema = null; + }, + }, + headers() { + return { + 'Content-Type': 'application/json', + 'graphql-client-name': 'Hive Client', + 'graphql-client-version': version, + }; + }, + async body() { + return JSON.stringify({ + query, + operationName: 'schemaPublish', + variables: { + input: { + sdl: await printToSDL(currentSchema!), + author: reportingOptions.author, + commit: reportingOptions.commit, + service: reportingOptions.serviceName ?? null, + url: reportingOptions.serviceUrl ?? null, + force: true, + }, + }, + }); + }, + } + ); + + return { + report({ schema }) { + try { + agent.capture(schema); + } catch (error) { + logger.error(`Failed to report schema`, error); + } + }, + dispose: agent.dispose, + }; +} + +const query = stripIgnoredCharacters(/* GraphQL */ ` + mutation schemaPublish($input: SchemaPublishInput!) { + schemaPublish(input: $input) { + __typename + } + } +`); + +/** + * It's a bit tricky to detect if a schema is federated or not. + * For now, we just check if the schema has a _service that resolves to `_Service!` (as described in federation spec). + * This may lead to issues if the schema is not a federated schema but something made by the user (I don't think we will hit that issue soon). + */ +function isFederatedSchema(schema: GraphQLSchema): boolean { + const queryType = schema.getQueryType(); + + if (queryType) { + const fields = queryType.getFields(); + + if (fields._service && fields._service.type.toString() === `_Service!`) { + return true; + } + } + + return false; +} + +/** + * Extracts the SDL of a federated service from a GraphQLSchema object + * We do it to not send federated schema to the registry but only the original schema provided by user + */ +async function extractFederationServiceSDL( + schema: GraphQLSchema +): Promise { + const queryType = schema.getQueryType()!; + const serviceField = queryType.getFields()._service; + const resolved = await ( + serviceField.resolve as () => Promise<{ sdl: string }> + )(); + return resolved.sdl; +} + +function isSchemaOfCommonNames(schema: GraphQLSchema): boolean { + const queryType = schema.getQueryType(); + if (queryType && queryType.name !== 'Query') { + return false; + } + + const mutationType = schema.getMutationType(); + if (mutationType && mutationType.name !== 'Mutation') { + return false; + } + + const subscriptionType = schema.getSubscriptionType(); + if (subscriptionType && subscriptionType.name !== 'Subscription') { + return false; + } + + return true; +} + +function printSchemaWithDirectives(schema: GraphQLSchema) { + const doc = getDocumentNodeFromSchema(schema); + + if (schema.description == null && isSchemaOfCommonNames(schema)) { + // remove the schema definition if it's the default one + // We do it to avoid sending schema definition to the registry, which may be unwanted by federated services or something + return print({ + kind: Kind.DOCUMENT, + definitions: doc.definitions.filter( + (def) => def.kind !== Kind.SCHEMA_DEFINITION + ), + }); + } + + return print(doc); +} + +async function printToSDL(schema: GraphQLSchema) { + return stripIgnoredCharacters( + isFederatedSchema(schema) + ? await extractFederationServiceSDL(schema) + : printSchemaWithDirectives(schema) + ); +} diff --git a/packages/libraries/client/src/internal/sampling.ts b/packages/libraries/client/src/internal/sampling.ts new file mode 100644 index 000000000..be495b11b --- /dev/null +++ b/packages/libraries/client/src/internal/sampling.ts @@ -0,0 +1,11 @@ +export function randomSampling(sampleRate: number) { + if (sampleRate > 1 || sampleRate < 0) { + throw new Error( + `Expected usage.sampleRate to be 0 <= x <= 1, received ${sampleRate}` + ); + } + + return function shouldInclude(): boolean { + return Math.random() <= sampleRate; + }; +} diff --git a/packages/libraries/client/src/internal/types.ts b/packages/libraries/client/src/internal/types.ts new file mode 100644 index 000000000..3a625dbb9 --- /dev/null +++ b/packages/libraries/client/src/internal/types.ts @@ -0,0 +1,164 @@ +import type { ExecutionArgs } from 'graphql'; +import type { AgentOptions } from './agent'; +import type { SchemaReporter } from './reporting'; +import type { OperationsStore } from './operations-store'; + +export interface HiveClient { + info(): Promise; + reportSchema: SchemaReporter['report']; + collectUsage(args: ExecutionArgs): CollectUsageCallback; + operationsStore: OperationsStore; + dispose(): Promise; +} + +export type AsyncIterableIteratorOrValue = AsyncIterableIterator | T; + +export type CollectUsageCallback = ( + result: AsyncIterableIteratorOrValue +) => void; +export interface ClientInfo { + name: string; + version: string; +} + +export interface Logger { + info(msg: string): void; + error(error: any, ...data: any[]): void; +} + +export interface HiveUsagePluginOptions { + /** + * Custom endpoint to collect schema usage + * + * Points to Hive by default + */ + endpoint?: string; + /** + * Extract client info from GraphQL Context + */ + clientInfo?(context: any): null | undefined | ClientInfo; + /** + * Generate hash of an operation (useful for persisted operations) + */ + operationHash?(args: ExecutionArgs): string | null | undefined; + /** + * Hive uses LRU cache to store info about operations. + * This option represents the maximum size of the cache. + * + * Default: 1000 + */ + max?: number; + /** + * Hive uses LRU cache to store info about operations. + * This option represents the time-to-live of every cached operation. + * + * Default: no ttl + */ + ttl?: number; + /** + * A list of operations (by name) to be ignored by Hive. + */ + exclude?: string[]; + /** + * Sample rate to determine sampling. + * 0.0 = 0% chance of being sent + * 1.0 = 100% chance of being sent. + * + * Default: 1.0 + */ + sampleRate?: number; +} + +export interface HiveReportingPluginOptions { + /** + * Custom endpoint to collect schema reports + * + * Points to Hive by default + */ + endpoint?: string; + /** + * Author of current version of the schema + */ + author: string; + /** + * Commit SHA hash (or any identifier) related to the schema version + */ + commit: string; + /** + * URL to the service (use only for distributed schemas) + */ + serviceUrl?: string; + /** + * Name of the service (use only for distributed schemas) + */ + serviceName?: string; +} + +export interface HiveOperationsStorePluginOptions { + /** + * Custom endpoint to fetch stored operations + * + * Points to Hive by default + */ + endpoint?: string; +} + +export interface HivePluginOptions { + /** + * Enable/Disable Hive + * + * Default: true + */ + enabled?: boolean; + /** + * Debugging mode + * + * Default: false + */ + debug?: boolean; + /** + * Access Token + */ + token: string; + agent?: Omit; + /** + * Collects schema usage based on operations + * + * Disabled by default + */ + usage?: HiveUsagePluginOptions | boolean; + /** + * Schema reporting + * + * Disabled by default + */ + reporting?: HiveReportingPluginOptions | false; + /** + * Operations Store + */ + operationsStore?: HiveOperationsStorePluginOptions; +} + +export type Maybe = null | undefined | T; + +export interface GraphQLErrorsResult { + errors?: ReadonlyArray<{ + message: string; + path?: Maybe>; + }>; +} + +export interface SupergraphSDLFetcherOptions { + endpoint: string; + key: string; +} + +export interface SchemaFetcherOptions { + endpoint: string; + key: string; +} + +export interface ServicesFetcherOptions { + endpoint: string; + key: string; +} diff --git a/packages/libraries/client/src/internal/usage.ts b/packages/libraries/client/src/internal/usage.ts new file mode 100644 index 000000000..6034a60df --- /dev/null +++ b/packages/libraries/client/src/internal/usage.ts @@ -0,0 +1,430 @@ +import { + ArgumentNode, + DocumentNode, + ExecutionArgs, + GraphQLInputObjectType, + GraphQLInputType, + GraphQLInterfaceType, + GraphQLNamedType, + GraphQLObjectType, + GraphQLOutputType, + GraphQLSchema, + GraphQLType, + GraphQLUnionType, + isEnumType, + isListType, + isNonNullType, + isScalarType, + Kind, + ObjectFieldNode, + OperationDefinitionNode, + TypeInfo, + visit, + visitWithTypeInfo, +} from 'graphql'; +import LRU from 'tiny-lru'; +import { normalizeOperation } from '@graphql-hive/core'; +import { createAgent } from './agent'; +import { randomSampling } from './sampling'; +import { version } from '../version'; +import { + cache, + cacheDocumentKey, + measureDuration, + memo, + isAsyncIterableIterator, +} from './utils'; +import type { + HivePluginOptions, + HiveUsagePluginOptions, + CollectUsageCallback, + ClientInfo, +} from './types'; + +interface UsageCollector { + collect(args: ExecutionArgs): CollectUsageCallback; + dispose(): Promise; +} + +export function createUsage(pluginOptions: HivePluginOptions): UsageCollector { + if (!pluginOptions.usage) { + return { + collect() { + return () => {}; + }, + async dispose() {}, + }; + } + + let report: Report = { + size: 0, + map: {}, + operations: [], + }; + const options = + typeof pluginOptions.usage === 'boolean' + ? ({} as HiveUsagePluginOptions) + : pluginOptions.usage; + const logger = pluginOptions.agent?.logger ?? console; + const collector = memo(createCollector, (arg) => arg.schema); + const excludeSet = new Set(options.exclude ?? []); + const agent = createAgent( + { + logger, + ...(pluginOptions.agent ?? { + maxSize: 1500, + }), + endpoint: options.endpoint ?? 'https://app.graphql-hive.com/usage', + token: pluginOptions.token, + enabled: pluginOptions.enabled, + debug: pluginOptions.debug, + }, + { + prefix: 'usage', + data: { + set(operation) { + report.operations.push({ + operationMapKey: operation.key, + timestamp: operation.timestamp, + execution: { + ok: operation.execution.ok, + duration: operation.execution.duration, + errorsTotal: operation.execution.errorsTotal, + errors: operation.execution.errors, + }, + metadata: { + client: operation.client, + }, + }); + + report.size += 1; + + if (!report.map[operation.key]) { + report.map[operation.key] = { + operation: operation.operation, + operationName: operation.operationName, + fields: operation.fields, + }; + } + }, + size() { + return report.size; + }, + clear() { + report = { + size: 0, + map: {}, + operations: [], + }; + }, + }, + headers() { + return { + 'Content-Type': 'application/json', + 'graphql-client-name': 'Hive Client', + 'graphql-client-version': version, + }; + }, + body() { + return JSON.stringify(report); + }, + } + ); + + const shouldInclude = randomSampling(options.sampleRate ?? 1.0); + + return { + dispose: agent.dispose, + collect(args) { + const finish = measureDuration(); + + return function complete(result) { + try { + if (isAsyncIterableIterator(result)) { + logger.info('@stream @defer is not supported'); + finish(); + return; + } + + const rootOperation = args.document.definitions.find( + (o) => o.kind === Kind.OPERATION_DEFINITION + ) as OperationDefinitionNode; + const document = args.document; + const operationName = + args.operationName || rootOperation.name?.value || 'anonymous'; + const duration = finish(); + + if (!excludeSet.has(operationName) && shouldInclude()) { + const errors = + result.errors?.map((error) => ({ + message: error.message, + path: error.path?.join('.'), + })) ?? []; + const collect = collector({ + schema: args.schema, + max: options.max ?? 1000, + ttl: options.ttl, + }); + const { key, value: info } = collect(document); + + agent.capture({ + key, + timestamp: Date.now(), + operationName, + operation: info.document, + fields: info.fields, + execution: { + ok: errors.length === 0, + duration, + errorsTotal: errors.length, + errors, + }, + // TODO: operationHash is ready to accept hashes of persisted operations + client: + typeof args.contextValue !== 'undefined' && + typeof options.clientInfo !== 'undefined' + ? options.clientInfo(args.contextValue) + : null, + }); + } + } catch (error) { + logger.error(`Failed to collect operation`, error); + } + }; + }, + }; +} + +interface CacheResult { + document: string; + fields: string[]; +} + +export function createCollector({ + schema, + max, + ttl, +}: { + schema: GraphQLSchema; + max?: number; + ttl?: number; +}) { + const typeInfo = new TypeInfo(schema); + + function collect(doc: DocumentNode): CacheResult { + const entries = new Set(); + + function markAsUsed(id: string) { + if (!entries.has(id)) { + entries.add(id); + } + } + + function makeId(...names: string[]): string { + return names.join('.'); + } + + const collectedInputTypes: Record< + string, + { + all: boolean; + fields: Set; + } + > = {}; + + function collectInputType(inputType: string, fieldName?: string) { + if (!collectedInputTypes[inputType]) { + collectedInputTypes[inputType] = { + all: false, + fields: new Set(), + }; + } + + if (fieldName) { + collectedInputTypes[inputType].fields.add(fieldName); + } else { + collectedInputTypes[inputType].all = true; + } + } + + function collectNode(node: ObjectFieldNode | ArgumentNode) { + const inputType = typeInfo.getInputType()!; + const inputTypeName = resolveTypeName(inputType); + + if (node.value.kind === Kind.ENUM) { + // Collect only a specific enum value + collectInputType(inputTypeName, node.value.value); + } else if ( + node.value.kind !== Kind.OBJECT && + node.value.kind !== Kind.LIST + ) { + collectInputType(inputTypeName); + } + } + + function markEntireTypeAsUsed(type: GraphQLInputType): void { + const namedType = unwrapType(type); + + if (isScalarType(namedType)) { + markAsUsed(makeId(namedType.name)); + return; + } + + if (isEnumType(namedType)) { + namedType.getValues().forEach((value) => { + markAsUsed(makeId(namedType.name, value.name)); + }); + return; + } + + const fieldsMap = namedType.getFields(); + + for (const fieldName in fieldsMap) { + const field = fieldsMap[fieldName]; + + markAsUsed(makeId(namedType.name, field.name)); + markEntireTypeAsUsed(field.type); + } + } + + visit( + doc, + visitWithTypeInfo(typeInfo, { + Field() { + const parent = typeInfo.getParentType()!; + const field = typeInfo.getFieldDef()!; + + markAsUsed(makeId(parent.name, field.name)); + }, + VariableDefinition() { + const inputType = typeInfo.getInputType()!; + collectInputType(resolveTypeName(inputType)); + }, + Argument(node) { + const parent = typeInfo.getParentType()!; + const field = typeInfo.getFieldDef()!; + const arg = typeInfo.getArgument()!; + + markAsUsed(makeId(parent.name, field.name, arg.name)); + collectNode(node); + }, + ListValue(node) { + const inputType = typeInfo.getInputType()!; + const inputTypeName = resolveTypeName(inputType); + + node.values.forEach((value) => { + if (value.kind !== Kind.OBJECT) { + // if a value is not an object we need to collect all fields + collectInputType(inputTypeName); + } + }); + }, + ObjectField(node) { + const parentInputType = typeInfo.getParentInputType()!; + const parentInputTypeName = resolveTypeName(parentInputType); + + collectNode(node); + collectInputType(parentInputTypeName, node.name.value); + }, + }) + ); + + for (const inputTypeName in collectedInputTypes) { + const { fields, all } = collectedInputTypes[inputTypeName]; + + if (all) { + markEntireTypeAsUsed(schema.getType(inputTypeName) as any); + } else { + fields.forEach((field) => { + markAsUsed(makeId(inputTypeName, field)); + }); + } + } + + return { + document: normalizeOperation({ + document: doc, + hideLiterals: true, + removeAliases: true, + }), + fields: Array.from(entries), + }; + } + + return cache(collect, cacheDocumentKey, LRU(max, ttl)); +} + +function resolveTypeName(inputType: GraphQLType): string { + return unwrapType(inputType).name; +} + +function unwrapType(type: GraphQLInputType): GraphQLNamedInputType; +function unwrapType(type: GraphQLOutputType): GraphQLNamedOutputType; +function unwrapType(type: GraphQLType): GraphQLNamedType; +function unwrapType(type: GraphQLType): GraphQLNamedType { + if (isNonNullType(type) || isListType(type)) { + return unwrapType(type.ofType); + } + + return type; +} + +type GraphQLNamedInputType = Exclude< + GraphQLNamedType, + GraphQLObjectType | GraphQLInterfaceType | GraphQLUnionType +>; +type GraphQLNamedOutputType = Exclude; + +export interface Report { + size: number; + map: OperationMap; + operations: Operation[]; +} + +interface CollectedOperation { + key: string; + timestamp: number; + operation: string; + operationName?: string | null; + fields: string[]; + execution: { + ok: boolean; + duration: number; + errorsTotal: number; + errors?: Array<{ + message: string; + path?: string; + }>; + }; + client?: ClientInfo | null; +} + +interface Operation { + operationMapKey: string; + timestamp: number; + execution: { + ok: boolean; + duration: number; + errorsTotal: number; + errors?: Array<{ + message: string; + path?: string; + }>; + }; + metadata?: { + client?: { + name?: string; + version?: string; + } | null; + }; +} + +interface OperationMapRecord { + operation: string; + operationName?: string | null; + fields: string[]; +} + +interface OperationMap { + [key: string]: OperationMapRecord; +} diff --git a/packages/libraries/client/src/internal/utils.ts b/packages/libraries/client/src/internal/utils.ts new file mode 100644 index 000000000..d40e7b07f --- /dev/null +++ b/packages/libraries/client/src/internal/utils.ts @@ -0,0 +1,122 @@ +import { createHash } from 'crypto'; +import type { + HiveClient, + HivePluginOptions, + AsyncIterableIteratorOrValue, +} from './types'; + +export function isAsyncIterableIterator( + value: AsyncIterableIteratorOrValue +): value is AsyncIterableIterator { + return typeof (value as any)?.[Symbol.asyncIterator] === 'function'; +} + +export function memo( + fn: (arg: A) => R, + cacheKeyFn: (arg: A) => K +): (arg: A) => R { + let memoizedResult: R | null = null; + let memoizedKey: K | null = null; + + return (arg: A) => { + const currentKey = cacheKeyFn(arg); + if (memoizedKey === currentKey) { + return memoizedResult!; + } + + memoizedKey = currentKey; + memoizedResult = fn(arg); + + return memoizedResult; + }; +} + +export function cache( + fn: (arg: A) => R, + cacheKeyFn: (arg: A) => K, + cacheMap: { + has(key: K): boolean; + set(key: K, value: R): void; + get(key: K): R | undefined; + } +) { + return (arg: A) => { + const key = cacheKeyFn(arg); + const cachedValue = cacheMap.get(key); + + if (cachedValue !== null && typeof cachedValue !== 'undefined') { + return { + key, + value: cachedValue, + }; + } + + const value = fn(arg); + cacheMap.set(key, value); + + return { + key, + value, + }; + }; +} + +export function cacheDocumentKey(doc: object) { + return createHash('md5').update(JSON.stringify(doc)).digest('hex'); +} + +const HR_TO_NS = 1e9; +const NS_TO_MS = 1e6; + +function deltaFrom(hrtime: [number, number]): { ms: number; ns: number } { + const delta = process.hrtime(hrtime); + const ns = delta[0] * HR_TO_NS + delta[1]; + + return { + ns, + get ms() { + return ns / NS_TO_MS; + }, + }; +} + +export function measureDuration() { + const startAt = process.hrtime(); + + return function end() { + return deltaFrom(startAt).ns; + }; +} + +export function addProperty( + key: K, + value: undefined | null, + obj: T +): T; +export function addProperty( + key: K, + value: V, + obj: T +): T & { + [k in K]: V; +}; +export function addProperty( + key: K, + value: V | undefined | null, + obj: T +): any { + if (value === null || typeof value === 'undefined') { + return obj; + } + + return { + ...obj, + [key]: value, + }; +} + +export function isHiveClient( + clientOrOptions: HiveClient | HivePluginOptions +): clientOrOptions is HiveClient { + return 'operationsStore' in clientOrOptions; +} diff --git a/packages/libraries/client/src/version.ts b/packages/libraries/client/src/version.ts new file mode 100644 index 000000000..1ddca76fb --- /dev/null +++ b/packages/libraries/client/src/version.ts @@ -0,0 +1 @@ +export const version = '0.15.2'; diff --git a/packages/libraries/client/tests/apollo.spec.ts b/packages/libraries/client/tests/apollo.spec.ts new file mode 100644 index 000000000..6f0e41030 --- /dev/null +++ b/packages/libraries/client/tests/apollo.spec.ts @@ -0,0 +1,23 @@ +// eslint-disable-next-line import/no-extraneous-dependencies +import nock from 'nock'; +import { createSupergraphSDLFetcher } from '../src/apollo'; + +test('createSupergraphSDLFetcher', async () => { + const supergraphSdl = 'type SuperQuery { sdl: String }'; + const key = 'secret-key'; + nock('http://localhost') + .get('/supergraph') + .once() + .matchHeader('X-Hive-CDN-Key', key) + .reply(() => [200, supergraphSdl]); + + const fetcher = createSupergraphSDLFetcher({ + endpoint: 'http://localhost', + key, + }); + + const result = await fetcher(); + + expect(result.id).toBeDefined(); + expect(result.supergraphSdl).toEqual(supergraphSdl); +}); diff --git a/packages/libraries/client/tests/gateways.spec.ts b/packages/libraries/client/tests/gateways.spec.ts new file mode 100644 index 000000000..3e4714b8d --- /dev/null +++ b/packages/libraries/client/tests/gateways.spec.ts @@ -0,0 +1,62 @@ +// eslint-disable-next-line import/no-extraneous-dependencies +import nock from 'nock'; +import { createServicesFetcher, createSchemaFetcher } from '../src/gateways'; + +afterEach(() => { + nock.cleanAll(); +}); + +test('createServicesFetcher', async () => { + const schema = { + sdl: 'type Query { noop: String }', + url: 'service-url', + name: 'service-name', + }; + const key = 'secret-key'; + nock('http://localhost') + .get('/schema') + .once() + .matchHeader('X-Hive-CDN-Key', key) + .matchHeader('accept', 'application/json') + .reply(() => [200, [schema]]); + + const fetcher = createServicesFetcher({ + endpoint: 'http://localhost', + key, + }); + + const result = await fetcher(); + + expect(result).toHaveLength(1); + expect(result[0].id).toBeDefined(); + expect(result[0].name).toEqual(schema.name); + expect(result[0].sdl).toEqual(schema.sdl); + expect(result[0].url).toEqual(schema.url); +}); + +test('createSchemaFetcher', async () => { + const schema = { + sdl: 'type Query { noop: String }', + url: 'service-url', + name: 'service-name', + }; + const key = 'secret-key'; + nock('http://localhost') + .get('/schema') + .once() + .matchHeader('X-Hive-CDN-Key', key) + .matchHeader('accept', 'application/json') + .reply(() => [200, schema]); + + const fetcher = createSchemaFetcher({ + endpoint: 'http://localhost', + key, + }); + + const result = await fetcher(); + + expect(result.id).toBeDefined(); + expect(result.name).toEqual(schema.name); + expect(result.sdl).toEqual(schema.sdl); + expect(result.url).toEqual(schema.url); +}); diff --git a/packages/libraries/client/tests/info.spec.ts b/packages/libraries/client/tests/info.spec.ts new file mode 100644 index 000000000..778dd8ed0 --- /dev/null +++ b/packages/libraries/client/tests/info.spec.ts @@ -0,0 +1,32 @@ +import { createHive } from '../src/client'; + +test('should not leak the exception', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + logger, + }, + token: 'Token', + reporting: { + endpoint: 'http://empty.localhost', + author: 'jest', + commit: 'random', + }, + }); + + const result = await hive + .info() + .then(() => 'OK') + .catch(() => 'ERROR'); + + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining(`[hive][info] Error`) + ); + expect(result).toBe('OK'); +}); diff --git a/packages/libraries/client/tests/integration.spec.ts b/packages/libraries/client/tests/integration.spec.ts new file mode 100644 index 000000000..5917ceb89 --- /dev/null +++ b/packages/libraries/client/tests/integration.spec.ts @@ -0,0 +1,159 @@ +/* eslint-disable-next-line import/no-extraneous-dependencies */ +import { createServer } from '@graphql-yoga/node'; +/* eslint-disable-next-line import/no-extraneous-dependencies */ +import { ApolloServerBase } from 'apollo-server-core'; +import axios from 'axios'; +import { createHive, useHive, hiveApollo } from '../src'; +import { waitFor } from './test-utils'; + +const typeDefs = /* GraphQL */ ` + type Query { + hello: String + } +`; + +const resolvers = { + Query: { + hello() { + return 'Hello world'; + }, + }, +}; + +function handleProcess() { + function fail(error: any) { + throw error; + } + + process.once('uncaughtException', fail); + process.once('unhandledRejection', fail); + + return () => { + process.removeListener('uncaughtException', fail); + process.removeListener('unhandledRejection', fail); + }; +} + +test('GraphQL Yoga - should not interrupt the process', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + const clean = handleProcess(); + const hive = createHive({ + enabled: true, + debug: true, + token: 'my-token', + agent: { + maxRetries: 2, + sendInterval: 1000, + timeout: 1000, + logger, + }, + reporting: { + endpoint: 'http://404.localhost/registry', + author: 'jest', + commit: 'js', + }, + usage: { + endpoint: 'http://404.localhost/usage', + }, + }); + + const server = createServer({ + port: 3000, + schema: { + typeDefs, + resolvers, + }, + plugins: [useHive(hive) as any], + logging: false, + }); + + async function stop() { + await server.stop(); + await hive.dispose(); + } + + await server.start(); + + await axios + .post('http://localhost:3000/graphql', { + query: /* GraphQL */ ` + { + hello + } + `, + }) + .catch(async (error) => { + await stop(); + return Promise.reject(error); + }); + + await waitFor(5_000); + await stop(); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('[hive][info]') + ); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('[hive][usage]') + ); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('[hive][reporting]') + ); + clean(); +}, 10_000); + +test('Apollo Server - should not interrupt the process', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + const clean = handleProcess(); + const apollo = new ApolloServerBase({ + typeDefs, + resolvers, + plugins: [ + hiveApollo({ + enabled: true, + debug: true, + token: 'my-token', + agent: { + maxRetries: 2, + sendInterval: 1000, + timeout: 1000, + logger, + }, + reporting: { + endpoint: 'http://404.localhost/registry', + author: 'jest', + commit: 'js', + }, + usage: { + endpoint: 'http://404.localhost/usage', + }, + }), + ], + }); + + await apollo.start(); + await apollo.executeOperation({ + query: /* GraphQL */ ` + { + hello + } + `, + }); + await waitFor(5_000); + await apollo.stop(); + clean(); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('[hive][info]') + ); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('[hive][usage]') + ); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('[hive][reporting]') + ); +}, 10_000); diff --git a/packages/libraries/client/tests/reporting.spec.ts b/packages/libraries/client/tests/reporting.spec.ts new file mode 100644 index 000000000..c6008a9b8 --- /dev/null +++ b/packages/libraries/client/tests/reporting.spec.ts @@ -0,0 +1,278 @@ +import { buildSchema, parse } from 'graphql'; +// eslint-disable-next-line import/no-extraneous-dependencies +import nock from 'nock'; +// eslint-disable-next-line import/no-extraneous-dependencies +import { buildSubgraphSchema } from '@apollo/federation'; +import { createHive } from '../src/client'; +import { version } from '../src/version'; +import { waitFor } from './test-utils'; + +afterEach(() => { + nock.cleanAll(); +}); + +const headers = { + 'Content-Type': 'application/json', + 'graphql-client-name': 'Hive Client', + 'graphql-client-version': version, +}; + +test('should not leak the exception', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 1, + logger, + }, + token: 'Token', + reporting: { + author: 'Test', + commit: 'Commit', + endpoint: 'http://404.localhost', + }, + }); + + hive.reportSchema({ + schema: buildSchema(/* GraphQL */ ` + type Query { + foo: String + } + `), + }); + + await waitFor(2000); + await hive.dispose(); + + expect(logger.info).toHaveBeenCalledWith( + '[hive][reporting] Sending (queue 1) (attempt 1)' + ); + expect(logger.info).toHaveBeenCalledWith( + expect.stringContaining('[hive][reporting] Attempt 1 failed:') + ); + expect(logger.info).toHaveBeenCalledWith( + '[hive][reporting] Sending (queue 1) (attempt 2)' + ); + expect(logger.error).toHaveBeenCalledTimes(1); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining(`[hive][reporting] Failed to send data`) + ); +}); + +test('should send data to Hive', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const author = 'Test'; + const commit = 'Commit'; + const token = 'Token'; + const serviceUrl = 'https://api.com'; + const serviceName = 'my-api'; + + let body: any = {}; + const http = nock('http://localhost') + .post('/200') + .matchHeader('x-api-token', token) + .matchHeader('Content-Type', headers['Content-Type']) + .matchHeader('graphql-client-name', headers['graphql-client-name']) + .matchHeader('graphql-client-version', headers['graphql-client-version']) + .once() + .reply((_, _body) => { + body = _body; + return [200]; + }); + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 1, + logger, + }, + token, + reporting: { + author, + commit, + endpoint: 'http://localhost/200', + serviceUrl, + serviceName, + }, + }); + + hive.reportSchema({ + schema: buildSchema(/* GraphQL */ ` + type Query { + foo: String + } + `), + }); + + await waitFor(2000); + await hive.dispose(); + http.done(); + + expect(logger.error).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith( + '[hive][reporting] Sending (queue 1) (attempt 1)' + ); + expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`); + + expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); + expect(body.variables.input.author).toBe(author); + expect(body.variables.input.commit).toBe(commit); + expect(body.variables.input.service).toBe(serviceName); + expect(body.variables.input.url).toBe(serviceUrl); + expect(body.variables.input.force).toBe(true); +}); + +test.only('should send data to Hive immediately', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const author = 'Test'; + const commit = 'Commit'; + const token = 'Token'; + const serviceUrl = 'https://api.com'; + const serviceName = 'my-api'; + + let body: any = {}; + const http = nock('http://localhost') + .post('/200') + .matchHeader('x-api-token', token) + .matchHeader('Content-Type', headers['Content-Type']) + .matchHeader('graphql-client-name', headers['graphql-client-name']) + .matchHeader('graphql-client-version', headers['graphql-client-version']) + .once() + .reply((_, _body) => { + body = _body; + return [200]; + }); + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 1, + logger, + sendInterval: 200, + }, + token, + reporting: { + author, + commit, + endpoint: 'http://localhost/200', + serviceUrl, + serviceName, + }, + }); + + hive.reportSchema({ + schema: buildSchema(/* GraphQL */ ` + type Query { + foo: String + } + `), + }); + + expect(logger.error).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith( + '[hive][reporting] Sending immediately' + ); + expect(logger.info).toHaveBeenCalledTimes(1); + await waitFor(50); + expect(logger.info).toHaveBeenCalledWith( + '[hive][reporting] Sending (queue 1) (attempt 1)' + ); + expect(logger.error).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`); + expect(logger.info).toHaveBeenCalledTimes(3); + + expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); + expect(body.variables.input.author).toBe(author); + expect(body.variables.input.commit).toBe(commit); + expect(body.variables.input.service).toBe(serviceName); + expect(body.variables.input.url).toBe(serviceUrl); + expect(body.variables.input.force).toBe(true); + + await waitFor(400); + expect(logger.info).toHaveBeenCalledTimes(3); + + await hive.dispose(); + http.done(); +}); + +test('should send original schema of a federated service', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const author = 'Test'; + const commit = 'Commit'; + const token = 'Token'; + const serviceUrl = 'https://api.com'; + const serviceName = 'my-api'; + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 1, + logger, + }, + token, + reporting: { + author, + commit, + endpoint: 'http://localhost/200', + serviceUrl, + serviceName, + }, + }); + + let body: any = {}; + const http = nock('http://localhost') + .post('/200') + .matchHeader('x-api-token', token) + .matchHeader('Content-Type', headers['Content-Type']) + .matchHeader('graphql-client-name', headers['graphql-client-name']) + .matchHeader('graphql-client-version', headers['graphql-client-version']) + .once() + .reply((_, _body) => { + body = _body; + return [200]; + }); + + hive.reportSchema({ + schema: buildSubgraphSchema( + parse(/* GraphQL */ ` + type Query { + bar: String + } + `) + ), + }); + + await hive.dispose(); + http.done(); + + expect(body.variables.input.sdl).toBe(`type Query{bar:String}`); + expect(body.variables.input.author).toBe(author); + expect(body.variables.input.commit).toBe(commit); + expect(body.variables.input.service).toBe(serviceName); + expect(body.variables.input.url).toBe(serviceUrl); + expect(body.variables.input.force).toBe(true); +}); diff --git a/packages/libraries/client/tests/test-utils.ts b/packages/libraries/client/tests/test-utils.ts new file mode 100644 index 000000000..b60275810 --- /dev/null +++ b/packages/libraries/client/tests/test-utils.ts @@ -0,0 +1,5 @@ +export function waitFor(ms: number) { + return new Promise((resolve) => { + setTimeout(resolve, ms); + }); +} diff --git a/packages/libraries/client/tests/usage-collector.spec.ts b/packages/libraries/client/tests/usage-collector.spec.ts new file mode 100644 index 000000000..8478a83c4 --- /dev/null +++ b/packages/libraries/client/tests/usage-collector.spec.ts @@ -0,0 +1,223 @@ +import { parse, buildSchema } from 'graphql'; +import { createCollector } from '../src/internal/usage'; + +const schema = buildSchema(/* GraphQL */ ` + type Query { + project(selector: ProjectSelectorInput!): Project + projectsByType(type: ProjectType!): [Project!]! + projects(filter: FilterInput): [Project!]! + } + + type Mutation { + deleteProject(selector: ProjectSelectorInput!): DeleteProjectPayload! + } + + input ProjectSelectorInput { + organization: ID! + project: ID! + } + + input FilterInput { + type: ProjectType + pagination: PaginationInput + } + + input PaginationInput { + limit: Int + offset: Int + } + + type ProjectSelector { + organization: ID! + project: ID! + } + + type DeleteProjectPayload { + selector: ProjectSelector! + deletedProject: Project! + } + + type Project { + id: ID! + cleanId: ID! + name: String! + type: ProjectType! + buildUrl: String + validationUrl: String + } + + enum ProjectType { + FEDERATION + STITCHING + SINGLE + CUSTOM + } +`); + +const op = parse(/* GraphQL */ ` + mutation deleteProject($selector: ProjectSelectorInput!) { + deleteProject(selector: $selector) { + selector { + organization + project + } + deletedProject { + ...ProjectFields + } + } + } + + fragment ProjectFields on Project { + id + cleanId + name + type + } +`); + +test('collect fields', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect(op).value; + + expect(info.fields).toContain(`Mutation.deleteProject`); + expect(info.fields).toContain(`Project.id`); +}); + +test('collect input object types', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect(op).value; + + expect(info.fields).toContain(`ProjectSelectorInput.organization`); + expect(info.fields).toContain(`ProjectSelectorInput.project`); +}); + +test('collect enums and scalars as inputs', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect( + parse(/* GraphQL */ ` + query getProjects($limit: Int!, $type: ProjectType!) { + projects(filter: { pagination: { limit: $limit }, type: $type }) { + id + } + } + `) + ).value; + + expect(info.fields).toContain(`Int`); + expect(info.fields).toContain(`ProjectType.FEDERATION`); + expect(info.fields).toContain(`ProjectType.STITCHING`); + expect(info.fields).toContain(`ProjectType.SINGLE`); + expect(info.fields).toContain(`ProjectType.CUSTOM`); +}); + +test('collect enum values from object fields', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect( + parse(/* GraphQL */ ` + query getProjects($limit: Int!) { + projects(filter: { pagination: { limit: $limit }, type: FEDERATION }) { + id + } + } + `) + ).value; + + expect(info.fields).toContain(`Int`); + expect(info.fields).toContain(`ProjectType.FEDERATION`); + expect(info.fields).not.toContain(`ProjectType.STITCHING`); + expect(info.fields).not.toContain(`ProjectType.SINGLE`); + expect(info.fields).not.toContain(`ProjectType.CUSTOM`); +}); + +test('collect enum values from arguments', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect( + parse(/* GraphQL */ ` + query getProjects { + projectsByType(type: FEDERATION) { + id + } + } + `) + ).value; + + expect(info.fields).toContain(`ProjectType.FEDERATION`); + expect(info.fields).not.toContain(`ProjectType.STITCHING`); + expect(info.fields).not.toContain(`ProjectType.SINGLE`); + expect(info.fields).not.toContain(`ProjectType.CUSTOM`); +}); + +test('collect arguments', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect( + parse(/* GraphQL */ ` + query getProjects($limit: Int!, $type: ProjectType!) { + projects(filter: { pagination: { limit: $limit }, type: $type }) { + id + } + } + `) + ).value; + + expect(info.fields).toContain(`Query.projects.filter`); +}); + +test('collect used-only input fields', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect( + parse(/* GraphQL */ ` + query getProjects($limit: Int!, $type: ProjectType!) { + projects(filter: { pagination: { limit: $limit }, type: $type }) { + id + } + } + `) + ).value; + + expect(info.fields).toContain(`FilterInput.pagination`); + expect(info.fields).toContain(`FilterInput.type`); + expect(info.fields).toContain(`PaginationInput.limit`); + expect(info.fields).not.toContain(`PaginationInput.offset`); +}); + +test('collect all input fields when it is impossible to pick only those used', async () => { + const collect = createCollector({ + schema, + max: 1, + }); + const info = collect( + parse(/* GraphQL */ ` + query getProjects($pagination: PaginationInput!, $type: ProjectType!) { + projects(filter: { pagination: $pagination, type: $type }) { + id + } + } + `) + ).value; + + expect(info.fields).toContain(`FilterInput.pagination`); + expect(info.fields).toContain(`FilterInput.type`); + expect(info.fields).toContain(`PaginationInput.limit`); + expect(info.fields).toContain(`PaginationInput.offset`); +}); diff --git a/packages/libraries/client/tests/usage.spec.ts b/packages/libraries/client/tests/usage.spec.ts new file mode 100644 index 000000000..1a70264e7 --- /dev/null +++ b/packages/libraries/client/tests/usage.spec.ts @@ -0,0 +1,326 @@ +import { parse, buildSchema } from 'graphql'; +// eslint-disable-next-line import/no-extraneous-dependencies +import nock from 'nock'; +import { createHive } from '../src/client'; +import { version } from '../src/version'; +import { waitFor } from './test-utils'; +import type { Report } from '../src/internal/usage'; + +const headers = { + 'Content-Type': 'application/json', + 'graphql-client-name': 'Hive Client', + 'graphql-client-version': version, +}; + +const schema = buildSchema(/* GraphQL */ ` + type Query { + project(selector: ProjectSelectorInput!): Project + projectsByType(type: ProjectType!): [Project!]! + projects(filter: FilterInput): [Project!]! + } + + type Mutation { + deleteProject(selector: ProjectSelectorInput!): DeleteProjectPayload! + } + + input ProjectSelectorInput { + organization: ID! + project: ID! + } + + input FilterInput { + type: ProjectType + pagination: PaginationInput + } + + input PaginationInput { + limit: Int + offset: Int + } + + type ProjectSelector { + organization: ID! + project: ID! + } + + type DeleteProjectPayload { + selector: ProjectSelector! + deletedProject: Project! + } + + type Project { + id: ID! + cleanId: ID! + name: String! + type: ProjectType! + buildUrl: String + validationUrl: String + } + + enum ProjectType { + FEDERATION + STITCHING + SINGLE + CUSTOM + } +`); + +const op = parse(/* GraphQL */ ` + mutation deleteProject($selector: ProjectSelectorInput!) { + deleteProject(selector: $selector) { + selector { + organization + project + } + deletedProject { + ...ProjectFields + } + } + } + + fragment ProjectFields on Project { + id + cleanId + name + type + } +`); + +beforeEach(() => { + jest.restoreAllMocks(); +}); + +afterEach(() => { + nock.cleanAll(); +}); + +test('should send data to Hive', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const token = 'Token'; + + let report: Report = { + size: 0, + map: {}, + operations: [], + }; + const http = nock('http://localhost') + .post('/200') + .matchHeader('x-api-token', token) + .matchHeader('Content-Type', headers['Content-Type']) + .matchHeader('graphql-client-name', headers['graphql-client-name']) + .matchHeader('graphql-client-version', headers['graphql-client-version']) + .once() + .reply((_, _body) => { + report = _body as any; + return [200]; + }); + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 0, + logger, + }, + token, + usage: { + endpoint: 'http://localhost/200', + }, + }); + + const collect = hive.collectUsage({ + schema, + document: op, + operationName: 'deleteProject', + }); + + await waitFor(2000); + collect({}); + await hive.dispose(); + await waitFor(1000); + http.done(); + + expect(logger.error).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith( + `[hive][usage] Sending (queue 1) (attempt 1)` + ); + expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`); + + // Map + expect(report.size).toEqual(1); + expect(Object.keys(report.map)).toHaveLength(1); + + const key = Object.keys(report.map)[0]; + const record = report.map[key]; + + // operation + expect(record.operation).toMatch('mutation deleteProject'); + expect(record.operationName).toMatch('deleteProject'); + // fields + expect(record.fields).toHaveLength(13); + expect(record.fields).toContainEqual('Mutation.deleteProject'); + expect(record.fields).toContainEqual('Mutation.deleteProject.selector'); + expect(record.fields).toContainEqual('DeleteProjectPayload.selector'); + expect(record.fields).toContainEqual('ProjectSelector.organization'); + expect(record.fields).toContainEqual('ProjectSelector.project'); + expect(record.fields).toContainEqual('DeleteProjectPayload.deletedProject'); + expect(record.fields).toContainEqual('Project.id'); + expect(record.fields).toContainEqual('Project.cleanId'); + expect(record.fields).toContainEqual('Project.name'); + expect(record.fields).toContainEqual('Project.type'); + expect(record.fields).toContainEqual('ProjectSelectorInput.organization'); + expect(record.fields).toContainEqual('ID'); + expect(record.fields).toContainEqual('ProjectSelectorInput.project'); + + // Operations + const operations = report.operations; + expect(operations).toHaveLength(1); // one operation + const operation = operations[0]; + + expect(operation.operationMapKey).toEqual(key); + expect(operation.timestamp).toEqual(expect.any(Number)); + // execution + expect(operation.execution.duration).toBeGreaterThanOrEqual(2000 * 1_000_000); // >=2000ms in microseconds + expect(operation.execution.duration).toBeLessThan(3000 * 1_000_000); // <3000ms + expect(operation.execution.errorsTotal).toBe(0); + expect(operation.execution.errors).toHaveLength(0); + expect(operation.execution.ok).toBe(true); +}); + +test('should not leak the exception', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 1, + logger, + }, + token: 'Token', + usage: { + endpoint: 'http://404.localhost', + }, + }); + + hive.collectUsage({ + schema, + document: op, + operationName: 'deleteProject', + })({}); + + await waitFor(1000); + await hive.dispose(); + + expect(logger.info).toHaveBeenCalledWith( + `[hive][usage] Sending (queue 1) (attempt 1)` + ); + expect(logger.info).toHaveBeenCalledWith( + expect.stringContaining(`[hive][usage] Attempt 1 failed:`) + ); + expect(logger.info).toHaveBeenCalledWith( + `[hive][usage] Sending (queue 1) (attempt 2)` + ); + expect(logger.error).toHaveBeenCalledTimes(1); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining(`[hive][usage] Failed to send data`) + ); +}); + +test('sendImmediately should not stop the schedule', async () => { + const logger = { + error: jest.fn(), + info: jest.fn(), + }; + + const token = 'Token'; + + const http = nock('http://localhost') + .post('/200') + .matchHeader('x-api-token', token) + .matchHeader('Content-Type', headers['Content-Type']) + .matchHeader('graphql-client-name', headers['graphql-client-name']) + .matchHeader('graphql-client-version', headers['graphql-client-version']) + .times(3) + .reply((_, _body) => { + return [200]; + }); + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 0, + maxSize: 2, + logger, + sendInterval: 100, + }, + token, + usage: { + endpoint: 'http://localhost/200', + }, + }); + + const collect = hive.collectUsage({ + schema, + document: op, + operationName: 'deleteProject', + }); + + expect(logger.info).toHaveBeenCalledTimes(0); + + collect({}); + await waitFor(200); + // Because maxSize is 2 and sendInterval is 100ms + // the scheduled send task should be done by now + expect(logger.error).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith( + `[hive][usage] Sending (queue 1) (attempt 1)` + ); + expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`); + expect(logger.info).not.toHaveBeenCalledWith( + `[hive][usage] Sending immediately` + ); + expect(logger.info).toHaveBeenCalledTimes(2); + + // Now we will check the maxSize + // We run collect three times + collect({}); + collect({}); + expect(logger.error).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith( + `[hive][usage] Sending (queue 1) (attempt 1)` + ); + expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending immediately`); + await waitFor(1); // we run setImmediate under the hood + // It should be sent already + expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`); + expect(logger.info).toHaveBeenCalledTimes(4); + + await waitFor(50); + expect(logger.info).toHaveBeenCalledTimes(5); + + // Let's check if the scheduled send task is still running + collect({}); + await waitFor(200); + expect(logger.error).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith( + `[hive][usage] Sending (queue 1) (attempt 1)` + ); + expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`); + expect(logger.info).toHaveBeenCalledTimes(7); + + await hive.dispose(); + await waitFor(1000); + http.done(); +}); diff --git a/packages/libraries/client/tsconfig.json b/packages/libraries/client/tsconfig.json new file mode 100644 index 000000000..02c9a0e67 --- /dev/null +++ b/packages/libraries/client/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../../tsconfig.json", + "include": ["src"], + "compilerOptions": { + "baseUrl": ".", + "outDir": "dist", + "rootDir": "src", + "target": "es2017", + "module": "esnext" + } +} diff --git a/packages/libraries/core/.gitignore b/packages/libraries/core/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/libraries/core/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/libraries/core/CHANGELOG.md b/packages/libraries/core/CHANGELOG.md new file mode 100644 index 000000000..548d5a23c --- /dev/null +++ b/packages/libraries/core/CHANGELOG.md @@ -0,0 +1,41 @@ +# @graphql-hive/core + +## 0.2.0 + +### Minor Changes + +- ac9b868c: Support GraphQL v16 + +## 0.1.0 + +### Minor Changes + +- d7348a3: Hide literals and remove aliases + +### Patch Changes + +- d7348a3: Pick operation name from DocumentNode + +## 0.0.5 + +### Patch Changes + +- c6ef3d2: Bob update + +## 0.0.4 + +### Patch Changes + +- 4a7c569: Share operation hashing + +## 0.0.3 + +### Patch Changes + +- 6b74355: Fix sorting + +## 0.0.2 + +### Patch Changes + +- 094c861: Normalization of operations diff --git a/packages/libraries/core/LICENSE b/packages/libraries/core/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/libraries/core/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/libraries/core/package.json b/packages/libraries/core/package.json new file mode 100644 index 000000000..bf485508d --- /dev/null +++ b/packages/libraries/core/package.json @@ -0,0 +1,48 @@ +{ + "name": "@graphql-hive/core", + "version": "0.2.0", + "author": { + "email": "contact@the-guild.dev", + "name": "The Guild", + "url": "https://the-guild.dev" + }, + "repository": { + "type": "git", + "url": "kamilkisiela/graphql-hive", + "directory": "packages/libraries/core" + }, + "license": "MIT", + "main": "dist/index.js", + "module": "dist/index.mjs", + "typings": "dist/index.d.ts", + "typescript": { + "definition": "dist/index.d.ts" + }, + "exports": { + ".": { + "require": "./dist/index.js", + "import": "./dist/index.mjs" + }, + "./*": { + "require": "./dist/*.js", + "import": "./dist/*.mjs" + } + }, + "publishConfig": { + "registry": "https://registry.npmjs.org", + "access": "public", + "directory": "dist" + }, + "scripts": { + "build": "bob build --single" + }, + "peerDependencies": { + "graphql": "^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "dependencies": { + "lodash.sortby": "4.7.0" + }, + "devDependencies": { + "@types/lodash.sortby": "4.7.7" + } +} diff --git a/packages/libraries/core/src/hash.ts b/packages/libraries/core/src/hash.ts new file mode 100644 index 000000000..724f05059 --- /dev/null +++ b/packages/libraries/core/src/hash.ts @@ -0,0 +1,5 @@ +import { createHash } from 'crypto'; + +export function hashOperation(operation: string) { + return createHash('md5').update(operation, 'utf8').digest('hex'); +} diff --git a/packages/libraries/core/src/index.ts b/packages/libraries/core/src/index.ts new file mode 100644 index 000000000..8911e75d7 --- /dev/null +++ b/packages/libraries/core/src/index.ts @@ -0,0 +1,2 @@ +export * from './normalize/operation'; +export * from './hash'; diff --git a/packages/libraries/core/src/normalize/operation.ts b/packages/libraries/core/src/normalize/operation.ts new file mode 100644 index 000000000..b3ce2714a --- /dev/null +++ b/packages/libraries/core/src/normalize/operation.ts @@ -0,0 +1,168 @@ +import { + visit, + print, + stripIgnoredCharacters, + separateOperations, + Kind, + DocumentNode, + DefinitionNode, + OperationDefinitionNode, + ArgumentNode, + VariableDefinitionNode, + SelectionNode, + DirectiveNode, +} from 'graphql'; +import sortBy from 'lodash.sortby'; + +export function normalizeOperation({ + document, + operationName, + hideLiterals = true, + removeAliases = true, +}: { + document: DocumentNode; + hideLiterals?: boolean; + removeAliases?: boolean; + operationName?: string; +}): string { + return stripIgnoredCharacters( + print( + visit( + dropUnusedDefinitions( + document, + operationName ?? + document.definitions.find(isOperationDef)?.name?.value + ), + { + // hide literals + IntValue(node) { + return hideLiterals ? { ...node, value: '0' } : node; + }, + FloatValue(node) { + return hideLiterals ? { ...node, value: '0' } : node; + }, + StringValue(node) { + return hideLiterals ? { ...node, value: '', block: false } : node; + }, + Field(node) { + return { + ...node, + // remove aliases + alias: removeAliases ? undefined : node.alias, + // sort arguments + arguments: sortNodes(node.arguments), + }; + }, + Document(node) { + return { + ...node, + definitions: sortNodes(node.definitions), + }; + }, + OperationDefinition(node) { + return { + ...node, + variableDefinitions: sortNodes(node.variableDefinitions), + }; + }, + SelectionSet(node) { + return { + ...node, + selections: sortNodes(node.selections), + }; + }, + FragmentSpread(node) { + return { + ...node, + directives: sortNodes(node.directives), + }; + }, + InlineFragment(node) { + return { + ...node, + directives: sortNodes(node.directives), + }; + }, + FragmentDefinition(node) { + return { + ...node, + directives: sortNodes(node.directives), + variableDefinitions: sortNodes(node.variableDefinitions), + }; + }, + Directive(node) { + return { ...node, arguments: sortNodes(node.arguments) }; + }, + } + ) + ) + ); +} + +function sortNodes(nodes: readonly DefinitionNode[]): readonly DefinitionNode[]; +function sortNodes(nodes: readonly SelectionNode[]): readonly SelectionNode[]; +function sortNodes( + nodes: readonly ArgumentNode[] | undefined +): readonly ArgumentNode[] | undefined; +function sortNodes( + nodes: readonly VariableDefinitionNode[] | undefined +): readonly VariableDefinitionNode[] | undefined; +function sortNodes( + nodes: readonly DirectiveNode[] | undefined +): readonly DirectiveNode[] | undefined; +function sortNodes( + nodes: readonly any[] | undefined +): readonly any[] | undefined { + if (nodes) { + if (nodes.length === 0) { + return []; + } + + if (isOfKindList(nodes, Kind.DIRECTIVE)) { + return sortBy(nodes, 'name.value'); + } + + if (isOfKindList(nodes, Kind.VARIABLE_DEFINITION)) { + return sortBy(nodes, 'variable.name.value'); + } + + if (isOfKindList(nodes, Kind.ARGUMENT)) { + return sortBy(nodes, 'name.value'); + } + + if ( + isOfKindList(nodes, [ + Kind.FIELD, + Kind.FRAGMENT_SPREAD, + Kind.INLINE_FRAGMENT, + ]) + ) { + return sortBy(nodes, 'kind', 'name.value'); + } + + return sortBy(nodes, 'kind', 'name.value'); + } + + return; +} + +function isOfKindList( + nodes: readonly any[], + kind: string | string[] +): nodes is T[] { + return typeof kind === 'string' + ? nodes[0].kind === kind + : kind.includes(nodes[0].kind); +} + +function isOperationDef(def: DefinitionNode): def is OperationDefinitionNode { + return def.kind === Kind.OPERATION_DEFINITION; +} + +function dropUnusedDefinitions(doc: DocumentNode, operationName?: string) { + if (!operationName) { + return doc; + } + + return separateOperations(doc)[operationName] ?? doc; +} diff --git a/packages/libraries/core/tsconfig.json b/packages/libraries/core/tsconfig.json new file mode 100644 index 000000000..02c9a0e67 --- /dev/null +++ b/packages/libraries/core/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../../tsconfig.json", + "include": ["src"], + "compilerOptions": { + "baseUrl": ".", + "outDir": "dist", + "rootDir": "src", + "target": "es2017", + "module": "esnext" + } +} diff --git a/packages/services/api/.gitignore b/packages/services/api/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/services/api/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/services/api/CHANGELOG.md b/packages/services/api/CHANGELOG.md new file mode 100644 index 000000000..77ffbbb63 --- /dev/null +++ b/packages/services/api/CHANGELOG.md @@ -0,0 +1,8 @@ +# @hive/api + +## 0.0.2 + +### Patch Changes + +- Updated dependencies [ac9b868c] + - @graphql-hive/core@0.2.0 diff --git a/packages/services/api/LICENSE b/packages/services/api/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/services/api/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/services/api/package.json b/packages/services/api/package.json new file mode 100644 index 000000000..52e33279b --- /dev/null +++ b/packages/services/api/package.json @@ -0,0 +1,60 @@ +{ + "name": "@hive/api", + "type": "module", + "private": true, + "version": "0.0.2", + "license": "MIT", + "peerDependencies": { + "graphql": "^16.0.0", + "reflect-metadata": "^0.1.0" + }, + "dependencies": { + "@graphql-yoga/common": "2.5.0", + "cross-undici-fetch": "0.4.3", + "@trpc/client": "9.23.2", + "@graphql-hive/core": "0.2.0", + "@graphql-inspector/core": "3.1.2", + "@graphql-tools/load-files": "6.5.0", + "@graphql-tools/schema": "8.2.0", + "@octokit/app": "12.0.5", + "@sentry/node": "6.19.7", + "@sentry/types": "6.19.7", + "@slack/web-api": "6.4.0", + "@theguild/buddy": "0.1.0", + "agentkeepalive": "4.2.0", + "abort-controller": "3.0.0", + "auth0": "2.37.0", + "dataloader": "2.0.0", + "date-fns": "2.25.0", + "emittery": "0.10.0", + "got": "12.0.4", + "graphql-modules": "2.0.0", + "graphql-parse-resolve-info": "4.12.0", + "graphql-scalars": "1.12.0", + "human-id": "2.0.1", + "ioredis": "4.28.3", + "jwks-rsa": "2.0.5", + "jsonwebtoken": "8.5.1", + "lodash": "4.17.21", + "lru-cache": "7.9.0", + "mixpanel": "0.14.0", + "ms": "2.1.3", + "param-case": "3.0.4", + "p-retry": "5.1.1", + "p-timeout": "5.0.2", + "uuid": "8.3.2" + }, + "devDependencies": { + "@types/auth0": "2.34.2", + "@types/ioredis": "4.28.7", + "@types/lodash": "4.14.182", + "@types/lru-cache": "7.6.1", + "@types/ms": "0.7.31", + "@types/ioredis-mock": "5.6.0", + "@types/uuid": "8.3.4", + "ioredis-mock": "7.4.0" + }, + "engines": { + "node": ">=12" + } +} diff --git a/packages/services/api/src/context.ts b/packages/services/api/src/context.ts new file mode 100644 index 000000000..8d52ae023 --- /dev/null +++ b/packages/services/api/src/context.ts @@ -0,0 +1,12 @@ +export {}; + +declare global { + namespace GraphQLModules { + interface GlobalContext { + req: any; + requestId: string; + user: any; + headers: Record; + } + } +} diff --git a/packages/services/api/src/create.ts b/packages/services/api/src/create.ts new file mode 100644 index 000000000..226905764 --- /dev/null +++ b/packages/services/api/src/create.ts @@ -0,0 +1,214 @@ +import { createApplication, Provider, Scope } from 'graphql-modules'; +import { activityModule } from './modules/activity'; +import { authModule } from './modules/auth'; +import { labModule } from './modules/lab'; +import { operationsModule } from './modules/operations'; +import { + ClickHouseConfig, + CLICKHOUSE_CONFIG, +} from './modules/operations/providers/tokens'; +import { organizationModule } from './modules/organization'; +import { persistedOperationModule } from './modules/persisted-operations'; +import { projectModule } from './modules/project'; +import { schemaModule } from './modules/schema'; +import { sharedModule } from './modules/shared'; +import { HttpClient } from './modules/shared/providers/http-client'; +import { IdTranslator } from './modules/shared/providers/id-translator'; +import { IdempotentRunner } from './modules/shared/providers/idempotent-runner'; +import { Logger } from './modules/shared/providers/logger'; +import { MessageBus } from './modules/shared/providers/message-bus'; +import { + CryptoProvider, + encryptionSecretProvider, +} from './modules/shared/providers/crypto'; +import { + RedisConfig, + REDIS_CONFIG, + RedisProvider, +} from './modules/shared/providers/redis'; +import { Storage } from './modules/shared/providers/storage'; +import { Tracking } from './modules/shared/providers/tracking'; +import { targetModule } from './modules/target'; +import { integrationsModule } from './modules/integrations'; +import { + GITHUB_APP_CONFIG, + GitHubApplicationConfig, +} from './modules/integrations/providers/github-integration-manager'; +import { alertsModule } from './modules/alerts'; +import { tokenModule } from './modules/token'; +import { feedbackModule } from './modules/feedback'; +import { TokensConfig, TOKENS_CONFIG } from './modules/token/providers/tokens'; +import { + WebhooksConfig, + WEBHOOKS_CONFIG, +} from './modules/alerts/providers/tokens'; +import { + SchemaServiceConfig, + SCHEMA_SERVICE_CONFIG, +} from './modules/schema/providers/orchestrators/tokens'; +import { CDN_CONFIG, CDNConfig } from './modules/cdn/providers/tokens'; +import { cdnModule } from './modules/cdn'; +import { adminModule } from './modules/admin'; +import { + FEEDBACK_SLACK_CHANNEL, + FEEDBACK_SLACK_TOKEN, +} from './modules/feedback/providers/tokens'; +import { usageEstimationModule } from './modules/usage-estimation'; +import { + UsageEstimationServiceConfig, + USAGE_ESTIMATION_SERVICE_CONFIG, +} from './modules/usage-estimation/providers/tokens'; +import { rateLimitModule } from './modules/rate-limit'; +import { + RateLimitServiceConfig, + RATE_LIMIT_SERVICE_CONFIG, +} from './modules/rate-limit/providers/tokens'; +import { + BillingConfig, + BILLING_CONFIG, +} from './modules/billing/providers/tokens'; +import { billingModule } from './modules/billing'; + +const modules = [ + sharedModule, + authModule, + organizationModule, + projectModule, + targetModule, + schemaModule, + activityModule, + operationsModule, + tokenModule, + persistedOperationModule, + labModule, + integrationsModule, + alertsModule, + feedbackModule, + cdnModule, + adminModule, + usageEstimationModule, + rateLimitModule, + billingModule, +]; + +const providers: Provider[] = [ + HttpClient, + IdTranslator, + MessageBus, + Tracking, + RedisProvider, + IdempotentRunner, + CryptoProvider, +]; + +export function createRegistry({ + tokens, + webhooks, + schemaService, + usageEstimationService, + rateLimitService, + logger, + storage, + clickHouse, + redis, + githubApp, + cdn, + encryptionSecret, + feedback, + billing, +}: { + logger: Logger; + storage: Storage; + clickHouse: ClickHouseConfig; + redis: RedisConfig; + tokens: TokensConfig; + webhooks: WebhooksConfig; + schemaService: SchemaServiceConfig; + usageEstimationService: UsageEstimationServiceConfig; + rateLimitService: RateLimitServiceConfig; + githubApp: GitHubApplicationConfig; + cdn: CDNConfig; + encryptionSecret: string; + feedback: { + token: string; + channel: string; + }; + billing: BillingConfig; +}) { + return createApplication({ + modules, + providers: providers.concat([ + { + provide: Logger, + useValue: logger, + scope: Scope.Singleton, + }, + { + provide: Storage, + useValue: storage, + scope: Scope.Singleton, + }, + { + provide: CLICKHOUSE_CONFIG, + useValue: clickHouse, + scope: Scope.Singleton, + }, + { + provide: TOKENS_CONFIG, + useValue: tokens, + scope: Scope.Singleton, + }, + { + provide: BILLING_CONFIG, + useValue: billing, + scope: Scope.Singleton, + }, + { + provide: WEBHOOKS_CONFIG, + useValue: webhooks, + scope: Scope.Singleton, + }, + { + provide: SCHEMA_SERVICE_CONFIG, + useValue: schemaService, + scope: Scope.Singleton, + }, + { + provide: USAGE_ESTIMATION_SERVICE_CONFIG, + useValue: usageEstimationService, + scope: Scope.Singleton, + }, + { + provide: RATE_LIMIT_SERVICE_CONFIG, + useValue: rateLimitService, + scope: Scope.Singleton, + }, + { + provide: REDIS_CONFIG, + useValue: redis, + scope: Scope.Singleton, + }, + { + provide: GITHUB_APP_CONFIG, + useValue: githubApp, + scope: Scope.Singleton, + }, + { + provide: CDN_CONFIG, + useValue: cdn, + scope: Scope.Singleton, + }, + { + provide: FEEDBACK_SLACK_CHANNEL, + useValue: feedback.channel, + scope: Scope.Singleton, + }, + { + provide: FEEDBACK_SLACK_TOKEN, + useValue: feedback.token, + scope: Scope.Singleton, + }, + encryptionSecretProvider(encryptionSecret), + ]), + }); +} diff --git a/packages/services/api/src/index.ts b/packages/services/api/src/index.ts new file mode 100644 index 000000000..cce3e5f27 --- /dev/null +++ b/packages/services/api/src/index.ts @@ -0,0 +1,30 @@ +import './context'; +export type { Application as Registry } from 'graphql-modules'; +export { createRegistry } from './create'; +export type { LogFn, Logger } from './modules/shared/providers/logger'; +export type { Storage } from './modules/shared/providers/storage'; +export type { + ActivityObject, + Member, + Organization, + PersistedOperation, + Project, + Schema, + SchemaObject, + SchemaVersion, + Target, + TargetSettings, + Token, + User, + AlertChannel, + Alert, + OrganizationBilling, +} from './shared/entities'; +export { minifySchema } from './shared/schema'; +export { HiveError } from './shared/errors'; +export { OrganizationType, ProjectType } from './__generated__/types'; +export type { AuthProvider } from './__generated__/types'; +export { HttpClient } from './modules/shared/providers/http-client'; +export { OperationsManager } from './modules/operations/providers/operations-manager'; +export { OperationsReader } from './modules/operations/providers/operations-reader'; +export { ClickHouse } from './modules/operations/providers/clickhouse-client'; diff --git a/packages/services/api/src/modules/activity/index.ts b/packages/services/api/src/modules/activity/index.ts new file mode 100644 index 000000000..3ba701444 --- /dev/null +++ b/packages/services/api/src/modules/activity/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import { ActivityManager } from './providers/activity-manager'; +import typeDefs from './module.graphql'; + +export const activityModule = createModule({ + id: 'activity', + dirname: __dirname, + typeDefs, + resolvers, + providers: [ActivityManager], +}); diff --git a/packages/services/api/src/modules/activity/module.graphql.ts b/packages/services/api/src/modules/activity/module.graphql.ts new file mode 100644 index 000000000..903504f6a --- /dev/null +++ b/packages/services/api/src/modules/activity/module.graphql.ts @@ -0,0 +1,175 @@ +import { gql } from 'graphql-modules'; + +export default gql` + interface Activity { + id: ID! + type: String! + createdAt: DateTime! + } + + extend type Query { + organizationActivities( + selector: OrganizationActivitiesSelector! + ): ActivityConnection! + projectActivities(selector: ProjectActivitiesSelector!): ActivityConnection! + targetActivities(selector: TargetActivitiesSelector!): ActivityConnection! + } + + input OrganizationActivitiesSelector { + organization: ID! + limit: Int! + } + + input ProjectActivitiesSelector { + organization: ID! + project: ID! + limit: Int! + } + + input TargetActivitiesSelector { + organization: ID! + project: ID! + target: ID! + limit: Int! + } + + type ActivityConnection { + nodes: [Activity!]! + total: Int! + } + + type OrganizationCreatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + } + + type OrganizationPlanChangeActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + newPlan: BillingPlanType! + previousPlan: BillingPlanType! + } + + type OrganizationNameUpdatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + value: String! + } + + type OrganizationIdUpdatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + value: String! + } + + type MemberAddedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + } + + type MemberDeletedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + email: String! + } + + type ProjectCreatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + project: Project! + } + + type ProjectDeletedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + name: String! + cleanId: String! + } + + type ProjectNameUpdatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + project: Project! + value: String! + } + + type ProjectIdUpdatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + project: Project! + value: String! + } + + type TargetCreatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + project: Project! + target: Target! + } + + type TargetDeletedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + project: Project! + name: String! + cleanId: String! + } + + type TargetNameUpdatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + project: Project! + target: Target! + value: String! + } + + type TargetIdUpdatedActivity implements Activity { + id: ID! + type: String! + createdAt: DateTime! + organization: Organization! + user: User! + project: Project! + target: Target! + value: String! + } +`; diff --git a/packages/services/api/src/modules/activity/providers/activities.ts b/packages/services/api/src/modules/activity/providers/activities.ts new file mode 100644 index 000000000..d7018dd9f --- /dev/null +++ b/packages/services/api/src/modules/activity/providers/activities.ts @@ -0,0 +1,167 @@ +interface User { + id: string; + externalAuthUserId: string; +} + +interface BaseActivity { + type: string; + selector: OrganizationSelector | ProjectSelector | TargetSelector; + user?: User; +} + +interface UserSelector { + user: string; +} + +interface OrganizationSelector { + organization: string; +} + +interface ProjectSelector extends OrganizationSelector { + project: string; +} + +interface PersistedOperationSelector extends ProjectSelector { + organization: string; + project: string; + operation: string; +} + +interface TargetSelector extends ProjectSelector { + target: string; +} + +export interface OrganizationCreatedActivity extends BaseActivity { + type: 'ORGANIZATION_CREATED'; + selector: OrganizationSelector; + user: User; +} + +export interface OrganizationNameUpdatedActivity extends BaseActivity { + type: 'ORGANIZATION_NAME_UPDATED'; + selector: OrganizationSelector; + meta: { + value: string; + }; +} + +export interface OrganizationIdUpdatedActivity extends BaseActivity { + type: 'ORGANIZATION_ID_UPDATED'; + selector: OrganizationSelector; + meta: { + value: string; + }; +} + +export interface MemberAddedActivity extends BaseActivity { + type: 'MEMBER_ADDED'; + selector: OrganizationSelector & UserSelector; +} + +export interface MemberDeletedActivity extends BaseActivity { + type: 'MEMBER_DELETED'; + selector: OrganizationSelector; + meta: { + email: string; + }; +} + +export interface ProjectCreatedActivity extends BaseActivity { + type: 'PROJECT_CREATED'; + selector: ProjectSelector; + meta: { + projectType: string; + }; +} + +export interface ProjectDeletedActivity extends BaseActivity { + type: 'PROJECT_DELETED'; + selector: OrganizationSelector; + meta: { + name: string; + cleanId: string; + }; +} + +export interface ProjectNameUpdatedActivity extends BaseActivity { + type: 'PROJECT_NAME_UPDATED'; + selector: ProjectSelector; + meta: { + value: string; + }; +} + +export interface ProjectIdUpdatedActivity extends BaseActivity { + type: 'PROJECT_ID_UPDATED'; + selector: ProjectSelector; + meta: { + value: string; + }; +} + +export interface PersistedOperationCreatedActivity extends BaseActivity { + type: 'PERSISTED_OPERATION_CREATED'; + selector: PersistedOperationSelector; +} + +export interface PersistedOperationDeletedActivity extends BaseActivity { + type: 'PERSISTED_OPERATION_DELETED'; + selector: PersistedOperationSelector; +} + +export interface TargetCreatedActivity extends BaseActivity { + type: 'TARGET_CREATED'; + selector: TargetSelector; +} + +export interface TargetDeletedActivity extends BaseActivity { + type: 'TARGET_DELETED'; + selector: ProjectSelector; + meta: { + name: string; + cleanId: string; + }; +} + +export interface TargetNameUpdatedActivity extends BaseActivity { + type: 'TARGET_NAME_UPDATED'; + selector: TargetSelector; + meta: { + value: string; + }; +} + +export interface TargetIdUpdatedActivity extends BaseActivity { + type: 'TARGET_ID_UPDATED'; + selector: TargetSelector; + meta: { + value: string; + }; +} + +export interface OrganizationPlanUpdated extends BaseActivity { + type: 'ORGANIZATION_PLAN_UPDATED'; + selector: OrganizationSelector; + meta: { + newPlan: string; + previousPlan: string; + }; +} + +export type Activity = + | OrganizationCreatedActivity + | OrganizationNameUpdatedActivity + | OrganizationIdUpdatedActivity + | OrganizationPlanUpdated + | MemberAddedActivity + | MemberDeletedActivity + | ProjectCreatedActivity + | ProjectDeletedActivity + | ProjectNameUpdatedActivity + | ProjectIdUpdatedActivity + | TargetCreatedActivity + | TargetDeletedActivity + | TargetNameUpdatedActivity + | TargetIdUpdatedActivity; + +export type ActivityTypes = Activity['type']; diff --git a/packages/services/api/src/modules/activity/providers/activity-manager.ts b/packages/services/api/src/modules/activity/providers/activity-manager.ts new file mode 100644 index 000000000..23842f749 --- /dev/null +++ b/packages/services/api/src/modules/activity/providers/activity-manager.ts @@ -0,0 +1,103 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { ActivityObject } from '../../../shared/entities'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { OrganizationAccessScope } from '../../auth/providers/organization-access'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; +import { Logger } from '../../shared/providers/logger'; +import { + Storage, + OrganizationSelector, + ProjectSelector, + TargetSelector, +} from '../../shared/providers/storage'; +import { Tracking } from '../../shared/providers/tracking'; +import { Activity } from './activities'; + +interface PaginationSelector { + limit: number; +} + +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class ActivityManager { + private logger: Logger; + + constructor( + logger: Logger, + private authManager: AuthManager, + private storage: Storage, + private tracking: Tracking + ) { + this.logger = logger.child({ + source: 'ActivityManager', + }); + } + + async create(activity: Activity): Promise { + try { + this.logger.debug('Creating an activity'); + + const user = activity.user + ? activity.user.id + : (await this.authManager.getCurrentUser()).id; + + await this.storage.createActivity({ + organization: activity.selector.organization, + project: + 'project' in activity.selector + ? activity.selector.project + : undefined, + target: + 'target' in activity.selector ? activity.selector.target : undefined, + user, + type: activity.type, + meta: 'meta' in activity ? activity.meta : {}, + }); + + this.logger.debug(`Created activity ${activity.type}`); + + await this.tracking.track({ + event: activity.type, + data: { + ...activity.selector, + ...('meta' in activity ? activity.meta : {}), + }, + user: activity.user, + }); + } catch (error) { + this.logger.error(`Failed to create an activity: ${error}`, error); + } + } + + public async getByOrganization( + selector: OrganizationSelector & PaginationSelector + ): Promise { + await this.authManager.ensureOrganizationAccess({ + ...selector, + scope: OrganizationAccessScope.READ, + }); + return this.storage.getActivities(selector); + } + + public async getByProject( + selector: ProjectSelector & PaginationSelector + ): Promise { + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.READ, + }); + return this.storage.getActivities(selector); + } + + public async getByTarget( + selector: TargetSelector & PaginationSelector + ): Promise { + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.READ, + }); + return this.storage.getActivities(selector); + } +} diff --git a/packages/services/api/src/modules/activity/resolvers.ts b/packages/services/api/src/modules/activity/resolvers.ts new file mode 100644 index 000000000..7d758a5f9 --- /dev/null +++ b/packages/services/api/src/modules/activity/resolvers.ts @@ -0,0 +1,156 @@ +import { ActivityModule } from './__generated__/types'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { ActivityManager } from './providers/activity-manager'; +import { ActivityObject } from '../../shared/entities'; +import { createConnection } from '../../shared/schema'; + +export const resolvers: ActivityModule.Resolvers = { + Query: { + async organizationActivities(_, { selector }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + + return injector.get(ActivityManager).getByOrganization({ + organization, + limit: selector.limit, + }); + }, + async projectActivities(_, { selector }, { injector }) { + const [organization, project] = await Promise.all([ + injector.get(IdTranslator).translateOrganizationId(selector), + injector.get(IdTranslator).translateProjectId(selector), + ]); + + return injector.get(ActivityManager).getByProject({ + organization, + project, + limit: selector.limit, + }); + }, + async targetActivities(_, { selector }, { injector }) { + const [organization, project, target] = await Promise.all([ + injector.get(IdTranslator).translateOrganizationId(selector), + injector.get(IdTranslator).translateProjectId(selector), + injector.get(IdTranslator).translateTargetId(selector), + ]); + + return injector.get(ActivityManager).getByTarget({ + organization, + project, + target, + limit: selector.limit, + }); + }, + }, + OrganizationCreatedActivity: { + __isTypeOf(activity) { + return activity.type === 'ORGANIZATION_CREATED'; + }, + }, + OrganizationPlanChangeActivity: { + __isTypeOf(activity) { + return activity.type === 'ORGANIZATION_PLAN_UPDATED'; + }, + newPlan(activity: any) { + return (activity as ActivityObject).meta.newPlan; + }, + previousPlan(activity: any) { + return (activity as ActivityObject).meta.previousPlan; + }, + }, + OrganizationNameUpdatedActivity: { + __isTypeOf(activity) { + return activity.type === 'ORGANIZATION_NAME_UPDATED'; + }, + value(activity: any) { + return (activity as ActivityObject).meta.value; + }, + }, + OrganizationIdUpdatedActivity: { + __isTypeOf(activity) { + return activity.type === 'ORGANIZATION_ID_UPDATED'; + }, + value(activity: any) { + return (activity as ActivityObject).meta.value; + }, + }, + MemberAddedActivity: { + __isTypeOf(activity) { + return activity.type === 'MEMBER_ADDED'; + }, + }, + MemberDeletedActivity: { + __isTypeOf(activity) { + return activity.type === 'MEMBER_DELETED'; + }, + email(activity: any) { + return (activity as ActivityObject).meta.email; + }, + }, + ProjectCreatedActivity: { + __isTypeOf(activity) { + return activity.type === 'PROJECT_CREATED'; + }, + }, + ProjectDeletedActivity: { + __isTypeOf(activity) { + return activity.type === 'PROJECT_DELETED'; + }, + name(activity: any) { + return (activity as ActivityObject).meta.name; + }, + cleanId(activity: any) { + return (activity as ActivityObject).meta.cleanId; + }, + }, + ProjectNameUpdatedActivity: { + __isTypeOf(activity) { + return activity.type === 'PROJECT_NAME_UPDATED'; + }, + value(activity: any) { + return (activity as ActivityObject).meta.value; + }, + }, + ProjectIdUpdatedActivity: { + __isTypeOf(activity) { + return activity.type === 'PROJECT_ID_UPDATED'; + }, + value(activity: any) { + return (activity as ActivityObject).meta.value; + }, + }, + TargetCreatedActivity: { + __isTypeOf(activity) { + return activity.type === 'TARGET_CREATED'; + }, + }, + TargetDeletedActivity: { + __isTypeOf(activity) { + return activity.type === 'TARGET_DELETED'; + }, + name(activity: any) { + return (activity as ActivityObject).meta.name; + }, + cleanId(activity: any) { + return (activity as ActivityObject).meta.cleanId; + }, + }, + TargetNameUpdatedActivity: { + __isTypeOf(activity) { + return activity.type === 'TARGET_NAME_UPDATED'; + }, + value(activity: any) { + return (activity as ActivityObject).meta.value; + }, + }, + TargetIdUpdatedActivity: { + __isTypeOf(activity) { + return activity.type === 'TARGET_ID_UPDATED'; + }, + value(activity: any) { + return (activity as ActivityObject).meta.value; + }, + }, + ActivityConnection: createConnection(), +}; diff --git a/packages/services/api/src/modules/admin/index.ts b/packages/services/api/src/modules/admin/index.ts new file mode 100644 index 000000000..5a79007c8 --- /dev/null +++ b/packages/services/api/src/modules/admin/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import { AdminManager } from './providers/admin-manager'; +import typeDefs from './module.graphql'; + +export const adminModule = createModule({ + id: 'admin', + dirname: __dirname, + typeDefs, + resolvers, + providers: [AdminManager], +}); diff --git a/packages/services/api/src/modules/admin/module.graphql.ts b/packages/services/api/src/modules/admin/module.graphql.ts new file mode 100644 index 000000000..f6fc0fc41 --- /dev/null +++ b/packages/services/api/src/modules/admin/module.graphql.ts @@ -0,0 +1,35 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + admin: AdminQuery! + } + + type AdminQuery { + stats(daysLimit: Int): AdminStats! + } + + type AdminStats { + organizations: [AdminOrganizationStats!]! + general: AdminGeneralStats! + } + + type AdminOrganizationStats { + organization: Organization! + versions: Int! + users: Int! + projects: Int! + targets: Int! + persistedOperations: Int! + operations: SafeInt! + } + + type AdminGeneralStats { + operationsOverTime: [AdminOperationPoint!]! + } + + type AdminOperationPoint { + date: DateTime! + count: SafeInt! + } +`; diff --git a/packages/services/api/src/modules/admin/providers/admin-manager.ts b/packages/services/api/src/modules/admin/providers/admin-manager.ts new file mode 100644 index 000000000..330a2f1a7 --- /dev/null +++ b/packages/services/api/src/modules/admin/providers/admin-manager.ts @@ -0,0 +1,96 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { Logger } from '../../shared/providers/logger'; +import { Storage } from '../../shared/providers/storage'; +import { atomic } from '../../../shared/helpers'; +import { OperationsReader } from '../../operations/providers/operations-reader'; + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, +}) +export class AdminManager { + private logger: Logger; + + constructor( + logger: Logger, + private storage: Storage, + private authManager: AuthManager, + private operationsReader: OperationsReader + ) { + this.logger = logger.child({ source: 'AdminManager' }); + } + + async getStats(daysLimit?: number | null) { + this.logger.debug('Fetching admin stats'); + const user = await this.authManager.getCurrentUser(); + + if (!user.isAdmin) { + throw new Error('GO AWAY'); + } + + return this.storage.adminGetStats(daysLimit); + } + + async getOperationsOverTime({ daysLimit }: { daysLimit: number }) { + this.logger.debug( + 'Fetching collected operations over time (admin, daysLimit=%s)', + daysLimit + ); + const user = await this.authManager.getCurrentUser(); + + if (!user.isAdmin) { + throw new Error('GO AWAY'); + } + + const points = await this.operationsReader.adminOperationsOverTime({ + daysLimit, + }); + + return points.map((point) => ({ + date: point.date, + count: point.total, + })); + } + + @atomic((arg: { daysLimit: number }) => arg.daysLimit + '') + async countOperationsPerOrganization({ daysLimit }: { daysLimit: number }) { + this.logger.info( + 'Counting collected operations per organization (admin, daysLimit=%s)', + daysLimit + ); + const user = await this.authManager.getCurrentUser(); + + if (user.isAdmin) { + const pairs = await this.storage.adminGetOrganizationsTargetPairs(); + const operations = + await this.operationsReader.adminCountOperationsPerTarget({ + daysLimit, + }); + + const organizationCountMap = new Map(); + const targetOrganizationMap = new Map( + pairs.map((p) => [p.target, p.organization]) + ); + + for (const op of operations) { + const organizationId = targetOrganizationMap.get(op.target); + + if (organizationId) { + const total = organizationCountMap.get(organizationId); + organizationCountMap.set(organizationId, (total ?? 0) + op.total); + } + } + + return Array.from(organizationCountMap.entries()).map((entry) => ({ + organization: entry[0], + total: entry[1], + })); + } + + throw new Error('Go away'); + } +} diff --git a/packages/services/api/src/modules/admin/resolvers.ts b/packages/services/api/src/modules/admin/resolvers.ts new file mode 100644 index 000000000..c790698c5 --- /dev/null +++ b/packages/services/api/src/modules/admin/resolvers.ts @@ -0,0 +1,48 @@ +import { AdminManager } from './providers/admin-manager'; +import { AdminModule } from './__generated__/types'; + +export const resolvers: AdminModule.Resolvers = { + Query: { + admin() { + return {}; + }, + }, + AdminQuery: { + stats(_, { daysLimit }) { + return { + daysLimit, + }; + }, + }, + AdminStats: { + organizations({ daysLimit }, __, { injector }) { + return injector.get(AdminManager).getStats(daysLimit); + }, + general({ daysLimit }) { + return { daysLimit }; + }, + }, + AdminGeneralStats: { + operationsOverTime({ daysLimit }, _, { injector }) { + return injector.get(AdminManager).getOperationsOverTime({ + // Max days limit is 30 (that's the default TTL in ClickHouse table) + daysLimit: daysLimit ?? 30, + }); + }, + }, + AdminOrganizationStats: { + async operations(stats, _, { injector }) { + const results = await injector + .get(AdminManager) + .countOperationsPerOrganization({ + // Max days limit is 30 (that's the default TTL in ClickHouse table) + daysLimit: stats.daysLimit ?? 30, + }); + + return ( + results.find((r) => r.organization === stats.organization.id)?.total ?? + 0 + ); + }, + }, +}; diff --git a/packages/services/api/src/modules/alerts/index.ts b/packages/services/api/src/modules/alerts/index.ts new file mode 100644 index 000000000..8e9c5bde4 --- /dev/null +++ b/packages/services/api/src/modules/alerts/index.ts @@ -0,0 +1,18 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; +import { AlertsManager } from './providers/alerts-manager'; +import { SlackCommunicationAdapter } from './providers/adapters/slack'; +import { WebhookCommunicationAdapter } from './providers/adapters/webhook'; + +export const alertsModule = createModule({ + id: 'alerts', + dirname: __dirname, + typeDefs, + resolvers, + providers: [ + AlertsManager, + SlackCommunicationAdapter, + WebhookCommunicationAdapter, + ], +}); diff --git a/packages/services/api/src/modules/alerts/module.graphql.ts b/packages/services/api/src/modules/alerts/module.graphql.ts new file mode 100644 index 000000000..636470b79 --- /dev/null +++ b/packages/services/api/src/modules/alerts/module.graphql.ts @@ -0,0 +1,88 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Mutation { + addAlertChannel(input: AddAlertChannelInput!): AlertChannel! + deleteAlertChannels(input: DeleteAlertChannelsInput!): [AlertChannel!]! + addAlert(input: AddAlertInput!): Alert! + deleteAlerts(input: DeleteAlertsInput!): [Alert!]! + } + + extend type Query { + alertChannels(selector: ProjectSelectorInput!): [AlertChannel!]! + alerts(selector: ProjectSelectorInput!): [Alert!]! + } + + enum AlertChannelType { + SLACK + WEBHOOK + } + + enum AlertType { + SCHEMA_CHANGE_NOTIFICATIONS + } + + input AddAlertChannelInput { + organization: ID! + project: ID! + name: String! + type: AlertChannelType! + slack: SlackChannelInput + webhook: WebhookChannelInput + } + + input SlackChannelInput { + channel: String! + } + + input WebhookChannelInput { + endpoint: String! + } + + input DeleteAlertChannelsInput { + organization: ID! + project: ID! + channels: [ID!]! + } + + input AddAlertInput { + organization: ID! + project: ID! + target: ID! + channel: ID! + type: AlertType! + } + + input DeleteAlertsInput { + organization: ID! + project: ID! + alerts: [ID!]! + } + + interface AlertChannel { + id: ID! + name: String! + type: AlertChannelType! + } + + type AlertSlackChannel implements AlertChannel { + id: ID! + name: String! + type: AlertChannelType! + channel: String! + } + + type AlertWebhookChannel implements AlertChannel { + id: ID! + name: String! + type: AlertChannelType! + endpoint: String! + } + + type Alert { + id: ID! + type: AlertType! + channel: AlertChannel! + target: Target! + } +`; diff --git a/packages/services/api/src/modules/alerts/providers/adapters/common.ts b/packages/services/api/src/modules/alerts/providers/adapters/common.ts new file mode 100644 index 000000000..852d3629e --- /dev/null +++ b/packages/services/api/src/modules/alerts/providers/adapters/common.ts @@ -0,0 +1,70 @@ +import type * as Types from '../../../../__generated__/types'; +import { + Alert, + AlertChannel, + Organization, + Project, + Target, + SchemaVersion, +} from '../../../../shared/entities'; + +export interface SchemaChangeNotificationInput { + event: { + organization: Pick; + project: Pick; + target: Pick; + schema: Pick; + changes: readonly Types.SchemaChange[]; + errors: readonly Types.SchemaError[]; + initial: boolean; + }; + alert: Alert; + channel: AlertChannel; + integrations: { + slack: { + token: string; + }; + }; +} + +export interface ChannelConfirmationInput { + event: { + kind: 'created' | 'deleted'; + organization: Pick; + project: Pick; + }; + channel: AlertChannel; + integrations: { + slack: { + token: string; + }; + }; +} + +export interface CommunicationAdapter { + sendSchemaChangeNotification( + input: SchemaChangeNotificationInput + ): Promise; + sendChannelConfirmation(input: ChannelConfirmationInput): Promise; +} + +export function slackCoderize(msg: string): string { + return quotesTransformer(msg, '`'); +} + +export function quotesTransformer(msg: string, symbols = '**') { + const findSingleQuotes = /'([^']+)'/gim; + const findDoubleQuotes = /"([^"]+)"/gim; + + function transformm(_: string, value: string) { + return `${symbols}${value}${symbols}`; + } + + return msg + .replace(findSingleQuotes, transformm) + .replace(findDoubleQuotes, transformm); +} + +export function filterChangesByLevel(level: Types.CriticalityLevel) { + return (change: Types.SchemaChange) => change.criticality === level; +} diff --git a/packages/services/api/src/modules/alerts/providers/adapters/slack.ts b/packages/services/api/src/modules/alerts/providers/adapters/slack.ts new file mode 100644 index 000000000..162c809b8 --- /dev/null +++ b/packages/services/api/src/modules/alerts/providers/adapters/slack.ts @@ -0,0 +1,180 @@ +import { Injectable } from 'graphql-modules'; +import { WebClient, MessageAttachment } from '@slack/web-api'; +import { + CommunicationAdapter, + SchemaChangeNotificationInput, + filterChangesByLevel, + slackCoderize, + ChannelConfirmationInput, +} from './common'; +import type * as Types from '../../../../__generated__/types'; +import { Logger } from '../../../shared/providers/logger'; + +@Injectable() +export class SlackCommunicationAdapter implements CommunicationAdapter { + private logger: Logger; + + constructor(logger: Logger) { + this.logger = logger.child({ service: 'SlackCommunicationAdapter' }); + } + + private createLink({ text, url }: { text: string; url: string }) { + return `<${url}|${text}>`; + } + + async sendSchemaChangeNotification(input: SchemaChangeNotificationInput) { + this.logger.debug( + `Sending Schema Change Notifications over Slack (organization=%s, project=%s, target=%s)`, + input.event.organization.id, + input.event.project.id, + input.event.target.id + ); + + if (!input.integrations.slack.token) { + this.logger.debug(`Slack Integration is not available`); + return; + } + + try { + const client = new WebClient(input.integrations.slack.token); + + const totalChanges = input.event.changes.length; + const projectLink = this.createLink({ + text: input.event.project.name, + url: `https://app.graphql-hive.com/${input.event.organization.cleanId}/${input.event.project.cleanId}`, + }); + const targetLink = this.createLink({ + text: input.event.target.name, + url: `https://app.graphql-hive.com/${input.event.organization.cleanId}/${input.event.project.cleanId}/${input.event.target.cleanId}`, + }); + const viewLink = this.createLink({ + text: 'view details', + url: `http://app.graphql-hive.com/${input.event.organization.cleanId}/${input.event.project.cleanId}/${input.event.target.cleanId}/history/${input.event.schema.id}`, + }); + + if (input.event.initial) { + await client.chat.postMessage({ + channel: input.channel.slackChannel!, + text: `:bee: Hi, I received your *first* schema in project ${projectLink}, target ${targetLink} (${viewLink}):`, + mrkdwn: true, + }); + } else { + await client.chat.postMessage({ + channel: input.channel.slackChannel!, + text: `:bee: Hi, I found *${totalChanges} ${this.pluralize( + 'change', + totalChanges + )}* in project ${projectLink}, target ${targetLink} (${viewLink}):`, + mrkdwn: true, + attachments: createAttachments(input.event.changes), + }); + } + } catch (error) { + this.logger.error(`Failed to send Slack notification`, error); + } + } + + async sendChannelConfirmation(input: ChannelConfirmationInput) { + this.logger.debug( + `Sending Channel Confirmation over Slack (organization=%s, project=%s)`, + input.event.organization.id, + input.event.project.id + ); + + const token = input.integrations.slack.token; + + if (!token) { + this.logger.debug(`Slack Integration is not available`); + return; + } + + const actionMessage = + input.event.kind === 'created' + ? `I will send here notifications` + : `I will no longer send here notifications`; + + try { + const projectLink = this.createLink({ + text: input.event.project.name, + url: `https://app.graphql-hive.com/${input.event.organization.cleanId}/${input.event.project.cleanId}`, + }); + + const client = new WebClient(token); + await client.chat.postMessage({ + channel: input.channel.slackChannel!, + text: [ + `:wave: Hi! I'm the notification :bee:.`, + `${actionMessage} about your ${projectLink} project.`, + ].join('\n'), + }); + } catch (error) { + this.logger.error(`Failed to send Slack notification`, error); + } + } + + private pluralize(word: string, num: number): string { + return word + (num > 1 ? 's' : ''); + } +} + +function createAttachments(changes: readonly Types.SchemaChange[]) { + const breakingChanges = changes.filter(filterChangesByLevel('Breaking')); + const dangerousChanges = changes.filter(filterChangesByLevel('Dangerous')); + const safeChanges = changes.filter(filterChangesByLevel('Safe')); + + const attachments: MessageAttachment[] = []; + + if (breakingChanges.length) { + attachments.push( + renderAttachments({ + color: '#E74C3B', + title: 'Breaking changes', + changes: breakingChanges, + }) + ); + } + + if (dangerousChanges.length) { + attachments.push( + renderAttachments({ + color: '#F0C418', + title: 'Dangerous changes', + changes: dangerousChanges, + }) + ); + } + + if (safeChanges.length) { + attachments.push( + renderAttachments({ + color: '#23B99A', + title: 'Safe changes', + changes: safeChanges, + }) + ); + } + + return attachments; +} + +function renderAttachments({ + changes, + title, + color, +}: { + color: string; + title: string; + changes: readonly Types.SchemaChange[]; +}): MessageAttachment { + const text = changes + .map((change) => slackCoderize(change.message)) + .join('\n'); + + return { + mrkdwn_in: ['text'], + color, + author_name: title, + text, + fallback: text, + }; +} diff --git a/packages/services/api/src/modules/alerts/providers/adapters/webhook.ts b/packages/services/api/src/modules/alerts/providers/adapters/webhook.ts new file mode 100644 index 000000000..bf6e3cbd1 --- /dev/null +++ b/packages/services/api/src/modules/alerts/providers/adapters/webhook.ts @@ -0,0 +1,56 @@ +import { Injectable, Inject } from 'graphql-modules'; +import type { + CommunicationAdapter, + SchemaChangeNotificationInput, +} from './common'; +import { Logger } from '../../../shared/providers/logger'; +import { HttpClient } from '../../../shared/providers/http-client'; +import { WEBHOOKS_CONFIG } from '../tokens'; +import type { WebhooksConfig } from '../tokens'; + +@Injectable() +export class WebhookCommunicationAdapter implements CommunicationAdapter { + private logger: Logger; + + constructor( + logger: Logger, + private http: HttpClient, + @Inject(WEBHOOKS_CONFIG) private config: WebhooksConfig + ) { + this.logger = logger.child({ service: 'WebhookCommunicationAdapter' }); + } + + async sendSchemaChangeNotification(input: SchemaChangeNotificationInput) { + this.logger.debug( + `Sending Schema Change Notifications over Webhook (organization=%s, project=%s, target=%s)`, + input.event.organization.id, + input.event.project.id, + input.event.target.id + ); + try { + await this.http.post(this.config.endpoint + '/schedule', { + headers: { + 'Accept-Encoding': 'gzip, deflate, br', + 'Content-Type': 'application/json', + }, + retry: { limit: 3 }, + timeout: { + socket: 1000, + connect: 1000, + secureConnect: 1000, + request: 10_000, + }, + json: { + endpoint: input.channel.webhookEndpoint, + event: input.event, + }, + }); + } catch (error) { + this.logger.error(`Failed to send Webhook notification`, error); + } + } + + async sendChannelConfirmation() { + // I don't think we need to implement this for webhooks + } +} diff --git a/packages/services/api/src/modules/alerts/providers/alerts-manager.ts b/packages/services/api/src/modules/alerts/providers/alerts-manager.ts new file mode 100644 index 000000000..1d806ba47 --- /dev/null +++ b/packages/services/api/src/modules/alerts/providers/alerts-manager.ts @@ -0,0 +1,350 @@ +import { Injectable, Scope } from 'graphql-modules'; +import type { AlertsModule } from '../__generated__/types'; +import type { AlertChannel, Alert } from '../../../shared/entities'; +import { cache } from '../../../shared/helpers'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { OrganizationManager } from '../../organization/providers/organization-manager'; +import { ProjectManager } from '../../project/providers/project-manager'; +import { Logger } from '../../shared/providers/logger'; +import { Storage } from '../../shared/providers/storage'; +import type { ProjectSelector } from '../../shared/providers/storage'; +import { Tracking } from '../../shared/providers/tracking'; +import { SlackIntegrationManager } from '../../integrations/providers/slack-integration-manager'; +import { IntegrationsAccessContext } from '../../integrations/providers/integrations-access-context'; +import { SchemaChangeNotificationInput } from './adapters/common'; +import { SlackCommunicationAdapter } from './adapters/slack'; +import { WebhookCommunicationAdapter } from './adapters/webhook'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; +import { TargetAccessScope } from '../../auth/providers/target-access'; + +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class AlertsManager { + private logger: Logger; + + constructor( + logger: Logger, + private authManager: AuthManager, + private slackIntegrationManager: SlackIntegrationManager, + private slack: SlackCommunicationAdapter, + private webhook: WebhookCommunicationAdapter, + private organizationManager: OrganizationManager, + private projectManager: ProjectManager, + private storage: Storage, + private tracking: Tracking + ) { + this.logger = logger.child({ + source: 'AlertsManager', + }); + } + + async addChannel( + input: AlertsModule.AddAlertChannelInput + ): Promise { + this.logger.debug( + 'Adding Alert Channel (organization=%s, project=%s, type=%s)', + input.organization, + input.project, + input.type + ); + await this.authManager.ensureProjectAccess({ + ...input, + scope: ProjectAccessScope.ALERTS, + }); + await this.tracking.track({ + event: 'ADDED_ALERT_CHANNEL', + data: { + organization: input.organization, + project: input.project, + type: input.type, + }, + }); + + const channel = await this.storage.addAlertChannel(input); + + await this.triggerChannelConfirmation({ + kind: 'created', + channel, + organization: input.organization, + project: input.project, + }); + + return channel; + } + + async deleteChannels( + input: ProjectSelector & { + channels: readonly string[]; + } + ): Promise { + this.logger.debug( + 'Deleting Alert Channels (organization=%s, project=%s, size=%s)', + input.organization, + input.project, + input.channels.length + ); + await this.tracking.track({ + event: 'DELETED_ALERT_CHANNELS', + data: { + organization: input.organization, + project: input.project, + size: input.channels.length, + }, + }); + await this.authManager.ensureProjectAccess({ + ...input, + scope: ProjectAccessScope.ALERTS, + }); + const channels = await this.storage.deleteAlertChannels(input); + + await Promise.all( + channels.map((channel) => + this.triggerChannelConfirmation({ + kind: 'deleted', + channel, + organization: input.organization, + project: input.project, + }) + ) + ); + + return channels; + } + + @cache( + (selector) => selector.project + selector.organization + ) + async getChannels( + selector: ProjectSelector + ): Promise { + this.logger.debug( + 'Fetching Alert Channels (organization=%s, project=%s)', + selector.organization, + selector.project + ); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.READ, + }); + return this.storage.getAlertChannels(selector); + } + + async addAlert(input: AlertsModule.AddAlertInput): Promise { + this.logger.debug( + 'Adding Alert (organization=%s, project=%s, type=%s)', + input.organization, + input.project, + input.type + ); + await this.authManager.ensureProjectAccess({ + ...input, + scope: ProjectAccessScope.ALERTS, + }); + await this.tracking.track({ + event: 'ADDED_ALERT', + data: { + organization: input.organization, + project: input.project, + type: input.type, + }, + }); + + return this.storage.addAlert(input); + } + + async deleteAlerts( + input: ProjectSelector & { + alerts: readonly string[]; + } + ): Promise { + this.logger.debug( + 'Deleting Alerts (organization=%s, project=%s, size=%s)', + input.organization, + input.project, + input.alerts.length + ); + await this.tracking.track({ + event: 'DELETED_ALERTS', + data: { + organization: input.organization, + project: input.project, + size: input.alerts.length, + }, + }); + await this.authManager.ensureProjectAccess({ + ...input, + scope: ProjectAccessScope.ALERTS, + }); + return this.storage.deleteAlerts(input); + } + + async getAlerts(selector: ProjectSelector): Promise { + this.logger.debug( + 'Fetching Alerts (organization=%s, project=%s)', + selector.organization, + selector.project + ); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.READ, + }); + return this.storage.getAlerts(selector); + } + + async triggerSchemaChangeNotifications( + event: SchemaChangeNotificationInput['event'] + ) { + const organization = event.organization.id; + const project = event.project.id; + const target = event.target.id; + + this.logger.debug( + 'Triggering Schema Change Notifications (organization=%s, project=%s, target=%s)', + organization, + project, + target + ); + + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + + const [channels, alerts] = await Promise.all([ + this.getChannels({ + organization, + project, + }), + this.getAlerts({ + organization, + project, + }), + ]); + + const matchingAlerts = alerts.filter( + (alert) => + alert.type === 'SCHEMA_CHANGE_NOTIFICATIONS' && + alert.targetId === target + ); + const pairs = matchingAlerts.map((alert) => { + return { + alert, + channel: channels.find((channel) => channel.id === alert.channelId)!, + }; + }); + + const slackToken = await this.slackIntegrationManager.getToken({ + organization: event.organization.id, + project: event.project.id, + target: event.target.id, + context: IntegrationsAccessContext.SchemaPublishing, + }); + + const integrations: SchemaChangeNotificationInput['integrations'] = { + slack: { + token: slackToken!, + }, + }; + + // Let's not leak any data :) + const safeEvent: SchemaChangeNotificationInput['event'] = { + organization: { + id: event.organization.id, + cleanId: event.organization.cleanId, + name: event.organization.name, + }, + project: { + id: event.project.id, + cleanId: event.project.cleanId, + name: event.project.name, + }, + target: { + id: event.target.id, + cleanId: event.target.cleanId, + name: event.target.name, + }, + schema: { + id: event.schema.id, + commit: event.schema.commit, + valid: event.schema.valid, + }, + changes: event.changes, + errors: event.errors, + initial: event.initial, + }; + + await Promise.all( + pairs.map(({ channel, alert }) => { + if (channel.type === 'SLACK') { + return this.slack.sendSchemaChangeNotification({ + event: safeEvent, + alert, + channel, + integrations, + }); + } + + return this.webhook.sendSchemaChangeNotification({ + event: safeEvent, + alert, + channel, + integrations, + }); + }) + ); + } + + async triggerChannelConfirmation(input: { + kind: 'created' | 'deleted'; + channel: AlertChannel; + organization: string; + project: string; + }) { + const channel = input.channel; + const [organization, project] = await Promise.all([ + this.organizationManager.getOrganization({ + organization: input.organization, + }), + this.projectManager.getProject({ + organization: input.organization, + project: input.project, + }), + ]); + + if (channel.type === 'SLACK') { + const slackToken = await this.slackIntegrationManager.getToken({ + organization: organization.id, + project: project.id, + context: IntegrationsAccessContext.ChannelConfirmation, + }); + + this.slack.sendChannelConfirmation({ + event: { + kind: 'created', + organization: { + id: organization.id, + cleanId: organization.cleanId, + name: organization.name, + }, + project: { + id: project.id, + cleanId: project.cleanId, + name: project.name, + }, + }, + channel, + integrations: { + slack: { + token: slackToken!, + }, + }, + }); + } else { + this.webhook.sendChannelConfirmation(); + } + } +} diff --git a/packages/services/api/src/modules/alerts/providers/tokens.ts b/packages/services/api/src/modules/alerts/providers/tokens.ts new file mode 100644 index 000000000..c2150c04c --- /dev/null +++ b/packages/services/api/src/modules/alerts/providers/tokens.ts @@ -0,0 +1,9 @@ +import { InjectionToken } from 'graphql-modules'; + +export interface WebhooksConfig { + endpoint: string; +} + +export const WEBHOOKS_CONFIG = new InjectionToken( + 'webhooks-endpoint' +); diff --git a/packages/services/api/src/modules/alerts/resolvers.ts b/packages/services/api/src/modules/alerts/resolvers.ts new file mode 100644 index 000000000..ac7232825 --- /dev/null +++ b/packages/services/api/src/modules/alerts/resolvers.ts @@ -0,0 +1,126 @@ +import type { AlertsModule } from './__generated__/types'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { AlertsManager } from './providers/alerts-manager'; +import { TargetManager } from '../target/providers/target-manager'; + +export const resolvers: AlertsModule.Resolvers = { + Mutation: { + async addAlertChannel(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + ]); + + return injector.get(AlertsManager).addChannel({ + organization, + project, + name: input.name, + type: input.type, + slack: input.slack, + webhook: input.webhook, + }); + }, + async deleteAlertChannels(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + ]); + + return injector.get(AlertsManager).deleteChannels({ + organization, + project, + channels: input.channels, + }); + }, + async addAlert(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + + return injector.get(AlertsManager).addAlert({ + organization, + project, + target, + channel: input.channel, + type: input.type, + }); + }, + async deleteAlerts(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + ]); + + return injector.get(AlertsManager).deleteAlerts({ + organization, + project, + alerts: input.alerts, + }); + }, + }, + Query: { + async alerts(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + ]); + + return injector.get(AlertsManager).getAlerts({ + organization, + project, + }); + }, + async alertChannels(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + ]); + + return injector.get(AlertsManager).getChannels({ + organization, + project, + }); + }, + }, + Alert: { + async channel(alert, _, { injector }) { + const channels = await injector.get(AlertsManager).getChannels({ + organization: alert.organizationId, + project: alert.projectId, + }); + + return channels.find((c) => c.id === alert.channelId)!; + }, + target(alert, _, { injector }) { + return injector.get(TargetManager).getTarget({ + organization: alert.organizationId, + project: alert.projectId, + target: alert.targetId, + }); + }, + }, + AlertSlackChannel: { + __isTypeOf(channel) { + return channel.type === 'SLACK'; + }, + channel(channel) { + return channel.slackChannel!; + }, + }, + AlertWebhookChannel: { + __isTypeOf(channel) { + return channel.type === 'WEBHOOK'; + }, + endpoint(channel) { + return channel.webhookEndpoint!; + }, + }, +}; diff --git a/packages/services/api/src/modules/auth/index.ts b/packages/services/api/src/modules/auth/index.ts new file mode 100644 index 000000000..9eb1f45ff --- /dev/null +++ b/packages/services/api/src/modules/auth/index.ts @@ -0,0 +1,24 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import { AuthManager } from './providers/auth-manager'; +import { ApiTokenProvider } from './providers/tokens'; +import { OrganizationAccess } from './providers/organization-access'; +import { ProjectAccess } from './providers/project-access'; +import { TargetAccess } from './providers/target-access'; +import { UserManager } from './providers/user-manager'; +import typeDefs from './module.graphql'; + +export const authModule = createModule({ + id: 'auth', + dirname: __dirname, + typeDefs, + resolvers, + providers: [ + AuthManager, + UserManager, + ApiTokenProvider, + OrganizationAccess, + ProjectAccess, + TargetAccess, + ], +}); diff --git a/packages/services/api/src/modules/auth/module.graphql.ts b/packages/services/api/src/modules/auth/module.graphql.ts new file mode 100644 index 000000000..b3219673c --- /dev/null +++ b/packages/services/api/src/modules/auth/module.graphql.ts @@ -0,0 +1,78 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + me: User! + } + + extend type Mutation { + updateMe(input: UpdateMeInput!): User! + } + + input UpdateMeInput { + fullName: String! + displayName: String! + } + + type User { + id: ID! + email: String! + fullName: String! + displayName: String! + provider: AuthProvider! + } + + type UserConnection { + nodes: [User!]! + total: Int! + } + + type Member { + id: ID! + user: User! + organizationAccessScopes: [OrganizationAccessScope!]! + projectAccessScopes: [ProjectAccessScope!]! + targetAccessScopes: [TargetAccessScope!]! + } + + type MemberConnection { + nodes: [Member!]! + total: Int! + } + + enum AuthProvider { + GOOGLE + GITHUB + """ + Username-Password-Authentication + """ + AUTH0 + } + + enum OrganizationAccessScope { + READ + DELETE + SETTINGS + INTEGRATIONS + MEMBERS + } + + enum ProjectAccessScope { + READ + DELETE + SETTINGS + ALERTS + OPERATIONS_STORE_READ + OPERATIONS_STORE_WRITE + } + + enum TargetAccessScope { + READ + DELETE + SETTINGS + REGISTRY_READ + REGISTRY_WRITE + TOKENS_READ + TOKENS_WRITE + } +`; diff --git a/packages/services/api/src/modules/auth/providers/auth-manager.ts b/packages/services/api/src/modules/auth/providers/auth-manager.ts new file mode 100644 index 000000000..5f91a94cb --- /dev/null +++ b/packages/services/api/src/modules/auth/providers/auth-manager.ts @@ -0,0 +1,302 @@ +import { Injectable, Inject, Scope, CONTEXT } from 'graphql-modules'; +import type { User } from '../../../shared/entities'; +import type { Listify, MapToArray } from '../../../shared/helpers'; +import { AccessError } from '../../../shared/errors'; +import { share } from '../../../shared/helpers'; +import { createOrUpdateUser } from '../../../shared/mixpanel'; +import { Storage } from '../../shared/providers/storage'; +import { MessageBus } from '../../shared/providers/message-bus'; +import { IdempotentRunner } from '../../shared/providers/idempotent-runner'; +import { TokenStorage } from '../../token/providers/token-storage'; +import { + ENSURE_PERSONAL_ORGANIZATION_EVENT, + EnsurePersonalOrganizationEventPayload, +} from '../../organization/providers/events'; +import { ApiToken } from './tokens'; +import { + OrganizationAccess, + OrganizationAccessScope, + OrganizationUserScopesSelector, +} from './organization-access'; +import { + ProjectAccess, + ProjectAccessScope, + ProjectUserScopesSelector, +} from './project-access'; +import { + TargetAccess, + TargetAccessScope, + TargetUserScopesSelector, +} from './target-access'; +import { UserManager } from './user-manager'; + +export interface OrganizationAccessSelector { + organization: string; + scope: OrganizationAccessScope; +} + +export interface ProjectAccessSelector { + organization: string; + project: string; + scope: ProjectAccessScope; +} + +export interface TargetAccessSelector { + organization: string; + project: string; + target: string; + scope: TargetAccessScope; +} + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class AuthManager { + private user: any; + + constructor( + @Inject(ApiToken) private apiToken: string, + @Inject(CONTEXT) context: any, + private organizationAccess: OrganizationAccess, + private projectAccess: ProjectAccess, + private targetAccess: TargetAccess, + private userManager: UserManager, + private tokenStorage: TokenStorage, + private messageBus: MessageBus, + private storage: Storage, + private idempotentRunner: IdempotentRunner + ) { + this.user = context.user; + } + + async ensureTargetAccess( + selector: Listify + ): Promise { + if (this.apiToken) { + if (hasManyTargets(selector)) { + await Promise.all( + selector.target.map((target) => + this.ensureTargetAccess({ + ...selector, + target, + }) + ) + ); + } else { + await this.targetAccess.ensureAccessForToken({ + ...(selector as TargetAccessSelector), + token: this.apiToken, + }); + } + } else { + if (hasManyTargets(selector)) { + await Promise.all( + selector.target.map((target) => + this.ensureTargetAccess({ + ...selector, + target, + }) + ) + ); + } else { + const user = await this.getCurrentUser(); + await this.targetAccess.ensureAccessForUser({ + ...(selector as TargetAccessSelector), + user: user.id, + }); + } + } + } + + async ensureProjectAccess( + selector: ProjectAccessSelector + ): Promise { + if (this.apiToken) { + await this.projectAccess.ensureAccessForToken({ + ...selector, + token: this.apiToken, + }); + } else { + const user = await this.getCurrentUser(); + await this.projectAccess.ensureAccessForUser({ + ...selector, + user: user.id, + }); + } + } + + async ensureOrganizationAccess( + selector: OrganizationAccessSelector + ): Promise { + if (this.apiToken) { + await this.organizationAccess.ensureAccessForToken({ + ...selector, + token: this.apiToken, + }); + } else { + const user = await this.getCurrentUser(); + + // If a user is an admin, we can allow access for all data + if (user.isAdmin) { + return; + } + + await this.organizationAccess.ensureAccessForUser({ + ...selector, + user: user.id, + }); + } + } + + async checkOrganizationAccess( + selector: OrganizationAccessSelector + ): Promise { + if (this.apiToken) { + throw new Error( + 'checkOrganizationAccess for token is not implemented yet' + ); + } + + const user = await this.getCurrentUser(); + + return this.organizationAccess.checkAccessForUser({ + ...selector, + user: user.id, + }); + } + + ensureApiToken(): string | never { + if (this.apiToken) { + return this.apiToken; + } + + throw new AccessError('X-API-Token is missing'); + } + + getUserIdForTracking: () => Promise = share(async () => { + const user = await (this.apiToken + ? this.getOrganizationOwnerByToken() + : this.getCurrentUser()); + + createOrUpdateUser({ + id: user.externalAuthUserId, + email: user.email, + }); + + return user.externalAuthUserId; + }); + + getOrganizationOwnerByToken: () => Promise = share(async () => { + const token = this.ensureApiToken(); + const result = await this.tokenStorage.getToken({ token }); + + await this.ensureOrganizationAccess({ + organization: result.organization, + scope: OrganizationAccessScope.READ, + }); + + const member = await this.storage.getOrganizationOwner({ + organization: result.organization, + }); + + return member.user; + }); + + getCurrentUser: () => Promise<(User & { isAdmin: boolean }) | never> = share( + async () => { + if (!this.user) { + throw new AccessError('Authorization token is missing'); + } + + const info = (this.user as any)['https://graphql-hive.com/userinfo']; + const metadata = (this.user as any)['https://graphql-hive.com/metadata']; + + let internalUser = await this.storage.getUserByExternalId({ + external: info.user_id, + }); + + if (!internalUser) { + internalUser = await this.idempotentRunner.run({ + identifier: `user:create:${info.user_id}`, + executor: () => this.ensureInternalUser(info), + ttl: 60, + }); + } + + return { + ...internalUser, + isAdmin: metadata?.admin === true, + }; + } + ); + + private async ensureInternalUser(input: { user_id: string; email: string }) { + let internalUser = await this.storage.getUserByExternalId({ + external: input.user_id, + }); + + if (!internalUser) { + internalUser = await this.userManager.createUser({ + external: input.user_id, + email: input.email, + }); + } + + await this.messageBus.emit( + ENSURE_PERSONAL_ORGANIZATION_EVENT, + { + name: internalUser.displayName, + user: { + id: internalUser.id, + externalAuthUserId: internalUser.externalAuthUserId, + }, + } + ); + + return internalUser; + } + + async updateCurrentUser(input: { + displayName: string; + fullName: string; + }): Promise { + const user = await this.getCurrentUser(); + return this.userManager.updateUser({ + id: user.id, + ...input, + }); + } + + isUser() { + return !!this.user; + } + + getMemberOrganizationScopes(selector: OrganizationUserScopesSelector) { + return this.organizationAccess.getMemberScopes(selector); + } + + getMemberProjectScopes(selector: ProjectUserScopesSelector) { + return this.projectAccess.getMemberScopes(selector); + } + + getMemberTargetScopes(selector: TargetUserScopesSelector) { + return this.targetAccess.getMemberScopes(selector); + } + + resetAccessCache() { + this.organizationAccess.resetAccessCache(); + this.projectAccess.resetAccessCache(); + this.targetAccess.resetAccessCache(); + } +} + +function hasManyTargets( + selector: Listify +): selector is MapToArray { + return Array.isArray(selector.target); +} diff --git a/packages/services/api/src/modules/auth/providers/organization-access.ts b/packages/services/api/src/modules/auth/providers/organization-access.ts new file mode 100644 index 000000000..7d03c8ccd --- /dev/null +++ b/packages/services/api/src/modules/auth/providers/organization-access.ts @@ -0,0 +1,252 @@ +import { Injectable, Scope, Inject, forwardRef } from 'graphql-modules'; +import Dataloader from 'dataloader'; +import { Logger } from '../../shared/providers/logger'; +import { Storage } from '../../shared/providers/storage'; +import { Token } from '../../../shared/entities'; +import { AccessError } from '../../../shared/errors'; +import DataLoader from 'dataloader'; +import { + TokenStorage, + TokenSelector, +} from '../../token/providers/token-storage'; +import type { ProjectAccessScope } from './project-access'; +import type { TargetAccessScope } from './target-access'; + +export interface OrganizationUserScopesSelector { + user: string; + organization: string; +} + +export interface OrganizationUserAccessSelector { + user: string; + organization: string; + scope: OrganizationAccessScope; +} + +interface OrganizationTokenAccessSelector { + token: string; + organization: string; + scope: OrganizationAccessScope; +} + +export enum OrganizationAccessScope { + /** + * Read organization data (projects, targets, etc.) + */ + READ = 'organization:read', + /** + * Who can delete the organization + */ + DELETE = 'organization:delete', + /** + * Who can modify organization's settings + */ + SETTINGS = 'organization:settings', + /** + * Who can add/remove 3rd-party integrations (Slack, etc.) + */ + INTEGRATIONS = 'organization:integrations', + /** + * Who can manage members + */ + MEMBERS = 'organization:members', +} + +const organizationAccessScopeValues = Object.values(OrganizationAccessScope); + +function isOrganizationScope(scope: any): scope is OrganizationAccessScope { + return organizationAccessScopeValues.includes(scope); +} + +@Injectable({ + scope: Scope.Operation, +}) +export class OrganizationAccess { + private logger: Logger; + private userAccess: Dataloader< + OrganizationUserAccessSelector, + boolean, + string + >; + private tokenAccess: Dataloader< + OrganizationTokenAccessSelector, + boolean, + string + >; + private allScopes: DataLoader< + OrganizationUserScopesSelector, + ReadonlyArray< + OrganizationAccessScope | ProjectAccessScope | TargetAccessScope + >, + string + >; + private scopes: DataLoader< + OrganizationUserScopesSelector, + readonly OrganizationAccessScope[], + string + >; + tokenInfo: DataLoader; + + constructor( + logger: Logger, + private storage: Storage, + @Inject(forwardRef(() => TokenStorage)) private tokenStorage: TokenStorage + ) { + this.logger = logger.child({ + source: 'OrganizationAccess', + }); + this.userAccess = new Dataloader( + async (selectors) => { + const scopes = await this.scopes.loadMany(selectors); + + return selectors.map((selector, i) => { + const scopesForSelector = scopes[i]; + + if (scopesForSelector instanceof Error) { + this.logger.warn( + `OrganizationAccess:user (error=%s, selector=%o)`, + scopesForSelector.message, + selector + ); + return false; + } + + return scopesForSelector.includes(selector.scope); + }); + }, + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'OrganizationAccess:user', + organization: selector.organization, + user: selector.user, + scope: selector.scope, + }); + }, + } + ); + this.tokenAccess = new Dataloader( + (selectors) => + Promise.all( + selectors.map(async (selector) => { + const tokenInfo = await this.tokenInfo.load(selector); + + if (tokenInfo?.organization === selector.organization) { + return tokenInfo.scopes.includes(selector.scope); + } + + return false; + }) + ), + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'OrganizationAccess:token', + organization: selector.organization, + token: selector.token, + scope: selector.scope, + }); + }, + } + ); + this.allScopes = new Dataloader( + async (selectors) => { + const scopesPerSelector = + await this.storage.getOrganizationMemberAccessPairs(selectors); + + return selectors.map((_, i) => scopesPerSelector[i]); + }, + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'OrganizationAccess:allScopes', + organization: selector.organization, + user: selector.user, + }); + }, + } + ); + this.scopes = new Dataloader( + async (selectors) => { + const scopesPerSelector = await this.allScopes.loadMany(selectors); + + return selectors.map((selector, i) => { + const scopes = scopesPerSelector[i]; + + if (scopes instanceof Error) { + this.logger.warn( + `OrganizationAccess:scopes (error=%s, selector=%o)`, + scopes.message, + selector + ); + return []; + } + + return scopes.filter(isOrganizationScope); + }); + }, + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'OrganizationAccess:scopes', + organization: selector.organization, + user: selector.user, + }); + }, + } + ); + this.tokenInfo = new Dataloader( + (selectors) => + Promise.all( + selectors.map((selector) => this.tokenStorage.getToken(selector)) + ), + { + cacheKeyFn(selector) { + return selector.token; + }, + } + ); + } + + async ensureAccessForToken( + selector: OrganizationTokenAccessSelector + ): Promise { + const canAccess = await this.tokenAccess.load(selector); + + if (!canAccess) { + throw new AccessError(`Missing ${selector.scope} permission`); + } + } + + async ensureAccessForUser( + selector: OrganizationUserAccessSelector + ): Promise { + const canAccess = await this.userAccess.load(selector); + + if (!canAccess) { + throw new AccessError(`Missing ${selector.scope} permission`); + } + } + + async checkAccessForUser( + selector: OrganizationUserAccessSelector + ): Promise { + return this.userAccess.load(selector); + } + + async getMemberScopes(selector: OrganizationUserScopesSelector) { + return this.scopes.load(selector); + } + + async getAllScopes(selectors: readonly OrganizationUserScopesSelector[]) { + return this.allScopes.loadMany(selectors); + } + + resetAccessCache() { + this.userAccess.clearAll(); + this.tokenAccess.clearAll(); + this.allScopes.clearAll(); + this.scopes.clearAll(); + this.tokenInfo.clearAll(); + } +} diff --git a/packages/services/api/src/modules/auth/providers/project-access.ts b/packages/services/api/src/modules/auth/providers/project-access.ts new file mode 100644 index 000000000..631996e2c --- /dev/null +++ b/packages/services/api/src/modules/auth/providers/project-access.ts @@ -0,0 +1,199 @@ +import { Injectable, Scope } from 'graphql-modules'; +import Dataloader from 'dataloader'; +import { Logger } from '../../shared/providers/logger'; +import { AccessError } from '../../../shared/errors'; +import { OrganizationAccess } from './organization-access'; + +export interface ProjectUserAccessSelector { + user: string; + organization: string; + project: string; + scope: ProjectAccessScope; +} + +export interface ProjectUserScopesSelector { + user: string; + organization: string; +} + +interface ProjectTokenAccessSelector { + token: string; + organization: string; + project: string; + scope: ProjectAccessScope; +} + +export enum ProjectAccessScope { + /** + * Read project data (targets, etc.) + */ + READ = 'project:read', + /** + * Who can delete the project + */ + DELETE = 'project:delete', + /** + * Who can modify projects's name + */ + SETTINGS = 'project:settings', + /** + * Who can manage alerts + */ + ALERTS = 'project:alerts', + /** + * Who can read Operations Store + */ + OPERATIONS_STORE_READ = 'project:operations-store:read', + /** + * Who can write to Operations Store + */ + OPERATIONS_STORE_WRITE = 'project:operations-store:write', +} + +const projectAccessScopeValues = Object.values(ProjectAccessScope); + +function isProjectScope(scope: any): scope is ProjectAccessScope { + return projectAccessScopeValues.includes(scope); +} + +@Injectable({ + scope: Scope.Operation, +}) +export class ProjectAccess { + private logger: Logger; + private userAccess: Dataloader; + private tokenAccess: Dataloader; + private scopes: Dataloader< + ProjectUserScopesSelector, + readonly ProjectAccessScope[], + string + >; + + constructor(logger: Logger, private organizationAccess: OrganizationAccess) { + this.logger = logger.child({ + source: 'ProjectAccess', + }); + this.userAccess = new Dataloader( + async (selectors) => { + const scopes = await this.scopes.loadMany(selectors); + + return selectors.map((selector, i) => { + const scopesForSelector = scopes[i]; + + if (scopesForSelector instanceof Error) { + this.logger.warn( + `ProjectAccess:user (error=%s, selector=%o)`, + scopesForSelector.message, + selector + ); + return false; + } + + return scopesForSelector.includes(selector.scope); + }); + }, + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'ProjectAccess:user', + organization: selector.organization, + project: selector.project, + user: selector.user, + scope: selector.scope, + }); + }, + } + ); + this.tokenAccess = new Dataloader( + (selectors) => + Promise.all( + selectors.map(async (selector) => { + const tokenInfo = await this.organizationAccess.tokenInfo.load( + selector + ); + + if ( + tokenInfo?.organization === selector.organization && + tokenInfo?.project === selector.project + ) { + return tokenInfo.scopes.includes(selector.scope); + } + + return false; + }) + ), + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'ProjectAccess:token', + organization: selector.organization, + project: selector.project, + token: selector.token, + scope: selector.scope, + }); + }, + } + ); + this.scopes = new Dataloader( + async (selectors) => { + const scopesPerSelector = await this.organizationAccess.getAllScopes( + selectors + ); + + return selectors.map((selector, i) => { + const scopes = scopesPerSelector[i]; + + if (scopes instanceof Error) { + this.logger.debug( + `ProjectAccess:scopes (error=%s, selector=%o)`, + scopes.message, + selector + ); + return []; + } + + return scopes.filter(isProjectScope); + }); + }, + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'ProjectAccess:scopes', + organization: selector.organization, + user: selector.user, + }); + }, + } + ); + } + + async ensureAccessForToken( + selector: ProjectTokenAccessSelector + ): Promise { + const canAccess = await this.tokenAccess.load(selector); + + if (!canAccess) { + throw new AccessError(`Missing ${selector.scope} permission`); + } + } + + async ensureAccessForUser( + selector: ProjectUserAccessSelector + ): Promise { + const canAccess = await this.userAccess.load(selector); + + if (!canAccess) { + throw new AccessError(`Missing ${selector.scope} permission`); + } + } + + async getMemberScopes(selector: ProjectUserScopesSelector) { + return this.scopes.load(selector); + } + + resetAccessCache() { + this.userAccess.clearAll(); + this.tokenAccess.clearAll(); + this.scopes.clearAll(); + } +} diff --git a/packages/services/api/src/modules/auth/providers/target-access.ts b/packages/services/api/src/modules/auth/providers/target-access.ts new file mode 100644 index 000000000..4ec6183f9 --- /dev/null +++ b/packages/services/api/src/modules/auth/providers/target-access.ts @@ -0,0 +1,209 @@ +import { Injectable, Scope } from 'graphql-modules'; +import Dataloader from 'dataloader'; +import { Logger } from '../../shared/providers/logger'; +import { AccessError } from '../../../shared/errors'; +import { OrganizationAccess } from './organization-access'; + +export interface TargetUserAccessSelector { + user: string; + organization: string; + project: string; + target: string; + scope: TargetAccessScope; +} + +export interface TargetUserScopesSelector { + user: string; + organization: string; +} + +interface TargetTokenAccessSelector { + token: string; + organization: string; + project: string; + target: string; + scope: TargetAccessScope; +} + +export enum TargetAccessScope { + /** + * Read target data + */ + READ = 'target:read', + /** + * Who can delete the target + */ + DELETE = 'target:delete', + /** + * Who can modify targets's name etc + */ + SETTINGS = 'target:settings', + /** + * Who can read registry + */ + REGISTRY_READ = 'target:registry:read', + /** + * Who can manage registry + */ + REGISTRY_WRITE = 'target:registry:write', + /** + * Who can read tokens + */ + TOKENS_READ = 'target:tokens:read', + /** + * Who can manage tokens + */ + TOKENS_WRITE = 'target:tokens:write', +} + +const targetAccessScopeValues = Object.values(TargetAccessScope); + +function isTargetScope(scope: any): scope is TargetAccessScope { + return targetAccessScopeValues.includes(scope); +} + +@Injectable({ + scope: Scope.Operation, +}) +export class TargetAccess { + private logger: Logger; + private userAccess: Dataloader; + private tokenAccess: Dataloader; + private scopes: Dataloader< + TargetUserScopesSelector, + readonly TargetAccessScope[], + string + >; + + constructor(logger: Logger, private organizationAccess: OrganizationAccess) { + this.logger = logger.child({ + source: 'TargetAccess', + }); + this.userAccess = new Dataloader( + async (selectors) => { + const scopes = await this.scopes.loadMany(selectors); + + return selectors.map((selector, i) => { + const scopesForSelector = scopes[i]; + + if (scopesForSelector instanceof Error) { + this.logger.warn( + `TargetAccess:user (error=%s, selector=%o)`, + scopesForSelector.message, + selector + ); + return false; + } + + return scopesForSelector.includes(selector.scope); + }); + }, + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'TargetAccess:user', + organization: selector.organization, + project: selector.project, + target: selector.target, + user: selector.user, + scope: selector.scope, + }); + }, + } + ); + this.tokenAccess = new Dataloader( + (selectors) => + Promise.all( + selectors.map(async (selector) => { + const tokenInfo = await this.organizationAccess.tokenInfo.load( + selector + ); + + if ( + tokenInfo?.organization === selector.organization && + tokenInfo?.project === selector.project && + tokenInfo?.target === selector.target + ) { + return tokenInfo.scopes.includes(selector.scope); + } + + return false; + }) + ), + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'TargetAccess:token', + organization: selector.organization, + project: selector.project, + target: selector.target, + token: selector.token, + scope: selector.scope, + }); + }, + } + ); + + this.scopes = new Dataloader( + async (selectors) => { + const scopesPerSelector = await this.organizationAccess.getAllScopes( + selectors + ); + + return selectors.map((selector, i) => { + const scopes = scopesPerSelector[i]; + + if (scopes instanceof Error) { + this.logger.warn( + `TargetAccess:scopes (error=%s, selector=%o)`, + scopes.message, + selector + ); + return []; + } + + return scopes.filter(isTargetScope); + }); + }, + { + cacheKeyFn(selector) { + return JSON.stringify({ + type: 'TargetAccess:scopes', + organization: selector.organization, + user: selector.user, + }); + }, + } + ); + } + + async ensureAccessForToken( + selector: TargetTokenAccessSelector + ): Promise { + const canAccess = await this.tokenAccess.load(selector); + + if (!canAccess) { + throw new AccessError(`Missing ${selector.scope} permission`); + } + } + + async ensureAccessForUser( + selector: TargetUserAccessSelector + ): Promise { + const canAccess = await this.userAccess.load(selector); + + if (!canAccess) { + throw new AccessError(`Missing ${selector.scope} permission`); + } + } + + async getMemberScopes(selector: TargetUserScopesSelector) { + return this.scopes.load(selector); + } + + resetAccessCache() { + this.userAccess.clearAll(); + this.tokenAccess.clearAll(); + this.scopes.clearAll(); + } +} diff --git a/packages/services/api/src/modules/auth/providers/tokens.ts b/packages/services/api/src/modules/auth/providers/tokens.ts new file mode 100644 index 000000000..a7ef66057 --- /dev/null +++ b/packages/services/api/src/modules/auth/providers/tokens.ts @@ -0,0 +1,29 @@ +import { + InjectionToken, + FactoryProvider, + Scope, + CONTEXT, +} from 'graphql-modules'; + +export const ApiToken = new InjectionToken('x-api-token'); +export const ApiTokenProvider: FactoryProvider = { + provide: ApiToken, + useFactory(context: { headers: Record }) { + let token: string | undefined; + + for (const headerName in context.headers) { + if (headerName.toLowerCase() === 'x-api-token') { + const values = context.headers[headerName]; + const singleValue = Array.isArray(values) ? values[0] : values; + + if (singleValue && singleValue !== '') { + token = singleValue; + } + } + } + + return token; + }, + deps: [CONTEXT], + scope: Scope.Operation, +}; diff --git a/packages/services/api/src/modules/auth/providers/user-manager.ts b/packages/services/api/src/modules/auth/providers/user-manager.ts new file mode 100644 index 000000000..e51b25d16 --- /dev/null +++ b/packages/services/api/src/modules/auth/providers/user-manager.ts @@ -0,0 +1,32 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { Logger } from '../../shared/providers/logger'; +import { Storage } from '../../shared/providers/storage'; + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, +}) +export class UserManager { + private logger: Logger; + + constructor(logger: Logger, private storage: Storage) { + this.logger = logger.child({ + source: 'UserManager', + }); + } + + async createUser(input: { external: string; email: string }) { + this.logger.info('Creating new user (input=%o)', input); + const user = await this.storage.createUser(input); + + return user; + } + + updateUser(input: { displayName: string; fullName: string; id: string }) { + this.logger.info('Updating user (input=%o)', input); + return this.storage.updateUser(input); + } +} diff --git a/packages/services/api/src/modules/auth/resolvers.ts b/packages/services/api/src/modules/auth/resolvers.ts new file mode 100644 index 000000000..893a5b8a8 --- /dev/null +++ b/packages/services/api/src/modules/auth/resolvers.ts @@ -0,0 +1,73 @@ +import { AuthModule } from './__generated__/types'; +import { AuthManager } from './providers/auth-manager'; +import { createConnection } from '../../shared/schema'; +import { OrganizationAccessScope } from './providers/organization-access'; +import { ProjectAccessScope } from './providers/project-access'; +import { TargetAccessScope } from './providers/target-access'; + +export const resolvers: AuthModule.Resolvers & { + OrganizationAccessScope: { + [K in AuthModule.OrganizationAccessScope]: OrganizationAccessScope; + }; + ProjectAccessScope: { + [K in AuthModule.ProjectAccessScope]: ProjectAccessScope; + }; + TargetAccessScope: { + [K in AuthModule.TargetAccessScope]: TargetAccessScope; + }; +} = { + Query: { + me: (_, __, { injector }) => injector.get(AuthManager).getCurrentUser(), + }, + Mutation: { + updateMe(_, { input }, { injector }) { + return injector.get(AuthManager).updateCurrentUser(input); + }, + }, + OrganizationAccessScope: { + READ: OrganizationAccessScope.READ, + DELETE: OrganizationAccessScope.DELETE, + MEMBERS: OrganizationAccessScope.MEMBERS, + SETTINGS: OrganizationAccessScope.SETTINGS, + INTEGRATIONS: OrganizationAccessScope.INTEGRATIONS, + }, + ProjectAccessScope: { + READ: ProjectAccessScope.READ, + DELETE: ProjectAccessScope.DELETE, + ALERTS: ProjectAccessScope.ALERTS, + SETTINGS: ProjectAccessScope.SETTINGS, + OPERATIONS_STORE_READ: ProjectAccessScope.OPERATIONS_STORE_READ, + OPERATIONS_STORE_WRITE: ProjectAccessScope.OPERATIONS_STORE_WRITE, + }, + TargetAccessScope: { + READ: TargetAccessScope.READ, + REGISTRY_READ: TargetAccessScope.REGISTRY_READ, + REGISTRY_WRITE: TargetAccessScope.REGISTRY_WRITE, + DELETE: TargetAccessScope.DELETE, + SETTINGS: TargetAccessScope.SETTINGS, + TOKENS_READ: TargetAccessScope.TOKENS_READ, + TOKENS_WRITE: TargetAccessScope.TOKENS_WRITE, + }, + Member: { + organizationAccessScopes(member, _, { injector }) { + return injector.get(AuthManager).getMemberOrganizationScopes({ + user: member.id, + organization: member.organization, + }); + }, + projectAccessScopes(member, _, { injector }) { + return injector.get(AuthManager).getMemberProjectScopes({ + user: member.id, + organization: member.organization, + }); + }, + targetAccessScopes(member, _, { injector }) { + return injector.get(AuthManager).getMemberTargetScopes({ + user: member.id, + organization: member.organization, + }); + }, + }, + UserConnection: createConnection(), + MemberConnection: createConnection(), +}; diff --git a/packages/services/api/src/modules/billing/index.ts b/packages/services/api/src/modules/billing/index.ts new file mode 100644 index 000000000..a80c82e50 --- /dev/null +++ b/packages/services/api/src/modules/billing/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; +import { BillingProvider } from './providers/billing.provider'; + +export const billingModule = createModule({ + id: 'billing', + dirname: __dirname, + typeDefs, + resolvers, + providers: [BillingProvider], +}); diff --git a/packages/services/api/src/modules/billing/module.graphql.ts b/packages/services/api/src/modules/billing/module.graphql.ts new file mode 100644 index 000000000..24a20dcb1 --- /dev/null +++ b/packages/services/api/src/modules/billing/module.graphql.ts @@ -0,0 +1,97 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Organization { + plan: BillingPlanType! + billingConfiguration: BillingConfiguration! + } + + type BillingConfiguration { + hasActiveSubscription: Boolean! + paymentMethod: BillingPaymentMethod + billingAddress: BillingDetails + invoices: [BillingInvoice!] + upcomingInvoice: BillingInvoice + } + + type BillingInvoice { + id: ID! + amount: Float! + date: DateTime! + periodStart: DateTime! + periodEnd: DateTime! + pdfLink: String + } + + type BillingPaymentMethod { + brand: String! + last4: String! + expMonth: Int! + expYear: Int! + } + + type BillingDetails { + city: String + country: String + line1: String + line2: String + postalCode: Int + state: String + } + + extend type Query { + billingPlans: [BillingPlan!]! + } + + type BillingPlan { + id: ID! + planType: BillingPlanType! + name: String! + description: String + basePrice: Float + includedOperationsLimit: SafeInt + includedSchemaPushLimit: SafeInt + pricePerOperationsUnit: Float + pricePerSchemaPushUnit: Float + rateLimit: UsageRateLimitType! + retentionInDays: Int! + } + + enum UsageRateLimitType { + MONTHLY_QUOTA + MONTHLY_LIMITED + UNLIMITED + } + + enum BillingPlanType { + HOBBY + PRO + ENTERPRISE + } + + extend type Mutation { + upgradeToPro(input: UpgradeToProInput!): ChangePlanResult! + downgradeToHobby(input: DowngradeToHobbyInput!): ChangePlanResult! + updateOrgRateLimit( + selector: OrganizationSelectorInput! + monthlyLimits: RateLimitInput! + ): Organization! + } + + input DowngradeToHobbyInput { + organization: OrganizationSelectorInput! + } + + input UpgradeToProInput { + organization: OrganizationSelectorInput! + paymentMethodId: String + couponCode: String + monthlyLimits: RateLimitInput! + } + + type ChangePlanResult { + previousPlan: BillingPlanType! + newPlan: BillingPlanType! + organization: Organization! + } +`; diff --git a/packages/services/api/src/modules/billing/providers/billing.provider.ts b/packages/services/api/src/modules/billing/providers/billing.provider.ts new file mode 100644 index 000000000..1941a7607 --- /dev/null +++ b/packages/services/api/src/modules/billing/providers/billing.provider.ts @@ -0,0 +1,121 @@ +import { Inject, Injectable, Scope } from 'graphql-modules'; +import { Logger } from '../../shared/providers/logger'; +import { BILLING_CONFIG } from './tokens'; +import type { BillingConfig } from './tokens'; +import type { + StripeBillingApi, + StripeBillingMutationInput, + StripeBillingQueryInput, +} from '@hive/stripe-billing'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; +import { OrganizationSelector } from '../../../__generated__/types'; +import { OrganizationBilling } from '../../../shared/entities'; +import { Storage } from '../../shared/providers/storage'; + +@Injectable({ + global: true, + scope: Scope.Singleton, +}) +export class BillingProvider { + private logger: Logger; + private billingService; + + enabled = false; + + constructor( + logger: Logger, + private storage: Storage, + @Inject(BILLING_CONFIG) billingConfig: BillingConfig + ) { + this.logger = logger.child({ source: 'BillingProvider' }); + this.billingService = billingConfig.endpoint + ? createTRPCClient({ + url: `${billingConfig.endpoint}/trpc`, + fetch, + }) + : null; + + if (billingConfig.endpoint) { + this.enabled = true; + } + } + + upgradeToPro( + input: StripeBillingMutationInput<'createSubscriptionForOrganization'> + ) { + if (!this.billingService) { + throw new Error(`Billing service is not configured!`); + } + + return this.billingService.mutation( + 'createSubscriptionForOrganization', + input + ); + } + + syncOrganization( + input: StripeBillingMutationInput<'syncOrganizationToStripe'> + ) { + if (!this.billingService) { + throw new Error(`Billing service is not configured!`); + } + + return this.billingService.mutation('syncOrganizationToStripe', input); + } + + async getAvailablePrices() { + if (!this.billingService) { + return null; + } + + return await this.billingService.query('availablePrices'); + } + + async getOrganizationBillingParticipant( + selector: OrganizationSelector + ): Promise { + this.logger.debug('Fetching organization billing (selector=%o)', selector); + + return this.storage.getOrganizationBilling({ + organization: selector.organization, + }); + } + + getActiveSubscription(input: StripeBillingQueryInput<'activeSubscription'>) { + if (!this.billingService) { + throw new Error(`Billing service is not configured!`); + } + + return this.billingService.query('activeSubscription', input); + } + + invoices(input: StripeBillingQueryInput<'invoices'>) { + if (!this.billingService) { + throw new Error(`Billing service is not configured!`); + } + + return this.billingService.query('invoices', input); + } + + upcomingInvoice(input: StripeBillingQueryInput<'upcomingInvoice'>) { + if (!this.billingService) { + throw new Error(`Billing service is not configured!`); + } + + return this.billingService.query('upcomingInvoice', input); + } + + async downgradeToHobby( + input: StripeBillingMutationInput<'cancelSubscriptionForOrganization'> + ) { + if (!this.billingService) { + throw new Error(`Billing service is not configured!`); + } + + return await this.billingService.mutation( + 'cancelSubscriptionForOrganization', + input + ); + } +} diff --git a/packages/services/api/src/modules/billing/providers/tokens.ts b/packages/services/api/src/modules/billing/providers/tokens.ts new file mode 100644 index 000000000..df3d295ae --- /dev/null +++ b/packages/services/api/src/modules/billing/providers/tokens.ts @@ -0,0 +1,9 @@ +import { InjectionToken } from 'graphql-modules'; + +export type BillingConfig = { + endpoint: string | null; +}; + +export const BILLING_CONFIG = new InjectionToken( + 'billing-config' +); diff --git a/packages/services/api/src/modules/billing/resolvers.ts b/packages/services/api/src/modules/billing/resolvers.ts new file mode 100644 index 000000000..276a60493 --- /dev/null +++ b/packages/services/api/src/modules/billing/resolvers.ts @@ -0,0 +1,295 @@ +import { EnvelopError } from '@graphql-yoga/common'; +import { BillingPlanType } from '../../__generated__/types'; +import { AuthManager } from '../auth/providers/auth-manager'; +import { OrganizationAccessScope } from '../auth/providers/organization-access'; +import { OrganizationManager } from '../organization/providers/organization-manager'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { BillingProvider } from './providers/billing.provider'; +import { BillingModule } from './__generated__/types'; + +const USAGE_DEFAULT_LIMITATIONS: Record< + 'HOBBY' | 'PRO' | 'ENTERPRISE', + { operations: number; schemaPushes: number; retention: number } +> = { + HOBBY: { + operations: 1_000_000, + schemaPushes: 50, + retention: 3, + }, + PRO: { + operations: 5_000_000, + schemaPushes: 500, + retention: 180, + }, + ENTERPRISE: { + operations: 0, // unlimited + schemaPushes: 0, // unlimited + retention: 360, + }, +}; + +export const resolvers: BillingModule.Resolvers = { + BillingInvoice: { + id: (i) => i.id || 'upcoming', + amount: (i) => parseFloat((i.total / 100).toFixed(2)), + pdfLink: (i) => i.invoice_pdf || null, + date: (i) => new Date(i.created * 1000).toISOString(), + periodStart: (i) => new Date(i.period_start * 1000).toISOString(), + periodEnd: (i) => new Date(i.period_end * 1000).toISOString(), + }, + Organization: { + plan: (org) => (org.billingPlan || 'HOBBY') as BillingPlanType, + billingConfiguration: async (org, _args, { injector }) => { + const billingRecord = await injector + .get(BillingProvider) + .getOrganizationBillingParticipant({ organization: org.id }); + + if (!billingRecord) { + return { + hasActiveSubscription: false, + paymentMethod: null, + billingAddress: null, + invoices: null, + upcomingInvoice: null, + }; + } + + const subscriptionInfo = await injector + .get(BillingProvider) + .getActiveSubscription({ + organizationId: billingRecord.organizationId, + }); + + if (!subscriptionInfo) { + return { + hasActiveSubscription: false, + paymentMethod: null, + billingAddress: null, + invoices: null, + upcomingInvoice: null, + }; + } + + const [invoices, upcomingInvoice] = await Promise.all([ + injector.get(BillingProvider).invoices({ + organizationId: billingRecord.organizationId, + }), + injector.get(BillingProvider).upcomingInvoice({ + organizationId: billingRecord.organizationId, + }), + ]); + + return { + hasActiveSubscription: subscriptionInfo.subscription !== null, + paymentMethod: subscriptionInfo.paymentMethod?.card || null, + billingAddress: subscriptionInfo.paymentMethod?.billing_details || null, + invoices, + upcomingInvoice, + }; + }, + }, + BillingPaymentMethod: { + brand: (bpm) => bpm.brand, + last4: (bpm) => bpm.last4, + expMonth: (bpm) => bpm.exp_month, + expYear: (bpm) => bpm.exp_year, + }, + BillingDetails: { + city: (bd) => bd.address?.city || null, + country: (bd) => bd.address?.country || null, + line1: (bd) => bd.address?.line1 || null, + line2: (bd) => bd.address?.line2 || null, + postalCode: (bd) => + bd.address?.postal_code ? parseInt(bd.address?.postal_code) : null, + state: (bd) => bd.address?.state || null, + }, + Query: { + billingPlans: async (root, args, { injector }) => { + const availablePrices = await injector + .get(BillingProvider) + .getAvailablePrices(); + + if (!availablePrices) { + return []; + } + + return [ + { + id: 'HOBBY', + planType: 'HOBBY', + basePrice: 0, + name: 'Hobby', + description: + 'Free for non-commercial use, startups, side-projects and just experiments.', + includedOperationsLimit: USAGE_DEFAULT_LIMITATIONS.HOBBY.operations, + includedSchemaPushLimit: USAGE_DEFAULT_LIMITATIONS.HOBBY.schemaPushes, + rateLimit: 'MONTHLY_LIMITED', + pricePerOperationsUnit: 0, + pricePerSchemaPushUnit: 0, + retentionInDays: USAGE_DEFAULT_LIMITATIONS.HOBBY.retention, + }, + { + id: 'PRO', + planType: 'PRO', + basePrice: availablePrices.basePrice.unit_amount! / 100, + name: 'Pro', + description: + 'For production-ready applications that requires long retention, high ingestion capacity and unlimited access to all Hive features.', + includedOperationsLimit: USAGE_DEFAULT_LIMITATIONS.PRO.operations, + includedSchemaPushLimit: USAGE_DEFAULT_LIMITATIONS.PRO.schemaPushes, + pricePerOperationsUnit: + availablePrices.operationsPrice.tiers![1].unit_amount! / 100, + pricePerSchemaPushUnit: + availablePrices.schemaPushesPrice.tiers![1].unit_amount! / 100, + retentionInDays: USAGE_DEFAULT_LIMITATIONS.PRO.retention, + rateLimit: 'MONTHLY_QUOTA', + }, + { + id: 'ENTERPRISE', + planType: 'ENTERPRISE', + name: 'Enterprise', + description: + 'For enterprise and organization that requires custom setup and custn data ingestion rates.', + includedOperationsLimit: + USAGE_DEFAULT_LIMITATIONS.ENTERPRISE.operations, + includedSchemaPushLimit: + USAGE_DEFAULT_LIMITATIONS.ENTERPRISE.schemaPushes, + retentionInDays: USAGE_DEFAULT_LIMITATIONS.ENTERPRISE.retention, + rateLimit: 'UNLIMITED', + }, + ]; + }, + }, + Mutation: { + updateOrgRateLimit: async (_, args, { injector }) => { + const organizationId = await injector + .get(IdTranslator) + .translateOrganizationId({ + organization: args.selector.organization, + }); + + return injector.get(OrganizationManager).updateRateLimits({ + organization: organizationId, + monthlyRateLimit: { + retentionInDays: USAGE_DEFAULT_LIMITATIONS.PRO.retention, + operations: args.monthlyLimits.operations, + schemaPush: args.monthlyLimits.schemaPushes, + }, + }); + }, + downgradeToHobby: async (_, args, { injector }) => { + const organizationId = await injector + .get(IdTranslator) + .translateOrganizationId({ + organization: args.input.organization.organization, + }); + await injector.get(AuthManager).ensureOrganizationAccess({ + organization: organizationId, + scope: OrganizationAccessScope.SETTINGS, + }); + + let organization = await injector + .get(OrganizationManager) + .getOrganization({ + organization: organizationId, + }); + + if (organization.billingPlan === 'PRO') { + // Configure user to use Stripe payments, create billing participant record for the org + await injector.get(BillingProvider).downgradeToHobby({ + organizationId, + }); + + // Upgrade the actual org plan to HOBBY + organization = await injector + .get(OrganizationManager) + .updatePlan({ plan: 'HOBBY', organization: organizationId }); + + // Upgrade the limits + organization = await injector + .get(OrganizationManager) + .updateRateLimits({ + organization: organizationId, + monthlyRateLimit: { + retentionInDays: USAGE_DEFAULT_LIMITATIONS.HOBBY.retention, + operations: USAGE_DEFAULT_LIMITATIONS.HOBBY.operations, + schemaPush: USAGE_DEFAULT_LIMITATIONS.HOBBY.schemaPushes, + }, + }); + + return { + previousPlan: 'PRO', + newPlan: 'HOBBY', + organization, + }; + } else { + throw new EnvelopError( + `Unable to downgrade from Pro from your current plan` + ); + } + }, + upgradeToPro: async (root, args, { injector }) => { + const organizationId = await injector + .get(IdTranslator) + .translateOrganizationId({ + organization: args.input.organization.organization, + }); + await injector.get(AuthManager).ensureOrganizationAccess({ + organization: organizationId, + scope: OrganizationAccessScope.SETTINGS, + }); + + let organization = await injector + .get(OrganizationManager) + .getOrganization({ + organization: organizationId, + }); + + if (organization.billingPlan === 'HOBBY') { + // Configure user to use Stripe payments, create billing participant record for the org + await injector.get(BillingProvider).upgradeToPro({ + organizationId, + couponCode: args.input.couponCode, + paymentMethodId: args.input.paymentMethodId, + reserved: { + operations: Math.floor( + args.input.monthlyLimits.operations / 1_000_000 + ), + schemaPushes: args.input.monthlyLimits.schemaPushes, + }, + }); + + // Upgrade the actual org plan to PRO + organization = await injector + .get(OrganizationManager) + .updatePlan({ plan: 'PRO', organization: organizationId }); + + // Upgrade the limits + organization = await injector + .get(OrganizationManager) + .updateRateLimits({ + organization: organizationId, + monthlyRateLimit: { + retentionInDays: USAGE_DEFAULT_LIMITATIONS.PRO.retention, + operations: + args.input.monthlyLimits.operations || + USAGE_DEFAULT_LIMITATIONS.PRO.operations, + schemaPush: + args.input.monthlyLimits.schemaPushes || + USAGE_DEFAULT_LIMITATIONS.PRO.schemaPushes, + }, + }); + + return { + previousPlan: 'HOBBY', + newPlan: 'PRO', + organization, + }; + } else { + throw new EnvelopError( + `Unable to upgrade to Pro from your current plan` + ); + } + }, + }, +}; diff --git a/packages/services/api/src/modules/cdn/index.ts b/packages/services/api/src/modules/cdn/index.ts new file mode 100644 index 000000000..76aa379fd --- /dev/null +++ b/packages/services/api/src/modules/cdn/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; +import { CdnProvider } from './providers/cdn.provider'; + +export const cdnModule = createModule({ + id: 'cdn', + dirname: __dirname, + typeDefs, + resolvers, + providers: [CdnProvider], +}); diff --git a/packages/services/api/src/modules/cdn/module.graphql.ts b/packages/services/api/src/modules/cdn/module.graphql.ts new file mode 100644 index 000000000..7098b28af --- /dev/null +++ b/packages/services/api/src/modules/cdn/module.graphql.ts @@ -0,0 +1,12 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Mutation { + createCdnToken(selector: TargetSelectorInput!): CdnTokenResult! + } + + type CdnTokenResult { + token: String! + url: String! + } +`; diff --git a/packages/services/api/src/modules/cdn/providers/cdn.provider.ts b/packages/services/api/src/modules/cdn/providers/cdn.provider.ts new file mode 100644 index 000000000..980f405fd --- /dev/null +++ b/packages/services/api/src/modules/cdn/providers/cdn.provider.ts @@ -0,0 +1,106 @@ +import { Injectable, Inject, Scope } from 'graphql-modules'; +import { createHmac } from 'crypto'; +import type { Span } from '@sentry/types'; +import { HiveError } from '../../../shared/errors'; +import { HttpClient } from '../../shared/providers/http-client'; +import { Logger } from '../../shared/providers/logger'; +import { sentry } from '../../../shared/sentry'; +import { CDN_CONFIG } from './tokens'; +import type { CDNConfig } from './tokens'; + +type CdnResourceType = 'schema' | 'supergraph' | 'metadata'; + +@Injectable({ + scope: Scope.Singleton, + global: true, +}) +export class CdnProvider { + private logger: Logger; + private encoder: TextEncoder; + private secretKeyData: Uint8Array; + + constructor( + logger: Logger, + private httpClient: HttpClient, + @Inject(CDN_CONFIG) private config: CDNConfig + ) { + this.logger = logger.child({ source: 'CdnProvider' }); + this.encoder = new TextEncoder(); + this.secretKeyData = this.encoder.encode(this.config.authPrivateKey); + } + + getCdnUrlForTarget(targetId: string): string { + return `${this.config.baseUrl}/${targetId}`; + } + + generateToken(targetId: string): string { + return createHmac('sha256', this.secretKeyData) + .update(this.encoder.encode(targetId)) + .digest('base64'); + } + + pushToCDN( + url: string, + body: string, + span?: Span + ): Promise<{ success: boolean }> { + return this.httpClient.put<{ success: boolean }>( + url, + { + headers: { + 'content-type': 'text/plain', + authorization: `Bearer ${this.config.cloudflare.authToken}`, + }, + body, + responseType: 'json', + retry: { + limit: 3, + }, + timeout: { + request: 10_000, + }, + }, + span + ); + } + + @sentry('CdnProvider.publish') + async publish( + { + targetId, + resourceType, + value, + }: { + targetId: string; + resourceType: CdnResourceType; + value: string; + }, + span?: Span + ): Promise { + const target = `target:${targetId}`; + this.logger.info( + `Publishing data to CDN based on target: "${target}", resourceType is: ${resourceType} ...` + ); + const CDN_SOURCE = `${this.config.cloudflare.basePath}/${this.config.cloudflare.accountId}/storage/kv/namespaces/${this.config.cloudflare.namespaceId}/values/${target}`; + + this.logger.info(`Data published to CDN: ${value}`); + const result = await this.pushToCDN( + `${CDN_SOURCE}:${resourceType}`, + value, + span + ); + + if (!result.success) { + return Promise.reject( + new HiveError( + `Failed to publish to CDN, response: ${JSON.stringify(result)}` + ) + ); + } + + this.logger.info( + `Published to CDN based on target: "${target}", resourceType is: ${resourceType} is done, response: %o`, + result + ); + } +} diff --git a/packages/services/api/src/modules/cdn/providers/tokens.ts b/packages/services/api/src/modules/cdn/providers/tokens.ts new file mode 100644 index 000000000..ddb7d1761 --- /dev/null +++ b/packages/services/api/src/modules/cdn/providers/tokens.ts @@ -0,0 +1,14 @@ +import { InjectionToken } from 'graphql-modules'; + +export interface CDNConfig { + cloudflare: { + basePath: string; + accountId: string; + authToken: string; + namespaceId: string; + }; + baseUrl: string; + authPrivateKey: string; +} + +export const CDN_CONFIG = new InjectionToken('cdn-config'); diff --git a/packages/services/api/src/modules/cdn/resolvers.ts b/packages/services/api/src/modules/cdn/resolvers.ts new file mode 100644 index 000000000..44755e1fd --- /dev/null +++ b/packages/services/api/src/modules/cdn/resolvers.ts @@ -0,0 +1,31 @@ +import { CdnModule } from './__generated__/types'; +import { CdnProvider } from './providers/cdn.provider'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { AuthManager } from '../auth/providers/auth-manager'; +import { TargetAccessScope } from '../auth/providers/target-access'; + +export const resolvers: CdnModule.Resolvers = { + Mutation: { + createCdnToken: async (_, { selector }, { injector }) => { + const translator = injector.get(IdTranslator); + const cdn = injector.get(CdnProvider); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + await injector.get(AuthManager).ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return { + token: cdn.generateToken(target), + url: cdn.getCdnUrlForTarget(target), + }; + }, + }, +}; diff --git a/packages/services/api/src/modules/feedback/index.ts b/packages/services/api/src/modules/feedback/index.ts new file mode 100644 index 000000000..a977b6fd5 --- /dev/null +++ b/packages/services/api/src/modules/feedback/index.ts @@ -0,0 +1,10 @@ +import { createModule } from 'graphql-modules'; +import typeDefs from './module.graphql'; +import { resolvers } from './resolvers'; + +export const feedbackModule = createModule({ + id: 'feedback', + dirname: __dirname, + typeDefs, + resolvers, +}); diff --git a/packages/services/api/src/modules/feedback/module.graphql.ts b/packages/services/api/src/modules/feedback/module.graphql.ts new file mode 100644 index 000000000..cdddd9eb9 --- /dev/null +++ b/packages/services/api/src/modules/feedback/module.graphql.ts @@ -0,0 +1,7 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Mutation { + sendFeedback(feedback: String!): Boolean! + } +`; diff --git a/packages/services/api/src/modules/feedback/providers/tokens.ts b/packages/services/api/src/modules/feedback/providers/tokens.ts new file mode 100644 index 000000000..35fb8a396 --- /dev/null +++ b/packages/services/api/src/modules/feedback/providers/tokens.ts @@ -0,0 +1,8 @@ +import { InjectionToken } from 'graphql-modules'; + +export const FEEDBACK_SLACK_TOKEN = new InjectionToken( + 'FEEDBACK_SLACK_TOKEN' +); +export const FEEDBACK_SLACK_CHANNEL = new InjectionToken( + 'FEEDBACK_SLACK_CHANNEL' +); diff --git a/packages/services/api/src/modules/feedback/resolvers.ts b/packages/services/api/src/modules/feedback/resolvers.ts new file mode 100644 index 000000000..1dee89404 --- /dev/null +++ b/packages/services/api/src/modules/feedback/resolvers.ts @@ -0,0 +1,48 @@ +import type { FeedbackModule } from './__generated__/types'; +import * as Sentry from '@sentry/node'; +import { WebClient } from '@slack/web-api'; +import { AuthManager } from '../auth/providers/auth-manager'; +import { Tracking } from '../shared/providers/tracking'; +import { + FEEDBACK_SLACK_CHANNEL, + FEEDBACK_SLACK_TOKEN, +} from './providers/tokens'; + +export const resolvers: FeedbackModule.Resolvers = { + Mutation: { + async sendFeedback(_, { feedback }, { injector }) { + const auth = injector.get(AuthManager); + const tracking = injector.get(Tracking); + const user = await auth.getCurrentUser(); + const slack = new WebClient(injector.get(FEEDBACK_SLACK_TOKEN)); + + await Promise.all([ + tracking.track({ + event: 'FEEDBACK', + data: { + feedback, + }, + }), + slack.chat.postMessage({ + channel: injector.get(FEEDBACK_SLACK_CHANNEL), + mrkdwn: true, + text: [`Got a feedback from \`${user.email}\``, `> ${feedback}`].join( + '\n' + ), + }), + ]).catch((error) => { + console.log('Feedback.sendFeedback error', error); + Sentry.captureException(error, { + extra: { + feedback, + }, + user: { + email: user.email, + }, + }); + }); + + return true; + }, + }, +}; diff --git a/packages/services/api/src/modules/integrations/index.ts b/packages/services/api/src/modules/integrations/index.ts new file mode 100644 index 000000000..de1772dfe --- /dev/null +++ b/packages/services/api/src/modules/integrations/index.ts @@ -0,0 +1,13 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; +import { SlackIntegrationManager } from './providers/slack-integration-manager'; +import { GitHubIntegrationManager } from './providers/github-integration-manager'; + +export const integrationsModule = createModule({ + id: 'integrations', + dirname: __dirname, + typeDefs, + resolvers, + providers: [SlackIntegrationManager, GitHubIntegrationManager], +}); diff --git a/packages/services/api/src/modules/integrations/module.graphql.ts b/packages/services/api/src/modules/integrations/module.graphql.ts new file mode 100644 index 000000000..d461b2795 --- /dev/null +++ b/packages/services/api/src/modules/integrations/module.graphql.ts @@ -0,0 +1,39 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Mutation { + addSlackIntegration(input: AddSlackIntegrationInput!): Boolean! + deleteSlackIntegration(input: OrganizationSelectorInput!): Boolean! + addGitHubIntegration(input: AddGitHubIntegrationInput!): Boolean! + deleteGitHubIntegration(input: OrganizationSelectorInput!): Boolean! + } + + extend type Query { + hasSlackIntegration(selector: OrganizationSelectorInput!): Boolean! + hasGitHubIntegration(selector: OrganizationSelectorInput!): Boolean! + gitHubIntegration(selector: OrganizationSelectorInput!): GitHubIntegration! + organizationByGitHubInstallationId(installation: ID!): Organization + } + + input AddSlackIntegrationInput { + organization: ID! + token: String! + } + + input AddGitHubIntegrationInput { + organization: ID! + installationId: ID! + } + + type GitHubIntegration { + repositories: [GitHubRepository!]! + } + + type GitHubRepository { + nameWithOwner: String! + } + + extend type Project { + gitRepository: String + } +`; diff --git a/packages/services/api/src/modules/integrations/providers/github-integration-manager.ts b/packages/services/api/src/modules/integrations/providers/github-integration-manager.ts new file mode 100644 index 000000000..813de7810 --- /dev/null +++ b/packages/services/api/src/modules/integrations/providers/github-integration-manager.ts @@ -0,0 +1,213 @@ +import { Injectable, Inject, InjectionToken, Scope } from 'graphql-modules'; +import type { IntegrationsModule } from '../__generated__/types'; +import { App } from '@octokit/app'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { OrganizationAccessScope } from '../../auth/providers/organization-access'; +import { Logger } from '../../shared/providers/logger'; +import { Storage, OrganizationSelector } from '../../shared/providers/storage'; +import { Tracking } from '../../shared/providers/tracking'; + +export interface GitHubApplicationConfig { + appId: number; + privateKey: string; +} + +export const GITHUB_APP_CONFIG = new InjectionToken( + 'GitHubApplicationConfig' +); + +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class GitHubIntegrationManager { + private logger: Logger; + private app: App; + + constructor( + logger: Logger, + private authManager: AuthManager, + private storage: Storage, + private tracking: Tracking, + @Inject(GITHUB_APP_CONFIG) private config: GitHubApplicationConfig + ) { + this.logger = logger.child({ + source: 'GitHubIntegrationManager', + }); + this.app = new App({ + appId: this.config.appId, + privateKey: this.config.privateKey, + log: this.logger, + }); + } + + async register( + input: OrganizationSelector & { + installationId: string; + } + ): Promise { + this.logger.debug( + 'Registering GitHub integration (organization=%s)', + input.organization + ); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.INTEGRATIONS, + }); + await this.tracking.track({ + event: 'ADDED_GITHUB_INTEGRATION', + data: { + organization: input.organization, + }, + }); + await this.storage.addGitHubIntegration({ + organization: input.organization, + installationId: input.installationId, + }); + } + + async unregister(input: OrganizationSelector): Promise { + this.logger.debug( + 'Removing GitHub integration (organization=%s)', + input.organization + ); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.INTEGRATIONS, + }); + await this.tracking.track({ + event: 'DELETED_GITHUB_INTEGRATION', + data: { + organization: input.organization, + }, + }); + await this.storage.deleteGitHubIntegration({ + organization: input.organization, + }); + } + + async isAvailable(selector: OrganizationSelector): Promise { + this.logger.debug( + 'Checking GitHub integration (organization=%s)', + selector.organization + ); + const installationId = await this.getInstallationId({ + organization: selector.organization, + }); + + return typeof installationId === 'string'; + } + + async getInstallationId( + selector: OrganizationSelector + ): Promise { + this.logger.debug( + 'Fetching GitHub integration token (organization=%s)', + selector.organization + ); + + return this.storage.getGitHubIntegrationInstallationId({ + organization: selector.organization, + }); + } + + async getRepositories( + selector: OrganizationSelector + ): Promise { + const installationId = await this.getInstallationId(selector); + this.logger.debug('Fetching repositories'); + + if (installationId) { + const octokit = await this.app.getInstallationOctokit( + parseInt(installationId, 10) + ); + + return octokit + .request('GET /installation/repositories') + .then((result) => + result.data.repositories.map((repo) => { + return { + nameWithOwner: repo.full_name, + }; + }) + ) + .catch((e) => { + this.logger.warn('Failed to fetch repositories', e); + this.logger.error(e); + return Promise.resolve([]); + }); + } + + return []; + } + + async getOrganization(selector: { installation: string }) { + const organization = + await this.storage.getOrganizationByGitHubInstallationId({ + installationId: selector.installation, + }); + + if (!organization) { + return null; + } + + await this.authManager.ensureOrganizationAccess({ + organization: organization.id, + scope: OrganizationAccessScope.INTEGRATIONS, + }); + + return organization; + } + + async createCheckRun( + input: OrganizationSelector & { + repositoryName: string; + repositoryOwner: string; + name: string; + sha: string; + conclusion: 'success' | 'neutral' | 'failure'; + output?: { + /** The title of the check run. */ + title: string; + /** The summary of the check run. This parameter supports Markdown. */ + summary: string; + }; + } + ) { + this.logger.debug( + 'Creating check-run (owner=%s, name=%s, sha=%s)', + input.repositoryOwner, + input.repositoryName, + input.sha + ); + const installationId = await this.getInstallationId({ + organization: input.organization, + }); + + if (!installationId) { + throw new Error( + 'GitHub Integration not found. Please install our GraphQL Hive GitHub Application.' + ); + } + + const octokit = await this.app.getInstallationOctokit( + parseInt(installationId, 10) + ); + + const result = await octokit.request( + 'POST /repos/{owner}/{repo}/check-runs', + { + owner: input.repositoryOwner, + repo: input.repositoryName, + name: input.name, + head_sha: input.sha, + conclusion: input.conclusion, + output: input.output, + } + ); + + this.logger.debug('Check-run created (link=%s)', result.data.url); + + return result.data.url; + } +} diff --git a/packages/services/api/src/modules/integrations/providers/integrations-access-context.ts b/packages/services/api/src/modules/integrations/providers/integrations-access-context.ts new file mode 100644 index 000000000..263803f0a --- /dev/null +++ b/packages/services/api/src/modules/integrations/providers/integrations-access-context.ts @@ -0,0 +1,5 @@ +export enum IntegrationsAccessContext { + Integrations, + ChannelConfirmation, + SchemaPublishing, +} diff --git a/packages/services/api/src/modules/integrations/providers/slack-integration-manager.ts b/packages/services/api/src/modules/integrations/providers/slack-integration-manager.ts new file mode 100644 index 000000000..4d8b5864f --- /dev/null +++ b/packages/services/api/src/modules/integrations/providers/slack-integration-manager.ts @@ -0,0 +1,180 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { OrganizationAccessScope } from '../../auth/providers/organization-access'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; +import { TargetAccessScope } from '../../auth/providers/target-access'; +import { Logger } from '../../shared/providers/logger'; +import { CryptoProvider } from '../../shared/providers/crypto'; +import { + Storage, + OrganizationSelector, + ProjectSelector, + TargetSelector, +} from '../../shared/providers/storage'; +import { Tracking } from '../../shared/providers/tracking'; +import { AccessError } from '../../../shared/errors'; +import { IntegrationsAccessContext } from './integrations-access-context'; + +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class SlackIntegrationManager { + private logger: Logger; + + constructor( + logger: Logger, + private authManager: AuthManager, + private storage: Storage, + private tracking: Tracking, + private crypto: CryptoProvider + ) { + this.logger = logger.child({ + source: 'SlackIntegrationManager', + }); + } + + async register( + input: OrganizationSelector & { + token: string; + } + ): Promise { + this.logger.debug( + 'Registering Slack integration (organization=%s)', + input.organization + ); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.INTEGRATIONS, + }); + await this.tracking.track({ + event: 'ADDED_SLACK_INTEGRATION', + data: { + organization: input.organization, + }, + }); + await this.storage.addSlackIntegration({ + organization: input.organization, + token: this.crypto.encrypt(input.token), + }); + } + + async unregister(input: OrganizationSelector): Promise { + this.logger.debug( + 'Removing Slack integration (organization=%s)', + input.organization + ); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.INTEGRATIONS, + }); + await this.tracking.track({ + event: 'DELETED_SLACK_INTEGRATION', + data: { + organization: input.organization, + }, + }); + await this.storage.deleteSlackIntegration({ + organization: input.organization, + }); + } + + async isAvailable(selector: OrganizationSelector): Promise { + this.logger.debug( + 'Checking Slack integration (organization=%s)', + selector.organization + ); + const token = await this.getToken({ + organization: selector.organization, + context: IntegrationsAccessContext.Integrations, + }); + + return typeof token === 'string'; + } + + async getToken( + selector: OrganizationSelector & { + context: IntegrationsAccessContext.Integrations; + } + ): Promise; + async getToken( + selector: ProjectSelector & { + context: IntegrationsAccessContext.ChannelConfirmation; + } + ): Promise; + async getToken( + selector: TargetSelector & { + context: IntegrationsAccessContext.SchemaPublishing; + } + ): Promise; + async getToken( + selector: + | (OrganizationSelector & { + context: IntegrationsAccessContext.Integrations; + }) + | (ProjectSelector & { + context: IntegrationsAccessContext.ChannelConfirmation; + }) + | (TargetSelector & { + context: IntegrationsAccessContext.SchemaPublishing; + }) + ): Promise { + switch (selector.context) { + case IntegrationsAccessContext.Integrations: { + this.logger.debug( + 'Fetching Slack integration token (organization=%s, context: %s)', + selector.organization, + selector.context + ); + await this.authManager.ensureOrganizationAccess({ + ...selector, + scope: OrganizationAccessScope.INTEGRATIONS, + }); + break; + } + case IntegrationsAccessContext.ChannelConfirmation: { + this.logger.debug( + 'Fetching Slack integration token (organization=%s, project=%s, context: %s)', + selector.organization, + selector.project, + selector.context + ); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.ALERTS, + }); + break; + } + case IntegrationsAccessContext.SchemaPublishing: { + this.logger.debug( + 'Fetching Slack integration token (organization=%s, project=%s, target=%s context: %s)', + selector.organization, + selector.project, + selector.target, + selector.context + ); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + break; + } + default: { + throw new AccessError('wrong context'); + } + } + + let token = await this.storage.getSlackIntegrationToken({ + organization: selector.organization, + }); + + if (token) { + /** + * Token is possibly not encrypted, that's why we pass `true` as second argument. + */ + token = this.crypto.decrypt(token, true); + } + + return token; + } +} diff --git a/packages/services/api/src/modules/integrations/resolvers.ts b/packages/services/api/src/modules/integrations/resolvers.ts new file mode 100644 index 000000000..5a9597117 --- /dev/null +++ b/packages/services/api/src/modules/integrations/resolvers.ts @@ -0,0 +1,93 @@ +import type { IntegrationsModule } from './__generated__/types'; +import { SlackIntegrationManager } from './providers/slack-integration-manager'; +import { GitHubIntegrationManager } from './providers/github-integration-manager'; +import { IdTranslator } from '../shared/providers/id-translator'; + +export const resolvers: IntegrationsModule.Resolvers = { + Mutation: { + async addSlackIntegration(_, { input }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(input); + + await injector.get(SlackIntegrationManager).register({ + organization, + token: input.token, + }); + + return true; + }, + async deleteSlackIntegration(_, { input }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(input); + + await injector.get(SlackIntegrationManager).unregister({ + organization, + }); + + return true; + }, + async addGitHubIntegration(_, { input }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(input); + + await injector.get(GitHubIntegrationManager).register({ + organization, + installationId: input.installationId, + }); + + return true; + }, + async deleteGitHubIntegration(_, { input }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(input); + + await injector.get(GitHubIntegrationManager).unregister({ + organization, + }); + + return true; + }, + }, + Query: { + async hasSlackIntegration(_, { selector }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + + return injector.get(SlackIntegrationManager).isAvailable({ + organization, + }); + }, + async hasGitHubIntegration(_, { selector }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + + return injector.get(GitHubIntegrationManager).isAvailable({ + organization, + }); + }, + async gitHubIntegration(_, { selector }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + + return { + repositories: await injector + .get(GitHubIntegrationManager) + .getRepositories({ + organization, + }), + }; + }, + organizationByGitHubInstallationId(_, { installation }, { injector }) { + return injector.get(GitHubIntegrationManager).getOrganization({ + installation, + }); + }, + }, +}; diff --git a/packages/services/api/src/modules/lab/index.ts b/packages/services/api/src/modules/lab/index.ts new file mode 100644 index 000000000..5bfdb2044 --- /dev/null +++ b/packages/services/api/src/modules/lab/index.ts @@ -0,0 +1,11 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; + +export const labModule = createModule({ + id: 'lab', + dirname: __dirname, + typeDefs, + resolvers, + providers: [], +}); diff --git a/packages/services/api/src/modules/lab/module.graphql.ts b/packages/services/api/src/modules/lab/module.graphql.ts new file mode 100644 index 000000000..235e194cc --- /dev/null +++ b/packages/services/api/src/modules/lab/module.graphql.ts @@ -0,0 +1,12 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + lab(selector: TargetSelectorInput!): Lab + } + + type Lab { + schema: String! + mocks: JSON + } +`; diff --git a/packages/services/api/src/modules/lab/resolvers.ts b/packages/services/api/src/modules/lab/resolvers.ts new file mode 100644 index 000000000..3e2f3df77 --- /dev/null +++ b/packages/services/api/src/modules/lab/resolvers.ts @@ -0,0 +1,61 @@ +import type { LabModule } from './__generated__/types'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { SchemaManager } from '../schema/providers/schema-manager'; +import { ProjectManager } from '../project/providers/project-manager'; +import { AuthManager } from '../auth/providers/auth-manager'; +import { createSchemaObject } from '../../shared/entities'; +import { TargetAccessScope } from '../auth/providers/target-access'; + +export const resolvers: LabModule.Resolvers = { + Query: { + async lab(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + await injector.get(AuthManager).ensureTargetAccess({ + organization: organization, + project: project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + const schemaManager = injector.get(SchemaManager); + + const latestSchema = await schemaManager.getMaybeLatestValidVersion({ + organization, + project, + target, + }); + + if (!latestSchema) { + return null; + } + + const [schemas, { type }] = await Promise.all([ + schemaManager.getSchemasOfVersion({ + organization, + project, + target, + version: latestSchema.id, + }), + injector.get(ProjectManager).getProject({ + organization, + project, + }), + ]); + + const orchestrator = schemaManager.matchOrchestrator(type); + + const schema = await orchestrator.build(schemas.map(createSchemaObject)); + + return { + schema: schema.raw, + mocks: {}, + }; + }, + }, +}; diff --git a/packages/services/api/src/modules/operations/index.ts b/packages/services/api/src/modules/operations/index.ts new file mode 100644 index 000000000..0e089e7bd --- /dev/null +++ b/packages/services/api/src/modules/operations/index.ts @@ -0,0 +1,14 @@ +import { createModule } from 'graphql-modules'; +import typeDefs from './module.graphql'; +import { resolvers } from './resolvers'; +import { OperationsManager } from './providers/operations-manager'; +import { OperationsReader } from './providers/operations-reader'; +import { ClickHouse } from './providers/clickhouse-client'; + +export const operationsModule = createModule({ + id: 'operations', + dirname: __dirname, + typeDefs, + resolvers, + providers: [OperationsManager, OperationsReader, ClickHouse], +}); diff --git a/packages/services/api/src/modules/operations/module.graphql.ts b/packages/services/api/src/modules/operations/module.graphql.ts new file mode 100644 index 000000000..381e34e37 --- /dev/null +++ b/packages/services/api/src/modules/operations/module.graphql.ts @@ -0,0 +1,143 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + fieldStats(selector: FieldStatsInput!): FieldStats! + fieldListStats(selector: FieldListStatsInput!): [FieldStats!]! + operationsStats(selector: OperationsStatsSelectorInput!): OperationsStats! + hasCollectedOperations(selector: TargetSelectorInput!): Boolean! + } + + input OperationsStatsSelectorInput { + organization: ID! + project: ID! + target: ID! + period: DateRangeInput! + operations: [ID!] + } + + input DateRangeInput { + from: DateTime! + to: DateTime! + } + + type DateRange { + from: DateTime! + to: DateTime! + } + + input FieldStatsInput { + target: String! + project: String! + organization: String! + type: String! + field: String! + argument: String + period: DateRangeInput! + operationHash: String + } + + input FieldListStatsInput { + target: String! + project: String! + organization: String! + period: DateRangeInput! + fields: [FieldTypePairInput!]! + operationHash: String + } + + input FieldTypePairInput { + type: String! + field: String! + argument: String + } + + type FieldStats { + type: String! + field: String! + argument: String + count: SafeInt! + percentage: Float! + } + + type OperationsStats { + requestsOverTime(resolution: Int!): [RequestsOverTime!]! + failuresOverTime(resolution: Int!): [FailuresOverTime!]! + durationOverTime(resolution: Int!): [DurationOverTime!]! + totalRequests(resolution: Int!): SafeInt! + totalFailures(resolution: Int!): SafeInt! + totalOperations: Int! + durationHistogram(resolution: Int!): [DurationHistogram!]! + duration: DurationStats! + operations: OperationStatsConnection! + clients: ClientStatsConnection! + } + + type OperationStatsConnection { + nodes: [OperationStats!]! + total: Int! + } + + type ClientStatsConnection { + nodes: [ClientStats!]! + total: Int! + } + + type DurationStats { + p75: Int! + p90: Int! + p95: Int! + p99: Int! + } + + type OperationStats { + id: ID! + document: String! + operationHash: String + kind: String! + name: String! + """ + Total number of requests + """ + count: SafeInt! + """ + Number of requests that succeeded + """ + countOk: SafeInt! + percentage: Float! + duration: DurationStats! + } + + type ClientStats { + name: String! + versions: [ClientVersionStats!]! + count: Int! + percentage: Float! + } + + type ClientVersionStats { + version: String! + count: Int! + percentage: Float! + } + + type RequestsOverTime { + date: DateTime! + value: SafeInt! + } + + type FailuresOverTime { + date: DateTime! + value: SafeInt! + } + + type DurationOverTime { + date: DateTime! + duration: DurationStats! + } + + type DurationHistogram { + duration: Int! + count: SafeInt! + } +`; diff --git a/packages/services/api/src/modules/operations/providers/clickhouse-client.ts b/packages/services/api/src/modules/operations/providers/clickhouse-client.ts new file mode 100644 index 000000000..b7b74ccce --- /dev/null +++ b/packages/services/api/src/modules/operations/providers/clickhouse-client.ts @@ -0,0 +1,157 @@ +import { Injectable, Inject } from 'graphql-modules'; +import Agent from 'agentkeepalive'; +import type { Span } from '@sentry/types'; +import { CLICKHOUSE_CONFIG } from './tokens'; +import type { ClickHouseConfig } from './tokens'; +import { HttpClient } from '../../shared/providers/http-client'; +import { atomic } from '../../../shared/helpers'; +import { Logger } from '../../shared/providers/logger'; + +export interface QueryResponse { + data: readonly T[]; + rows: number; + statistics: { + elapsed: number; + }; +} + +export type RowOf> = T extends QueryResponse< + infer R +> + ? R + : never; + +const agentConfig: Agent.HttpOptions = { + // Keep sockets around in a pool to be used by other requests in the future + keepAlive: true, + // Sets the working socket to timeout after N ms of inactivity on the working socket + timeout: 60_000, + // Sets the free socket to timeout after N ms of inactivity on the free socket + freeSocketTimeout: 30_000, + // Sets the socket active time to live + socketActiveTTL: 60_000, + maxSockets: 35, + maxFreeSockets: 10, + scheduling: 'lifo', +}; + +const httpAgent = new Agent(agentConfig); +const httpsAgent = new Agent.HttpsAgent(agentConfig); + +@Injectable() +export class ClickHouse { + private logger: Logger; + + constructor( + @Inject(CLICKHOUSE_CONFIG) private config: ClickHouseConfig, + private httpClient: HttpClient, + logger: Logger + ) { + this.logger = logger.child({ + service: 'ClickHouse', + }); + } + + @atomic(({ query }: { query: string }) => query) + async query({ + query, + queryId, + timeout, + span: parentSpan, + }: { + query: string; + queryId: string; + timeout: number; + span?: Span; + }): Promise> { + const span = parentSpan?.startChild({ + op: queryId, + }); + const startedAt = Date.now(); + const endpoint = `${this.config.protocol ?? 'https'}://${ + this.config.host + }:${this.config.port}`; + + this.logger.debug(`Execution ClickHouse Query: %s`, query); + + const response = await this.httpClient + .post>( + endpoint, + { + context: { + description: `ClickHouse - ${queryId}`, + }, + body: query, + headers: { + 'Accept-Encoding': 'gzip', + Accept: 'application/json', + }, + searchParams: { + default_format: 'JSON', + }, + username: this.config.username, + password: this.config.password, + decompress: true, + timeout: { + lookup: 1000, + connect: 1000, + secureConnect: 1000, + request: timeout, + }, + retry: { + calculateDelay: (info) => { + if (info.attemptCount >= 6) { + // After 5 retries, stop. + return 0; + } + + const delayBy = info.attemptCount * 250; + + this.logger.debug( + `Retry (delay=%s, attempt=%s, reason=%s, queryId=%s)`, + delayBy, + info.attemptCount, + info.error.message, + queryId + ); + + return delayBy; + }, + }, + responseType: 'json', + agent: { + http: httpAgent, + https: httpsAgent, + }, + }, + span + ) + .finally(() => { + span?.finish(); + }); + const endedAt = (Date.now() - startedAt) / 1000; + + this.config.onReadEnd?.(queryId, { + totalSeconds: endedAt, + elapsedSeconds: response.statistics.elapsed, + }); + + return response; + } + + translateWindow({ + value, + unit, + }: { + value: number; + unit: 'd' | 'h' | 'm'; + }): string { + const unitMap = { + d: 'DAY', + h: 'HOUR', + m: 'MINUTE', + }; + + return `${value} ${unitMap[unit]}`; + } +} diff --git a/packages/services/api/src/modules/operations/providers/helpers.ts b/packages/services/api/src/modules/operations/providers/helpers.ts new file mode 100644 index 000000000..d0cf54880 --- /dev/null +++ b/packages/services/api/src/modules/operations/providers/helpers.ts @@ -0,0 +1,56 @@ +import type { DateRange } from '../../../shared/entities'; + +export const maxResolution = 90; + +export function calculateTimeWindow({ + period, + resolution, +}: { + period: DateRange; + resolution: number; +}): { + value: number; + unit: 'd' | 'h' | 'm'; +} { + if (!Number.isInteger(resolution)) { + throw new Error( + `Invalid resolution. Expected an integer, received ${resolution}` + ); + } + + if (resolution < 10 || resolution > maxResolution) { + throw new Error( + `Invalid resolution. Expected 10 <= x <= ${maxResolution}, received ${resolution}` + ); + } + + const distanceInMinutes = + (period.to.getTime() - period.from.getTime()) / 1000 / 60; + + const divideBy = { + m: 1, + h: 60, + d: 60 * 24, + }; + + const value = Math.ceil(distanceInMinutes / resolution); + const unit = calculateUnit(value); + const correctedValue = Math.ceil(value / divideBy[unit]); + + return { + value: correctedValue, + unit: calculateUnit(value), + }; +} + +function calculateUnit(minutes: number) { + if (minutes < 60) { + return 'm' as const; + } + + if (minutes < 60 * 24) { + return 'h' as const; + } + + return 'd' as const; +} diff --git a/packages/services/api/src/modules/operations/providers/operations-manager.ts b/packages/services/api/src/modules/operations/providers/operations-manager.ts new file mode 100644 index 000000000..8a6b4c913 --- /dev/null +++ b/packages/services/api/src/modules/operations/providers/operations-manager.ts @@ -0,0 +1,508 @@ +import { Injectable, Scope } from 'graphql-modules'; +import LRU from 'lru-cache'; +import type { DateRange } from '../../../shared/entities'; +import type { Optional, Listify } from '../../../shared/helpers'; +import { cache } from '../../../shared/helpers'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { TargetAccessScope } from '../../auth/providers/target-access'; +import { Logger } from '../../shared/providers/logger'; +import type { TargetSelector } from '../../shared/providers/storage'; +import { OperationsReader } from './operations-reader'; + +const DAY_IN_MS = 86_400_000; +const lru = new LRU({ + max: 500, + ttl: 30 * DAY_IN_MS, + stale: false, +}); + +async function hasCollectedOperationsCached( + target: string, + checkFn: () => Promise +) { + if (lru.get(target)) { + return true; + } + + const collected = await checkFn(); + + if (collected) { + lru.set(target, true); + } + + return collected; +} + +interface ReadFieldStatsInput extends TargetSelector { + type: string; + field: string; + argument?: string; + period: DateRange; +} + +interface ReadFieldStatsOutput { + type: string; + field: string; + argument?: string; + period: DateRange; + count: number; + percentage: number; +} + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class OperationsManager { + private logger: Logger; + + constructor( + logger: Logger, + private authManager: AuthManager, + private reader: OperationsReader + ) { + this.logger = logger.child({ source: 'OperationsManager' }); + } + + async countUniqueOperations({ + organization, + project, + target, + period, + operations, + }: { period: DateRange; operations?: readonly string[] } & TargetSelector) { + this.logger.info( + 'Counting unique operations (period=%o, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return ( + await this.reader.countUniqueDocuments({ + target, + period, + operations, + }) + ).length; + } + + async hasCollectedOperations({ + organization, + project, + target, + }: TargetSelector) { + this.logger.info( + 'Checking existence of collected operations (target=%s)', + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return hasCollectedOperationsCached(target, () => + this.reader + .countOperations({ + target, + }) + .then((r) => r.total > 0) + ); + } + + async countRequests({ + organization, + project, + target, + period, + operations, + }: { period: DateRange; operations?: readonly string[] } & TargetSelector) { + this.logger.info( + 'Counting requests (period=%s, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader + .countOperations({ + target, + period, + operations, + }) + .then((r) => r.total); + } + + async countFailures({ + organization, + project, + target, + period, + operations, + }: { period: DateRange; operations?: readonly string[] } & TargetSelector) { + this.logger.info( + 'Counting failures (period=%o, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.countFailures({ + target, + period, + operations, + }); + } + + async readFieldStats( + input: ReadFieldStatsInput + ): Promise; + async readFieldStats( + input: Optional + ): Promise>; + async readFieldStats( + input: ReadFieldStatsInput + ): Promise { + const { type, field, argument, period, organization, project, target } = + input; + this.logger.info('Counting a field (period=%o, target=%s)', period, target); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + const [totalField, total] = await Promise.all([ + this.reader.countField({ + type, + field, + argument, + target, + period, + }), + this.reader.countOperations({ target, period }).then((r) => r.total), + ]); + + return { + type, + field, + argument, + period, + count: totalField, + percentage: total === 0 ? 0 : (totalField / total) * 100, + }; + } + async readFieldListStats({ + fields, + period, + organization, + project, + target, + unsafe__itIsMeInspector, + }: { + fields: ReadonlyArray<{ + type: string; + field?: string | null; + argument?: string | null; + }>; + period: DateRange; + /** + * Skips the access check. + * A token created for one target can't access data from the other targets. + * This is a workaround for the inspector only. + * TODO: let's think how to solve it well, soon. + */ + unsafe__itIsMeInspector?: boolean; + } & Listify) { + this.logger.info('Counting fields (period=%o, target=%s)', period, target); + + if (!unsafe__itIsMeInspector) { + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + } + + const [totalFields, total] = await Promise.all([ + this.reader.countFields({ + fields, + target, + period, + }), + this.reader.countOperations({ target, period }).then((r) => r.total), + ]); + + return Object.keys(totalFields).map((id) => { + const [type, field, argument] = id.split('.'); + const totalField = totalFields[id] ?? 0; + + return { + type, + field, + argument, + period, + count: totalField, + percentage: total === 0 ? 0 : (totalField / total) * 100, + }; + }); + } + + async readOperationsStats({ + period, + organization, + project, + target, + operations, + }: { + period: DateRange; + operations?: readonly string[]; + } & TargetSelector) { + this.logger.info( + 'Reading operations stats (period=%o, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.countUniqueDocuments({ + target, + period, + operations, + }); + } + + async readRequestsOverTime({ + period, + resolution, + organization, + project, + target, + operations, + }: { + period: DateRange; + resolution: number; + operations?: readonly string[]; + } & TargetSelector) { + this.logger.info( + 'Reading requests over time (period=%o, resolution=%s, target=%s)', + period, + resolution, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.requestsOverTime({ + target, + period, + resolution, + operations, + }); + } + + async readFailuresOverTime({ + period, + resolution, + organization, + project, + target, + operations, + }: { + period: DateRange; + resolution: number; + operations?: readonly string[]; + } & TargetSelector) { + this.logger.info( + 'Reading failures over time (period=%o, resolution=%s, target=%s)', + period, + resolution, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.failuresOverTime({ + target, + period, + resolution, + operations, + }); + } + + async readDurationOverTime({ + period, + resolution, + organization, + project, + target, + operations, + }: { + period: DateRange; + resolution: number; + operations?: readonly string[]; + } & TargetSelector) { + this.logger.info( + 'Reading duration over time (period=%o, resolution=%s, target=%s)', + period, + resolution, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.durationOverTime({ + target, + period, + resolution, + operations, + }); + } + + async readGeneralDurationPercentiles({ + period, + organization, + project, + target, + operations, + }: { period: DateRange; operations?: readonly string[] } & TargetSelector) { + this.logger.info( + 'Reading overall duration percentiles (period=%o, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.generalDurationPercentiles({ + target, + period, + operations, + }); + } + + @cache<{ period: DateRange } & TargetSelector>((selector) => + JSON.stringify(selector) + ) + async readDetailedDurationPercentiles({ + period, + organization, + project, + target, + operations, + }: { period: DateRange; operations?: readonly string[] } & TargetSelector) { + this.logger.info( + 'Reading detailed duration percentiles (period=%o, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.durationPercentiles({ + target, + period, + operations, + }); + } + + async readDurationHistogram({ + period, + organization, + project, + target, + operations, + }: { period: DateRange; operations?: readonly string[] } & TargetSelector) { + this.logger.info( + 'Reading duration histogram (period=%o, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.durationHistogram({ + target, + period, + operations, + }); + } + + async readUniqueClients({ + period, + organization, + project, + target, + operations, + }: { period: DateRange; operations?: readonly string[] } & TargetSelector) { + this.logger.info( + 'Counting unique clients (period=%o, target=%s)', + period, + target + ); + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.REGISTRY_READ, + }); + + return this.reader.countUniqueClients({ + target, + period, + operations, + }); + } +} diff --git a/packages/services/api/src/modules/operations/providers/operations-reader.ts b/packages/services/api/src/modules/operations/providers/operations-reader.ts new file mode 100644 index 000000000..06159ba3e --- /dev/null +++ b/packages/services/api/src/modules/operations/providers/operations-reader.ts @@ -0,0 +1,965 @@ +import { Injectable } from 'graphql-modules'; +import { format, addMinutes, subDays, parse, isAfter } from 'date-fns'; +import type { Span } from '@sentry/types'; +import { ClickHouse, RowOf } from './clickhouse-client'; +import { calculateTimeWindow, maxResolution } from './helpers'; +import type { DateRange } from '../../../shared/entities'; +import { sentry } from '../../../shared/sentry'; + +function formatDate(date: Date): string { + return format( + addMinutes(date, date.getTimezoneOffset()), + 'yyyy-MM-dd HH:mm:ss' + ); +} + +export interface Percentiles { + p75: number; + p90: number; + p95: number; + p99: number; +} + +export interface ESPercentiles { + '75.0': number; + '90.0': number; + '95.0': number; + '99.0': number; +} + +// Remove after ES is no longer used +function toESPercentiles(item: Percentiles | number[]): ESPercentiles { + if (Array.isArray(item)) { + return { + '75.0': item[0], + '90.0': item[1], + '95.0': item[2], + '99.0': item[3], + }; + } + + return { + '75.0': item.p75, + '90.0': item.p90, + '95.0': item.p95, + '99.0': item.p99, + }; +} + +function ensureNumber(value: number | string): number { + if (typeof value === 'number') { + return value; + } + + return parseFloat(value); +} + +function canUseHourlyAggTable({ + period, + resolution = maxResolution, +}: { + period?: DateRange; + resolution?: number; +}): boolean { + if (period) { + const distance = period.to.getTime() - period.from.getTime(); + const distanceInHours = distance / 1000 / 60 / 60; + + // We can't show data in 90 time-windows from past 24 hours (based on hourly table) + if (distanceInHours < resolution) { + return false; + } + } + + return true; +} + +const schemaCoordinatesDailyStartedAt = parse( + '2022-01-25 00:00:00', + 'yyyy-MM-dd HH:mm:ss', + new Date() +); + +function canUseSchemaCoordinatesDailyTable(period?: DateRange): boolean { + if (period) { + return isAfter(period.from, schemaCoordinatesDailyStartedAt); + } + + return false; +} + +@Injectable({ + global: true, +}) +export class OperationsReader { + constructor(private clickHouse: ClickHouse) {} + + @sentry('OperationsReader.countField') + async countField({ + type, + field, + argument, + target, + period, + }: { + type: string; + field?: string; + argument?: string; + target: string; + period: DateRange; + }) { + return this.countFields({ + target, + period, + fields: [{ type, field, argument }], + }).then((r) => r[this.makeId({ type, field, argument })]); + } + + @sentry('OperationsReader.countFields') + async countFields( + { + fields, + target, + period, + operations, + }: { + fields: ReadonlyArray<{ + type: string; + field?: string | null; + argument?: string | null; + }>; + target: string | readonly string[]; + period: DateRange; + operations?: readonly string[]; + }, + span?: Span + ): Promise> { + // Once we collect data from more than 30 days, we can leave on this part of code + if (canUseSchemaCoordinatesDailyTable(period)) { + const coordinates = fields.map((selector) => this.makeId(selector)); + + const res = await this.clickHouse.query<{ + total: string; + coordinate: string; + }>({ + query: ` + SELECT + coordinate, + sum(total) as total + FROM schema_coordinates_daily + ${this.createFilter({ + target, + period, + operations, + extra: [`( coordinate IN ('${coordinates.join(`', '`)}') )`], + })} + GROUP BY coordinate + `, + queryId: 'count_fields_v2', + timeout: 30_000, + span, + }); + + const stats: Record = {}; + for (const row of res.data) { + stats[row.coordinate] = ensureNumber(row.total); + } + + for (const selector of fields) { + const key = this.makeId(selector); + + if (typeof stats[key] !== 'number') { + stats[key] = 0; + } + } + + return stats; + } + + // TODO: Remove after 2022-02-25 + + const sep = `_${Math.random().toString(36).substr(2, 5)}_`; + + function createAlias(selector: { + type: string; + field?: string | null; + argument?: string | null; + }) { + return [selector.type, selector.field, selector.argument] + .filter(Boolean) + .join(sep); + } + + function extractSelector(alias: string): { + type: string; + field?: string | null; + argument?: string | null; + } { + const [type, field, argument] = alias.split(sep); + + return { + type, + field, + argument, + }; + } + + const counters: string[] = []; + const conditions: string[] = []; + + for (const selector of fields) { + const alias = createAlias(selector); + const id = this.makeId(selector); + + counters.push(`sum(has(schema, '${id}')) as ${alias}`); + conditions.push(`has(schema, '${id}')`); + } + + const res = await this.clickHouse.query<{ + [key: string]: string; + }>({ + query: ` + SELECT + ${counters.join(', ')} + FROM operations_new + ${this.createFilter({ + target, + period, + operations, + extra: [`( ${conditions.join(' OR ')} )`], + })} + `, + queryId: 'count_fields', + timeout: 60_000, + span, + }); + + const row = res.data[0]; + const stats: Record = {}; + + Object.keys(row).forEach((alias) => { + const selector = extractSelector(alias); + const total = ensureNumber(row[alias]); + + stats[this.makeId(selector)] = total; + }); + + for (const selector of fields) { + const key = this.makeId(selector); + + if (typeof stats[key] !== 'number') { + stats[key] = 0; + } + } + + return stats; + } + + @sentry('OperationsReader.countOperations') + async countOperations( + { + target, + period, + operations, + }: { + target: string | readonly string[]; + period?: DateRange; + operations?: readonly string[]; + }, + span?: Span + ): Promise<{ + total: number; + ok: number; + notOk: number; + }> { + const result = await this.clickHouse.query<{ + total: number; + totalOk: number; + }>( + canUseHourlyAggTable({ period }) + ? { + query: ` + SELECT + sum(total) as total, + sum(total_ok) as totalOk + FROM operations_new_hourly_mv + ${this.createFilter({ + target, + period, + operations, + })} + `, + queryId: 'count_operations_mv', + timeout: 15_000, + span, + } + : { + query: ` + SELECT + count() as total, + sum(ok) as totalOk + FROM operations_new + ${this.createFilter({ + target, + period, + operations, + })} + `, + queryId: 'count_operations', + timeout: 15_000, + span, + } + ); + const total = ensureNumber(result.data[0].total); + const totalOk = ensureNumber(result.data[0].totalOk); + + return { + total, + ok: totalOk, + notOk: total - totalOk, + }; + } + + @sentry('OperationsReader.countFailures') + async countFailures({ + target, + period, + operations, + }: { + target: string; + period: DateRange; + operations?: readonly string[]; + }): Promise { + return this.countOperations({ target, period, operations }).then( + (r) => r.notOk + ); + } + + @sentry('OperationsReader.countUniqueDocuments') + async countUniqueDocuments( + { + target, + period, + operations, + }: { + target: string; + period: DateRange; + operations?: readonly string[]; + }, + span?: Span + ): Promise< + Array<{ + document: string; + operationHash?: string; + operationName: string; + kind: string; + count: number; + countOk: number; + percentage: number; + }> + > { + const result = await this.clickHouse.query<{ + total: string; + totalOk: string; + hash: string; + }>( + canUseHourlyAggTable({ period }) + ? { + query: ` + SELECT + sum(total) as total, + sum(total_ok) as totalOk, + hash + FROM operations_new_hourly_mv + ${this.createFilter({ + target, + period, + operations, + })} + GROUP BY hash + `, + queryId: 'count_unique_documents_mv', + timeout: 15_000, + span, + } + : { + query: ` + SELECT + count() as total, + sum(ok) as totalOk, + hash + FROM operations_new + ${this.createFilter({ + target, + period, + operations, + })} + GROUP BY hash + `, + queryId: 'count_unique_documents', + timeout: 15_000, + span, + } + ); + const total = result.data.reduce( + (sum, row) => sum + parseInt(row.total, 10), + 0 + ); + + const registryResult = await this.clickHouse.query<{ + name?: string; + body: string; + hash: string; + operation: string; + }>({ + query: ` + SELECT + name, + body, + hash, + operation + FROM operations_registry FINAL + ${this.createFilter({ + target, + operations, + })}`, + queryId: 'operations_registry', + timeout: 15_000, + span, + }); + + const operationsMap = new Map>(); + + for (const row of registryResult.data) { + operationsMap.set(row.hash, row); + } + + return result.data.map((row) => { + const rowTotal = parseInt(row.total, 10); + const rowTotalOk = parseInt(row.totalOk, 10); + const op = operationsMap.get(row.hash); + const { name, body, operation } = op ?? { + name: 'missing', + body: 'missing', + operation: 'missing', + }; + + return { + document: body, + operationName: `${row.hash.substr(0, 4)}_${name ?? 'anonymous'}`, + operationHash: row.hash, + kind: operation, + count: rowTotal, + countOk: rowTotalOk, + percentage: (rowTotal / total) * 100, + }; + }); + } + + @sentry('OperationsReader.countUniqueClients') + async countUniqueClients( + { + target, + period, + operations, + }: { + target: string; + period: DateRange; + operations?: readonly string[]; + }, + span?: Span + ): Promise< + Array<{ + name: string; + count: number; + percentage: number; + versions: Array<{ + version: string; + count: number; + percentage: number; + }>; + }> + > { + const result = await this.clickHouse.query<{ + total: string; + client_name: string; + client_version: string; + }>({ + query: ` + SELECT + COUNT(*) as total, + client_name, + client_version + FROM operations_new + ${this.createFilter({ + target, + period, + operations, + })} + GROUP BY client_name, client_version + `, + queryId: 'count_unique_clients', + timeout: 15_000, + span, + }); + + const total = result.data.reduce( + (sum, row) => sum + parseInt(row.total, 10), + 0 + ); + + const clientMap = new Map< + string, + { + name: string; + total: number; + versions: Array<{ + total: number; + version: string; + }>; + } + >(); + + for (const row of result.data) { + const client_name = !row.client_name ? 'unknown' : row.client_name; + const client_version = !row.client_version + ? 'unknown' + : row.client_version; + + if (!clientMap.has(client_name)) { + clientMap.set(client_name, { + name: client_name, + total: 0, + versions: [], + }); + } + + const client = clientMap.get(client_name)!; + + client.total += ensureNumber(row.total); + client.versions.push({ + total: ensureNumber(row.total), + version: client_version, + }); + } + + return Array.from(clientMap.values()).map((client) => { + return { + name: client.name, + versions: client.versions.map((version) => ({ + version: version.version, + count: version.total, + percentage: (version.total / client.total) * 100, + })), + count: client.total, + percentage: (client.total / total) * 100, + }; + }); + } + + @sentry('OperationsReader.requestsOverTime') + async requestsOverTime({ + target, + period, + resolution, + operations, + }: { + target: string; + period: DateRange; + resolution: number; + operations?: readonly string[]; + }) { + const results = await this.getDurationAndCountOverTime({ + target, + period, + resolution, + operations, + }); + + return results.map((row) => ({ + date: row.date, + value: row.total, + })); + } + + @sentry('OperationsReader.failuresOverTime') + async failuresOverTime({ + target, + period, + resolution, + operations, + }: { + target: string; + period: DateRange; + resolution: number; + operations?: readonly string[]; + }) { + const result = await this.getDurationAndCountOverTime({ + target, + period, + resolution, + operations, + }); + + return result.map((row) => ({ + date: row.date, + value: row.total - row.totalOk, + })); + } + + @sentry('OperationsReader.durationOverTime') + async durationOverTime({ + target, + period, + resolution, + operations, + }: { + target: string; + period: DateRange; + resolution: number; + operations?: readonly string[]; + }): Promise< + Array<{ + date: any; + duration: ESPercentiles; + }> + > { + return this.getDurationAndCountOverTime({ + target, + period, + resolution, + operations, + }); + } + + @sentry('OperationsReader.durationHistogram') + async durationHistogram( + { + target, + period, + operations, + }: { + target: string; + period: DateRange; + operations?: readonly string[]; + }, + span?: Span + ): Promise< + Array<{ + duration: number; + count: number; + }> + > { + const result = await this.clickHouse.query<{ + latency: number; + total: number; + }>({ + query: ` + WITH histogram(90)(logDuration) AS hist + SELECT + arrayJoin(hist).1 as latency, + arrayJoin(hist).3 as total + FROM + ( + SELECT log10(duration) as logDuration + FROM operations_new + ${this.createFilter({ target, period, operations })} + ) + ORDER BY latency + `, + queryId: 'duration_histogram', + timeout: 60_000, + span, + }); + + return result.data.map((row) => { + return { + duration: Math.round(Math.pow(10, row.latency)), + count: Math.round(row.total), + }; + }); + } + + @sentry('OperationsReader.generalDurationPercentiles') + async generalDurationPercentiles( + { + target, + period, + operations, + }: { + target: string; + period: DateRange; + operations?: readonly string[]; + }, + span?: Span + ): Promise { + const result = await this.clickHouse.query<{ + percentiles: [number, number, number, number]; + }>( + canUseHourlyAggTable({ period }) + ? { + query: ` + SELECT + quantilesMerge(0.75, 0.90, 0.95, 0.99)(duration_quantiles) as percentiles + FROM operations_new_hourly_mv + ${this.createFilter({ target, period, operations })} + `, + queryId: 'general_duration_percentiles_mv', + timeout: 15_000, + span, + } + : { + query: ` + SELECT + quantiles(0.75, 0.90, 0.95, 0.99)(duration) as percentiles + FROM operations_new + ${this.createFilter({ target, period, operations })} + `, + queryId: 'general_duration_percentiles', + timeout: 15_000, + span, + } + ); + + return toESPercentiles(result.data[0].percentiles); + } + + @sentry('OperationsReader.durationPercentiles') + async durationPercentiles( + { + target, + period, + operations, + }: { + target: string; + period: DateRange; + operations?: readonly string[]; + }, + span?: Span + ) { + const result = await this.clickHouse.query<{ + hash: string; + percentiles: [number, number, number, number]; + }>( + canUseHourlyAggTable({ period }) + ? { + query: ` + SELECT + hash, + quantilesMerge(0.75, 0.90, 0.95, 0.99)(duration_quantiles) as percentiles + FROM operations_new_hourly_mv + ${this.createFilter({ target, period, operations })} + GROUP BY hash + `, + queryId: 'duration_percentiles_mv', + timeout: 15_000, + span, + } + : { + query: ` + SELECT + hash, + quantiles(0.75, 0.90, 0.95, 0.99)(duration) as percentiles + FROM operations_new + ${this.createFilter({ target, period, operations })} + GROUP BY hash + `, + queryId: 'duration_percentiles', + timeout: 15_000, + span, + } + ); + + const collection = new Map(); + + result.data.forEach((row) => { + collection.set(row.hash, toESPercentiles(row.percentiles)); + }); + + return collection; + } + + @sentry('OperationsReader.getDurationAndCountOverTime') + private async getDurationAndCountOverTime( + { + target, + period, + resolution, + operations, + }: { + target: string; + period: DateRange; + resolution: number; + operations?: readonly string[]; + }, + span?: Span + ) { + // multiply by 1000 to convert to milliseconds + const result = await this.clickHouse.query<{ + date: number; + total: number; + totalOk: number; + percentiles: [number, number, number, number]; + }>( + canUseHourlyAggTable({ period }) + ? { + query: ` + SELECT + multiply( + toUnixTimestamp( + toStartOfInterval(timestamp, INTERVAL ${this.clickHouse.translateWindow( + calculateTimeWindow({ period, resolution }) + )}, 'UTC'), + 'UTC'), + 1000) as date, + quantilesMerge(0.75, 0.90, 0.95, 0.99)(duration_quantiles) as percentiles, + sum(total) as total, + sum(total_ok) as totalOk + FROM operations_new_hourly_mv + ${this.createFilter({ target, period, operations })} + GROUP BY date + ORDER BY date + `, + queryId: 'duration_and_count_over_time_mv', + timeout: 15_000, + span, + } + : { + query: ` + SELECT + multiply( + toUnixTimestamp( + toStartOfInterval(timestamp, INTERVAL ${this.clickHouse.translateWindow( + calculateTimeWindow({ period, resolution }) + )}, 'UTC'), + 'UTC'), + 1000) as date, + quantiles(0.75, 0.90, 0.95, 0.99)(duration) as percentiles, + count(*) as total, + sum(ok) as totalOk + FROM operations_new + ${this.createFilter({ target, period, operations })} + GROUP BY date + ORDER BY date + `, + queryId: 'duration_and_count_over_time', + timeout: 15_000, + span, + } + ); + + return result.data.map((row) => { + return { + date: ensureNumber(row.date) as any, + total: ensureNumber(row.total), + totalOk: ensureNumber(row.totalOk), + duration: toESPercentiles(row.percentiles), + }; + }); + } + + async adminCountOperationsPerTarget({ daysLimit }: { daysLimit: number }) { + const result = await this.clickHouse.query<{ + total: string; + target: string; + }>({ + query: `SELECT sum(total) as total, target from operations_new_hourly_mv WHERE timestamp >= subtractDays(NOW(), ${daysLimit}) GROUP BY target`, + queryId: 'admin_operations_per_target', + timeout: 15_000, + }); + + return result.data.map((row) => ({ + total: ensureNumber(row.total), + target: row.target, + })); + } + + async adminOperationsOverTime({ daysLimit }: { daysLimit: number }) { + const result = await this.clickHouse.query<{ + date: number; + total: string; + }>({ + query: ` + SELECT + multiply( + toUnixTimestamp( + toStartOfInterval(timestamp, INTERVAL ${this.clickHouse.translateWindow( + calculateTimeWindow({ + period: { + from: subDays(new Date(), daysLimit), + to: new Date(), + }, + resolution: 90, + }) + )}, 'UTC'), + 'UTC'), + 1000) as date, + sum(total) as total + FROM operations_new_hourly_mv + WHERE timestamp >= subtractDays(NOW(), ${daysLimit}) + GROUP BY date + ORDER BY date + `, + queryId: 'admin_operations_per_target', + timeout: 15_000, + }); + + return result.data.map((row) => ({ + date: ensureNumber(row.date) as any, + total: ensureNumber(row.total), + })); + } + + public createFilter({ + target, + period, + operations, + extra = [], + }: { + target?: string | readonly string[]; + period?: DateRange; + operations?: readonly string[]; + extra?: string[]; + }) { + const where: string[] = []; + + if (target) { + if (Array.isArray(target)) { + where.push(`target IN (${target.map((t) => `'${t}'`).join(', ')})`); + } else { + where.push(`target = '${target}'`); + } + } + + if (period) { + where.push( + `timestamp >= toDateTime('${formatDate(period.from)}', 'UTC')` + ); + where.push(`timestamp <= toDateTime('${formatDate(period.to)}', 'UTC')`); + } + + if (operations?.length) { + where.push(`(hash) IN (${operations.map((op) => `'${op}'`).join(',')})`); + } + + if (extra.length) { + where.push(...extra); + } + + const statement = where.length ? ` PREWHERE ${where.join(' AND ')} ` : ' '; + + return statement; + } + + private makeId({ + type, + field, + argument, + }: { + type: string; + field?: string | null; + argument?: string | null; + }): string { + return [type, field, argument].filter(Boolean).join('.'); + } +} diff --git a/packages/services/api/src/modules/operations/providers/tokens.ts b/packages/services/api/src/modules/operations/providers/tokens.ts new file mode 100644 index 000000000..c10830d24 --- /dev/null +++ b/packages/services/api/src/modules/operations/providers/tokens.ts @@ -0,0 +1,20 @@ +import { InjectionToken } from 'graphql-modules'; + +export interface ClickHouseConfig { + host: string; + port: number; + protocol?: string; + username?: string; + password?: string; + onReadEnd?: ( + label: string, + timings: { + totalSeconds: number; + elapsedSeconds: number; + } + ) => void; +} + +export const CLICKHOUSE_CONFIG = new InjectionToken( + 'clickhouse-config' +); diff --git a/packages/services/api/src/modules/operations/resolvers.ts b/packages/services/api/src/modules/operations/resolvers.ts new file mode 100644 index 000000000..0bf03ecf9 --- /dev/null +++ b/packages/services/api/src/modules/operations/resolvers.ts @@ -0,0 +1,283 @@ +import { hash, nsToMs, parseDateRangeInput } from '../../shared/helpers'; +import { createConnection } from '../../shared/schema'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { OperationsManager } from './providers/operations-manager'; +import { OperationsModule } from './__generated__/types'; + +export const resolvers: OperationsModule.Resolvers = { + Query: { + async hasCollectedOperations(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + return injector.get(OperationsManager).hasCollectedOperations({ + organization, + project, + target, + }); + }, + async fieldStats(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + return injector.get(OperationsManager).readFieldStats({ + organization, + project, + target, + type: selector.type, + field: selector.field, + argument: selector.argument ?? undefined, + period: parseDateRangeInput(selector.period), + }); + }, + async fieldListStats(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + return injector.get(OperationsManager).readFieldListStats({ + organization, + project, + target, + fields: selector.fields, + period: parseDateRangeInput(selector.period), + }); + }, + async operationsStats(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + const operations = selector.operations ?? []; + + return { + period: parseDateRangeInput(selector.period), + organization, + project, + target, + operations, + }; + }, + }, + OperationsStats: { + async operations( + { organization, project, target, period, operations: operationsFilter }, + _, + { injector } + ) { + const operationsManager = injector.get(OperationsManager); + const [operations, durations] = await Promise.all([ + operationsManager.readOperationsStats({ + organization, + project, + target, + period, + operations: operationsFilter, + }), + operationsManager.readDetailedDurationPercentiles({ + organization, + project, + target, + period, + operations: operationsFilter, + }), + ]); + + return operations + .map((op) => { + return { + id: hash(`${op.operationName}__${op.document}`), + kind: op.kind, + document: op.document, + name: op.operationName, + count: op.count, + countOk: op.countOk, + percentage: op.percentage, + duration: durations.get(op.operationHash!)!, + operationHash: op.operationHash, + }; + }) + .sort((a, b) => b.count - a.count); + }, + totalRequests( + { organization, project, target, period, operations }, + _, + { injector } + ) { + return injector.get(OperationsManager).countRequests({ + organization, + project, + target, + period, + operations, + }); + }, + totalFailures( + { organization, project, target, period, operations: operationsFilter }, + _, + { injector } + ) { + return injector.get(OperationsManager).countFailures({ + organization, + project, + target, + period, + operations: operationsFilter, + }); + }, + totalOperations( + { organization, project, target, period, operations: operationsFilter }, + _, + { injector } + ) { + return injector.get(OperationsManager).countUniqueOperations({ + organization, + project, + target, + period, + operations: operationsFilter, + }); + }, + requestsOverTime( + { organization, project, target, period, operations: operationsFilter }, + { resolution }, + { injector } + ) { + return injector.get(OperationsManager).readRequestsOverTime({ + target, + project, + organization, + period, + resolution, + operations: operationsFilter, + }); + }, + failuresOverTime( + { organization, project, target, period, operations: operationsFilter }, + { resolution }, + { injector } + ) { + return injector.get(OperationsManager).readFailuresOverTime({ + target, + project, + organization, + period, + resolution, + operations: operationsFilter, + }); + }, + durationOverTime( + { organization, project, target, period, operations: operationsFilter }, + { resolution }, + { injector } + ) { + return injector.get(OperationsManager).readDurationOverTime({ + target, + project, + organization, + period, + resolution, + operations: operationsFilter, + }); + }, + clients( + { organization, project, target, period, operations: operationsFilter }, + _, + { injector } + ) { + return injector.get(OperationsManager).readUniqueClients({ + target, + project, + organization, + period, + operations: operationsFilter, + }); + }, + duration( + { organization, project, target, period, operations: operationsFilter }, + _, + { injector } + ) { + return injector.get(OperationsManager).readGeneralDurationPercentiles({ + organization, + project, + target, + period, + operations: operationsFilter, + }); + }, + async durationHistogram( + { organization, project, target, period, operations: operationsFilter }, + _, + { injector } + ) { + const histogram = await injector + .get(OperationsManager) + .readDurationHistogram({ + organization, + project, + target, + period, + operations: operationsFilter, + }); + + const uniqueDurations = new Map< + number, + { + duration: number; + count: number; + } + >(); + + for (let i = 0; i < histogram.length; i++) { + const node = histogram[i]; + const slot = Math.floor(nsToMs(node.duration) / 50); + + if (uniqueDurations.has(slot)) { + uniqueDurations.get(slot)!.count += node.count; + } else { + uniqueDurations.set(slot, { + duration: (slot + 1) * 50, + count: node.count, + }); + } + } + + return Array.from(uniqueDurations.values()); + }, + }, + DurationStats: { + p75(value) { + return transformPercentile(value['75.0']); + }, + p90(value) { + return transformPercentile(value['90.0']); + }, + p95(value) { + return transformPercentile(value['95.0']); + }, + p99(value) { + return transformPercentile(value['99.0']); + }, + }, + OperationStatsConnection: createConnection(), + ClientStatsConnection: createConnection(), +}; + +function transformPercentile(value: number | null): number { + return value ? Math.round(nsToMs(value)) : 0; +} diff --git a/packages/services/api/src/modules/organization/index.ts b/packages/services/api/src/modules/organization/index.ts new file mode 100644 index 000000000..ccfe19d7b --- /dev/null +++ b/packages/services/api/src/modules/organization/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import { OrganizationManager } from './providers/organization-manager'; +import typeDefs from './module.graphql'; + +export const organizationModule = createModule({ + id: 'organization', + dirname: __dirname, + typeDefs, + resolvers, + providers: [OrganizationManager], +}); diff --git a/packages/services/api/src/modules/organization/module.graphql.ts b/packages/services/api/src/modules/organization/module.graphql.ts new file mode 100644 index 000000000..6c145ab4e --- /dev/null +++ b/packages/services/api/src/modules/organization/module.graphql.ts @@ -0,0 +1,99 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + organization(selector: OrganizationSelectorInput!): OrganizationPayload + organizationByInviteCode(code: String!): OrganizationByInviteCodePayload + organizations: OrganizationConnection! + } + + extend type Mutation { + createOrganization(input: CreateOrganizationInput!): OrganizationPayload! + deleteOrganization( + selector: OrganizationSelectorInput! + ): OrganizationPayload! + deleteOrganizationMembers( + selector: OrganizationMembersSelectorInput! + ): OrganizationPayload! + joinOrganization(code: String!): JoinOrganizationPayload! + resetInviteCode(selector: OrganizationSelectorInput!): OrganizationPayload! + updateOrganizationName( + input: UpdateOrganizationNameInput! + ): OrganizationPayload! + updateOrganizationMemberAccess( + input: OrganizationMemberAccessInput! + ): OrganizationPayload! + } + + input OrganizationSelectorInput { + organization: ID! + } + + type OrganizationSelector { + organization: ID! + } + + input OrganizationMembersSelectorInput { + organization: ID! + users: [ID!]! + } + + input OrganizationMemberAccessInput { + organization: ID! + user: ID! + organizationScopes: [OrganizationAccessScope!]! + projectScopes: [ProjectAccessScope!]! + targetScopes: [TargetAccessScope!]! + } + + input CreateOrganizationInput { + name: String! + } + + input UpdateOrganizationNameInput { + organization: ID! + name: String! + } + + enum OrganizationType { + PERSONAL + REGULAR + } + + type Organization { + id: ID! + cleanId: ID! + name: String! + type: OrganizationType! + owner: Member! + me: Member! + members: MemberConnection! + inviteCode: String! + } + + type OrganizationConnection { + nodes: [Organization!]! + total: Int! + } + + type OrganizationInvitationError { + message: String! + } + + type OrganizationInvitationPayload { + name: String! + } + + union JoinOrganizationPayload = + OrganizationInvitationError + | OrganizationPayload + + union OrganizationByInviteCodePayload = + OrganizationInvitationError + | OrganizationInvitationPayload + + type OrganizationPayload { + selector: OrganizationSelector! + organization: Organization! + } +`; diff --git a/packages/services/api/src/modules/organization/providers/events.ts b/packages/services/api/src/modules/organization/providers/events.ts new file mode 100644 index 000000000..4691d3a7d --- /dev/null +++ b/packages/services/api/src/modules/organization/providers/events.ts @@ -0,0 +1,9 @@ +export const ENSURE_PERSONAL_ORGANIZATION_EVENT = + 'ensure-personal-organization-event'; +export interface EnsurePersonalOrganizationEventPayload { + name: string; + user: { + id: string; + externalAuthUserId: string; + }; +} diff --git a/packages/services/api/src/modules/organization/providers/organization-manager.ts b/packages/services/api/src/modules/organization/providers/organization-manager.ts new file mode 100644 index 000000000..d35db9646 --- /dev/null +++ b/packages/services/api/src/modules/organization/providers/organization-manager.ts @@ -0,0 +1,576 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { paramCase } from 'param-case'; +import { Organization, OrganizationType } from '../../../shared/entities'; +import { HiveError } from '../../../shared/errors'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { Logger } from '../../shared/providers/logger'; +import { Storage } from '../../shared/providers/storage'; +import type { OrganizationSelector } from '../../shared/providers/storage'; +import { + share, + cache, + uuid, + diffArrays, + pushIfMissing, +} from '../../../shared/helpers'; +import { MessageBus } from '../../shared/providers/message-bus'; +import { ActivityManager } from '../../activity/providers/activity-manager'; +import { BillingProvider } from '../../billing/providers/billing.provider'; +import { TokenStorage } from '../../token/providers/token-storage'; +import { Tracking } from '../../shared/providers/tracking'; +import { OrganizationAccessScope } from '../../auth/providers/organization-access'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; +import { TargetAccessScope } from '../../auth/providers/target-access'; +import { + EnsurePersonalOrganizationEventPayload, + ENSURE_PERSONAL_ORGANIZATION_EVENT, +} from './events'; + +const reservedNames = [ + 'registry', + 'server', + 'usage', + 'graphql', + 'api', + 'auth', + 'home', + 'register', + 'login', + 'logout', + 'signup', + 'signin', + 'signout', + 'sign-up', + 'sign-in', + 'sign-out', + 'manage', + 'admin', + 'stats', + 'internal', + 'general', + 'dashboard', + 'index', + 'contact', + 'docs', + 'documentation', + 'help', + 'support', + 'faq', + 'knowledge', + 'internal', +]; + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class OrganizationManager { + private logger: Logger; + + constructor( + logger: Logger, + private storage: Storage, + private authManager: AuthManager, + private tokenStorage: TokenStorage, + private messageBus: MessageBus, + private activityManager: ActivityManager, + private tracking: Tracking, + private billingProvider: BillingProvider + ) { + this.logger = logger.child({ source: 'OrganizationManager' }); + this.messageBus.on( + ENSURE_PERSONAL_ORGANIZATION_EVENT, + (data) => this.ensurePersonalOrganization(data) + ); + } + + getOrganizationFromToken: () => Promise = share( + async () => { + const token = this.authManager.ensureApiToken(); + const result = await this.tokenStorage.getToken({ token }); + + await this.authManager.ensureOrganizationAccess({ + organization: result.organization, + scope: OrganizationAccessScope.READ, + }); + + return this.storage.getOrganization({ + organization: result.organization, + }); + } + ); + + getOrganizationIdByToken: () => Promise = share(async () => { + const token = this.authManager.ensureApiToken(); + const { organization } = await this.tokenStorage.getToken({ + token, + }); + + return organization; + }); + + async getOrganization(selector: OrganizationSelector): Promise { + this.logger.debug('Fetching organization (selector=%o)', selector); + await this.authManager.ensureOrganizationAccess({ + ...selector, + scope: OrganizationAccessScope.READ, + }); + return this.storage.getOrganization(selector); + } + + async getOrganizations(): Promise { + this.logger.debug('Fetching organizations'); + const user = await this.authManager.getCurrentUser(); + return this.storage.getOrganizations({ user: user.id }); + } + + async getOrganizationByInviteCode({ + code, + }: { + code: string; + }): Promise { + this.logger.debug('Fetching organization (inviteCode=%s)', code); + const organization = await this.storage.getOrganizationByInviteCode({ + inviteCode: code, + }); + + if (!organization) { + return { + message: 'Invitation expired', + }; + } + + const hasAccess = await this.authManager.checkOrganizationAccess({ + organization: organization.id, + scope: OrganizationAccessScope.READ, + }); + + if (hasAccess) { + return { + message: "You're already a member", + }; + } + + return organization; + } + + @cache((selector: OrganizationSelector) => selector.organization) + async getOrganizationMembers(selector: OrganizationSelector) { + return this.storage.getOrganizationMembers(selector); + } + + async getOrganizationMember( + selector: OrganizationSelector & { user: string } + ) { + return this.storage.getOrganizationMember(selector); + } + + async getOrganizationOwner(selector: OrganizationSelector) { + return this.storage.getOrganizationOwner(selector); + } + + async createOrganization(input: { + name: string; + type: OrganizationType; + user: { + id: string; + externalAuthUserId: string; + }; + }): Promise { + const { name, type, user } = input; + this.logger.info('Creating an organization (input=%o)', input); + let cleanId = paramCase(name); + + if ( + reservedNames.includes(cleanId) || + (await this.storage.getOrganizationByCleanId({ cleanId })) + ) { + cleanId = paramCase(`${name}-${uuid(4)}`); + } + + const organization = await this.storage.createOrganization({ + name, + cleanId, + type, + user: user.id, + scopes: [ + ...Object.values(OrganizationAccessScope), + ...Object.values(ProjectAccessScope), + ...Object.values(TargetAccessScope), + ], + }); + + await this.activityManager.create({ + type: 'ORGANIZATION_CREATED', + selector: { + organization: organization.id, + }, + user, + }); + + return organization; + } + + async deleteOrganization( + selector: OrganizationSelector + ): Promise { + this.logger.info( + 'Deleting an organization (organization=%s)', + selector.organization + ); + await this.authManager.ensureOrganizationAccess({ + organization: selector.organization, + scope: OrganizationAccessScope.DELETE, + }); + + const organization = await this.getOrganization({ + organization: selector.organization, + }); + + if (organization.type === OrganizationType.PERSONAL) { + throw new HiveError(`Cannot remove a personal organization`); + } + + await this.tracking.track({ + event: 'ORGANIZATION_DELETED', + data: { + ...selector, + name: organization.name, + }, + }); + + const [deletedOrganization] = await Promise.all([ + this.storage.deleteOrganization({ + organization: organization.id, + }), + this.tokenStorage.invalidateOrganization({ + organization: selector.organization, + }), + ]); + + // Because we checked the access before, it's stale by now + this.authManager.resetAccessCache(); + + return deletedOrganization; + } + + async updatePlan( + input: { + plan: string; + } & OrganizationSelector + ): Promise { + const { plan } = input; + this.logger.info('Updating an organization plan (input=%o)', input); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.SETTINGS, + }); + const organization = await this.getOrganization({ + organization: input.organization, + }); + + const result = await this.storage.updateOrganizationPlan({ + billingPlan: plan, + organization: organization.id, + }); + + await this.activityManager.create({ + type: 'ORGANIZATION_PLAN_UPDATED', + selector: { + organization: organization.id, + }, + meta: { + newPlan: plan, + previousPlan: organization.billingPlan, + }, + }); + + return result; + } + + async updateRateLimits( + input: Pick & OrganizationSelector + ): Promise { + const { monthlyRateLimit } = input; + this.logger.info('Updating an organization plan (input=%o)', input); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.SETTINGS, + }); + const organization = await this.getOrganization({ + organization: input.organization, + }); + + const result = await this.storage.updateOrganizationRateLimits({ + monthlyRateLimit, + organization: organization.id, + }); + + if (this.billingProvider.enabled) { + await this.billingProvider.syncOrganization({ + organizationId: organization.id, + reserved: { + operations: Math.floor(input.monthlyRateLimit.operations / 1_000_000), + schemaPushes: input.monthlyRateLimit.schemaPush, + }, + }); + } + + return result; + } + + async updateName( + input: { + name: string; + } & OrganizationSelector + ): Promise { + const { name } = input; + this.logger.info('Updating an organization name (input=%o)', input); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.SETTINGS, + }); + const [user, organization] = await Promise.all([ + this.authManager.getCurrentUser(), + this.getOrganization({ + organization: input.organization, + }), + ]); + + if (organization.type === OrganizationType.PERSONAL) { + throw new HiveError(`Cannot rename a personal organization`); + } + + const result = await this.storage.updateOrganizationName({ + name, + organization: organization.id, + user: user.id, + }); + + await this.activityManager.create({ + type: 'ORGANIZATION_NAME_UPDATED', + selector: { + organization: organization.id, + }, + meta: { + value: result.name, + }, + }); + + return result; + } + + async joinOrganization({ + code, + }: { + code: string; + }): Promise { + this.logger.info('Joining an organization (code=%s)', code); + const organization = await this.getOrganizationByInviteCode({ + code, + }); + + if ('message' in organization) { + return organization; + } + + if (organization.type === OrganizationType.PERSONAL) { + throw new HiveError(`Cannot join a personal organization`); + } + + const user = await this.authManager.getCurrentUser(); + + await this.storage.addOrganizationMember({ + user: user.id, + organization: organization.id, + scopes: [ + OrganizationAccessScope.READ, + ProjectAccessScope.READ, + ProjectAccessScope.OPERATIONS_STORE_READ, + TargetAccessScope.READ, + TargetAccessScope.REGISTRY_READ, + ], + }); + + // Because we checked the access before, it's stale by now + this.authManager.resetAccessCache(); + + await this.activityManager.create({ + type: 'MEMBER_ADDED', + selector: { + organization: organization.id, + user: user.id, + }, + }); + + return this.storage.getOrganization({ + organization: organization.id, + }); + } + + async deleteMembers( + selector: { + users: readonly string[]; + } & OrganizationSelector + ): Promise { + this.logger.info( + 'Deleting a member from an organization (selector=%o)', + selector + ); + await this.authManager.ensureOrganizationAccess({ + ...selector, + scope: OrganizationAccessScope.MEMBERS, + }); + const owner = await this.getOrganizationOwner(selector); + const { users, organization } = selector; + + if (users.some((user) => user === owner.id)) { + throw new HiveError(`Cannot remove the owner from the organization`); + } + + const members = await this.storage.getOrganizationMembers({ + organization, + }); + + await this.storage.deleteOrganizationMembers({ + users, + organization, + }); + + await Promise.all( + users.map((user) => { + const member = members.find((m) => m.id === user); + + if (member) { + return this.activityManager.create({ + type: 'MEMBER_DELETED', + selector: { + organization, + }, + meta: { + email: member.user.email, + }, + }); + } + }) + ); + + // Because we checked the access before, it's stale by now + this.authManager.resetAccessCache(); + + return this.storage.getOrganization({ + organization, + }); + } + + async updateMemberAccess( + input: { + user: string; + organizationScopes: readonly OrganizationAccessScope[]; + projectScopes: readonly ProjectAccessScope[]; + targetScopes: readonly TargetAccessScope[]; + } & OrganizationSelector + ) { + this.logger.info( + 'Updating a member access in an organization (input=%o)', + input + ); + await this.authManager.ensureOrganizationAccess({ + ...input, + scope: OrganizationAccessScope.MEMBERS, + }); + + const currentUser = await this.authManager.getCurrentUser(); + + const [currentMember, member] = await Promise.all([ + this.getOrganizationMember({ + organization: input.organization, + user: currentUser.id, + }), + this.getOrganizationMember({ + organization: input.organization, + user: input.user, + }), + ]); + + const newScopes = [ + ...input.organizationScopes, + ...input.projectScopes, + ...input.targetScopes, + ]; + + // See what scopes were removed or added + const modifiedScopes = diffArrays(member.scopes, newScopes); + + // Check if the current user has rights to update these member scopes + // User can't manage other user's scope if he's missing the scope as well + const currentUserMissingScopes = modifiedScopes.filter( + (scope) => !currentMember.scopes.includes(scope) + ); + + if (currentUserMissingScopes.length > 0) { + this.logger.debug(`Logged user scopes: %o`, currentMember.scopes); + throw new HiveError( + `No access to modify the scopes: ${currentUserMissingScopes.join(', ')}` + ); + } + + // Ensure user still has read-only access + pushIfMissing(newScopes, TargetAccessScope.READ); + pushIfMissing(newScopes, ProjectAccessScope.READ); + pushIfMissing(newScopes, OrganizationAccessScope.READ); + + // Update the scopes + await this.storage.updateOrganizationMemberAccess({ + organization: input.organization, + user: input.user, + scopes: newScopes, + }); + + // Because we checked the access before, it's stale by now + this.authManager.resetAccessCache(); + + return this.storage.getOrganization({ + organization: input.organization, + }); + } + + async resetInviteCode(selector: OrganizationSelector) { + this.logger.info( + 'Resetting an organization invite code (selector=%o)', + selector + ); + await this.authManager.ensureOrganizationAccess({ + ...selector, + scope: OrganizationAccessScope.MEMBERS, + }); + return this.storage.updateOrganizationInviteCode({ + organization: selector.organization, + inviteCode: Math.random().toString(16).substr(2, 10), + }); + } + + async ensurePersonalOrganization( + payload: EnsurePersonalOrganizationEventPayload + ) { + const myOrg = await this.storage.getMyOrganization({ + user: payload.user.id, + }); + + if (!myOrg) { + this.logger.info( + 'Detected missing personal organization (user=%s)', + payload.user.id + ); + await this.createOrganization({ + name: payload.name, + user: payload.user, + type: OrganizationType.PERSONAL, + }); + } + } +} diff --git a/packages/services/api/src/modules/organization/resolvers.ts b/packages/services/api/src/modules/organization/resolvers.ts new file mode 100644 index 000000000..9fa57722b --- /dev/null +++ b/packages/services/api/src/modules/organization/resolvers.ts @@ -0,0 +1,196 @@ +import type { OrganizationModule } from './__generated__/types'; +import { createConnection } from '../../shared/schema'; +import { OrganizationType } from '../../shared/entities'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { OrganizationManager } from './providers/organization-manager'; +import { AuthManager } from '../auth/providers/auth-manager'; + +export const resolvers: OrganizationModule.Resolvers = { + Query: { + async organization(_, { selector }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + + return { + selector, + organization: await injector.get(OrganizationManager).getOrganization({ + organization, + }), + }; + }, + async organizations(_, __, { injector }) { + return injector.get(OrganizationManager).getOrganizations(); + }, + async organizationByInviteCode(_, { code }, { injector }) { + const organization = await injector + .get(OrganizationManager) + .getOrganizationByInviteCode({ + code, + }); + + if ('message' in organization) { + return organization; + } + + return { + __typename: 'OrganizationInvitationPayload', + name: organization.name, + }; + }, + }, + Mutation: { + async createOrganization(_, { input }, { injector }) { + const user = await injector.get(AuthManager).getCurrentUser(); + const organization = await injector + .get(OrganizationManager) + .createOrganization({ + name: input.name, + type: OrganizationType.REGULAR, + user, + }); + + return { + selector: { + organization: organization.cleanId, + }, + organization, + }; + }, + async deleteOrganization(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const organizationId = await translator.translateOrganizationId({ + organization: selector.organization, + }); + const organization = await injector + .get(OrganizationManager) + .deleteOrganization({ + organization: organizationId, + }); + return { + selector: { + organization: organizationId, + }, + organization, + }; + }, + async updateOrganizationName(_, { input }, { injector }) { + const organizationId = await injector + .get(IdTranslator) + .translateOrganizationId(input); + + const organization = await injector.get(OrganizationManager).updateName({ + name: input.name, + organization: organizationId, + }); + + return { + selector: { + organization: organization.cleanId, + }, + organization, + }; + }, + async joinOrganization(_, { code }, { injector }) { + const organization = await injector + .get(OrganizationManager) + .joinOrganization({ code }); + + if ('message' in organization) { + return organization; + } + + return { + __typename: 'OrganizationPayload', + selector: { + organization: organization.cleanId, + }, + organization, + }; + }, + async resetInviteCode(_, { selector }, { injector }) { + const organizationId = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + const organizationManager = injector.get(OrganizationManager); + const organization = organizationManager.resetInviteCode({ + organization: organizationId, + }); + + return { + selector, + organization, + }; + }, + async deleteOrganizationMembers(_, { selector }, { injector }) { + const organizationId = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + const organization = await injector + .get(OrganizationManager) + .deleteMembers({ organization: organizationId, users: selector.users }); + + return { + selector, + organization, + }; + }, + async updateOrganizationMemberAccess(_, { input }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(input); + + return { + selector: { + organization: input.organization, + }, + organization: await injector + .get(OrganizationManager) + .updateMemberAccess({ + organization, + user: input.user, + organizationScopes: input.organizationScopes, + projectScopes: input.projectScopes, + targetScopes: input.targetScopes, + }), + }; + }, + }, + Organization: { + __isTypeOf(organization) { + return !!organization.id; + }, + owner(organization, _, { injector }) { + return injector + .get(OrganizationManager) + .getOrganizationOwner({ organization: organization.id }); + }, + async me(organization, _, { injector }) { + const me = await injector.get(AuthManager).getCurrentUser(); + const members = await injector + .get(OrganizationManager) + .getOrganizationMembers({ organization: organization.id }); + + return members.find((m) => m.id === me.id)!; + }, + members(organization, _, { injector }) { + return injector + .get(OrganizationManager) + .getOrganizationMembers({ organization: organization.id }); + }, + }, + OrganizationInvitationError: { + __isTypeOf(obj) { + return !!obj.message; + }, + }, + OrganizationInvitationPayload: { + __isTypeOf(organization) { + return !!organization.name; + }, + name(organization) { + return organization.name; + }, + }, + OrganizationConnection: createConnection(), +}; diff --git a/packages/services/api/src/modules/persisted-operations/index.ts b/packages/services/api/src/modules/persisted-operations/index.ts new file mode 100644 index 000000000..a5e99f500 --- /dev/null +++ b/packages/services/api/src/modules/persisted-operations/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import typeDefs from './module.graphql'; +import { PersistedOperationManager } from './providers/persisted-operation-manager'; +import { resolvers } from './resolvers'; + +export const persistedOperationModule = createModule({ + id: 'persisted-operations', + dirname: __dirname, + typeDefs, + resolvers, + providers: [PersistedOperationManager], +}); diff --git a/packages/services/api/src/modules/persisted-operations/module.graphql.ts b/packages/services/api/src/modules/persisted-operations/module.graphql.ts new file mode 100644 index 000000000..499288b12 --- /dev/null +++ b/packages/services/api/src/modules/persisted-operations/module.graphql.ts @@ -0,0 +1,84 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Mutation { + """ + Requires API Token + """ + publishPersistedOperations( + input: [PublishPersistedOperationInput!]! + ): PublishPersistedOperationPayload! + """ + Requires API Token + """ + deletePersistedOperation( + selector: PersistedOperationSelectorInput! + ): DeletePersistedOperationPayload! + } + + extend type Query { + """ + Requires API Token + """ + storedOperations: [PersistedOperation!]! + """ + Requires API Token + """ + comparePersistedOperations(hashes: [String!]!): [String!]! + persistedOperation( + selector: PersistedOperationSelectorInput! + ): PersistedOperation + persistedOperations( + selector: ProjectSelectorInput! + ): PersistedOperationConnection! + } + + input PersistedOperationSelectorInput { + organization: ID! + project: ID! + operation: ID! + } + + type PersistedOperationSelector { + organization: ID! + project: ID! + operation: ID + } + + extend type Project { + persistedOperations: PersistedOperationConnection! + } + + type PersistedOperationConnection { + nodes: [PersistedOperation!]! + total: Int! + } + + type PersistedOperation { + id: ID! + operationHash: ID! + content: String! + name: String! + kind: String! + } + + input PublishPersistedOperationInput { + content: String! + operationHash: String + } + + type PublishPersistedOperationPayload { + summary: PublishPersistedOperationsSummary! + operations: [PersistedOperation!]! + } + + type PublishPersistedOperationsSummary { + total: Int! + unchanged: Int! + } + + type DeletePersistedOperationPayload { + selector: PersistedOperationSelector! + deletedPersistedOperation: PersistedOperation! + } +`; diff --git a/packages/services/api/src/modules/persisted-operations/providers/persisted-operation-manager.ts b/packages/services/api/src/modules/persisted-operations/providers/persisted-operation-manager.ts new file mode 100644 index 000000000..5902511b4 --- /dev/null +++ b/packages/services/api/src/modules/persisted-operations/providers/persisted-operation-manager.ts @@ -0,0 +1,208 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { hashOperation, normalizeOperation } from '@graphql-hive/core'; +import { parse, Kind, OperationDefinitionNode, DefinitionNode } from 'graphql'; +import { PersistedOperationsModule } from '../__generated__/types'; +import type { PersistedOperation } from '../../../shared/entities'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { Logger } from '../../shared/providers/logger'; +import { + PersistedOperationSelector, + ProjectSelector, + Storage, +} from '../../shared/providers/storage'; +import { Tracking } from '../../shared/providers/tracking'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, +}) +export class PersistedOperationManager { + private logger: Logger; + + constructor( + logger: Logger, + private storage: Storage, + private authManager: AuthManager, + private tracking: Tracking + ) { + this.logger = logger.child({ source: 'PersistedOperationManager' }); + } + + async createPersistedOperations( + operationList: readonly PersistedOperationsModule.PublishPersistedOperationInput[], + project: string, + organization: string + ): Promise { + this.logger.info( + 'Creating persisted operations (project=%s, organization=%s, size=%s)', + project, + organization, + operationList.length + ); + await this.authManager.ensureProjectAccess({ + project, + organization, + scope: ProjectAccessScope.OPERATIONS_STORE_WRITE, + }); + + await this.tracking.track({ + event: 'PERSISTED_OPERATIONS_CREATED', + data: { + size: operationList.length, + project, + organization, + }, + }); + + const operations = operationList.map((operation) => { + const document = parse(operation.content); + const normalizedDocument = normalizeOperation({ + document, + hideLiterals: true, + removeAliases: true, + }); + const operationHash = + operation.operationHash || hashOperation(normalizedDocument); + const op = document.definitions.find(isOperation)!; + + return { + operationHash, + project, + organization, + name: op.name?.value ?? 'anonymous', + kind: op.operation, + content: normalizedDocument, + }; + }); + + const hashesToPublish = await this.comparePersistedOperations({ + organization, + project, + hashes: operations.map((op) => op.operationHash), + }); + + const publishedOperations = await Promise.all( + operations + .filter((op) => hashesToPublish.includes(op.operationHash)) + .map((operation) => this.storage.insertPersistedOperation(operation)) + ); + + const unchangedOperations = await this.getSelectedPersistedOperations( + { organization, project }, + operations + .filter((op) => !hashesToPublish.includes(op.operationHash)) + .map((op) => op.operationHash) + ); + const total = operations.length; + const unchanged = total - hashesToPublish.length; + + return { + summary: { + total, + unchanged, + }, + operations: [...publishedOperations, ...unchangedOperations], + }; + } + + async deletePersistedOperation({ + organization, + project, + operation, + }: PersistedOperationSelector): Promise { + this.logger.info( + 'Deleting an operation (operation=%s, project=%s, organization=%s)', + operation, + project, + organization + ); + await this.authManager.ensureProjectAccess({ + project, + organization, + scope: ProjectAccessScope.OPERATIONS_STORE_WRITE, + }); + + await this.tracking.track({ + event: 'PERSISTED_OPERATIONS_DELETED', + data: { + project, + organization, + }, + }); + + const result = await this.storage.deletePersistedOperation({ + project, + organization, + operation, + }); + + return result; + } + + async comparePersistedOperations( + selector: ProjectSelector & { + hashes: readonly string[]; + } + ): Promise { + this.logger.debug('Fetching persisted operations (selector=%o)', selector); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.OPERATIONS_STORE_READ, + }); + return this.storage.comparePersistedOperations(selector); + } + + async getPersistedOperations( + selector: ProjectSelector + ): Promise { + this.logger.debug('Fetching persisted operations (selector=%o)', selector); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.OPERATIONS_STORE_READ, + }); + return this.storage.getPersistedOperations(selector); + } + + async getPersistedOperation( + selector: PersistedOperationSelector + ): Promise { + this.logger.debug('Fetching target (selector=%o)', selector); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.OPERATIONS_STORE_READ, + }); + return this.storage.getPersistedOperation(selector); + } + + private async getSelectedPersistedOperations( + selector: ProjectSelector, + hashes: readonly string[] + ): Promise { + this.logger.debug( + 'Fetching selected persisted operations (selector=%o, size=%s)', + selector, + hashes.length + ); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.OPERATIONS_STORE_READ, + }); + + if (hashes.length === 0) { + return []; + } + + return this.storage.getSelectedPersistedOperations({ + ...selector, + hashes, + }); + } +} + +function isOperation(def: DefinitionNode): def is OperationDefinitionNode { + return def.kind === Kind.OPERATION_DEFINITION; +} diff --git a/packages/services/api/src/modules/persisted-operations/resolvers.ts b/packages/services/api/src/modules/persisted-operations/resolvers.ts new file mode 100644 index 000000000..cc2356167 --- /dev/null +++ b/packages/services/api/src/modules/persisted-operations/resolvers.ts @@ -0,0 +1,118 @@ +import { createConnection } from '../../shared/schema'; +import { OrganizationManager } from '../organization/providers/organization-manager'; +import { ProjectManager } from '../project/providers/project-manager'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { PersistedOperationManager } from './providers/persisted-operation-manager'; +import type { PersistedOperationsModule } from './__generated__/types'; + +export const resolvers: PersistedOperationsModule.Resolvers = { + Query: { + async storedOperations(_, _2, { injector }) { + const [organization, project] = await Promise.all([ + injector.get(OrganizationManager).getOrganizationIdByToken(), + injector.get(ProjectManager).getProjectIdByToken(), + ]); + + return injector.get(PersistedOperationManager).getPersistedOperations({ + organization, + project, + }); + }, + async persistedOperation(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, operation] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translatePersistedOperationHash(selector), + ]); + + return injector.get(PersistedOperationManager).getPersistedOperation({ + organization, + project, + operation, + }); + }, + async persistedOperations(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + ]); + + return injector.get(PersistedOperationManager).getPersistedOperations({ + organization, + project, + }); + }, + async comparePersistedOperations(_, { hashes }, { injector }) { + const [organization, project] = await Promise.all([ + injector.get(OrganizationManager).getOrganizationIdByToken(), + injector.get(ProjectManager).getProjectIdByToken(), + ]); + + return injector + .get(PersistedOperationManager) + .comparePersistedOperations({ + organization, + project, + hashes, + }); + }, + }, + Mutation: { + async publishPersistedOperations(_, { input }, { injector }) { + if (input.length === 0) { + return { + summary: { + total: 0, + unchanged: 0, + }, + operations: [], + }; + } + + const [organization, project] = await Promise.all([ + injector.get(OrganizationManager).getOrganizationIdByToken(), + injector.get(ProjectManager).getProjectIdByToken(), + ]); + + return injector + .get(PersistedOperationManager) + .createPersistedOperations(input, project, organization); + }, + async deletePersistedOperation(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organizationId, projectId, operationId] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translatePersistedOperationHash(selector), + ]); + + const persistedOperation = await injector + .get(PersistedOperationManager) + .deletePersistedOperation({ + organization: organizationId, + project: projectId, + operation: operationId, + }); + + return { + selector: { + organization: organizationId, + project: projectId, + operation: operationId, + }, + deletedPersistedOperation: persistedOperation, + }; + }, + }, + Project: { + persistedOperations(project, _, { injector }) { + return injector.get(PersistedOperationManager).getPersistedOperations({ + project: project.id, + organization: project.orgId, + }); + }, + }, + PersistedOperationConnection: createConnection(), +}; diff --git a/packages/services/api/src/modules/project/index.ts b/packages/services/api/src/modules/project/index.ts new file mode 100644 index 000000000..bf144351b --- /dev/null +++ b/packages/services/api/src/modules/project/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import { ProjectManager } from './providers/project-manager'; +import typeDefs from './module.graphql'; + +export const projectModule = createModule({ + id: 'project', + dirname: __dirname, + typeDefs, + resolvers, + providers: [ProjectManager], +}); diff --git a/packages/services/api/src/modules/project/module.graphql.ts b/packages/services/api/src/modules/project/module.graphql.ts new file mode 100644 index 000000000..e2cd57ead --- /dev/null +++ b/packages/services/api/src/modules/project/module.graphql.ts @@ -0,0 +1,88 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + project(selector: ProjectSelectorInput!): Project + projects(selector: OrganizationSelectorInput!): ProjectConnection! + } + + extend type Mutation { + createProject(input: CreateProjectInput!): CreateProjectPayload! + updateProjectName(input: UpdateProjectNameInput!): UpdateProjectPayload! + updateProjectGitRepository( + input: UpdateProjectGitRepositoryInput! + ): UpdateProjectPayload! + deleteProject(selector: ProjectSelectorInput!): DeleteProjectPayload! + } + + input ProjectSelectorInput { + organization: ID! + project: ID! + } + + type ProjectSelector { + organization: ID! + project: ID! + } + + enum ProjectType { + FEDERATION + STITCHING + SINGLE + CUSTOM + } + + extend type Organization { + projects: ProjectConnection! + } + + type Project { + id: ID! + cleanId: ID! + name: String! + type: ProjectType! + buildUrl: String + validationUrl: String + } + + type ProjectConnection { + nodes: [Project!]! + total: Int! + } + + input CreateProjectInput { + name: String! + type: ProjectType! + organization: ID! + buildUrl: String + validationUrl: String + } + + input UpdateProjectNameInput { + name: String! + organization: ID! + project: ID! + } + + input UpdateProjectGitRepositoryInput { + gitRepository: String + organization: ID! + project: ID! + } + + type CreateProjectPayload { + selector: ProjectSelector! + createdProject: Project! + createdTarget: Target! + } + + type UpdateProjectPayload { + selector: ProjectSelector! + updatedProject: Project! + } + + type DeleteProjectPayload { + selector: ProjectSelector! + deletedProject: Project! + } +`; diff --git a/packages/services/api/src/modules/project/providers/project-manager.ts b/packages/services/api/src/modules/project/providers/project-manager.ts new file mode 100644 index 000000000..3b67b2cb0 --- /dev/null +++ b/packages/services/api/src/modules/project/providers/project-manager.ts @@ -0,0 +1,201 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { paramCase } from 'param-case'; +import type { Project, ProjectType } from '../../../shared/entities'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { Logger } from '../../shared/providers/logger'; +import { + Storage, + OrganizationSelector, + ProjectSelector, +} from '../../shared/providers/storage'; +import { NullableAndPartial, share, uuid } from '../../../shared/helpers'; +import { SchemaManager } from '../../schema/providers/schema-manager'; +import type { CustomOrchestratorConfig } from '../../schema/providers/orchestrators/custom'; +import { ActivityManager } from '../../activity/providers/activity-manager'; +import { TokenStorage } from '../../token/providers/token-storage'; +import { OrganizationAccessScope } from '../../auth/providers/organization-access'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class ProjectManager { + private logger: Logger; + + constructor( + logger: Logger, + private storage: Storage, + private authManager: AuthManager, + private schemaManager: SchemaManager, + private tokenStorage: TokenStorage, + private activityManager: ActivityManager + ) { + this.logger = logger.child({ source: 'ProjectManager' }); + } + + async createProject( + input: { + name: string; + type: ProjectType; + } & OrganizationSelector & + NullableAndPartial + ): Promise { + const { name, type, organization, buildUrl, validationUrl } = input; + this.logger.info('Creating a project (input=%o)', input); + let cleanId = paramCase(name); + + if (await this.storage.getProjectByCleanId({ cleanId, organization })) { + cleanId = paramCase(`${name}-${uuid(4)}`); + } + + const orchestrator = this.schemaManager.matchOrchestrator(type); + + orchestrator.ensureConfig({ buildUrl, validationUrl }); + + // create project + const project = await this.storage.createProject({ + name, + cleanId, + type, + organization, + buildUrl, + validationUrl, + }); + + await this.activityManager.create({ + type: 'PROJECT_CREATED', + selector: { + organization, + project: project.id, + }, + meta: { + projectType: type, + }, + }); + + return project; + } + + async deleteProject({ + organization, + project, + }: ProjectSelector): Promise { + this.logger.info( + 'Deleting a project (project=%s, organization=%s)', + project, + organization + ); + await this.authManager.ensureProjectAccess({ + project, + organization, + scope: ProjectAccessScope.DELETE, + }); + + const [result] = await Promise.all([ + this.storage.deleteProject({ + project, + organization, + }), + this.tokenStorage.invalidateProject({ + project, + organization, + }), + ]); + + await this.activityManager.create({ + type: 'PROJECT_DELETED', + selector: { + organization, + }, + meta: { + name: result.name, + cleanId: result.cleanId, + }, + }); + + return result; + } + + getProjectIdByToken: () => Promise = share(async () => { + const token = this.authManager.ensureApiToken(); + const { project } = await this.tokenStorage.getToken({ token }); + + return project; + }); + + async getProject(selector: ProjectSelector): Promise { + this.logger.debug('Fetching project (selector=%o)', selector); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.READ, + }); + return this.storage.getProject(selector); + } + + async getProjects(selector: OrganizationSelector): Promise { + this.logger.debug('Fetching projects (selector=%o)', selector); + await this.authManager.ensureOrganizationAccess({ + ...selector, + scope: OrganizationAccessScope.READ, + }); + return this.storage.getProjects(selector); + } + + async updateName( + input: { + name: string; + } & ProjectSelector + ): Promise { + const { name, organization, project } = input; + this.logger.info('Updating a project name (input=%o)', input); + await this.authManager.ensureProjectAccess({ + ...input, + scope: ProjectAccessScope.SETTINGS, + }); + const user = await this.authManager.getCurrentUser(); + + const result = await this.storage.updateProjectName({ + name, + organization, + project, + user: user.id, + }); + + await this.activityManager.create({ + type: 'PROJECT_NAME_UPDATED', + selector: { + organization, + project, + }, + meta: { + value: name, + }, + }); + + return result; + } + + async updateGitRepository( + input: { + gitRepository?: string | null; + } & ProjectSelector + ): Promise { + const { gitRepository, organization, project } = input; + this.logger.info('Updating a project git repository (input=%o)', input); + await this.authManager.ensureProjectAccess({ + ...input, + scope: ProjectAccessScope.SETTINGS, + }); + + return this.storage.updateProjectGitRepository({ + gitRepository: gitRepository?.trim() === '' ? null : gitRepository, + organization, + project, + }); + } +} diff --git a/packages/services/api/src/modules/project/resolvers.ts b/packages/services/api/src/modules/project/resolvers.ts new file mode 100644 index 000000000..c8b149ce8 --- /dev/null +++ b/packages/services/api/src/modules/project/resolvers.ts @@ -0,0 +1,129 @@ +import type { ProjectModule } from './__generated__/types'; +import { ProjectType } from '../../shared/entities'; +import { createConnection } from '../../shared/schema'; +import { ProjectManager } from './providers/project-manager'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { TargetManager } from '../target/providers/target-manager'; + +export const resolvers: ProjectModule.Resolvers & { ProjectType: any } = { + Query: { + async project(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + ]); + return injector.get(ProjectManager).getProject({ + project, + organization, + }); + }, + async projects(_, { selector }, { injector }) { + const organization = await injector + .get(IdTranslator) + .translateOrganizationId(selector); + return injector.get(ProjectManager).getProjects({ organization }); + }, + }, + Mutation: { + async createProject(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const organization = await translator.translateOrganizationId({ + organization: input.organization, + }); + const project = await injector.get(ProjectManager).createProject({ + ...input, + organization, + }); + const target = await injector.get(TargetManager).createTarget({ + name: 'experiment', + project: project.id, + organization, + }); + return { + selector: { + organization: input.organization, + project: project.cleanId, + }, + createdProject: project, + createdTarget: target, + }; + }, + async deleteProject(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organizationId, projectId] = await Promise.all([ + translator.translateOrganizationId({ + organization: selector.organization, + }), + translator.translateProjectId({ + organization: selector.organization, + project: selector.project, + }), + ]); + const deletedProject = await injector.get(ProjectManager).deleteProject({ + organization: organizationId, + project: projectId, + }); + return { + selector: { + organization: organizationId, + project: projectId, + }, + deletedProject, + }; + }, + async updateProjectName(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organizationId, projectId] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + ]); + + const project = await injector.get(ProjectManager).updateName({ + name: input.name, + organization: organizationId, + project: projectId, + }); + + return { + selector: { + organization: input.organization, + project: input.project, + }, + updatedProject: project, + }; + }, + async updateProjectGitRepository(_, { input }, { injector }) { + const [organization, project] = await Promise.all([ + injector.get(IdTranslator).translateOrganizationId(input), + injector.get(IdTranslator).translateProjectId(input), + ]); + + return { + selector: { + organization: input.organization, + project: input.project, + }, + updatedProject: await injector.get(ProjectManager).updateGitRepository({ + project, + organization, + gitRepository: input.gitRepository, + }), + }; + }, + }, + ProjectType: { + FEDERATION: ProjectType.FEDERATION, + STITCHING: ProjectType.STITCHING, + SINGLE: ProjectType.SINGLE, + CUSTOM: ProjectType.CUSTOM, + }, + Organization: { + projects(organization, _, { injector }) { + return injector + .get(ProjectManager) + .getProjects({ organization: organization.id }); + }, + }, + ProjectConnection: createConnection(), +}; diff --git a/packages/services/api/src/modules/rate-limit/index.ts b/packages/services/api/src/modules/rate-limit/index.ts new file mode 100644 index 000000000..a20c2fa93 --- /dev/null +++ b/packages/services/api/src/modules/rate-limit/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; +import { RateLimitProvider } from './providers/rate-limit.provider'; + +export const rateLimitModule = createModule({ + id: 'rate-limit', + dirname: __dirname, + typeDefs, + resolvers, + providers: [RateLimitProvider], +}); diff --git a/packages/services/api/src/modules/rate-limit/module.graphql.ts b/packages/services/api/src/modules/rate-limit/module.graphql.ts new file mode 100644 index 000000000..482101f78 --- /dev/null +++ b/packages/services/api/src/modules/rate-limit/module.graphql.ts @@ -0,0 +1,19 @@ +import { gql } from 'graphql-modules'; + +export default gql` + type RateLimit { + limitedForOperations: Boolean! + limitedForSchemaPushes: Boolean! + operations: SafeInt! + schemaPushes: SafeInt! + } + + input RateLimitInput { + operations: SafeInt! + schemaPushes: SafeInt! + } + + extend type Organization { + rateLimit: RateLimit! + } +`; diff --git a/packages/services/api/src/modules/rate-limit/providers/rate-limit.provider.ts b/packages/services/api/src/modules/rate-limit/providers/rate-limit.provider.ts new file mode 100644 index 000000000..2c7d7638d --- /dev/null +++ b/packages/services/api/src/modules/rate-limit/providers/rate-limit.provider.ts @@ -0,0 +1,61 @@ +import { Inject, Injectable, Scope } from 'graphql-modules'; +import { sentry } from '../../../shared/sentry'; +import { Logger } from '../../shared/providers/logger'; +import { RATE_LIMIT_SERVICE_CONFIG } from './tokens'; +import type { RateLimitServiceConfig } from './tokens'; +import type { RateLimitApi, RateLimitQueryInput } from '@hive/rate-limit'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; + +@Injectable({ + global: true, + scope: Scope.Singleton, +}) +export class RateLimitProvider { + private logger: Logger; + private rateLimit; + + constructor( + logger: Logger, + @Inject(RATE_LIMIT_SERVICE_CONFIG) + rateLimitServiceConfig: RateLimitServiceConfig + ) { + this.logger = logger.child({ service: 'RateLimitProvider' }); + this.rateLimit = rateLimitServiceConfig.endpoint + ? createTRPCClient({ + url: `${rateLimitServiceConfig.endpoint}/trpc`, + fetch, + }) + : null; + } + + async assertRateLimit(input: RateLimitQueryInput<'checkRateLimit'>) { + const limit = await this.checkRateLimit(input); + + if (limit.limited) { + throw new Error(`Monthly limit for ${input.type} has reached!`); + } + + return limit; + } + + @sentry('RateLimitProvider.checkRateLimit') + async checkRateLimit(input: RateLimitQueryInput<'checkRateLimit'>) { + if (this.rateLimit === null) { + this.logger.warn( + `Unable to check rate-limit for input: %o , service information is not available`, + input + ); + + return { + limited: false, + }; + } + + this.logger.debug( + `Checking rate limit for target id="${input.id}", type=${input.type}` + ); + + return await this.rateLimit.query('checkRateLimit', input); + } +} diff --git a/packages/services/api/src/modules/rate-limit/providers/tokens.ts b/packages/services/api/src/modules/rate-limit/providers/tokens.ts new file mode 100644 index 000000000..8b5562380 --- /dev/null +++ b/packages/services/api/src/modules/rate-limit/providers/tokens.ts @@ -0,0 +1,8 @@ +import { InjectionToken } from 'graphql-modules'; + +export interface RateLimitServiceConfig { + endpoint: string | null; +} + +export const RATE_LIMIT_SERVICE_CONFIG = + new InjectionToken('rate-limit-service-config'); diff --git a/packages/services/api/src/modules/rate-limit/resolvers.ts b/packages/services/api/src/modules/rate-limit/resolvers.ts new file mode 100644 index 000000000..4317e8609 --- /dev/null +++ b/packages/services/api/src/modules/rate-limit/resolvers.ts @@ -0,0 +1,38 @@ +import { RateLimitProvider } from './providers/rate-limit.provider'; +import { RateLimitModule } from './__generated__/types'; + +export const resolvers: RateLimitModule.Resolvers = { + Organization: { + rateLimit: async (org, args, { injector }) => { + let limitedForOperations = false; + let limitedForSchemaPushes = false; + + try { + const [organizationRateLimit, schemaPushLimit] = await Promise.all([ + injector.get(RateLimitProvider).checkRateLimit({ + entityType: 'organization', + id: org.id, + type: 'operations-reporting', + }), + injector.get(RateLimitProvider).checkRateLimit({ + entityType: 'organization', + id: org.id, + type: 'schema-push', + }), + ]); + + limitedForOperations = organizationRateLimit.limited; + limitedForSchemaPushes = schemaPushLimit.limited; + } catch (e) { + // nothing to do here + } + + return { + limitedForOperations, + limitedForSchemaPushes, + operations: org.monthlyRateLimit.operations, + schemaPushes: org.monthlyRateLimit.schemaPush, + }; + }, + }, +}; diff --git a/packages/services/api/src/modules/schema/__tests__/schema-publisher.spec.ts b/packages/services/api/src/modules/schema/__tests__/schema-publisher.spec.ts new file mode 100644 index 000000000..977e23932 --- /dev/null +++ b/packages/services/api/src/modules/schema/__tests__/schema-publisher.spec.ts @@ -0,0 +1,935 @@ +test.todo('move the code below to integration tests'); + +// /* eslint-disable import/no-extraneous-dependencies */ +// import 'reflect-metadata'; +// import { testkit } from 'graphql-modules'; +// import { parse } from 'graphql'; +// import { schemaModule } from '../index'; +// import { sharedModule } from '../../shared/index'; +// import { Logger } from '../../shared/providers/logger'; +// import { Storage } from '../../shared/providers/storage'; +// import { ProjectManager } from '../../project/providers/project-manager'; +// import { +// ProjectType, +// OrganizationType, +// Organization, +// Project, +// Target, +// } from '../../../shared/entities'; +// import { TargetManager } from '../../target/providers/target-manager'; +// import { OrganizationManager } from '../../organization/providers/organization-manager'; +// import { AuthManager } from '../../auth/providers/auth-manager'; +// import { HttpClient } from '../../shared/providers/http-client'; +// import { Tracking } from '../../shared/providers/tracking'; +// import { REDIS_INSTANCE } from '../../shared/providers/redis'; +// import { OperationsManager } from '../../operations/providers/operations-manager'; +// import { AlertsManager } from '../../alerts/providers/alerts-manager'; +// import { CdnProvider } from '../../cdn/providers/cdn.provider'; + +// const Redis = require('ioredis-mock'); + +// const schemaPublishMutation = parse(/* GraphQL */ ` +// mutation schemaPublish( +// $input: SchemaPublishInput! +// $includeMissingServiceError: Boolean = false +// ) { +// schemaPublish(input: $input) { +// __typename +// ... on SchemaPublishSuccess { +// initial +// valid +// message +// changes { +// total +// nodes { +// criticality +// path +// } +// } +// } +// ... on SchemaPublishError { +// valid +// changes { +// total +// nodes { +// criticality +// path +// } +// } +// errors { +// total +// } +// } +// ... on SchemaPublishMissingServiceError +// @include(if: $includeMissingServiceError) { +// message +// } +// } +// } +// `); + +// const emptyLogger = () => { +// const logger = { +// log() {}, +// warn() {}, +// error() {}, +// info() {}, +// debug() {}, +// child() { +// return logger; +// }, +// }; +// return { +// provide: Logger, +// useValue: logger, +// }; +// }; + +// function createProviders({ +// organization, +// project, +// target, +// alertsManager, +// cdnProvider, +// }: { +// organization: Organization; +// project: Project; +// target: Target; +// alertsManager?: any; +// cdnProvider?: any; +// }) { +// return [ +// emptyLogger(), +// HttpClient, +// Tracking, +// { +// provide: AuthManager, +// useValue: { +// ensureApiToken() { +// return 'api-token'; +// }, +// async ensureOrganizationAccess() {}, +// async ensureProjectAccess() {}, +// async ensureTargetAccess() {}, +// } as Pick< +// AuthManager, +// | 'ensureApiToken' +// | 'ensureOrganizationAccess' +// | 'ensureProjectAccess' +// | 'ensureTargetAccess' +// >, +// }, +// { +// provide: OperationsManager, +// useValue: {}, +// }, +// { +// provide: AlertsManager, +// useValue: alertsManager ?? { +// triggerSchemaChangeNotifications() {}, +// }, +// }, +// { +// provide: CdnProvider, +// useValue: cdnProvider ?? { +// publish() {}, +// }, +// }, +// { +// provide: OrganizationManager, +// useValue: { +// async getOrganization() { +// return organization; +// }, +// async getOrganizationIdByToken() { +// return organization.id; +// }, +// } as Pick< +// OrganizationManager, +// 'getOrganization' | 'getOrganizationIdByToken' +// >, +// }, +// { +// provide: ProjectManager, +// useValue: { +// async getProject() { +// return project; +// }, +// async getProjectIdByToken() { +// return project.id; +// }, +// } as Pick, +// }, +// { +// provide: TargetManager, +// useValue: { +// async getTarget() { +// return target; +// }, +// async getTargetIdByToken() { +// return target.id; +// }, +// } as Pick, +// }, + +// { +// provide: REDIS_INSTANCE, +// useFactory() { +// return new Redis({ +// db: 0, +// maxRetriesPerRequest: null, +// enableReadyCheck: false, +// }); +// }, +// }, +// ]; +// } + +// // TODO: Move it to integration tests +// describe.skip('publish', () => { +// test('publish initial schema', async () => { +// const organization = { +// id: 'org-id', +// cleanId: 'ogr-clean-id', +// name: 'org-name', +// inviteCode: 'invite', +// type: OrganizationType.REGULAR, +// }; + +// const project = { +// id: 'project-id', +// cleanId: 'project-clean-id', +// name: 'project-name', +// type: ProjectType.SINGLE, +// orgId: organization.id, +// }; + +// const target = { +// id: 'target-id', +// cleanId: 'target-clean-id', +// name: 'target-name', +// projectId: project.id, +// orgId: organization.id, +// }; + +// const insertSchema = jest.fn< +// ReturnType, +// Parameters +// >((input) => +// Promise.resolve({ +// ...input, +// id: 'schema-id', +// source: input.schema, +// date: new Date().toISOString(), +// metadata: input.metadata ? JSON.parse(input.metadata) : null, +// }) +// ); + +// const createVersion = jest.fn< +// ReturnType, +// Parameters +// >((input) => +// Promise.resolve({ +// id: 'version-id', +// date: Date.now(), +// url: input.url, +// valid: input.valid, +// commit: input.commit, +// base_schema: input.base_schema, +// }) +// ); + +// const input = { +// author: 'Kamil', +// commit: 'commit', +// sdl: 'type Query { foo: String }', +// }; + +// const triggerSchemaChangeNotifications = jest.fn(() => Promise.resolve()); +// const cdnProviderPublish = jest.fn(); + +// const mod = testkit.testModule(schemaModule, { +// replaceExtensions: true, +// inheritTypeDefs: [sharedModule], +// providers: [ +// ...createProviders({ +// organization, +// project, +// target, +// alertsManager: { triggerSchemaChangeNotifications }, +// cdnProvider: { +// publish: cdnProviderPublish, +// }, +// }), +// { +// provide: Storage, +// useValue: { +// async getLatestSchemas() { +// return { +// schemas: [], +// }; +// }, +// async getMaybeSchema() { +// return null; +// }, +// async getBaseSchema() { +// return ''; +// }, +// insertSchema, +// createVersion, +// } as Pick< +// Storage, +// | 'getLatestSchemas' +// | 'getMaybeSchema' +// | 'insertSchema' +// | 'createVersion' +// | 'getBaseSchema' +// >, +// }, +// ], +// }); + +// const result = await testkit.execute(mod, { +// document: schemaPublishMutation, +// variableValues: { +// input, +// }, +// }); + +// expect(result.errors).not.toBeDefined(); + +// // It should insert schema +// expect(insertSchema).toHaveBeenCalledWith({ +// schema: input.sdl, +// commit: input.commit, +// author: input.author, +// organization: organization.id, +// project: project.id, +// target: target.id, +// }); + +// // Create a new version of schema +// expect(createVersion).toHaveBeenCalledWith({ +// valid: true, +// commit: 'schema-id', +// commits: ['schema-id'], +// organization: organization.id, +// project: project.id, +// target: target.id, +// base_schema: '', +// }); + +// expect(triggerSchemaChangeNotifications).toHaveBeenCalledTimes(1); +// expect(cdnProviderPublish).toHaveBeenCalledTimes(1); + +// expect(result.data).toEqual({ +// schemaPublish: { +// __typename: 'SchemaPublishSuccess', +// initial: true, +// valid: true, +// message: null, +// changes: { +// nodes: [], +// total: 0, +// }, +// }, +// }); +// }); + +// test('update service url without db inserts', async () => { +// const organization = { +// id: 'org-id', +// cleanId: 'ogr-clean-id', +// name: 'org-name', +// inviteCode: 'invite', +// type: OrganizationType.REGULAR, +// }; + +// const project = { +// id: 'project-id', +// cleanId: 'project-clean-id', +// name: 'project-name', +// type: ProjectType.SINGLE, +// orgId: organization.id, +// }; + +// const target = { +// id: 'target-id', +// cleanId: 'target-clean-id', +// name: 'target-name', +// projectId: project.id, +// orgId: organization.id, +// }; + +// const insertSchema = jest.fn(() => +// Promise.reject(`You should not be here!`) +// ); + +// const createVersion = jest.fn(() => +// Promise.reject(`You should not be here!`) +// ); + +// const updateSchemaUrlOfVersion = jest.fn(async () => {}); + +// const input = { +// author: 'Kamil', +// commit: 'commit', +// sdl: 'type Query { foo: String }', +// url: 'https://api.com', +// service: 'service', +// }; + +// const triggerSchemaChangeNotifications = jest.fn(() => Promise.resolve()); +// const cdnProviderPublish = jest.fn(); + +// const mod = testkit.testModule(schemaModule, { +// replaceExtensions: true, +// inheritTypeDefs: [sharedModule], +// providers: [ +// ...createProviders({ +// organization, +// project, +// target, +// alertsManager: { triggerSchemaChangeNotifications }, +// cdnProvider: { +// publish: cdnProviderPublish, +// }, +// }), +// { +// provide: Storage, +// useValue: { +// async getLatestSchemas() { +// return { +// schemas: [ +// { +// id: 'existing-schema-commit-id', +// author: 'existing-author', +// commit: 'existing-commit', +// source: input.sdl, +// service: input.service, +// target: 'existing-target', +// date: new Date().toISOString(), +// }, +// ], +// version: 'existing-version-id', +// }; +// }, +// async getMaybeSchema() { +// return null; +// }, +// async getBaseSchema() { +// return ''; +// }, +// insertSchema, +// createVersion, +// updateSchemaUrlOfVersion, +// } as Pick< +// Storage, +// | 'getLatestSchemas' +// | 'getMaybeSchema' +// | 'insertSchema' +// | 'createVersion' +// | 'updateSchemaUrlOfVersion' +// | 'getBaseSchema' +// >, +// }, +// ], +// }); + +// const result = await testkit.execute(mod, { +// document: schemaPublishMutation, +// variableValues: { +// input, +// }, +// }); + +// expect(result.errors).not.toBeDefined(); + +// // Make sure it doesn't create new version or schema +// expect(insertSchema).not.toHaveBeenCalled(); +// expect(createVersion).not.toHaveBeenCalled(); + +// // No notifications +// expect(triggerSchemaChangeNotifications).not.toHaveBeenCalled(); +// // Update CDN +// expect(cdnProviderPublish).toHaveBeenCalledTimes(1); + +// // Make sure it updates the url of the existing version +// expect(updateSchemaUrlOfVersion).toHaveBeenCalledWith({ +// version: 'existing-version-id', +// commit: 'existing-schema-commit-id', +// url: input.url, +// target: target.id, +// project: project.id, +// organization: organization.id, +// }); + +// expect(result.data).toEqual({ +// schemaPublish: { +// __typename: 'SchemaPublishSuccess', +// initial: false, +// valid: true, +// message: `New service url: ${input.url} (previously: empty)`, +// changes: { +// nodes: [], +// total: 0, +// }, +// }, +// }); +// }); + +// test('do not update service url if exactly the same', async () => { +// const organization = { +// id: 'org-id', +// cleanId: 'ogr-clean-id', +// name: 'org-name', +// inviteCode: 'invite', +// type: OrganizationType.REGULAR, +// }; + +// const project = { +// id: 'project-id', +// cleanId: 'project-clean-id', +// name: 'project-name', +// type: ProjectType.SINGLE, +// orgId: organization.id, +// }; + +// const target = { +// id: 'target-id', +// cleanId: 'target-clean-id', +// name: 'target-name', +// projectId: project.id, +// orgId: organization.id, +// }; + +// const insertSchema = jest.fn(() => +// Promise.reject(`You should not be here!`) +// ); + +// const createVersion = jest.fn(() => +// Promise.reject(`You should not be here!`) +// ); + +// const updateSchemaUrlOfVersion = jest.fn(async () => {}); + +// const input = { +// author: 'Kamil', +// commit: 'commit', +// sdl: 'type Query { foo: String }', +// url: 'https://api.com', +// service: 'service', +// }; + +// const triggerSchemaChangeNotifications = jest.fn(() => Promise.resolve()); +// const cdnProviderPublish = jest.fn(); + +// const mod = testkit.testModule(schemaModule, { +// replaceExtensions: true, +// inheritTypeDefs: [sharedModule], +// providers: [ +// ...createProviders({ +// organization, +// project, +// target, +// alertsManager: { triggerSchemaChangeNotifications }, +// cdnProvider: { +// publish: cdnProviderPublish, +// }, +// }), +// { +// provide: Storage, +// useValue: { +// async getLatestSchemas() { +// return { +// schemas: [ +// { +// id: 'existing-schema-commit-id', +// author: 'existing-author', +// commit: 'existing-commit', +// url: input.url, +// source: input.sdl, +// service: input.service, +// target: 'existing-target', +// date: new Date().toISOString(), +// }, +// ], +// version: 'existing-version-id', +// }; +// }, +// async getMaybeSchema() { +// return null; +// }, +// async getBaseSchema() { +// return ''; +// }, +// insertSchema, +// createVersion, +// updateSchemaUrlOfVersion, +// } as Pick< +// Storage, +// | 'getLatestSchemas' +// | 'getMaybeSchema' +// | 'insertSchema' +// | 'createVersion' +// | 'updateSchemaUrlOfVersion' +// | 'getBaseSchema' +// >, +// }, +// ], +// }); + +// const result = await testkit.execute(mod, { +// document: schemaPublishMutation, +// variableValues: { +// input, +// }, +// }); + +// // Make sure it doesn't create new version or schema +// expect(insertSchema).not.toHaveBeenCalled(); +// expect(createVersion).not.toHaveBeenCalled(); + +// // No notifications +// expect(triggerSchemaChangeNotifications).not.toHaveBeenCalled(); +// // No CDN update +// expect(cdnProviderPublish).not.toHaveBeenCalled(); + +// // Do not update if the url is the same +// expect(updateSchemaUrlOfVersion).not.toHaveBeenCalled(); + +// expect(result.errors).not.toBeDefined(); +// expect(result.data).toEqual({ +// schemaPublish: { +// __typename: 'SchemaPublishSuccess', +// initial: false, +// valid: true, +// message: null, +// changes: { +// nodes: [], +// total: 0, +// }, +// }, +// }); +// }); + +// test('creating root type should not appear as breaking change', async () => { +// const organization = { +// id: 'org-id', +// cleanId: 'ogr-clean-id', +// name: 'org-name', +// inviteCode: 'invite', +// type: OrganizationType.REGULAR, +// }; + +// const project = { +// id: 'project-id', +// cleanId: 'project-clean-id', +// name: 'project-name', +// type: ProjectType.SINGLE, +// orgId: organization.id, +// }; + +// const target = { +// id: 'target-id', +// cleanId: 'target-clean-id', +// name: 'target-name', +// projectId: project.id, +// orgId: organization.id, +// }; + +// const createVersion = jest.fn(); + +// const existingSDL = 'type Query { foo: String }'; +// const input = { +// author: 'Kamil', +// commit: 'commit', +// sdl: `${existingSDL} type Subscription { onFoo: String }`, +// }; + +// const triggerSchemaChangeNotifications = jest.fn(() => Promise.resolve()); +// const cdnProviderPublish = jest.fn(); + +// const mod = testkit.testModule(schemaModule, { +// replaceExtensions: true, +// inheritTypeDefs: [sharedModule], +// providers: [ +// ...createProviders({ +// organization, +// project, +// target, +// alertsManager: { triggerSchemaChangeNotifications }, +// cdnProvider: { +// publish: cdnProviderPublish, +// }, +// }), +// { +// provide: Storage, +// useValue: { +// async getLatestSchemas() { +// return { +// schemas: [ +// { +// id: 'existing-schema-commit-id', +// author: 'existing-author', +// commit: 'existing-commit', +// source: existingSDL, +// date: new Date().toISOString(), +// target: 'existing-target', +// }, +// ], +// version: 'existing-version-id', +// }; +// }, +// async getMaybeSchema() { +// return null; +// }, +// async getBaseSchema() { +// return ''; +// }, +// async insertSchema(obj) { +// return { +// id: 'new-schema-commit-id', +// author: obj.author, +// commit: obj.commit, +// source: obj.schema, +// target: obj.target, +// date: new Date().toISOString(), +// }; +// }, +// createVersion, +// } as Pick< +// Storage, +// | 'getLatestSchemas' +// | 'getMaybeSchema' +// | 'insertSchema' +// | 'createVersion' +// | 'getBaseSchema' +// >, +// }, +// ], +// }); + +// const result = await testkit.execute(mod, { +// document: schemaPublishMutation, +// variableValues: { +// input, +// }, +// }); + +// expect(result.errors).not.toBeDefined(); + +// expect(triggerSchemaChangeNotifications).toHaveBeenCalledTimes(1); +// expect(cdnProviderPublish).toHaveBeenCalledTimes(1); + +// expect(result.data).toEqual({ +// schemaPublish: { +// __typename: 'SchemaPublishSuccess', +// initial: false, +// message: null, +// valid: true, +// changes: { +// nodes: [ +// { +// criticality: 'Safe', +// path: ['Subscription'], +// }, +// ], +// total: 1, +// }, +// }, +// }); +// }); + +// test('can not update stitching project without specified service input if SchemaPublishMissingServiceError is selected', async () => { +// const organization = { +// id: 'org-id', +// cleanId: 'ogr-clean-id', +// name: 'org-name', +// inviteCode: 'invite', +// type: OrganizationType.REGULAR, +// }; + +// const project = { +// id: 'project-id', +// cleanId: 'project-clean-id', +// name: 'project-name', +// type: ProjectType.STITCHING, +// orgId: organization.id, +// }; + +// const target = { +// id: 'target-id', +// cleanId: 'target-clean-id', +// name: 'target-name', +// projectId: project.id, +// orgId: organization.id, +// }; + +// const createVersion = jest.fn(); + +// const mod = testkit.testModule(schemaModule, { +// replaceExtensions: true, +// inheritTypeDefs: [sharedModule], +// providers: [ +// ...createProviders({ +// organization, +// project, +// target, +// }), +// { +// provide: Storage, +// useValue: { +// async getLatestSchemas() { +// return { +// schemas: [], +// }; +// }, +// async getMaybeSchema() { +// return null; +// }, +// async getBaseSchema() { +// return null; +// }, +// async insertSchema(obj) { +// return { +// id: 'new-schema-commit-id', +// author: obj.author, +// commit: obj.commit, +// source: obj.schema, +// target: obj.target, +// date: new Date().toISOString(), +// }; +// }, +// createVersion, +// } as Pick< +// Storage, +// | 'getLatestSchemas' +// | 'getMaybeSchema' +// | 'insertSchema' +// | 'createVersion' +// >, +// }, +// ], +// }); + +// const input = { +// author: 'n1', +// commit: 'commit', +// sdl: `type Subscription { onFoo: String }`, +// }; + +// const result = await testkit.execute(mod, { +// document: schemaPublishMutation, +// variableValues: { +// input, +// includeMissingServiceError: true, +// }, +// }); + +// expect(result.data).toBeDefined(); +// expect(result.errors).toBeUndefined(); + +// expect(result.data?.schemaPublish).toEqual({ +// __typename: 'SchemaPublishMissingServiceError', +// message: +// 'Can not publish schema for a stitching project without a service name.', +// }); +// }); + +// test('can not update federation project without specified service input if SchemaPublishMissingServiceError is selected', async () => { +// const organization = { +// id: 'org-id', +// cleanId: 'ogr-clean-id', +// name: 'org-name', +// inviteCode: 'invite', +// type: OrganizationType.REGULAR, +// }; + +// const project = { +// id: 'project-id', +// cleanId: 'project-clean-id', +// name: 'project-name', +// type: ProjectType.FEDERATION, +// orgId: organization.id, +// }; + +// const target = { +// id: 'target-id', +// cleanId: 'target-clean-id', +// name: 'target-name', +// projectId: project.id, +// orgId: organization.id, +// }; + +// const createVersion = jest.fn(); + +// const mod = testkit.testModule(schemaModule, { +// replaceExtensions: true, +// inheritTypeDefs: [sharedModule], +// providers: [ +// ...createProviders({ +// organization, +// project, +// target, +// }), +// { +// provide: Storage, +// useValue: { +// async getLatestSchemas() { +// return { +// schemas: [], +// }; +// }, +// async getMaybeSchema() { +// return null; +// }, +// async getBaseSchema() { +// return null; +// }, +// async insertSchema(obj) { +// return { +// id: 'new-schema-commit-id', +// author: obj.author, +// commit: obj.commit, +// source: obj.schema, +// target: obj.target, +// date: new Date().toISOString(), +// }; +// }, +// createVersion, +// } as Pick< +// Storage, +// | 'getLatestSchemas' +// | 'getMaybeSchema' +// | 'insertSchema' +// | 'createVersion' +// >, +// }, +// ], +// }); + +// const input = { +// author: 'Kamil', +// commit: 'commit', +// sdl: `type Subscription { onFoo: String }`, +// }; + +// const result = await testkit.execute(mod, { +// document: schemaPublishMutation, +// variableValues: { +// input, +// includeMissingServiceError: true, +// }, +// }); + +// expect(result.data).toBeDefined(); +// expect(result.errors).toBeUndefined(); + +// expect(result.data?.schemaPublish).toEqual({ +// __typename: 'SchemaPublishMissingServiceError', +// message: +// 'Can not publish schema for a federation project without a service name.', +// }); +// }); +// }); diff --git a/packages/services/api/src/modules/schema/index.ts b/packages/services/api/src/modules/schema/index.ts new file mode 100644 index 000000000..f2c967754 --- /dev/null +++ b/packages/services/api/src/modules/schema/index.ts @@ -0,0 +1,22 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import { SchemaManager } from './providers/schema-manager'; +import { SchemaPublisher } from './providers/schema-publisher'; +import { SchemaValidator } from './providers/schema-validator'; +import { orchestrators } from './providers/orchestrators'; +import { Inspector } from './providers/inspector'; +import typeDefs from './module.graphql'; + +export const schemaModule = createModule({ + id: 'schema', + dirname: __dirname, + typeDefs, + resolvers, + providers: [ + SchemaManager, + SchemaValidator, + SchemaPublisher, + Inspector, + ...orchestrators, + ], +}); diff --git a/packages/services/api/src/modules/schema/module.graphql.ts b/packages/services/api/src/modules/schema/module.graphql.ts new file mode 100644 index 000000000..1ad2d1c1c --- /dev/null +++ b/packages/services/api/src/modules/schema/module.graphql.ts @@ -0,0 +1,270 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Mutation { + """ + Requires API Token + """ + schemaPublish(input: SchemaPublishInput!): SchemaPublishPayload! + """ + Requires API Token + """ + schemaCheck(input: SchemaCheckInput!): SchemaCheckPayload! + updateSchemaVersionStatus(input: SchemaVersionUpdateInput!): SchemaVersion! + updateBaseSchema(input: UpdateBaseSchemaInput!): Target! + updateSchemaServiceName(input: UpdateSchemaServiceNameInput!): Target! + schemaSyncCDN(input: SchemaSyncCDNInput!): SchemaSyncCDNPayload! + } + + extend type Query { + schemaCompare(selector: SchemaCompareInput!): SchemaComparePayload! + schemaCompareToPrevious( + selector: SchemaCompareToPreviousInput! + ): SchemaComparePayload! + schemaVersions( + selector: SchemaVersionsInput! + after: ID + limit: Int! + ): SchemaVersionConnection! + schemaVersion(selector: SchemaVersionInput!): SchemaVersion! + """ + Requires API Token + """ + latestVersion: SchemaVersion! + """ + Requires API Token + """ + latestValidVersion: SchemaVersion! + } + + extend type Target { + latestSchemaVersion: SchemaVersion + baseSchema: String + hasSchema: Boolean! + } + + type SchemaConnection { + nodes: [Schema!]! + total: Int! + } + + type Schema { + id: ID! + author: String! + source: String! + date: DateTime! + commit: ID! + url: String + service: String + metadata: String + } + + union SchemaPublishPayload = + SchemaPublishSuccess + | SchemaPublishError + | SchemaPublishMissingServiceError + | GitHubSchemaPublishSuccess + | GitHubSchemaPublishError + + input SchemaPublishInput { + service: ID + url: String + sdl: String! + author: String! + commit: String! + force: Boolean + metadata: String + """ + Talk to GitHub Application and create a check-run + """ + github: Boolean + } + + union SchemaCheckPayload = + SchemaCheckSuccess + | SchemaCheckError + | GitHubSchemaCheckSuccess + | GitHubSchemaCheckError + + enum CriticalityLevel { + Breaking + Dangerous + Safe + } + + type SchemaChange { + criticality: CriticalityLevel! + message: String! + path: [String!] + } + + type SchemaError { + message: String! + path: [String!] + } + + type SchemaChangeConnection { + nodes: [SchemaChange!]! + total: Int! + } + + type SchemaErrorConnection { + nodes: [SchemaError!]! + total: Int! + } + + type SchemaCheckSuccess { + valid: Boolean! + changes: SchemaChangeConnection + } + + type SchemaCheckError { + valid: Boolean! + changes: SchemaChangeConnection + errors: SchemaErrorConnection! + } + + type GitHubSchemaCheckSuccess { + message: String! + } + + type GitHubSchemaCheckError { + message: String! + } + + type GitHubSchemaPublishSuccess { + message: String! + } + + type GitHubSchemaPublishError { + message: String! + } + + type SchemaPublishSuccess { + initial: Boolean! + valid: Boolean! + message: String + changes: SchemaChangeConnection + } + + type SchemaPublishError { + valid: Boolean! + changes: SchemaChangeConnection + errors: SchemaErrorConnection! + } + + type SchemaPublishMissingServiceError { + message: String! + } + + input SchemaCheckInput { + service: ID + sdl: String! + github: GitHubSchemaCheckInput + } + + input GitHubSchemaCheckInput { + commit: String! + } + + input SchemaCompareInput { + organization: ID! + project: ID! + target: ID! + after: ID! + before: ID! + } + + input SchemaCompareToPreviousInput { + organization: ID! + project: ID! + target: ID! + version: ID! + } + + input SchemaVersionUpdateInput { + organization: ID! + project: ID! + target: ID! + version: ID! + valid: Boolean! + } + + type SchemaCompareResult { + changes: SchemaChangeConnection! + diff: SchemaDiff! + initial: Boolean! + } + + type SchemaCompareError { + message: String! + } + + union SchemaComparePayload = SchemaCompareResult | SchemaCompareError + + type SchemaDiff { + after: String! + before: String! + } + + input SchemaVersionsInput { + organization: ID! + project: ID! + target: ID! + } + + input SchemaVersionInput { + organization: ID! + project: ID! + target: ID! + version: ID! + } + + input UpdateBaseSchemaInput { + organization: ID! + project: ID! + target: ID! + newBase: String + } + + input UpdateSchemaServiceNameInput { + organization: ID! + project: ID! + target: ID! + version: ID! + name: String! + newName: String! + } + + type SchemaVersion { + id: ID! + valid: Boolean! + date: DateTime! + commit: Schema! + baseSchema: String + schemas: SchemaConnection! + supergraph: String + sdl: String + } + + type SchemaVersionConnection { + nodes: [SchemaVersion!]! + pageInfo: PageInfo! + } + + input SchemaSyncCDNInput { + organization: ID! + project: ID! + target: ID! + } + + type SchemaSyncCDNSuccess { + message: String! + } + + type SchemaSyncCDNError { + message: String! + } + + union SchemaSyncCDNPayload = SchemaSyncCDNSuccess | SchemaSyncCDNError +`; diff --git a/packages/services/api/src/modules/schema/providers/inspector.ts b/packages/services/api/src/modules/schema/providers/inspector.ts new file mode 100644 index 000000000..3c5b8b72d --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/inspector.ts @@ -0,0 +1,171 @@ +import { Injectable, Scope } from 'graphql-modules'; +import type { GraphQLSchema } from 'graphql'; +import { + diff, + Change, + CriticalityLevel, + DiffRule, +} from '@graphql-inspector/core'; +import type * as Types from '../../../__generated__/types'; +import type { TargetSettings } from '../../../shared/entities'; +import { Logger } from '../../shared/providers/logger'; +import { sentry } from '../../../shared/sentry'; +import { createPeriod } from '../../../shared/helpers'; +import { OperationsManager } from '../../operations/providers/operations-manager'; +import { TargetManager } from '../../target/providers/target-manager'; + +const criticalityMap: Record = { + [CriticalityLevel.Breaking]: 'Breaking', + [CriticalityLevel.NonBreaking]: 'Safe', + [CriticalityLevel.Dangerous]: 'Dangerous', +}; + +@Injectable({ + scope: Scope.Operation, +}) +export class Inspector { + private logger: Logger; + + constructor( + logger: Logger, + private operationsManager: OperationsManager, + private targetManager: TargetManager + ) { + this.logger = logger.child({ service: 'Inspector' }); + } + + @sentry('Inspector.diff') + async diff( + existing: GraphQLSchema, + incoming: GraphQLSchema, + selector?: Types.TargetSelector + ): Promise { + this.logger.debug('Comparing Schemas'); + + const changes = await diff(existing, incoming, [DiffRule.considerUsage], { + checkUsage: async (fields) => { + this.logger.debug('Checking usage (fields=%s)', fields.length); + const BREAKING = false; + const NOT_BREAKING = true; + const allUsed = fields.map(() => BREAKING); + + if (!(selector && 'organization' in selector) || fields.length === 0) { + this.logger.debug('Mark all as used'); + return allUsed; + } + + const settings = await this.getSettings({ selector }); + + if (!settings) { + return allUsed; + } + + this.logger.debug('Usage validation enabled'); + + const statsList = await this.getStats({ + selector, + settings, + fields, + }); + + if (!statsList) { + return allUsed; + } + + this.logger.debug('Got the stats'); + + function useStats({ + type, + field, + argument, + }: { + type: string; + field?: string; + argument?: string; + }) { + const stats = statsList!.find( + (s) => + s.field === field && s.type === type && s.argument === argument + ); + + if (!stats) { + return NOT_BREAKING; + } + + const aboveThreshold = + stats.percentage > settings!.validation.percentage; + return aboveThreshold ? BREAKING : NOT_BREAKING; + } + + return fields.map(useStats); + }, + }); + + return changes + .map((change) => this.translateChange(change)) + .sort((a, b) => a.criticality.localeCompare(b.criticality)); + } + + private translateChange(change: Change): Types.SchemaChange { + return { + message: change.message, + criticality: criticalityMap[change.criticality.level], + path: change.path?.split('.'), + }; + } + + private async getSettings({ selector }: { selector: Types.TargetSelector }) { + try { + const settings = await this.targetManager.getTargetSettings(selector); + + if (!settings.validation.enabled) { + this.logger.debug('Usage validation disabled'); + this.logger.debug('Mark all as used'); + return null; + } + + if ( + settings.validation.enabled && + settings.validation.targets.length === 0 + ) { + this.logger.debug( + 'Usage validation enabled but no targets to check against' + ); + this.logger.debug('Mark all as used'); + return null; + } + + return settings; + } catch (error: any) { + this.logger.error(`Failed to get settings`, error); + return null; + } + } + + private async getStats({ + fields, + settings, + selector, + }: { + settings: TargetSettings; + selector: Types.TargetSelector; + fields: ReadonlyArray<{ + type: string; + field?: string | null; + argument?: string | null; + }>; + }) { + try { + return await this.operationsManager.readFieldListStats({ + fields, + period: createPeriod(`${settings.validation.period}d`), + target: settings.validation.targets, + project: selector.project, + organization: selector.organization, + unsafe__itIsMeInspector: true, + }); + } catch (error: any) { + this.logger.error(`Failed to read stats`, error); + } + } +} diff --git a/packages/services/api/src/modules/schema/providers/orchestrators/custom.ts b/packages/services/api/src/modules/schema/providers/orchestrators/custom.ts new file mode 100644 index 000000000..b2f81ff45 --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/orchestrators/custom.ts @@ -0,0 +1,120 @@ +import { Injectable } from 'graphql-modules'; +import { parse } from 'graphql'; +import { Logger } from '../../../shared/providers/logger'; +import { HiveError } from '../../../../shared/errors'; +import { HttpClient } from '../../../shared/providers/http-client'; +import { + Orchestrator, + ProjectType, + emptySource, + SchemaObject, +} from '../../../../shared/entities'; +import type { SchemaError } from '../../../../__generated__/types'; +import { SchemaBuildError } from './errors'; +import { sentry } from '../../../../shared/sentry'; + +export interface CustomOrchestratorConfig { + validationUrl: string; + buildUrl: string; +} + +type BuildResponse = BuildFailureResponse | BuildSuccessResponse; + +interface BuildFailureResponse { + errors: SchemaError[]; +} + +interface BuildSuccessResponse { + schema: string; +} + +@Injectable() +export class CustomOrchestrator implements Orchestrator { + type = ProjectType.CUSTOM; + private logger: Logger; + + constructor(logger: Logger, private http: HttpClient) { + this.logger = logger.child({ service: 'CustomOrchestrator' }); + } + + ensureConfig(config: CustomOrchestratorConfig) { + if (!config) { + throw new HiveError('Config is missing'); + } + + if (!config.buildUrl) { + throw new HiveError('Build endpoint is missing'); + } + + if (!config.validationUrl) { + throw new HiveError('Validation endpoint is missing'); + } + } + + @sentry('CustomOrchestrator.validate') + async validate( + schemas: SchemaObject[], + config: CustomOrchestratorConfig + ): Promise { + this.logger.debug('Validating Custom Schemas'); + return this.http.post(config.validationUrl, { + responseType: 'json', + headers: { + Accept: 'application/json', + 'Accept-Encoding': 'gzip, deflate, br', + 'Content-Type': 'application/json', + }, + json: { + schemas: schemas.map((s) => s.raw), + }, + }); + } + + @sentry('CustomOrchestrator.build') + async build( + schemas: SchemaObject[], + config: CustomOrchestratorConfig + ): Promise { + this.logger.debug('Building Custom Schema'); + try { + const response = await this.http.post(config.buildUrl, { + responseType: 'json', + headers: { + Accept: 'application/json', + 'Accept-Encoding': 'gzip, deflate, br', + 'Content-Type': 'application/json', + }, + json: { + schemas: schemas.map((s) => s.raw), + }, + }); + + if (hasErrors(response)) { + throw new HiveError( + [ + `Schema couldn't be build:`, + response.errors.map((error) => `\t - ${error.message}`), + ].join('\n') + ); + } + + const raw = response.schema; + + return { + raw, + document: parse(raw), + source: emptySource, + }; + } catch (error: any) { + throw new SchemaBuildError(error); + } + } + + async supergraph() { + return null; + } +} + +function hasErrors(response: any): response is BuildFailureResponse { + return response.errors; +} diff --git a/packages/services/api/src/modules/schema/providers/orchestrators/errors.ts b/packages/services/api/src/modules/schema/providers/orchestrators/errors.ts new file mode 100644 index 000000000..52cb2f314 --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/orchestrators/errors.ts @@ -0,0 +1,8 @@ +import { HiveError } from '../../../../shared/errors'; + +export class SchemaBuildError extends HiveError { + constructor(error: Error) { + super(`Failed to build schema: ${error.message}`, {}); + Object.assign(this, { originalError: error }); + } +} diff --git a/packages/services/api/src/modules/schema/providers/orchestrators/federation.ts b/packages/services/api/src/modules/schema/providers/orchestrators/federation.ts new file mode 100644 index 000000000..2a879c84a --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/orchestrators/federation.ts @@ -0,0 +1,89 @@ +import { Injectable, Inject } from 'graphql-modules'; +import { parse } from 'graphql'; +import { Logger } from '../../../shared/providers/logger'; +import { sentry } from '../../../../shared/sentry'; +import { + Orchestrator, + ProjectType, + SchemaObject, +} from '../../../../shared/entities'; +import { SchemaBuildError } from './errors'; +import { SCHEMA_SERVICE_CONFIG } from './tokens'; +import type { SchemaServiceConfig } from './tokens'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; +import type { SchemaBuilderApi } from '@hive/schema'; + +@Injectable() +export class FederationOrchestrator implements Orchestrator { + type = ProjectType.FEDERATION; + private logger: Logger; + private schemaService; + + constructor( + logger: Logger, + @Inject(SCHEMA_SERVICE_CONFIG) private serviceConfig: SchemaServiceConfig + ) { + this.logger = logger.child({ service: 'FederationOrchestrator' }); + this.schemaService = createTRPCClient({ + url: `${serviceConfig.endpoint}/trpc`, + fetch, + }); + } + + ensureConfig() {} + + @sentry('FederationOrchestrator.validate') + async validate(schemas: SchemaObject[]) { + this.logger.debug('Validating Federated Schemas'); + + const result = await this.schemaService.mutation('validate', { + type: 'federation', + schemas: schemas.map((s) => ({ + raw: s.raw, + source: s.source, + })), + }); + + return result.errors; + } + + @sentry('FederationOrchestrator.build') + async build(schemas: SchemaObject[]): Promise { + this.logger.debug('Building Federated Schemas'); + + try { + const result = await this.schemaService.mutation('build', { + type: 'federation', + schemas: schemas.map((s) => ({ + raw: s.raw, + source: s.source, + })), + }); + + return { + document: parse(result.raw), + raw: result.raw, + source: result.source, + }; + } catch (error) { + throw new SchemaBuildError(error as Error); + } + } + + @sentry('FederationOrchestrator.supergraph') + async supergraph(schemas: SchemaObject[]): Promise { + this.logger.debug('Generating Federated Supergraph'); + + const result = await this.schemaService.mutation('supergraph', { + type: 'federation', + schemas: schemas.map((s) => ({ + raw: s.raw, + source: s.source, + url: s.url, + })), + }); + + return result.supergraph; + } +} diff --git a/packages/services/api/src/modules/schema/providers/orchestrators/index.ts b/packages/services/api/src/modules/schema/providers/orchestrators/index.ts new file mode 100644 index 000000000..f97cb08f2 --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/orchestrators/index.ts @@ -0,0 +1,12 @@ +import { Provider } from 'graphql-modules'; +import { SingleOrchestrator } from './single'; +import { FederationOrchestrator } from './federation'; +import { StitchingOrchestrator } from './stitching'; +import { CustomOrchestrator } from './custom'; + +export const orchestrators: Provider[] = [ + SingleOrchestrator, + FederationOrchestrator, + StitchingOrchestrator, + CustomOrchestrator, +]; diff --git a/packages/services/api/src/modules/schema/providers/orchestrators/single.ts b/packages/services/api/src/modules/schema/providers/orchestrators/single.ts new file mode 100644 index 000000000..6202b2b9c --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/orchestrators/single.ts @@ -0,0 +1,89 @@ +import { Injectable, Inject } from 'graphql-modules'; +import { parse } from 'graphql'; +import { Logger } from '../../../shared/providers/logger'; +import { HiveError } from '../../../../shared/errors'; +import { + Orchestrator, + ProjectType, + SchemaObject, +} from '../../../../shared/entities'; +import { SchemaBuildError } from './errors'; +import { SCHEMA_SERVICE_CONFIG } from './tokens'; +import type { SchemaServiceConfig } from './tokens'; +import { sentry } from '../../../../shared/sentry'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; +import type { SchemaBuilderApi } from '@hive/schema'; + +@Injectable() +export class SingleOrchestrator implements Orchestrator { + type = ProjectType.SINGLE; + private logger: Logger; + private schemaService; + + constructor( + logger: Logger, + @Inject(SCHEMA_SERVICE_CONFIG) serviceConfig: SchemaServiceConfig + ) { + this.logger = logger.child({ service: 'SingleOrchestrator' }); + this.schemaService = createTRPCClient({ + url: `${serviceConfig.endpoint}/trpc`, + fetch, + }); + } + + ensureConfig() {} + + @sentry('SingleOrchestrator.validate') + async validate(schemas: SchemaObject[]) { + this.logger.debug('Validating Single Schema'); + if (schemas.length > 1) { + this.logger.debug('More than one schema (sources=%o)', { + sources: schemas.map((s) => s.source), + }); + throw new HiveError('too many schemas'); + } + + const result = await this.schemaService.mutation('validate', { + type: 'single', + schemas: schemas.map((s) => ({ + raw: s.raw, + source: s.source, + })), + }); + + return result.errors; + } + + @sentry('SingleOrchestrator.build') + async build(schemas: SchemaObject[]) { + try { + if (schemas.length > 1) { + this.logger.error('More than one schema (sources=%o)', { + sources: schemas.map((s) => s.source), + }); + throw new HiveError('too many schemas'); + } + + const result = await this.schemaService.mutation('build', { + type: 'single', + schemas: schemas.map((s) => ({ + raw: s.raw, + source: s.source, + })), + }); + + return { + document: parse(result.raw), + raw: result.raw, + source: result.source, + }; + } catch (error) { + throw new SchemaBuildError(error as Error); + } + } + + async supergraph() { + return null; + } +} diff --git a/packages/services/api/src/modules/schema/providers/orchestrators/stitching.ts b/packages/services/api/src/modules/schema/providers/orchestrators/stitching.ts new file mode 100644 index 000000000..3eb19a7c9 --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/orchestrators/stitching.ts @@ -0,0 +1,76 @@ +import { Injectable, Inject } from 'graphql-modules'; +import { parse } from 'graphql'; +import { Logger } from '../../../shared/providers/logger'; +import { + Orchestrator, + ProjectType, + SchemaObject, +} from '../../../../shared/entities'; +import { SchemaBuildError } from './errors'; +import { SCHEMA_SERVICE_CONFIG } from './tokens'; +import type { SchemaServiceConfig } from './tokens'; +import { sentry } from '../../../../shared/sentry'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; +import type { SchemaBuilderApi } from '@hive/schema'; + +@Injectable() +export class StitchingOrchestrator implements Orchestrator { + type = ProjectType.STITCHING; + private logger: Logger; + private schemaService; + + constructor( + logger: Logger, + @Inject(SCHEMA_SERVICE_CONFIG) private serviceConfig: SchemaServiceConfig + ) { + this.logger = logger.child({ service: 'StitchingOrchestrator' }); + this.schemaService = createTRPCClient({ + url: `${serviceConfig.endpoint}/trpc`, + fetch, + }); + } + + ensureConfig() {} + + @sentry('StitchingOrchestrator.validate') + async validate(schemas: SchemaObject[]) { + this.logger.debug('Validating Stitched Schemas'); + + const result = await this.schemaService.mutation('validate', { + type: 'stitching', + schemas: schemas.map((s) => ({ + raw: s.raw, + source: s.source, + })), + }); + + return result.errors; + } + + @sentry('StitchingOrchestrator.build') + async build(schemas: SchemaObject[]): Promise { + this.logger.debug('Building Stitched Schemas'); + try { + const result = await this.schemaService.mutation('build', { + type: 'stitching', + schemas: schemas.map((s) => ({ + raw: s.raw, + source: s.source, + })), + }); + + return { + document: parse(result.raw), + raw: result.raw, + source: result.source, + }; + } catch (error) { + throw new SchemaBuildError(error as Error); + } + } + + async supergraph() { + return null; + } +} diff --git a/packages/services/api/src/modules/schema/providers/orchestrators/tokens.ts b/packages/services/api/src/modules/schema/providers/orchestrators/tokens.ts new file mode 100644 index 000000000..a06b68f11 --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/orchestrators/tokens.ts @@ -0,0 +1,9 @@ +import { InjectionToken } from 'graphql-modules'; + +export interface SchemaServiceConfig { + endpoint: string; +} + +export const SCHEMA_SERVICE_CONFIG = new InjectionToken( + 'schema-service-config' +); diff --git a/packages/services/api/src/modules/schema/providers/schema-manager.ts b/packages/services/api/src/modules/schema/providers/schema-manager.ts new file mode 100644 index 000000000..4b42ff00b --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/schema-manager.ts @@ -0,0 +1,473 @@ +import { Injectable, Scope } from 'graphql-modules'; +import lodash from 'lodash'; +import { SchemaVersion } from '../../../shared/mappers'; +import { Orchestrator, ProjectType } from '../../../shared/entities'; +import { atomic, stringifySelector } from '../../../shared/helpers'; +import { HiveError } from '../../../shared/errors'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { Logger } from '../../shared/providers/logger'; +import { Storage, TargetSelector } from '../../shared/providers/storage'; +import { CustomOrchestrator } from './orchestrators/custom'; +import { FederationOrchestrator } from './orchestrators/federation'; +import { SingleOrchestrator } from './orchestrators/single'; +import { StitchingOrchestrator } from './orchestrators/stitching'; +import { Tracking } from '../../shared/providers/tracking'; +import { TargetAccessScope } from '../../auth/providers/target-access'; + +interface VersionSelector extends TargetSelector { + version: string; +} + +type Paginated = T & { + after?: string | null; + limit: number; +}; + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class SchemaManager { + private logger: Logger; + + constructor( + logger: Logger, + private authManager: AuthManager, + private storage: Storage, + private singleOrchestrator: SingleOrchestrator, + private stitchingOrchestrator: StitchingOrchestrator, + private federationOrchestrator: FederationOrchestrator, + private customOrchestrator: CustomOrchestrator, + private tracking: Tracking + ) { + this.logger = logger.child({ source: 'SchemaManager' }); + } + + async hasSchema(selector: TargetSelector) { + this.logger.debug( + 'Checking if schema is available (selector=%o)', + selector + ); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return this.storage.hasSchema(selector); + } + + async getSchemasOfVersion( + selector: { + version: string; + includeMetadata?: boolean; + } & TargetSelector + ) { + this.logger.debug('Fetching schemas (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return this.storage.getSchemasOfVersion(selector); + } + + async getSchemasOfPreviousVersion( + selector: { + version: string; + } & TargetSelector + ) { + this.logger.debug( + 'Fetching schemas from the previous version (selector=%o)', + selector + ); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return this.storage.getSchemasOfPreviousVersion(selector); + } + + async getLatestSchemas(selector: TargetSelector) { + this.logger.debug('Fetching latest schemas (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return this.storage.getLatestSchemas(selector); + } + + async getMaybeLatestValidVersion(selector: TargetSelector) { + this.logger.debug('Fetching latest valid version (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + + const version = await this.storage.getMaybeLatestValidVersion(selector); + + if (!version) { + return null; + } + + return { + ...version, + project: selector.project, + target: selector.target, + organization: selector.organization, + }; + } + + async getLatestValidVersion(selector: TargetSelector) { + this.logger.debug('Fetching latest valid version (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return { + ...(await this.storage.getLatestValidVersion(selector)), + project: selector.project, + target: selector.target, + organization: selector.organization, + }; + } + + async getLatestVersion(selector: TargetSelector) { + this.logger.debug('Fetching latest version (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return { + ...(await this.storage.getLatestVersion(selector)), + project: selector.project, + target: selector.target, + organization: selector.organization, + }; + } + + async getMaybeLatestVersion(selector: TargetSelector) { + this.logger.debug('Fetching maybe latest version (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + + const latest = await this.storage.getMaybeLatestVersion(selector); + + if (!latest) { + return null; + } + + return { + ...latest, + project: selector.project, + target: selector.target, + organization: selector.organization, + }; + } + + async getSchemaVersion(selector: TargetSelector & { version: string }) { + this.logger.debug('Fetching single schema version (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + const result = await this.storage.getVersion(selector); + + return { + project: selector.project, + target: selector.target, + organization: selector.organization, + ...result, + }; + } + + async getSchemaVersions(selector: Paginated) { + this.logger.debug('Fetching published schemas (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + const result = await this.storage.getVersions(selector); + + return { + nodes: result.versions.map((r) => ({ + ...r, + project: selector.project, + target: selector.target, + organization: selector.organization, + })), + hasMore: result.hasMore, + }; + } + + async updateSchemaVersionStatus( + input: TargetSelector & { version: string; valid: boolean } + ): Promise { + this.logger.debug('Updating schema version status (input=%o)', input); + await this.authManager.ensureTargetAccess({ + ...input, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + + await this.tracking.track({ + event: 'SCHEMA_VERSION_STATUS_UPDATED', + ...input, + }); + + return { + ...(await this.storage.updateVersionStatus(input)), + organization: input.organization, + project: input.project, + target: input.target, + }; + } + + async updateSchemaUrl( + input: TargetSelector & { + version: string; + commit: string; + url?: string | null; + } + ) { + this.logger.debug('Updating schema version status (input=%o)', input); + await this.authManager.ensureTargetAccess({ + ...input, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + await this.tracking.track({ + event: 'SCHEMA_URL_UPDATED', + ...input, + }); + await this.storage.updateSchemaUrlOfVersion(input); + } + + async getCommit(selector: { commit: string } & TargetSelector) { + this.logger.debug('Fetching schema (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return this.storage.getSchema({ + commit: selector.commit, + target: selector.target, + }); + } + + @atomic(stringifySelector) + async getCommits(selector: VersionSelector) { + this.logger.debug('Fetching schemas (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return this.storage.getSchemasOfVersion(selector); + } + + async createVersion( + input: { + commit: string; + schema: string; + author: string; + valid: boolean; + service?: string | null; + commits: string[]; + url?: string | null; + base_schema: string | null; + metadata: string | null; + } & TargetSelector + ) { + this.logger.info( + 'Creating a new version (input=%o)', + lodash.omit(input, ['schema']) + ); + const { + valid, + project, + organization, + target, + commit, + schema, + author, + commits, + url, + metadata, + } = input; + let service = input.service; + + await this.authManager.ensureTargetAccess({ + project, + organization, + target, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + + if (service) { + service = service.toLowerCase(); + } + + // if schema exists + const existingSchema = await this.storage.getMaybeSchema({ + commit, + service, + organization, + project, + target, + }); + + if (existingSchema) { + if (service) { + throw new HiveError( + `Only one service schema per commit per target is allowed` + ); + } + + throw new HiveError(`Only one schema per commit per target is allowed`); + } + + // insert new schema + const insertedSchema = await this.insertSchema({ + organization, + project, + target, + schema, + service, + commit, + author, + url, + metadata, + }); + + // finally create a version + return this.storage.createVersion({ + valid, + organization, + project, + target, + commit: insertedSchema.id, + commits: commits.concat(insertedSchema.id), + url, + base_schema: input.base_schema, + }); + } + + matchOrchestrator(projectType: ProjectType): Orchestrator | never { + switch (projectType) { + case ProjectType.SINGLE: { + return this.singleOrchestrator; + } + case ProjectType.STITCHING: { + return this.stitchingOrchestrator; + } + case ProjectType.FEDERATION: { + return this.federationOrchestrator; + } + case ProjectType.CUSTOM: { + return this.customOrchestrator; + } + default: { + throw new HiveError( + `Couldn't find an orchestrator for project type "${projectType}"` + ); + } + } + } + + private async insertSchema( + input: { + schema: string; + commit: string; + author: string; + service?: string | null; + url?: string | null; + metadata: string | null; + } & TargetSelector + ) { + this.logger.info( + 'Inserting schema (input=%o)', + lodash.omit(input, ['schema']) + ); + await this.authManager.ensureTargetAccess({ + ...input, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + return this.storage.insertSchema(input); + } + + async getBaseSchema(selector: TargetSelector) { + this.logger.debug('Fetching base schema (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + return await this.storage.getBaseSchema(selector); + } + async updateBaseSchema( + selector: TargetSelector, + newBaseSchema: string | null + ) { + this.logger.debug('Updating base schema (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.REGISTRY_READ, + }); + await this.storage.updateBaseSchema(selector, newBaseSchema); + } + + async updateServiceName( + input: TargetSelector & { + version: string; + name: string; + newName: string; + projectType: ProjectType; + } + ) { + this.logger.debug('Updating service name (input=%o)', input); + await this.authManager.ensureTargetAccess({ + ...input, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + + if ( + input.projectType !== ProjectType.FEDERATION && + input.projectType !== ProjectType.STITCHING + ) { + throw new HiveError( + `Project type "${input.projectType}" doesn't support service name updates` + ); + } + + const schemas = await this.storage.getSchemasOfVersion({ + version: input.version, + target: input.target, + project: input.project, + organization: input.organization, + }); + + const schema = schemas.find((s) => s.service === input.name); + + if (!schema) { + throw new HiveError(`Couldn't find service "${input.name}"`); + } + + if (input.newName.trim().length === 0) { + throw new HiveError(`Service name can't be empty`); + } + + const duplicatedSchema = schemas.find((s) => s.service === input.newName); + + if (duplicatedSchema) { + throw new HiveError(`Service "${input.newName}" already exists`); + } + + await this.storage.updateServiceName({ + organization: input.organization, + project: input.project, + target: input.target, + commit: schema.id, + name: input.newName, + }); + } +} diff --git a/packages/services/api/src/modules/schema/providers/schema-publisher.ts b/packages/services/api/src/modules/schema/providers/schema-publisher.ts new file mode 100644 index 000000000..00f2f7818 --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/schema-publisher.ts @@ -0,0 +1,935 @@ +import { Injectable, Scope } from 'graphql-modules'; +import lodash from 'lodash'; +import type { Span } from '@sentry/types'; +import { + Schema, + Target, + Project, + ProjectType, + createSchemaObject, + Orchestrator, +} from '../../../shared/entities'; +import * as Types from '../../../__generated__/types'; +import { ProjectManager } from '../../project/providers/project-manager'; +import { Logger } from '../../shared/providers/logger'; +import { updateSchemas } from '../../../shared/schema'; +import { SchemaManager } from './schema-manager'; +import { SchemaValidator } from './schema-validator'; +import { sentry } from '../../../shared/sentry'; +import type { TargetSelector } from '../../shared/providers/storage'; +import { IdempotentRunner } from '../../shared/providers/idempotent-runner'; +import { bolderize } from '../../../shared/markdown'; +import { Tracking } from '../../shared/providers/tracking'; +import { AlertsManager } from '../../alerts/providers/alerts-manager'; +import { TargetManager } from '../../target/providers/target-manager'; +import { CdnProvider } from '../../cdn/providers/cdn.provider'; +import { OrganizationManager } from '../../organization/providers/organization-manager'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { TargetAccessScope } from '../../auth/providers/target-access'; +import { GitHubIntegrationManager } from '../../integrations/providers/github-integration-manager'; + +type CheckInput = Omit< + Types.SchemaCheckInput, + 'project' | 'organization' | 'target' +> & + TargetSelector; + +type PublishInput = Types.SchemaPublishInput & + TargetSelector & { + checksum: string; + isSchemaPublishMissingServiceErrorSelected: boolean; + }; + +type BreakPromise = T extends Promise ? U : never; + +type PublishResult = BreakPromise< + ReturnType +>; + +@Injectable({ + scope: Scope.Operation, +}) +export class SchemaPublisher { + private logger: Logger; + + constructor( + logger: Logger, + private authManager: AuthManager, + private schemaManager: SchemaManager, + private targetManager: TargetManager, + private projectManager: ProjectManager, + private organizationManager: OrganizationManager, + private schemaValidator: SchemaValidator, + private alertsManager: AlertsManager, + private cdn: CdnProvider, + private tracking: Tracking, + private gitHubIntegrationManager: GitHubIntegrationManager, + private idempotentRunner: IdempotentRunner + ) { + this.logger = logger.child({ service: 'SchemaPublisher' }); + } + + @sentry('SchemaPublisher.check') + async check(input: CheckInput) { + this.logger.info('Checking schema (input=%o)', lodash.omit(input, ['sdl'])); + + await this.authManager.ensureTargetAccess({ + target: input.target, + project: input.project, + organization: input.organization, + scope: TargetAccessScope.REGISTRY_READ, + }); + + const [project, latest] = await Promise.all([ + this.projectManager.getProject({ + organization: input.organization, + project: input.project, + }), + this.schemaManager.getLatestSchemas({ + organization: input.organization, + project: input.project, + target: input.target, + }), + ]); + + const schemas = latest.schemas; + + await this.tracking.track({ + event: 'SCHEMA_CHECK', + data: { + organization: input.organization, + project: input.project, + target: input.target, + projectType: project.type, + }, + }); + + if (input.github) { + await this.tracking.track({ + event: 'SCHEMA_CHECK_GITHUB', + data: { + organization: input.organization, + project: input.project, + target: input.target, + projectType: project.type, + }, + }); + } + + const baseSchema = await this.schemaManager.getBaseSchema({ + organization: input.organization, + project: input.project, + target: input.target, + }); + const orchestrator = this.schemaManager.matchOrchestrator(project.type); + const incomingSchema: Schema = { + id: 'temp', + author: 'temp', + source: input.sdl, + service: input.service, + target: input.target, + commit: 'temp', + date: new Date().toISOString(), + }; + const { schemas: newSchemas } = updateSchemas(schemas, incomingSchema); + + const validationResult = await this.schemaValidator.validate({ + orchestrator, + incoming: incomingSchema, + before: schemas, + after: newSchemas, + selector: { + organization: input.organization, + project: input.project, + target: input.target, + }, + baseSchema: baseSchema, + }); + + if (input.github) { + if (!project.gitRepository) { + return { + __typename: 'GitHubSchemaCheckError' as const, + message: 'Git repository is not configured for this project', + }; + } + const [repositoryOwner, repositoryName] = + project.gitRepository.split('/'); + + try { + let title: string; + let summary: string; + + if (validationResult.valid) { + if (validationResult.changes.length === 0) { + title = 'No changes'; + summary = 'No changes detected'; + } else { + title = 'No breaking changes'; + summary = this.changesToMarkdown(validationResult.changes); + } + } else { + title = `Detected ${validationResult.errors.length} error${ + validationResult.errors.length === 1 ? '' : 's' + }`; + summary = [ + validationResult.errors + ? this.errorsToMarkdown(validationResult.errors) + : null, + validationResult.changes + ? this.changesToMarkdown(validationResult.changes) + : null, + ] + .filter(Boolean) + .join('\n\n'); + } + + await this.gitHubIntegrationManager.createCheckRun({ + name: 'GraphQL Hive - schema:check', + conclusion: validationResult.valid ? 'success' : 'failure', + sha: input.github.commit, + organization: input.organization, + repositoryOwner, + repositoryName, + output: { + title, + summary, + }, + }); + return { + __typename: 'GitHubSchemaCheckSuccess' as const, + message: 'Check-run created', + }; + } catch (error: any) { + return { + __typename: 'GitHubSchemaCheckError' as const, + message: `Failed to create the check-run: ${error.message}`, + }; + } + } + + return validationResult; + } + + @sentry('SchemaPublisher.publish') + async publish(input: PublishInput, span?: Span): Promise { + this.logger.debug('Schema publication (checksum=%s)', input.checksum); + return this.idempotentRunner.run({ + identifier: `schema:publish:${input.checksum}`, + executor: () => this.internalPublish(input), + ttl: 60, + span, + }); + } + + @sentry('SchemaPublisher.sync') + public async sync(selector: TargetSelector, span?: Span) { + this.logger.info('Syncing CDN with DB (target=%s)', selector.target); + await this.authManager.ensureTargetAccess({ + target: selector.target, + project: selector.project, + organization: selector.organization, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + try { + const [latestVersion, project, target] = await Promise.all([ + this.schemaManager.getLatestValidVersion(selector), + this.projectManager.getProject({ + organization: selector.organization, + project: selector.project, + }), + this.targetManager.getTarget({ + organization: selector.organization, + project: selector.project, + target: selector.target, + }), + ]); + + const schemas = await this.schemaManager.getSchemasOfVersion({ + organization: selector.organization, + project: selector.project, + target: selector.target, + version: latestVersion.id, + includeMetadata: true, + }); + + this.logger.info( + 'Deploying version to CDN (version=%s)', + latestVersion.id + ); + await this.updateCDN( + { + target, + project, + supergraph: + project.type === ProjectType.FEDERATION + ? await this.schemaManager + .matchOrchestrator(project.type) + .supergraph(schemas.map(createSchemaObject)) + : null, + schemas, + }, + span + ); + } catch (error) { + this.logger.error(`Failed to sync with CDN ` + String(error), error); + throw error; + } + } + + public async updateVersionStatus( + input: TargetSelector & { version: string; valid: boolean } + ) { + const updateResult = await this.schemaManager.updateSchemaVersionStatus( + input + ); + + if (updateResult.valid === true) { + // Now, when fetching the latest valid version, we should be able to detect + // if it's the version we just updated or not. + // Why? + // Because we change its status to valid + // and `getLatestValidVersion` calls for fresh data from DB + const latestVersion = await this.schemaManager.getLatestValidVersion( + input + ); + + // if it is the latest version, we should update the CDN + if (latestVersion.id === updateResult.id) { + this.logger.info( + 'Version is now promoted to latest valid (version=%s)', + latestVersion.id + ); + const [project, target, schemas] = await Promise.all([ + this.projectManager.getProject({ + organization: input.organization, + project: input.project, + }), + this.targetManager.getTarget({ + organization: input.organization, + project: input.project, + target: input.target, + }), + this.schemaManager.getSchemasOfVersion({ + organization: input.organization, + project: input.project, + target: input.target, + version: latestVersion.id, + includeMetadata: true, + }), + ]); + + this.logger.info( + 'Deploying version to CDN (version=%s)', + latestVersion.id + ); + await this.updateCDN({ + target, + project, + supergraph: + project.type === ProjectType.FEDERATION + ? await this.schemaManager + .matchOrchestrator(project.type) + .supergraph(schemas.map(createSchemaObject)) + : null, + schemas, + }); + } + } + + return updateResult; + } + + private validateMetadata( + metadataRaw: string | null | undefined + ): Record | null { + if (metadataRaw) { + try { + return JSON.parse(metadataRaw); + } catch (e) { + throw new Error( + `Failed to parse schema metadata JSON: ${ + e instanceof Error ? e.message : e + }` + ); + } + } + + return null; + } + + private async internalPublish(input: PublishInput) { + const [organizationId, projectId, targetId] = [ + input.organization, + input.project, + input.target, + ]; + this.logger.info('Publishing schema (input=%o)', { + ...lodash.omit(input, [ + 'sdl', + 'organization', + 'project', + 'target', + 'metadata', + ]), + organization: organizationId, + project: projectId, + target: targetId, + sdl: input.sdl.length, + checksum: input.checksum, + metadata: !!input.metadata, + }); + + await this.authManager.ensureTargetAccess({ + target: targetId, + project: projectId, + organization: organizationId, + scope: TargetAccessScope.REGISTRY_WRITE, + }); + + const [project, target, latest, baseSchema] = await Promise.all([ + this.projectManager.getProject({ + organization: organizationId, + project: projectId, + }), + this.targetManager.getTarget({ + organization: organizationId, + project: projectId, + target: targetId, + }), + this.schemaManager.getLatestSchemas({ + // here we get an empty list of schemas + organization: organizationId, + project: projectId, + target: targetId, + }), + this.schemaManager.getBaseSchema({ + organization: organizationId, + project: projectId, + target: targetId, + }), + ]); + + const schemas = latest.schemas; + + await this.tracking.track({ + event: 'SCHEMA_PUBLISH', + data: { + organization: organizationId, + project: projectId, + target: targetId, + projectType: project.type, + }, + }); + + this.logger.debug(`Found ${schemas.length} most recent schemas`); + + if ( + input.isSchemaPublishMissingServiceErrorSelected && + (project.type === ProjectType.STITCHING || + project.type === ProjectType.FEDERATION) && + input.service == null + ) { + const missingServiceNameMessage = `Can not publish schema for a '${project.type.toLowerCase()}' project without a service name.`; + + if (input.github) { + return this.createPublishCheckRun({ + force: false, + initial: false, + input, + project, + valid: false, + changes: [], + errors: [ + { + message: missingServiceNameMessage, + }, + ], + }); + } + return { + __typename: 'SchemaPublishMissingServiceError' as const, + message: missingServiceNameMessage, + }; + } + + const isInitialSchema = schemas.length === 0; + const orchestrator = this.schemaManager.matchOrchestrator(project.type); + const incomingSchema: Schema = { + id: 'new-schema', + author: input.author, + source: input.sdl, + service: input.service, + commit: input.commit, + target: targetId, + date: new Date().toISOString(), + url: input.url, + metadata: this.validateMetadata(input.metadata), + }; + + const { schemas: newSchemas, swappedSchema: previousSchema } = + updateSchemas(schemas, incomingSchema); + + this.logger.debug(`Produced ${newSchemas.length} new schemas`); + + const { errors, changes, valid } = await this.schemaValidator.validate({ + orchestrator, + incoming: incomingSchema, + before: schemas, + after: newSchemas, + selector: { + organization: organizationId, + project: projectId, + target: targetId, + }, + baseSchema: baseSchema, + }); + + if (errors.length === 0 && changes.length === 0 && schemas.length !== 0) { + const updated: string[] = []; + + if ( + latest.version && + previousSchema && + (previousSchema.url ?? null) !== (incomingSchema.url ?? null) + ) { + this.logger.debug( + 'New url detected: %s (previously: %s)', + incomingSchema.url, + previousSchema.url + ); + + updated.push( + `New service url: ${incomingSchema.url ?? 'empty'} (previously: ${ + previousSchema.url ?? 'empty' + })` + ); + + await this.schemaManager.updateSchemaUrl({ + organization: organizationId, + project: projectId, + target: targetId, + version: latest.version, + commit: previousSchema.id, + url: incomingSchema.url ?? null, + }); + + this.publishToCDN({ + valid, + target, + project, + orchestrator, + schemas: newSchemas, + }); + } + + if (incomingSchema.metadata && latest.version && previousSchema) { + this.publishToCDN({ + valid, + target, + project, + orchestrator, + schemas: newSchemas, + }); + + updated.push('Schema metadata'); + } + + if (input.github) { + return this.createPublishCheckRun({ + force: input.force, + initial: isInitialSchema, + input, + project, + valid, + changes, + errors, + updates: updated, + }); + } + + return { + __typename: valid + ? ('SchemaPublishSuccess' as const) + : ('SchemaPublishError' as const), + initial: isInitialSchema, + valid, + errors, + changes, + message: updated.length === 0 ? null : `Updated: ${updated.join('\n')}`, + }; + } + + // if we detect any changes + if (errors.length === 0 || input.force) { + await this.publishNewVersion({ + input, + valid, + schemas: newSchemas, + newSchema: incomingSchema, + organizationId, + target, + project, + changes, + errors, + initial: isInitialSchema, + }); + this.publishToCDN({ + valid, + target, + project, + orchestrator, + schemas: newSchemas, + }); + } + + if (input.github) { + return this.createPublishCheckRun({ + force: input.force, + initial: isInitialSchema, + input, + project, + valid, + changes, + errors, + }); + } + + return { + __typename: valid + ? ('SchemaPublishSuccess' as const) + : ('SchemaPublishError' as const), + initial: isInitialSchema, + valid, + errors, + changes, + }; + } + @sentry('SchemaPublisher.publishNewVersion') + private async publishNewVersion({ + valid, + input, + target, + project, + organizationId, + newSchema, + schemas, + changes, + errors, + initial, + }: { + valid: boolean; + input: PublishInput; + target: Target; + project: Project; + organizationId: string; + newSchema: Schema; + schemas: readonly Schema[]; + changes: readonly Types.SchemaChange[]; + errors: readonly Types.SchemaError[]; + initial: boolean; + }) { + const commits = schemas + .filter((s) => s.id !== newSchema.id) // do not include the incoming schema + .map((s) => s.id); + + this.logger.debug(`Assigning ${commits.length} schemas to new version`); + const baseSchema = await this.schemaManager.getBaseSchema({ + organization: await input.organization, + project: await input.project, + target: await input.target, + }); + const [schemaVersion, organization] = await Promise.all([ + this.schemaManager.createVersion({ + valid, + organization: organizationId, + project: project.id, + target: target.id, + commit: input.commit, + commits, + service: input.service, + schema: input.sdl, + author: input.author, + url: input.url, + base_schema: baseSchema, + metadata: input.metadata ?? null, + }), + this.organizationManager.getOrganization({ + organization: organizationId, + }), + ]); + + this.alertsManager + .triggerSchemaChangeNotifications({ + organization, + project, + target, + schema: schemaVersion, + changes, + errors, + initial, + }) + .catch((err) => { + this.logger.error('Failed to trigger schema change notifications', err); + }); + } + + @sentry('SchemaPublisher.publishToCDN') + private async publishToCDN({ + valid, + target, + project, + orchestrator, + schemas, + }: { + valid: boolean; + target: Target; + project: Project; + orchestrator: Orchestrator; + schemas: readonly Schema[]; + }) { + try { + if (valid) { + this.updateCDN({ + target, + project, + schemas, + supergraph: + project.type === ProjectType.FEDERATION + ? await orchestrator.supergraph(schemas.map(createSchemaObject)) + : null, + }); + } + } catch (e) { + this.logger.error(`Failed to publish to CDN!`, e); + } + } + + private async updateCDN( + { + target, + project, + supergraph, + schemas, + }: { + target: Target; + project: Project; + schemas: readonly Schema[]; + supergraph?: string | null; + }, + span?: Span + ) { + const publishMetadata = async () => { + const metadata: Array> = []; + for (const schema of schemas) { + if (!schema.metadata) { + continue; + } + metadata.push(schema.metadata); + } + if (metadata.length > 0) { + await this.cdn.publish( + { + targetId: target.id, + resourceType: 'metadata', + value: JSON.stringify( + metadata.length === 1 ? metadata[0] : metadata + ), + }, + span + ); + } + }; + + const publishSchema = async () => { + await this.cdn.publish( + { + targetId: target.id, + resourceType: 'schema', + value: JSON.stringify( + schemas.length > 1 + ? schemas.map((s) => ({ + sdl: s.source, + url: s.url, + name: s.service, + date: s.date, + })) + : { + sdl: schemas[0].source, + url: schemas[0].url, + name: schemas[0].service, + date: schemas[0].date, + } + ), + }, + span + ); + }; + + const actions = [publishSchema(), publishMetadata()]; + + if (project.type === ProjectType.FEDERATION) { + if (supergraph) { + this.logger.debug('Publishing supergraph to CDN'); + + actions.push( + this.cdn.publish( + { + targetId: target.id, + resourceType: 'supergraph', + value: supergraph, + }, + span + ) + ); + } + } + + await Promise.all(actions); + } + + private async createPublishCheckRun({ + initial, + force, + input, + project, + valid, + changes, + errors, + updates, + }: { + initial: boolean; + force?: boolean | null; + input: PublishInput; + project: Project; + valid: boolean; + changes: readonly Types.SchemaChange[]; + errors: readonly Types.SchemaError[]; + updates?: string[]; + }) { + if (!project.gitRepository) { + return { + __typename: 'GitHubSchemaPublishError' as const, + message: 'Git repository is not configured for this project', + }; + } + const [repositoryOwner, repositoryName] = project.gitRepository.split('/'); + + try { + let title: string; + let summary: string; + + if (valid) { + if (initial) { + title = 'Schema published'; + summary = 'Initial Schema published'; + } else if (changes.length === 0) { + title = 'No changes'; + summary = 'No changes detected'; + } else { + title = 'No breaking changes'; + summary = this.changesToMarkdown(changes); + } + } else { + title = `Detected ${errors.length} error${ + errors.length === 1 ? '' : 's' + }`; + summary = [ + errors ? this.errorsToMarkdown(errors) : null, + changes ? this.changesToMarkdown(changes) : null, + ] + .filter(Boolean) + .join('\n\n'); + } + + if (updates?.length) { + summary += `\n\n${updates.map((val) => `- ${val}`).join('\n')}`; + } + + if (valid === false && force === true) { + title += ' (forced)'; + } + + await this.gitHubIntegrationManager.createCheckRun({ + name: 'GraphQL Hive - schema:publish', + conclusion: valid ? 'success' : force ? 'neutral' : 'failure', + sha: input.commit, + organization: input.organization, + repositoryOwner, + repositoryName, + output: { + title, + summary, + }, + }); + return { + __typename: 'GitHubSchemaPublishSuccess' as const, + message: title, + }; + } catch (error: any) { + return { + __typename: 'GitHubSchemaPublishError' as const, + message: `Failed to create the check-run: ${error.message}`, + }; + } + } + + private errorsToMarkdown(errors: readonly Types.SchemaError[]): string { + return ['', ...errors.map((error) => `- ${bolderize(error.message)}`)].join( + '\n' + ); + } + + private changesToMarkdown(changes: readonly Types.SchemaChange[]): string { + const breakingChanges = changes.filter(filterChangesByLevel('Breaking')); + const dangerousChanges = changes.filter(filterChangesByLevel('Dangerous')); + const safeChanges = changes.filter(filterChangesByLevel('Safe')); + + const lines: string[] = [ + `## Found ${changes.length} change${changes.length > 1 ? 's' : ''}`, + '', + ]; + + if (breakingChanges.length) { + lines.push(`Breaking: ${breakingChanges.length}`); + } + + if (dangerousChanges.length) { + lines.push(`Dangerous: ${dangerousChanges.length}`); + } + + if (safeChanges.length) { + lines.push(`Safe: ${safeChanges.length}`); + } + + if (breakingChanges.length) { + writeChanges('Breaking', breakingChanges, lines); + } + + if (dangerousChanges.length) { + writeChanges('Dangerous', dangerousChanges, lines); + } + + if (safeChanges.length) { + writeChanges('Safe', safeChanges, lines); + } + + return lines.join('\n'); + } +} + +function filterChangesByLevel(level: Types.CriticalityLevel) { + return (change: Types.SchemaChange) => change.criticality === level; +} + +function writeChanges( + type: string, + changes: readonly Types.SchemaChange[], + lines: string[] +): void { + lines.push( + ...['', `### ${type} changes`].concat( + changes.map((change) => ` - ${bolderize(change.message)}`) + ) + ); +} diff --git a/packages/services/api/src/modules/schema/providers/schema-validator.ts b/packages/services/api/src/modules/schema/providers/schema-validator.ts new file mode 100644 index 000000000..f1379a5c5 --- /dev/null +++ b/packages/services/api/src/modules/schema/providers/schema-validator.ts @@ -0,0 +1,134 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { + createSchemaObject, + Orchestrator, + Schema, + SchemaObject, +} from '../../../shared/entities'; +import { buildSchema, findSchema, hashSchema } from '../../../shared/schema'; +import * as Types from '../../../__generated__/types'; +import { Logger } from '../../shared/providers/logger'; +import { sentry } from '../../../shared/sentry'; +import { Inspector } from './inspector'; + +@Injectable({ + scope: Scope.Operation, +}) +export class SchemaValidator { + private logger: Logger; + + constructor(logger: Logger, private inspector: Inspector) { + this.logger = logger.child({ service: 'SchemaValidator' }); + } + + @sentry('SchemaValidator.validate') + async validate({ + orchestrator, + selector, + incoming, + before, + after, + baseSchema, + }: { + orchestrator: Orchestrator; + incoming: Schema; + before: readonly Schema[]; + after: readonly Schema[]; + selector: Types.TargetSelector; + baseSchema: string | null; + }) { + this.logger.debug('Validating Schema'); + const existing = findSchema(before, incoming); + const afterWithBase = after.map((schema, index) => { + let source = ''; + if (index === 0) { + source = (baseSchema || '') + schema.source; + } else { + source = schema.source; + } + return { + id: schema.id, + author: schema.author, + source: source, + date: schema.date, + commit: schema.commit, + url: schema.url, + service: schema.service, + target: schema.target, + }; + }); + const afterSchemasWithBase: SchemaObject[] = + afterWithBase.map(createSchemaObject); + const afterSchemas: SchemaObject[] = after.map(createSchemaObject); + const beforeSchemas: SchemaObject[] = before.map(createSchemaObject); + + const isInitialSchema = beforeSchemas.length === 0; + const isIdentical = + existing && hashSchema(existing) === hashSchema(incoming); + + if (isIdentical) { + return { + valid: true, + errors: [], + changes: [], + }; + } + + const errors = await orchestrator.validate(afterSchemasWithBase); + + if (isInitialSchema) { + if (errors.length > 0) { + errors.push({ + message: `Note: If this is your first schema publish, please make sure it's fully valid and standalone.`, + }); + } + + return { + valid: errors.length === 0, + errors: errors, + changes: [], + }; + } + + let changes: Types.SchemaChange[] = []; + + try { + const [existingSchema, incomingSchema] = await Promise.all([ + orchestrator.build(beforeSchemas), + orchestrator.build(afterSchemas), + ]); + if (existingSchema) { + changes = await this.inspector.diff( + buildSchema(existingSchema), + buildSchema(incomingSchema), + selector + ); + + changes.forEach((change) => { + if (change.criticality === 'Breaking') { + errors.push({ + message: `Breaking Change: ${change.message}`, + path: change.path, + }); + } + }); + } + } catch (error) { + errors.push({ + message: `Failed to compare schemas: ${(error as Error).message}`, + }); + } + + const hasErrors = errors.length > 0; + const hasBreakingChanges = changes.some( + (change) => change.criticality === 'Breaking' + ); + const valid = !hasErrors && !hasBreakingChanges; + + return { + valid, + errors, + changes, + }; + } +} diff --git a/packages/services/api/src/modules/schema/resolvers.ts b/packages/services/api/src/modules/schema/resolvers.ts new file mode 100644 index 000000000..71a94464f --- /dev/null +++ b/packages/services/api/src/modules/schema/resolvers.ts @@ -0,0 +1,443 @@ +import { createHash } from 'crypto'; +import type { SchemaModule } from './__generated__/types'; +import { SchemaManager } from './providers/schema-manager'; +import { SchemaPublisher } from './providers/schema-publisher'; +import { Inspector } from './providers/inspector'; +import { buildSchema, createConnection } from '../../shared/schema'; +import { createSchemaObject, ProjectType } from '../../shared/entities'; +import { ProjectManager } from '../project/providers/project-manager'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { OrganizationManager } from '../organization/providers/organization-manager'; +import { SchemaBuildError } from './providers/orchestrators/errors'; +import { TargetManager } from '../target/providers/target-manager'; +import { AuthManager } from '../auth/providers/auth-manager'; +import { parseResolveInfo } from 'graphql-parse-resolve-info'; +import { RateLimitProvider } from '../rate-limit/providers/rate-limit.provider'; + +export const resolvers: SchemaModule.Resolvers = { + Mutation: { + async schemaCheck(_, { input }, { injector }) { + const [organization, project, target] = await Promise.all([ + injector.get(OrganizationManager).getOrganizationIdByToken(), + injector.get(ProjectManager).getProjectIdByToken(), + injector.get(TargetManager).getTargetIdByToken(), + ]); + + return injector.get(SchemaPublisher).check({ + ...input, + organization, + project, + target, + }); + }, + async schemaPublish(_, { input }, { injector }, info) { + const [organization, project, target] = await Promise.all([ + injector.get(OrganizationManager).getOrganizationIdByToken(), + injector.get(ProjectManager).getProjectIdByToken(), + injector.get(TargetManager).getTargetIdByToken(), + ]); + const token = injector.get(AuthManager).ensureApiToken(); + + await injector.get(RateLimitProvider).assertRateLimit({ + entityType: 'target', + id: target, + type: 'schema-push', + token, + }); + + const checksum = createHash('md5') + .update(JSON.stringify(input)) + .update(token) + .digest('base64'); + + const parsedResolveInfoFragment = parseResolveInfo(info); + + // We only want to resolve to SchemaPublishMissingServiceError if it is selected by the operation. + // NOTE: This should be removed once the usage of cli versions that don't request on 'SchemaPublishMissingServiceError' is becomes pretty low. + const isSchemaPublishMissingServiceErrorSelected = + !!parsedResolveInfoFragment?.fieldsByTypeName[ + 'SchemaPublishMissingServiceError' + ]; + + return injector.get(SchemaPublisher).publish({ + ...input, + checksum, + organization, + project, + target, + isSchemaPublishMissingServiceErrorSelected, + }); + }, + async updateSchemaVersionStatus(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + + return injector.get(SchemaPublisher).updateVersionStatus({ + version: input.version, + valid: input.valid, + organization, + project, + target, + }); + }, + async updateBaseSchema(_, { input }, { injector }) { + const schemaManager = injector.get(SchemaManager); + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + + const selector = { organization, project, target }; + await schemaManager.updateBaseSchema( + selector, + input.newBase ? input.newBase : null + ); + return injector.get(TargetManager).getTarget({ + organization, + target, + project, + }); + }, + async updateSchemaServiceName(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + + const { type: projectType } = await injector + .get(ProjectManager) + .getProject({ + organization, + project, + }); + + await injector.get(SchemaManager).updateServiceName({ + organization, + project, + target, + version: input.version, + name: input.name, + newName: input.newName, + projectType, + }); + + return injector.get(TargetManager).getTarget({ + organization, + project, + target, + }); + }, + async schemaSyncCDN(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + + try { + await injector.get(SchemaPublisher).sync({ + organization, + project, + target, + }); + + return { + __typename: 'SchemaSyncCDNSuccess', + message: 'CDN is now up to date with the latest version', + }; + } catch (error) { + return { + __typename: 'SchemaSyncCDNError', + message: + error instanceof Error ? error.message : 'Failed to sync with CDN', + }; + } + }, + }, + Query: { + async schemaCompare(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const schemaManager = injector.get(SchemaManager); + const projectManager = injector.get(ProjectManager); + + const [organizationId, projectId, targetId] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + const project = await projectManager.getProject({ + organization: organizationId, + project: projectId, + }); + const orchestrator = schemaManager.matchOrchestrator(project.type); + + // TODO: collect stats from a period between these two versions + const [schemasBefore, schemasAfter] = await Promise.all([ + injector.get(SchemaManager).getSchemasOfVersion({ + organization: organizationId, + project: projectId, + target: targetId, + version: selector.before, + }), + injector.get(SchemaManager).getSchemasOfVersion({ + organization: organizationId, + project: projectId, + target: targetId, + version: selector.after, + }), + ]); + + return Promise.all([ + orchestrator.build(schemasBefore.map(createSchemaObject)), + orchestrator.build(schemasAfter.map(createSchemaObject)), + ]).catch((reason) => { + if (reason instanceof SchemaBuildError) { + return Promise.resolve({ + message: reason.message, + }); + } + + return Promise.reject(reason); + }); + }, + async schemaCompareToPrevious(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const schemaManager = injector.get(SchemaManager); + const projectManager = injector.get(ProjectManager); + + const [organizationId, projectId, targetId] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + const project = await projectManager.getProject({ + organization: organizationId, + project: projectId, + }); + const orchestrator = schemaManager.matchOrchestrator(project.type); + + // TODO: collect stats from a period between these two versions + const [schemasBefore, schemasAfter] = await Promise.all([ + injector.get(SchemaManager).getSchemasOfPreviousVersion({ + organization: organizationId, + project: projectId, + target: targetId, + version: selector.version, + }), + injector.get(SchemaManager).getSchemasOfVersion({ + organization: organizationId, + project: projectId, + target: targetId, + version: selector.version, + }), + ]); + + return Promise.all([ + schemasBefore.length + ? orchestrator.build(schemasBefore.map(createSchemaObject)) + : null, + orchestrator.build(schemasAfter.map(createSchemaObject)), + ]).catch((reason) => { + if (reason instanceof SchemaBuildError) { + return Promise.resolve({ + message: reason.message, + }); + } + + return Promise.reject(reason); + }); + }, + async schemaVersions(_, { selector, after, limit }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + return injector.get(SchemaManager).getSchemaVersions({ + organization, + project, + target, + after, + limit, + }); + }, + async schemaVersion(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + return injector.get(SchemaManager).getSchemaVersion({ + organization, + project, + target, + version: selector.version, + }); + }, + async latestVersion(_, __, { injector }) { + const target = await injector.get(TargetManager).getTargetFromToken(); + + return injector.get(SchemaManager).getLatestValidVersion({ + organization: target.orgId, + project: target.projectId, + target: target.id, + }); + }, + async latestValidVersion(_, __, { injector }) { + const target = await injector.get(TargetManager).getTargetFromToken(); + + return injector.get(SchemaManager).getLatestValidVersion({ + organization: target.orgId, + project: target.projectId, + target: target.id, + }); + }, + }, + Target: { + latestSchemaVersion(target, _, { injector }) { + return injector.get(SchemaManager).getMaybeLatestVersion({ + target: target.id, + project: target.projectId, + organization: target.orgId, + }); + }, + baseSchema(target, _, { injector }) { + return injector.get(SchemaManager).getBaseSchema({ + target: target.id, + project: target.projectId, + organization: target.orgId, + }); + }, + hasSchema(target, _, { injector }) { + return injector.get(SchemaManager).hasSchema({ + target: target.id, + project: target.projectId, + organization: target.orgId, + }); + }, + }, + SchemaVersion: { + commit(version, _, { injector }) { + return injector.get(SchemaManager).getCommit({ + commit: version.commit, + organization: version.organization, + project: version.project, + target: version.target, + }); + }, + schemas(version, _, { injector }) { + return injector.get(SchemaManager).getCommits({ + version: version.id, + organization: version.organization, + project: version.project, + target: version.target, + }); + }, + async supergraph(version, _, { injector }) { + const project = await injector.get(ProjectManager).getProject({ + organization: version.organization, + project: version.project, + }); + + if (project.type !== ProjectType.FEDERATION) { + return null; + } + + const schemaManager = injector.get(SchemaManager); + const orchestrator = schemaManager.matchOrchestrator(project.type); + + const schemas = await schemaManager.getCommits({ + version: version.id, + organization: version.organization, + project: version.project, + target: version.target, + }); + + return orchestrator.supergraph(schemas.map(createSchemaObject)); + }, + async sdl(version, _, { injector }) { + const project = await injector.get(ProjectManager).getProject({ + organization: version.organization, + project: version.project, + }); + + const schemaManager = injector.get(SchemaManager); + const orchestrator = schemaManager.matchOrchestrator(project.type); + + const schemas = await schemaManager.getCommits({ + version: version.id, + organization: version.organization, + project: version.project, + target: version.target, + }); + + return (await orchestrator.build(schemas.map(createSchemaObject))).raw; + }, + async baseSchema(version) { + return version.base_schema || null; + }, + }, + SchemaCompareError: { + __isTypeOf(error) { + return 'message' in error; + }, + }, + SchemaCompareResult: { + __isTypeOf(obj) { + return Array.isArray(obj); + }, + initial([before]) { + return !before; + }, + changes([before, after], _, { injector }) { + if (!before) { + return []; + } + + return injector + .get(Inspector) + .diff(buildSchema(before), buildSchema(after)); + }, + diff([before, after]) { + return { + before: before ? before.raw : '', + after: after.raw, + }; + }, + }, + SchemaConnection: createConnection(), + SchemaVersionConnection: { + pageInfo(info) { + return { + hasMore: info.hasMore, + }; + }, + }, + SchemaChangeConnection: createConnection(), + SchemaErrorConnection: createConnection(), + SchemaCheckSuccess: { + __isTypeOf(obj) { + return obj.valid; + }, + }, + SchemaCheckError: { + __isTypeOf(obj) { + return !obj.valid; + }, + }, +}; diff --git a/packages/services/api/src/modules/shared/__tests__/crypto.spec.ts b/packages/services/api/src/modules/shared/__tests__/crypto.spec.ts new file mode 100644 index 000000000..0484307d9 --- /dev/null +++ b/packages/services/api/src/modules/shared/__tests__/crypto.spec.ts @@ -0,0 +1,44 @@ +import 'reflect-metadata'; +import { testkit } from 'graphql-modules'; +import { CryptoProvider, encryptionSecretProvider } from '../providers/crypto'; + +test('should decrypt encrypted value', () => { + const cryptoProvider = testkit + .testInjector([CryptoProvider, encryptionSecretProvider('secret')]) + .get(CryptoProvider); + const encrypted = cryptoProvider.encrypt('foo'); + + expect(cryptoProvider.decrypt(encrypted)).toBe('foo'); +}); + +test('should read raw value when decrypting (when possiblyRaw is enabled)', () => { + const cryptoProvider = testkit + .testInjector([CryptoProvider, encryptionSecretProvider('secret')]) + .get(CryptoProvider); + + expect(cryptoProvider.decrypt('foo', true)).toBe('foo'); +}); + +test('should NOT read raw value when decrypting', () => { + const cryptoProvider = testkit + .testInjector([CryptoProvider, encryptionSecretProvider('secret')]) + .get(CryptoProvider); + + expect(() => { + cryptoProvider.decrypt('foo'); + }).toThrow(); +}); + +test('should NOT decrypt value encrypted with different secret', () => { + const aCryptoProvider = testkit + .testInjector([CryptoProvider, encryptionSecretProvider('secret')]) + .get(CryptoProvider); + const bCryptoProvider = testkit + .testInjector([CryptoProvider, encryptionSecretProvider('other-secret')]) + .get(CryptoProvider); + + const encrypted = aCryptoProvider.encrypt('a'); + expect(() => { + bCryptoProvider.decrypt(encrypted); + }).toThrow(); +}); diff --git a/packages/services/api/src/modules/shared/index.ts b/packages/services/api/src/modules/shared/index.ts new file mode 100644 index 000000000..370b146b2 --- /dev/null +++ b/packages/services/api/src/modules/shared/index.ts @@ -0,0 +1,10 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; + +export const sharedModule = createModule({ + id: 'shared', + dirname: __dirname, + typeDefs, + resolvers, +}); diff --git a/packages/services/api/src/modules/shared/module.graphql.ts b/packages/services/api/src/modules/shared/module.graphql.ts new file mode 100644 index 000000000..924c92751 --- /dev/null +++ b/packages/services/api/src/modules/shared/module.graphql.ts @@ -0,0 +1,19 @@ +import { gql } from 'graphql-modules'; + +export default gql` + scalar DateTime + scalar JSON + scalar SafeInt + + type Query { + noop: Boolean + } + + type Mutation { + noop: Boolean + } + + type PageInfo { + hasMore: Boolean! + } +`; diff --git a/packages/services/api/src/modules/shared/providers/crypto.ts b/packages/services/api/src/modules/shared/providers/crypto.ts new file mode 100644 index 000000000..3b2e5e59b --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/crypto.ts @@ -0,0 +1,56 @@ +import { Injectable, Scope, InjectionToken, Inject } from 'graphql-modules'; +import crypto from 'crypto'; + +const ALG = 'aes256'; +const IN_ENC = 'utf8'; +const OUT_ENC = 'hex'; +const IV = 16; + +const ENCRYPTION_SECRET = new InjectionToken('ENCRYPTION_SECRET'); + +export function encryptionSecretProvider(value: string) { + return { + provide: ENCRYPTION_SECRET, + useValue: crypto.createHash('md5').update(value).digest('hex'), + scope: Scope.Singleton, + }; +} + +@Injectable({ + scope: Scope.Singleton, +}) +export class CryptoProvider { + constructor(@Inject(ENCRYPTION_SECRET) private encryptionSecret: string) {} + + encrypt(text: string) { + const secretBuffer = Buffer.from(this.encryptionSecret, 'latin1'); + const iv = crypto.randomBytes(IV); + const cipher = crypto.createCipheriv(ALG, secretBuffer, iv); + const ciphered = + cipher.update(text, IN_ENC, OUT_ENC) + cipher.final(OUT_ENC); + return iv.toString(OUT_ENC) + ':' + ciphered; + } + + decrypt(text: string, possiblyRaw?: boolean) { + if (possiblyRaw) { + // The result of `encrypt()` is `:` + // We're looking for this pattern here. + // If it has more than 32 characters and `:` after 32 chars, it's encrypted. + const isEncrypted = text.length > 32 && text.indexOf(':') === 32; + + if (!isEncrypted) { + return text; + } + } + + const secretBuffer = Buffer.from(this.encryptionSecret, 'latin1'); + const components = text.split(':'); + const iv = Buffer.from(components.shift() || '', OUT_ENC); + const decipher = crypto.createDecipheriv(ALG, secretBuffer, iv); + + return ( + decipher.update(components.join(':'), OUT_ENC, IN_ENC) + + decipher.final(IN_ENC) + ); + } +} diff --git a/packages/services/api/src/modules/shared/providers/http-client.ts b/packages/services/api/src/modules/shared/providers/http-client.ts new file mode 100644 index 000000000..8bf085384 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/http-client.ts @@ -0,0 +1,83 @@ +import { Injectable } from 'graphql-modules'; +import { got, TimeoutError, HTTPError } from 'got'; +import type { OptionsOfJSONResponseBody } from 'got'; +import * as Sentry from '@sentry/node'; +import type { Span } from '@sentry/types'; + +interface HttpClientOptions extends OptionsOfJSONResponseBody { + method: 'GET' | 'POST' | 'DELETE' | 'PUT'; + context?: { + description?: string; + }; +} + +type HttpOptions = Omit< + HttpClientOptions, + 'method' | 'throwHttpErrors' | 'resolveBodyOnly' +>; + +@Injectable() +export class HttpClient { + get(url: string, opts: HttpOptions, span?: Span): Promise { + return this.request(url, { ...opts, method: 'GET' }, span); + } + post(url: string, opts: HttpOptions = {}, span?: Span): Promise { + return this.request(url, { ...opts, method: 'POST' }, span); + } + put(url: string, opts: HttpOptions = {}, span?: Span): Promise { + return this.request(url, { ...opts, method: 'PUT' }, span); + } + delete(url: string, opts: HttpOptions, span?: Span): Promise { + return this.request(url, { ...opts, method: 'DELETE' }, span); + } + + private request( + url: string, + opts: HttpClientOptions, + upstreamSpan?: Span + ) { + const parentSpan = + upstreamSpan ?? Sentry.getCurrentHub().getScope()?.getSpan(); + const span = parentSpan?.startChild({ + op: 'HttpClient', + description: opts?.context?.description ?? `${opts.method} ${url}`, + }); + + const request = got(url, { + ...opts, + throwHttpErrors: true, + }); + + if (!span) { + return request.then((response) => response.body); + } + + return request.then( + (response) => { + span.setHttpStatus(response.statusCode); + + if (typeof response.headers['x-cache'] !== 'undefined') { + span.setTag('cache', response.headers['x-cache'] as string); + } + + span.finish(); + return Promise.resolve(response.body); + }, + (error) => { + console.log('HttpClient.request error', error); + console.error(error); + Sentry.captureException(error); + + if (error instanceof HTTPError) { + span.setHttpStatus(error.response.statusCode); + } + + span.setStatus( + error instanceof TimeoutError ? 'deadline_exceeded' : 'internal_error' + ); + span.finish(); + return Promise.reject(error); + } + ); + } +} diff --git a/packages/services/api/src/modules/shared/providers/id-translator.ts b/packages/services/api/src/modules/shared/providers/id-translator.ts new file mode 100644 index 000000000..07781ea89 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/id-translator.ts @@ -0,0 +1,75 @@ +import { Injectable, Scope } from 'graphql-modules'; +import type { + OrganizationSelector, + ProjectSelector, + TargetSelector, + PersistedOperationSelector, +} from './storage'; +import { Storage } from './storage'; +import { cache, filterSelector } from '../../../shared/helpers'; +import { Logger } from './logger'; + +@Injectable({ + scope: Scope.Operation, +}) +export class IdTranslator { + private logger: Logger; + constructor(private storage: Storage, logger: Logger) { + this.logger = logger.child({ service: 'IdTranslator' }); + } + + @cache((selector) => selector.organization) + translateOrganizationId(selector: OrganizationSelector) { + this.logger.debug( + 'Translating Organization Clean ID (selector=%o)', + filterSelector('organization', selector) + ); + return this.storage.getOrganizationId(selector); + } + + @cache((selector) => + [selector.organization, selector.project].join(',') + ) + translateProjectId(selector: ProjectSelector) { + this.logger.debug( + 'Translating Project Clean ID (selector=%o)', + filterSelector('project', selector) + ); + return this.storage.getProjectId(selector); + } + + @cache< + TargetSelector & { + useIds?: boolean; + } + >((selector) => + [ + selector.organization, + selector.project, + selector.target, + selector.useIds, + ].join(',') + ) + translateTargetId( + selector: TargetSelector & { + useIds?: boolean; + } + ) { + this.logger.debug( + 'Translating Target Clean ID (selector=%o)', + filterSelector('target', selector) + ); + return this.storage.getTargetId(selector); + } + + @cache((selector) => + [selector.organization, selector.project, selector.operation].join(',') + ) + translatePersistedOperationHash(selector: PersistedOperationSelector) { + this.logger.debug( + 'Translating Persisted Operation Hash (selector=%o)', + filterSelector('persistedOperation', selector) + ); + return this.storage.getPersistedOperationId(selector); + } +} diff --git a/packages/services/api/src/modules/shared/providers/idempotent-runner.ts b/packages/services/api/src/modules/shared/providers/idempotent-runner.ts new file mode 100644 index 000000000..cf620c6d8 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/idempotent-runner.ts @@ -0,0 +1,262 @@ +import { Injectable, Scope, Inject } from 'graphql-modules'; +import type { Span } from '@sentry/types'; +import { REDIS_INSTANCE } from '../../shared/providers/redis'; +import type { Redis } from '../../shared/providers/redis'; +import { Logger } from '../../shared/providers/logger'; +import { uuid } from '../../../shared/helpers'; + +export enum JobStatus { + PENDING = 'PENDING', + COMPLETED = 'COMPLETED', +} + +export interface JobPending { + status: JobStatus.PENDING; +} + +export interface JobCompleted { + status: JobStatus.COMPLETED; + payload: T; +} + +export interface JobExecutorContext { + span?: Span; + attempt: number; +} + +@Injectable({ + scope: Scope.Operation, +}) +export class IdempotentRunner { + private logger: Logger; + constructor(logger: Logger, @Inject(REDIS_INSTANCE) private redis: Redis) { + this.logger = logger.child({ service: 'IdempotentRunner' }); + } + + async run({ + identifier, + executor, + span, + ttl, + }: { + identifier: string; + executor: (context: JobExecutorContext) => Promise; + /** + * In seconds + */ + ttl: number; + span?: Span; + }): Promise { + const traceId = uuid(); + this.logger.debug( + 'Running idempotent job (id=%s, traceId=%s)', + identifier, + traceId + ); + return this.start({ + identifier, + traceId, + executor, + ttl, + context: { + span, + attempt: 1, + }, + }); + } + + private async set( + identifier: string, + job: JobPending, + ttl: number + ): Promise; + private async set( + identifier: string, + job: JobCompleted, + ttl: number + ): Promise; + private async set( + identifier: string, + job: JobPending | JobCompleted, + ttl: number + ): Promise { + if (job.status === JobStatus.PENDING) { + // SET if Not eXists + const inserted = await this.redis.setnx(identifier, JSON.stringify(job)); + + if (inserted) { + // expire if inserted + await this.redis.expire(identifier, ttl); + } + + return inserted === 1; + } + + // remove the key and set + expire + await this.redis.setex(identifier, ttl, JSON.stringify(job)); + return true; + } + + private async get(identifier: string): Promise; + private async get(identifier: string): Promise; + private async get(identifier: string): Promise>; + private async get( + identifier: string + ): Promise> { + const cached = await this.redis.get(identifier); + + if (cached) { + return JSON.parse(cached); + } + + return null; + } + + private async del(identifier: string): Promise { + const result = await this.redis.del(identifier); + return result === 1; + } + + private async start({ + identifier, + traceId, + executor, + ttl, + context, + }: { + identifier: string; + traceId: string; + executor: (context: JobExecutorContext) => Promise; + /** + * In seconds + */ + ttl: number; + context: JobExecutorContext; + }): Promise { + this.logger.debug( + 'Starting new job (id=%s, traceId=%s, attempt=%s)', + identifier, + traceId, + context.attempt + ); + if (context.attempt > 3) { + this.logger.error( + 'Job failed after 3 attempts (id=%s, traceId=%s, attempt=%s)', + identifier, + traceId, + context.attempt + ); + throw new Error(`Job failed after 3 attempts`); + } + + let job = await this.get(identifier); + + if (!job) { + const created = await this.set( + identifier, + { + status: JobStatus.PENDING, + }, + ttl + ); + + if (!created) { + this.logger.debug( + 'Job is pending (id=%s, traceId=%s)', + identifier, + traceId + ); + context.attempt++; + return this.start({ + identifier, + traceId, + executor, + context: { + span: context.span?.startChild({ + op: `Attempt #${context.attempt}`, + }), + attempt: context.attempt, + }, + ttl, + }); + } + + this.logger.debug( + 'Executing job (id=%s, traceId=%s, attempt=%s)', + identifier, + traceId, + context.attempt + ); + const payload = await executor(context).catch(async (error) => { + this.logger.debug( + 'Job execution failed (id=%s, traceId=%s, error=%s)', + identifier, + traceId, + error.message + ); + console.error(error); + await this.del(identifier); + return await Promise.reject(error); + }); + await this.set( + identifier, + { + status: JobStatus.COMPLETED, + payload, + }, + ttl + ); + this.logger.debug( + 'Job completed (id=%s, traceId=%s)', + identifier, + traceId + ); + + return payload; + } + + const startedAt = Date.now(); + while (job && job.status !== JobStatus.COMPLETED) { + this.logger.debug( + 'Awaiting job (id=%s, traceId=%s, time=%s)', + identifier, + traceId, + Date.now() - startedAt + ); + await new Promise((resolve) => setTimeout(resolve, 500)); + job = await this.get(identifier); + } + + if (!job) { + this.logger.debug( + 'Job not found, probably failed to complete (id=%s, traceId=%s, attempt=%s)', + identifier, + traceId, + context.attempt + ); + + context.attempt++; + return this.start({ + identifier, + traceId, + executor, + context: { + span: context.span?.startChild({ + op: `Attempt #${context.attempt}`, + }), + attempt: context.attempt, + }, + ttl, + }); + } + + this.logger.debug( + 'Resolving the runner (id=%s, traceId=%s, attempt=%s, status=%s)', + identifier, + traceId, + context.attempt, + job.status + ); + return job.payload; + } +} diff --git a/packages/services/api/src/modules/shared/providers/logger.ts b/packages/services/api/src/modules/shared/providers/logger.ts new file mode 100644 index 000000000..3cf21e1b2 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/logger.ts @@ -0,0 +1,21 @@ +import { Injectable } from 'graphql-modules'; + +export type LogFn = (msg: string, ...args: unknown[]) => void; + +function notImplemented(method: string) { + return () => { + throw new Error(`Method Logger.${method} not implemented`); + }; +} + +@Injectable() +export class Logger { + info: LogFn = notImplemented('info'); + warn: LogFn = notImplemented('warn'); + error: LogFn = notImplemented('error'); + fatal: LogFn = notImplemented('fatal'); + trace: LogFn = notImplemented('trace'); + debug: LogFn = notImplemented('debug'); + child: (bindings: Record) => Logger = + notImplemented('child'); +} diff --git a/packages/services/api/src/modules/shared/providers/message-bus.ts b/packages/services/api/src/modules/shared/providers/message-bus.ts new file mode 100644 index 000000000..0e0738f31 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/message-bus.ts @@ -0,0 +1,20 @@ +import { Injectable, Scope } from 'graphql-modules'; +import Emittery from 'emittery'; + +@Injectable({ + scope: Scope.Operation, +}) +export class MessageBus { + private emitter = new Emittery(); + + async on( + event: string, + listener: (payload: TPayload) => Promise + ) { + this.emitter.on(event, listener); + } + + emit(event: string, payload: TPayload) { + return this.emitter.emitSerial(event, payload); + } +} diff --git a/packages/services/api/src/modules/shared/providers/redis.ts b/packages/services/api/src/modules/shared/providers/redis.ts new file mode 100644 index 000000000..90da4ac28 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/redis.ts @@ -0,0 +1,64 @@ +import { InjectionToken } from 'graphql-modules'; +import Redis from 'ioredis'; +import type { FactoryProvider } from 'graphql-modules'; +import type { Redis as RedisInstance } from 'ioredis'; +import type { RedisOptions } from 'ioredis'; +import { Logger } from './logger'; + +export type { RedisInstance as Redis }; + +export type RedisConfig = Required< + Pick +>; + +export const REDIS_CONFIG = new InjectionToken('REDIS_CONFIG'); +export const REDIS_INSTANCE = new InjectionToken( + 'REDIS_INSTANCE' +); + +export const RedisProvider: FactoryProvider = { + provide: REDIS_INSTANCE, + useFactory(config: RedisConfig, mainLogger: Logger) { + const logger = mainLogger.child({ + source: 'Redis', + }); + const redis = new Redis({ + host: config.host, + port: config.port, + password: config.password, + retryStrategy(times) { + return Math.min(times * 500, 2000); + }, + reconnectOnError(error) { + logger.warn('Redis reconnectOnError', error); + return 1; + }, + db: 0, + maxRetriesPerRequest: null, + enableReadyCheck: false, + }); + + redis.on('error', (err) => { + logger.error(err); + }); + + redis.on('connect', () => { + logger.debug('Redis connection established'); + }); + + redis.on('ready', () => { + logger.info('Redis connection ready'); + }); + + redis.on('close', () => { + logger.info('Redis connection closed'); + }); + + redis.on('reconnecting', (timeToReconnect) => { + logger.info('Redis reconnecting in %s', timeToReconnect); + }); + + return redis; + }, + deps: [REDIS_CONFIG, Logger], +}; diff --git a/packages/services/api/src/modules/shared/providers/storage.ts b/packages/services/api/src/modules/shared/providers/storage.ts new file mode 100644 index 000000000..70ca19848 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/storage.ts @@ -0,0 +1,407 @@ +import { Injectable } from 'graphql-modules'; +import type { NullableAndPartial } from '../../../shared/helpers'; +import type { + Member, + Organization, + PersistedOperation, + Project, + Schema, + SchemaVersion, + Target, + User, + ActivityObject, + TargetSettings, + AlertChannel, + Alert, + OrganizationBilling, +} from '../../../shared/entities'; +import type { CustomOrchestratorConfig } from '../../schema/providers/orchestrators/custom'; +import type { + AddAlertChannelInput, + AddAlertInput, +} from '../../../__generated__/types'; +import type { OrganizationAccessScope } from '../../auth/providers/organization-access'; +import type { ProjectAccessScope } from '../../auth/providers/project-access'; +import type { TargetAccessScope } from '../../auth/providers/target-access'; + +type Paginated = T & { + after?: string | null; + limit: number; +}; + +export interface OrganizationSelector { + organization: string; +} + +export interface ProjectSelector extends OrganizationSelector { + project: string; +} + +export interface TargetSelector extends ProjectSelector { + target: string; +} + +export interface PersistedOperationSelector extends ProjectSelector { + operation: string; +} + +export interface Storage { + getUserByExternalId(_: { external: string }): Promise; + getUserById(_: { id: string }): Promise; + + createUser(_: { email: string; external: string }): Promise; + updateUser(_: { + id: string; + fullName: string; + displayName: string; + }): Promise; + + getOrganizationId(_: OrganizationSelector): Promise; + getOrganizationByInviteCode(_: { + inviteCode: string; + }): Promise; + getOrganizationByCleanId(_: { + cleanId: string; + }): Promise; + getOrganizationByGitHubInstallationId(_: { + installationId: string; + }): Promise; + getOrganization(_: OrganizationSelector): Promise; + getMyOrganization(_: { user: string }): Promise; + getOrganizations(_: { + user: string; + }): Promise; + createOrganization( + _: Pick & { + user: string; + scopes: ReadonlyArray< + OrganizationAccessScope | ProjectAccessScope | TargetAccessScope + >; + } + ): Promise; + deleteOrganization(_: OrganizationSelector): Promise; + updateOrganizationName( + _: OrganizationSelector & Pick & { user: string } + ): Promise; + updateOrganizationPlan( + _: OrganizationSelector & Pick + ): Promise; + updateOrganizationRateLimits( + _: OrganizationSelector & Pick + ): Promise; + + updateOrganizationInviteCode( + _: OrganizationSelector & Pick + ): Promise; + getOrganizationMembers( + _: OrganizationSelector + ): Promise; + getOrganizationOwner(_: OrganizationSelector): Promise; + getOrganizationOwner(_: OrganizationSelector): Promise; + getOrganizationMember( + _: OrganizationSelector & { user: string } + ): Promise; + getOrganizationMemberAccessPairs( + _: readonly (OrganizationSelector & { user: string })[] + ): Promise< + ReadonlyArray< + ReadonlyArray< + OrganizationAccessScope | ProjectAccessScope | TargetAccessScope + > + > + >; + hasOrganizationMemberPairs( + _: readonly (OrganizationSelector & { user: string })[] + ): Promise; + hasOrganizationProjectMemberPairs( + _: readonly (ProjectSelector & { user: string })[] + ): Promise; + addOrganizationMember( + _: OrganizationSelector & { + user: string; + scopes: ReadonlyArray< + OrganizationAccessScope | ProjectAccessScope | TargetAccessScope + >; + } + ): Promise; + deleteOrganizationMembers( + _: OrganizationSelector & { users: readonly string[] } + ): Promise; + updateOrganizationMemberAccess( + _: OrganizationSelector & { + user: string; + scopes: ReadonlyArray< + OrganizationAccessScope | ProjectAccessScope | TargetAccessScope + >; + } + ): Promise; + + getPersistedOperationId( + _: PersistedOperationSelector + ): Promise; + + getProject(_: ProjectSelector): Promise; + getProjectId(_: ProjectSelector): Promise; + getProjectByCleanId( + _: { cleanId: string } & OrganizationSelector + ): Promise; + getProjects(_: OrganizationSelector): Promise; + createProject( + _: Pick & + NullableAndPartial & + OrganizationSelector + ): Promise; + deleteProject(_: ProjectSelector): Promise; + updateProjectName( + _: ProjectSelector & Pick & { user: string } + ): Promise; + updateProjectGitRepository( + _: ProjectSelector & Pick + ): Promise; + + getTargetId( + _: TargetSelector & { useIds?: boolean } + ): Promise; + getTargetByCleanId( + _: { + cleanId: string; + } & ProjectSelector + ): Promise; + createTarget( + _: Pick & ProjectSelector + ): Promise; + updateTargetName( + _: TargetSelector & Pick & { user: string } + ): Promise; + deleteTarget(_: TargetSelector): Promise; + getTarget(_: TargetSelector): Promise; + getTargets(_: ProjectSelector): Promise; + getTargetSettings(_: TargetSelector): Promise; + setTargetValidation( + _: TargetSelector & { enabled: boolean } + ): Promise; + updateTargetValidationSettings( + _: TargetSelector & Omit + ): Promise; + + hasSchema(_: TargetSelector): Promise; + getLatestSchemas( + _: { + version?: string; + } & TargetSelector + ): Promise< + | { + schemas: readonly Schema[]; + version?: string; + } + | never + >; + getLatestValidVersion(_: TargetSelector): Promise; + getMaybeLatestValidVersion( + _: TargetSelector + ): Promise; + getLatestVersion(_: TargetSelector): Promise; + getMaybeLatestVersion(_: TargetSelector): Promise; + + getSchemasOfVersion( + _: { + version: string; + includeMetadata?: boolean; + } & TargetSelector + ): Promise; + getSchemasOfPreviousVersion( + _: { + version: string; + } & TargetSelector + ): Promise; + getVersions(_: Paginated): Promise< + | { + versions: readonly SchemaVersion[]; + hasMore: boolean; + } + | never + >; + getVersion( + _: TargetSelector & { version: string } + ): Promise; + + updateSchemaUrlOfVersion( + _: TargetSelector & { version: string; url?: string | null; commit: string } + ): Promise; + updateServiceName( + _: TargetSelector & { commit: string; name: string } + ): Promise; + + insertSchema( + _: { + schema: string; + commit: string; + author: string; + service?: string | null; + url?: string | null; + metadata: string | null; + } & TargetSelector + ): Promise; + + createVersion( + _: { + valid: boolean; + url?: string | null; + commit: string; + commits: string[]; + base_schema: string | null; + } & TargetSelector + ): Promise; + + updateVersionStatus( + _: { + valid: boolean; + version: string; + } & TargetSelector + ): Promise; + + getSchema(_: { commit: string; target: string }): Promise; + + getSchemaPushCount(_: { + targetIds: string[]; + startTime: Date; + endTime: Date; + }): Promise; + + getAllSchemaPushesGrouped(_: { startTime: Date; endTime: Date }): Promise< + { + total: number; + target: string; + }[] + >; + + getMaybeSchema( + _: { + commit: string; + service?: string | null; + } & TargetSelector + ): Promise; + + createActivity( + _: { + user: string; + type: string; + meta: object; + } & OrganizationSelector & + Partial> + ): Promise; + + getActivities( + _: (OrganizationSelector | ProjectSelector | TargetSelector) & { + limit: number; + } + ): Promise; + + getPersistedOperations( + _: ProjectSelector + ): Promise; + + getSelectedPersistedOperations( + _: ProjectSelector & { hashes: readonly string[] } + ): Promise; + + comparePersistedOperations( + _: ProjectSelector & { + hashes: readonly string[]; + } + ): Promise; + + getPersistedOperation( + _: PersistedOperationSelector + ): Promise; + + insertPersistedOperation( + _: { + operationHash: string; + name: string; + kind: string; + content: string; + } & ProjectSelector + ): Promise; + + deletePersistedOperation( + _: PersistedOperationSelector + ): Promise; + + addSlackIntegration( + _: OrganizationSelector & { token: string } + ): Promise; + deleteSlackIntegration(_: OrganizationSelector): Promise; + getSlackIntegrationToken( + _: OrganizationSelector + ): Promise; + + addGitHubIntegration( + _: OrganizationSelector & { installationId: string } + ): Promise; + deleteGitHubIntegration(_: OrganizationSelector): Promise; + getGitHubIntegrationInstallationId( + _: OrganizationSelector + ): Promise; + + addAlertChannel(_: AddAlertChannelInput): Promise; + deleteAlertChannels( + _: ProjectSelector & { + channels: readonly string[]; + } + ): Promise; + getAlertChannels(_: ProjectSelector): Promise; + + addAlert(_: AddAlertInput): Promise; + deleteAlerts( + _: ProjectSelector & { + alerts: readonly string[]; + } + ): Promise; + getAlerts(_: ProjectSelector): Promise; + + adminGetStats(daysLimit?: number | null): Promise< + ReadonlyArray<{ + organization: Organization; + versions: number; + users: number; + projects: number; + targets: number; + persistedOperations: number; + daysLimit?: number | null; + }> + >; + + adminGetOrganizationsTargetPairs(): Promise< + ReadonlyArray<{ + organization: string; + target: string; + }> + >; + + getGetOrganizationsAndTargetPairsWithLimitInfo(): Promise< + ReadonlyArray<{ + organization: string; + target: string; + limit_operations_monthly: number; + limit_schema_push_monthly: number; + limit_retention_days: number; + }> + >; + + getBillingParticipants(): Promise>; + getOrganizationBilling( + _: OrganizationSelector + ): Promise; + deleteOrganizationBilling(_: OrganizationSelector): Promise; + + createOrganizationBilling( + _: OrganizationBilling + ): Promise; + + getBaseSchema(_: TargetSelector): Promise; + updateBaseSchema(_: TargetSelector, base: string | null): Promise; +} + +@Injectable() +export class Storage implements Storage {} diff --git a/packages/services/api/src/modules/shared/providers/tracking.ts b/packages/services/api/src/modules/shared/providers/tracking.ts new file mode 100644 index 000000000..21b10ded4 --- /dev/null +++ b/packages/services/api/src/modules/shared/providers/tracking.ts @@ -0,0 +1,33 @@ +import { Injectable, Scope } from 'graphql-modules'; +import * as Sentry from '@sentry/node'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { track } from '../../../shared/mixpanel'; + +@Injectable({ + scope: Scope.Operation, +}) +export class Tracking { + constructor(private authManager: AuthManager) {} + + async track(event: { + event: string; + data?: Record; + user?: { + id: string; + externalAuthUserId: string; + }; + }) { + try { + track({ + event: event.event, + distinct_id: + event.user?.externalAuthUserId ?? + (await this.authManager.getUserIdForTracking()), + data: event.data, + }); + } catch (error) { + console.log('Tracking.track error', error); + Sentry.captureException(error); + } + } +} diff --git a/packages/services/api/src/modules/shared/resolvers.ts b/packages/services/api/src/modules/shared/resolvers.ts new file mode 100644 index 000000000..8bd19152e --- /dev/null +++ b/packages/services/api/src/modules/shared/resolvers.ts @@ -0,0 +1,25 @@ +import { + DateTimeResolver, + JSONResolver, + SafeIntResolver, +} from 'graphql-scalars'; +import type { SharedModule } from './__generated__/types'; + +// Remove descriptions from resolvers +// `scalar JSON` in `module.graphql.ts` does not have a description +// and it messes up the static analysis +JSONResolver.description = undefined; +DateTimeResolver.description = undefined; +SafeIntResolver.description = undefined; + +export const resolvers: SharedModule.Resolvers = { + DateTime: DateTimeResolver, + JSON: JSONResolver, + SafeInt: SafeIntResolver, + Query: { + noop: () => true, + }, + Mutation: { + noop: () => true, + }, +}; diff --git a/packages/services/api/src/modules/target/index.ts b/packages/services/api/src/modules/target/index.ts new file mode 100644 index 000000000..04e7ae4f3 --- /dev/null +++ b/packages/services/api/src/modules/target/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import { TargetManager } from './providers/target-manager'; +import typeDefs from './module.graphql'; + +export const targetModule = createModule({ + id: 'target', + dirname: __dirname, + typeDefs, + resolvers, + providers: [TargetManager], +}); diff --git a/packages/services/api/src/modules/target/module.graphql.ts b/packages/services/api/src/modules/target/module.graphql.ts new file mode 100644 index 000000000..a35d64d0e --- /dev/null +++ b/packages/services/api/src/modules/target/module.graphql.ts @@ -0,0 +1,105 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + target(selector: TargetSelectorInput!): Target + targets(selector: ProjectSelectorInput!): TargetConnection! + targetSettings(selector: TargetSelectorInput!): TargetSettings! + } + + extend type Mutation { + createTarget(input: CreateTargetInput!): CreateTargetPayload! + updateTargetName(input: UpdateTargetNameInput!): UpdateTargetPayload! + deleteTarget(selector: TargetSelectorInput!): DeleteTargetPayload! + updateTargetValidationSettings( + input: UpdateTargetValidationSettingsInput! + ): TargetValidationSettings! + setTargetValidation( + input: SetTargetValidationInput! + ): TargetValidationSettings! + } + + input TargetSelectorInput { + organization: ID! + project: ID! + target: ID! + } + + input UpdateTargetValidationSettingsInput { + organization: ID! + project: ID! + target: ID! + period: Int! + percentage: Float! + targets: [ID!]! + } + + input SetTargetValidationInput { + organization: ID! + project: ID! + target: ID! + enabled: Boolean! + } + + type TargetSelector { + organization: ID! + project: ID! + target: ID! + } + + extend type Project { + targets: TargetConnection! + } + + type TargetConnection { + nodes: [Target!]! + total: Int! + } + + type Target { + id: ID! + cleanId: ID! + name: String! + } + + type TargetSettings { + id: ID! + validation: TargetValidationSettings! + } + + type TargetValidationSettings { + id: ID! + enabled: Boolean! + period: Int! + percentage: Float! + targets: [Target!]! + } + + input CreateTargetInput { + organization: ID! + project: ID! + name: String! + } + + input UpdateTargetNameInput { + organization: ID! + project: ID! + target: ID! + name: String! + } + + type CreateTargetPayload { + selector: TargetSelector! + createdTarget: Target! + } + + type UpdateTargetPayload { + selector: TargetSelector! + updatedTarget: Target! + } + + type DeleteTargetPayload { + selector: TargetSelector! + deletedTarget: Target! + } +`; diff --git a/packages/services/api/src/modules/target/providers/target-manager.ts b/packages/services/api/src/modules/target/providers/target-manager.ts new file mode 100644 index 000000000..491109f20 --- /dev/null +++ b/packages/services/api/src/modules/target/providers/target-manager.ts @@ -0,0 +1,272 @@ +import { Injectable, Scope } from 'graphql-modules'; +import { paramCase } from 'param-case'; +import type { Target, TargetSettings } from '../../../shared/entities'; +import { HiveError } from './../../../shared/errors'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { Logger } from '../../shared/providers/logger'; +import { + Storage, + ProjectSelector, + TargetSelector, +} from '../../shared/providers/storage'; +import { share, uuid } from '../../../shared/helpers'; +import { ActivityManager } from '../../activity/providers/activity-manager'; +import { TokenStorage } from '../../token/providers/token-storage'; +import { Tracking } from '../../shared/providers/tracking'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; +import { TargetAccessScope } from '../../auth/providers/target-access'; + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class TargetManager { + private logger: Logger; + + constructor( + logger: Logger, + private storage: Storage, + private tokenStorage: TokenStorage, + private authManager: AuthManager, + private activityManager: ActivityManager, + private tracking: Tracking + ) { + this.logger = logger.child({ source: 'TargetManager' }); + } + + async createTarget({ + name, + project, + organization, + }: { + name: string; + } & ProjectSelector): Promise { + this.logger.info( + 'Creating a target (name=%s, project=%s, organization=%s)', + name, + project, + organization + ); + await this.authManager.ensureProjectAccess({ + project, + organization, + scope: ProjectAccessScope.READ, + }); + + let cleanId = paramCase(name); + + if ( + await this.storage.getTargetByCleanId({ cleanId, project, organization }) + ) { + cleanId = paramCase(`${name}-${uuid(4)}`); + } + + // create target + const target = await this.storage.createTarget({ + name, + cleanId, + project, + organization, + }); + + await this.activityManager.create({ + type: 'TARGET_CREATED', + selector: { + organization, + project, + target: target.id, + }, + }); + + return target; + } + + async deleteTarget({ + organization, + project, + target, + }: TargetSelector): Promise { + this.logger.info( + 'Deleting a target (target=%s, project=%s, organization=%s)', + target, + project, + organization + ); + await this.authManager.ensureTargetAccess({ + project, + organization, + target, + scope: TargetAccessScope.DELETE, + }); + + // create target + const [result] = await Promise.all([ + this.storage.deleteTarget({ + target, + project, + organization, + }), + this.tokenStorage.invalidateTarget({ + target, + project, + organization, + }), + ]); + + await this.activityManager.create({ + type: 'TARGET_DELETED', + selector: { + organization, + project, + }, + meta: { + name: result.name, + cleanId: result.cleanId, + }, + }); + + return result; + } + + async getTargets(selector: ProjectSelector): Promise { + this.logger.debug('Fetching targets (selector=%o)', selector); + await this.authManager.ensureProjectAccess({ + ...selector, + scope: ProjectAccessScope.READ, + }); + return this.storage.getTargets(selector); + } + + async getTarget(selector: TargetSelector): Promise { + this.logger.debug('Fetching target (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.READ, + }); + return this.storage.getTarget(selector); + } + + getTargetIdByToken: () => Promise = share(async () => { + const token = this.authManager.ensureApiToken(); + const { target } = await this.tokenStorage.getToken({ token }); + + return target; + }); + + getTargetFromToken: () => Promise = share(async () => { + const token = this.authManager.ensureApiToken(); + const { target, project, organization } = await this.tokenStorage.getToken({ + token, + }); + + await this.authManager.ensureTargetAccess({ + organization, + project, + target, + scope: TargetAccessScope.READ, + }); + + return this.storage.getTarget({ + organization, + project, + target, + }); + }); + + async getTargetSettings(selector: TargetSelector): Promise { + this.logger.debug('Fetching target settings (selector=%o)', selector); + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.READ, + }); + + return this.storage.getTargetSettings(selector); + } + + async setTargetValidaton( + input: { + enabled: boolean; + } & TargetSelector + ): Promise { + this.logger.debug('Setting target validation (input=%o)', input); + await this.authManager.ensureTargetAccess({ + ...input, + scope: TargetAccessScope.SETTINGS, + }); + + await this.tracking.track({ + event: input.enabled + ? 'TARGET_VALIDATION_ENABLED' + : 'TARGET_VALIDATION_DISABLED', + data: { + ...input, + }, + }); + + return this.storage.setTargetValidation(input); + } + + async updateTargetValidatonSettings( + input: Omit & TargetSelector + ): Promise { + this.logger.debug('Updating target validation settings (input=%o)', input); + await this.authManager.ensureTargetAccess({ + ...input, + scope: TargetAccessScope.SETTINGS, + }); + + await this.tracking.track({ + event: 'TARGET_VALIDATION_UPDATED', + data: { + ...input, + }, + }); + + if (input.targets.length === 0) { + throw new HiveError(`No targets specified. Required at least one target`); + } + + // TODO: validation of percentage (0 - 100) and period (1 - 30) + return this.storage.updateTargetValidationSettings(input); + } + + async updateName( + input: { + name: string; + } & TargetSelector + ): Promise { + const { name, organization, project, target } = input; + this.logger.info('Updating a target name (input=%o)', input); + await this.authManager.ensureTargetAccess({ + ...input, + scope: TargetAccessScope.SETTINGS, + }); + const user = await this.authManager.getCurrentUser(); + + const result = await this.storage.updateTargetName({ + name, + organization, + project, + target, + user: user.id, + }); + + await this.activityManager.create({ + type: 'TARGET_NAME_UPDATED', + selector: { + organization, + project, + target, + }, + meta: { + value: name, + }, + }); + + return result; + } +} diff --git a/packages/services/api/src/modules/target/resolvers.ts b/packages/services/api/src/modules/target/resolvers.ts new file mode 100644 index 000000000..ca3bc376a --- /dev/null +++ b/packages/services/api/src/modules/target/resolvers.ts @@ -0,0 +1,231 @@ +import type { TargetModule } from './__generated__/types'; +import { createConnection } from '../../shared/schema'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { TargetManager } from './providers/target-manager'; + +export const resolvers: TargetModule.Resolvers = { + Query: { + async target(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + return injector.get(TargetManager).getTarget({ + organization, + target, + project, + }); + }, + async targets(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + ]); + + return injector.get(TargetManager).getTargets({ + organization, + project, + }); + }, + async targetSettings(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + const targetManager = injector.get(TargetManager); + + const settings = await targetManager.getTargetSettings({ + organization, + project, + target, + }); + + const id = target; + + return { + id, + validation: { + id, + ...settings.validation, + targets: await Promise.all( + settings.validation.targets.map((tid) => + targetManager.getTarget({ + organization, + project, + target: tid, + }) + ) + ), + }, + }; + }, + }, + Mutation: { + async createTarget(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project] = await Promise.all([ + translator.translateOrganizationId({ + organization: input.organization, + }), + translator.translateProjectId({ + organization: input.organization, + project: input.project, + }), + ]); + const target = await injector.get(TargetManager).createTarget({ + organization, + project, + name: input.name, + }); + return { + selector: { + organization: input.organization, + project: input.project, + target: target.cleanId, + }, + createdTarget: target, + }; + }, + async updateTargetName(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organizationId, projectId, targetId] = await Promise.all([ + translator.translateOrganizationId({ + organization: input.organization, + }), + translator.translateProjectId({ + organization: input.organization, + project: input.project, + }), + translator.translateTargetId({ + organization: input.organization, + project: input.project, + target: input.target, + }), + ]); + + const target = await injector.get(TargetManager).updateName({ + name: input.name, + organization: organizationId, + project: projectId, + target: targetId, + }); + + return { + selector: { + organization: input.organization, + project: input.project, + target: input.target, + }, + updatedTarget: target, + }; + }, + async deleteTarget(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organizationId, projectId, targetId] = await Promise.all([ + translator.translateOrganizationId({ + organization: selector.organization, + }), + translator.translateProjectId({ + organization: selector.organization, + project: selector.project, + }), + translator.translateTargetId({ + organization: selector.organization, + project: selector.project, + target: selector.target, + }), + ]); + const target = await injector.get(TargetManager).deleteTarget({ + organization: organizationId, + project: projectId, + target: targetId, + }); + return { + selector: { + organization: organizationId, + project: projectId, + target: targetId, + }, + deletedTarget: target, + }; + }, + async setTargetValidation(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + + const targetManager = injector.get(TargetManager); + const settings = await targetManager.setTargetValidaton({ + organization, + project, + target, + enabled: input.enabled, + }); + + return { + id: target, + ...settings, + targets: await Promise.all( + settings.targets.map((tid) => + targetManager.getTarget({ + organization, + project, + target: tid, + }) + ) + ), + }; + }, + async updateTargetValidationSettings(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + + const targetManager = injector.get(TargetManager); + const settings = await targetManager.updateTargetValidatonSettings({ + period: input.period, + percentage: input.percentage, + target, + project, + organization, + targets: input.targets, + }); + + return { + id: target, + ...settings, + targets: await Promise.all( + settings.targets.map((tid) => + targetManager.getTarget({ + organization, + project, + target: tid, + }) + ) + ), + }; + }, + }, + Project: { + targets(project, _, { injector }) { + return injector.get(TargetManager).getTargets({ + project: project.id, + organization: project.orgId, + }); + }, + }, + TargetConnection: createConnection(), +}; diff --git a/packages/services/api/src/modules/token/index.ts b/packages/services/api/src/modules/token/index.ts new file mode 100644 index 000000000..f4b32ca33 --- /dev/null +++ b/packages/services/api/src/modules/token/index.ts @@ -0,0 +1,13 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; +import { TokenManager } from './providers/token-manager'; +import { TokenStorage } from './providers/token-storage'; + +export const tokenModule = createModule({ + id: 'token', + dirname: __dirname, + typeDefs, + resolvers, + providers: [TokenManager, TokenStorage], +}); diff --git a/packages/services/api/src/modules/token/module.graphql.ts b/packages/services/api/src/modules/token/module.graphql.ts new file mode 100644 index 000000000..538b31773 --- /dev/null +++ b/packages/services/api/src/modules/token/module.graphql.ts @@ -0,0 +1,74 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + tokens(selector: TargetSelectorInput!): TokenConnection! + tokenInfo: TokenInfoPayload! + } + + extend type Mutation { + createToken(input: CreateTokenInput!): CreateTokenPayload! + deleteTokens(input: DeleteTokensInput!): DeleteTokensPayload! + } + + type TokenConnection { + nodes: [Token!]! + total: Int! + } + + type Token { + id: ID! + name: String! + alias: String! + date: DateTime! + lastUsedAt: DateTime + } + + union TokenInfoPayload = TokenInfo | TokenNotFoundError + + type TokenInfo { + token: Token! + organization: Organization! + project: Project! + target: Target! + hasTargetScope(scope: TargetAccessScope!): Boolean! + hasProjectScope(scope: ProjectAccessScope!): Boolean! + hasOrganizationScope(scope: OrganizationAccessScope!): Boolean! + } + + type TokenNotFoundError { + message: String! + } + + input CreateTokenInput { + organization: ID! + project: ID! + target: ID! + name: String! + organizationScopes: [OrganizationAccessScope!]! + projectScopes: [ProjectAccessScope!]! + targetScopes: [TargetAccessScope!]! + } + + input DeleteTokensInput { + organization: ID! + project: ID! + target: ID! + tokens: [ID!]! + } + + type DeleteTokensPayload { + selector: TargetSelector! + deletedTokens: [ID!]! + } + + type CreateTokenPayload { + selector: TargetSelector! + createdToken: Token! + secret: String! + } + + extend type Target { + tokens: TokenConnection! + } +`; diff --git a/packages/services/api/src/modules/token/providers/token-manager.ts b/packages/services/api/src/modules/token/providers/token-manager.ts new file mode 100644 index 000000000..d5b7d320e --- /dev/null +++ b/packages/services/api/src/modules/token/providers/token-manager.ts @@ -0,0 +1,143 @@ +import { Injectable, Scope } from 'graphql-modules'; +import type { Token } from '../../../shared/entities'; +import { HiveError } from '../../../shared/errors'; +import { diffArrays, pushIfMissing } from '../../../shared/helpers'; +import { AuthManager } from '../../auth/providers/auth-manager'; +import { Storage, TargetSelector } from '../../shared/providers/storage'; +import { Logger } from '../../shared/providers/logger'; +import { Tracking } from '../../shared/providers/tracking'; +import { TokenStorage } from './token-storage'; +import type { CreateTokenResult } from './token-storage'; +import { TargetAccessScope } from '../../auth/providers/target-access'; +import { ProjectAccessScope } from '../../auth/providers/project-access'; +import { OrganizationAccessScope } from '../../auth/providers/organization-access'; + +interface CreateTokenInput extends TargetSelector { + name: string; + organizationScopes: readonly OrganizationAccessScope[]; + projectScopes: readonly ProjectAccessScope[]; + targetScopes: readonly TargetAccessScope[]; +} + +/** + * Responsible for auth checks. + * Talks to Storage. + */ +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class TokenManager { + private logger: Logger; + + constructor( + private authManager: AuthManager, + private tokenStorage: TokenStorage, + private storage: Storage, + private tracking: Tracking, + logger: Logger + ) { + this.logger = logger.child({ + source: 'TokenManager', + }); + } + + async createToken(input: CreateTokenInput): Promise { + await this.authManager.ensureTargetAccess({ + project: input.project, + organization: input.organization, + target: input.target, + scope: TargetAccessScope.TOKENS_WRITE, + }); + + const scopes = [ + ...input.organizationScopes, + ...input.projectScopes, + ...input.targetScopes, + ]; + + const currentUser = await this.authManager.getCurrentUser(); + const currentMember = await this.storage.getOrganizationMember({ + organization: input.organization, + user: currentUser.id, + }); + + const newScopes = [ + ...input.organizationScopes, + ...input.projectScopes, + ...input.targetScopes, + ]; + + // See what scopes were removed or added + const modifiedScopes = diffArrays(currentMember.scopes, newScopes); + + // Check if the current user has rights to set these scopes. + const currentUserMissingScopes = modifiedScopes.filter( + (scope) => !currentMember.scopes.includes(scope) + ); + + if (currentUserMissingScopes.length > 0) { + this.logger.debug(`Logged user scopes: %o`, currentMember.scopes); + throw new HiveError( + `No access to the scopes: ${currentUserMissingScopes.join(', ')}` + ); + } + + pushIfMissing(scopes, TargetAccessScope.READ); + pushIfMissing(scopes, ProjectAccessScope.READ); + pushIfMissing(scopes, OrganizationAccessScope.READ); + + await this.tracking.track({ + event: 'TOKEN_CREATED', + data: { + ...input, + }, + }); + + return this.tokenStorage.createToken({ + organization: input.organization, + project: input.project, + target: input.target, + name: input.name, + scopes, + }); + } + + async deleteTokens( + input: { + tokens: readonly string[]; + } & TargetSelector + ): Promise { + await this.authManager.ensureTargetAccess({ + project: input.project, + organization: input.organization, + target: input.target, + scope: TargetAccessScope.TOKENS_WRITE, + }); + + await this.tracking.track({ + event: 'TOKEN_DELETED', + data: { + organization: input.organization, + project: input.project, + target: input.target, + size: input.tokens.length, + }, + }); + + return this.tokenStorage.deleteTokens(input); + } + + async getTokens(selector: TargetSelector): Promise { + await this.authManager.ensureTargetAccess({ + ...selector, + scope: TargetAccessScope.TOKENS_READ, + }); + return this.tokenStorage.getTokens(selector); + } + + async getCurrentToken(): Promise { + const token = this.authManager.ensureApiToken(); + return this.tokenStorage.getToken({ token }); + } +} diff --git a/packages/services/api/src/modules/token/providers/token-storage.ts b/packages/services/api/src/modules/token/providers/token-storage.ts new file mode 100644 index 000000000..d5d9b1fa7 --- /dev/null +++ b/packages/services/api/src/modules/token/providers/token-storage.ts @@ -0,0 +1,150 @@ +import { Inject, Injectable, Scope } from 'graphql-modules'; +import { atomic } from '../../../shared/helpers'; +import { HiveError } from '../../../shared/errors'; +import type { Token } from '../../../shared/entities'; +import { Logger } from '../../shared/providers/logger'; +import { + TargetSelector, + ProjectSelector, + OrganizationSelector, +} from '../../shared/providers/storage'; +import type { TargetAccessScope } from '../../auth/providers/target-access'; +import type { ProjectAccessScope } from '../../auth/providers/project-access'; +import type { OrganizationAccessScope } from '../../auth/providers/organization-access'; +import type { TokensConfig } from './tokens'; +import { TOKENS_CONFIG } from './tokens'; +import type { TokensApi } from '@hive/tokens'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; + +function maskToken(token: string) { + return ( + token.substring(0, 3) + + '*'.repeat(token.length - 6) + + token.substring(token.length - 3) + ); +} + +export interface TokenSelector { + token: string; +} + +interface CreateTokenInput extends TargetSelector { + name: string; + scopes: Array< + OrganizationAccessScope | ProjectAccessScope | TargetAccessScope + >; +} + +export interface CreateTokenResult extends Token { + secret: string; +} + +@Injectable({ + scope: Scope.Operation, + global: true, +}) +export class TokenStorage { + private logger: Logger; + private tokensService; + + constructor( + logger: Logger, + @Inject(TOKENS_CONFIG) tokensConfig: TokensConfig + ) { + this.logger = logger.child({ source: 'TokenStorage' }); + this.tokensService = createTRPCClient({ + url: `${tokensConfig.endpoint}/trpc`, + fetch, + }); + } + + async createToken(input: CreateTokenInput) { + this.logger.debug('Creating new token (input=%o)', input); + + const response = await this.tokensService.mutation('createToken', { + name: input.name, + target: input.target, + project: input.project, + organization: input.organization, + scopes: input.scopes as CreateTokenInput['scopes'], + }); + + return response; + } + + async deleteTokens( + input: { + tokens: readonly string[]; + } & TargetSelector + ): Promise { + this.logger.debug('Deleting tokens (input=%o)', input); + + await Promise.all( + input.tokens.map((token) => + this.tokensService.mutation('deleteToken', { token }) + ) + ); + + return input.tokens; + } + + async invalidateTarget(input: TargetSelector) { + this.logger.debug('Invalidating target tokens (input=%o)', input); + + await this.tokensService + .mutation('invalidateTokenByTarget', { + targetId: input.target, + }) + .catch((error) => { + this.logger.error(error); + }); + } + + async invalidateProject(input: ProjectSelector) { + this.logger.debug('Invalidating project tokens (input=%o)', input); + + await this.tokensService + .mutation('invalidateTokenByProject', { + projectId: input.project, + }) + .catch((error) => { + this.logger.error(error); + }); + } + + async invalidateOrganization(input: OrganizationSelector) { + this.logger.debug('Invalidating organization tokens (input=%o)', input); + + await this.tokensService + .mutation('invalidateTokenByOrganization', { + organizationId: input.organization, + }) + .catch((error) => { + this.logger.error(error); + }); + } + + async getTokens(selector: TargetSelector) { + this.logger.debug('Fetching tokens (selector=%o)', selector); + + const response = await this.tokensService.query('targetTokens', { + targetId: selector.target, + }); + + return response || []; + } + + @atomic(({ token }) => token) + async getToken({ token }: TokenSelector) { + this.logger.debug('Fetching token (token=%s)', maskToken(token)); + + try { + return await this.tokensService.query('getToken', { token }); + } catch (e: any) { + this.logger.error(e); + + throw new HiveError('Invalid token provided!'); + } + } +} diff --git a/packages/services/api/src/modules/token/providers/tokens.ts b/packages/services/api/src/modules/token/providers/tokens.ts new file mode 100644 index 000000000..8b1a1c8e3 --- /dev/null +++ b/packages/services/api/src/modules/token/providers/tokens.ts @@ -0,0 +1,9 @@ +import { InjectionToken } from 'graphql-modules'; + +export interface TokensConfig { + endpoint: string; +} + +export const TOKENS_CONFIG = new InjectionToken( + 'tokens-endpoint' +); diff --git a/packages/services/api/src/modules/token/resolvers.ts b/packages/services/api/src/modules/token/resolvers.ts new file mode 100644 index 000000000..0e94546c6 --- /dev/null +++ b/packages/services/api/src/modules/token/resolvers.ts @@ -0,0 +1,142 @@ +import type { TokenModule } from './__generated__/types'; +import { createConnection } from '../../shared/schema'; +import { TokenManager } from './providers/token-manager'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { AuthManager } from '../auth/providers/auth-manager'; +import { OrganizationManager } from '../organization/providers/organization-manager'; +import { ProjectManager } from '../project/providers/project-manager'; +import { TargetManager } from '../target/providers/target-manager'; + +export const resolvers: TokenModule.Resolvers = { + Query: { + async tokens(_, { selector }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(selector), + translator.translateProjectId(selector), + translator.translateTargetId(selector), + ]); + + return injector.get(TokenManager).getTokens({ + organization, + project, + target, + }); + }, + async tokenInfo(_, __, { injector }) { + try { + injector.get(AuthManager).ensureApiToken(); + } catch (error) { + return { + __typename: 'TokenNotFoundError', + message: (error as Error).message, + }; + } + + return injector.get(TokenManager).getCurrentToken(); + }, + }, + Mutation: { + async createToken(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + const token = await injector.get(TokenManager).createToken({ + name: input.name, + target, + project, + organization, + organizationScopes: input.organizationScopes, + projectScopes: input.projectScopes, + targetScopes: input.targetScopes, + }); + + return { + selector: { + organization: input.organization, + project: input.project, + target: input.target, + }, + createdToken: token, + secret: token.secret, + }; + }, + async deleteTokens(_, { input }, { injector }) { + const translator = injector.get(IdTranslator); + const [organization, project, target] = await Promise.all([ + translator.translateOrganizationId(input), + translator.translateProjectId(input), + translator.translateTargetId(input), + ]); + return { + selector: { + organization: input.organization, + project: input.project, + target: input.target, + }, + deletedTokens: await injector.get(TokenManager).deleteTokens({ + target, + project, + organization, + tokens: input.tokens, + }), + }; + }, + }, + Token: { + id(token) { + return token.token; + }, + alias(token) { + return token.tokenAlias; + }, + }, + TokenInfo: { + __isTypeOf(token) { + return 'token' in token; + }, + token(token) { + return token; + }, + organization(token, _, { injector }) { + return injector.get(OrganizationManager).getOrganization({ + organization: token.organization, + }); + }, + project(token, _, { injector }) { + return injector.get(ProjectManager).getProject({ + organization: token.organization, + project: token.project, + }); + }, + target(token, _, { injector }) { + return injector.get(TargetManager).getTarget({ + organization: token.organization, + project: token.project, + target: token.target, + }); + }, + hasOrganizationScope(token, { scope }) { + return token.scopes.includes(scope); + }, + hasProjectScope(token, { scope }) { + return token.scopes.includes(scope); + }, + hasTargetScope(token, { scope }) { + return token.scopes.includes(scope); + }, + }, + Target: { + tokens(target, _, { injector }) { + return injector.get(TokenManager).getTokens({ + target: target.id, + project: target.projectId, + organization: target.orgId, + }); + }, + }, + TokenConnection: createConnection(), +}; diff --git a/packages/services/api/src/modules/usage-estimation/index.ts b/packages/services/api/src/modules/usage-estimation/index.ts new file mode 100644 index 000000000..1ce46f391 --- /dev/null +++ b/packages/services/api/src/modules/usage-estimation/index.ts @@ -0,0 +1,12 @@ +import { createModule } from 'graphql-modules'; +import { resolvers } from './resolvers'; +import typeDefs from './module.graphql'; +import { UsageEstimationProvider } from './providers/usage-estimation.provider'; + +export const usageEstimationModule = createModule({ + id: 'usage-estimation', + dirname: __dirname, + typeDefs, + resolvers, + providers: [UsageEstimationProvider], +}); diff --git a/packages/services/api/src/modules/usage-estimation/module.graphql.ts b/packages/services/api/src/modules/usage-estimation/module.graphql.ts new file mode 100644 index 000000000..aff9ed34e --- /dev/null +++ b/packages/services/api/src/modules/usage-estimation/module.graphql.ts @@ -0,0 +1,17 @@ +import { gql } from 'graphql-modules'; + +export default gql` + extend type Query { + usageEstimation(range: DateRangeInput!): UsageEstimationScope! + } + + type UsageEstimationScope { + target(selector: TargetSelectorInput!): UsageEstimation! + org(selector: OrganizationSelectorInput!): UsageEstimation! + } + + type UsageEstimation { + schemaPushes: SafeInt! + operations: SafeInt! + } +`; diff --git a/packages/services/api/src/modules/usage-estimation/providers/tokens.ts b/packages/services/api/src/modules/usage-estimation/providers/tokens.ts new file mode 100644 index 000000000..e91428585 --- /dev/null +++ b/packages/services/api/src/modules/usage-estimation/providers/tokens.ts @@ -0,0 +1,10 @@ +import { InjectionToken } from 'graphql-modules'; + +export interface UsageEstimationServiceConfig { + endpoint: string | null; +} + +export const USAGE_ESTIMATION_SERVICE_CONFIG = + new InjectionToken( + 'usage-estimation-service-config' + ); diff --git a/packages/services/api/src/modules/usage-estimation/providers/usage-estimation.provider.ts b/packages/services/api/src/modules/usage-estimation/providers/usage-estimation.provider.ts new file mode 100644 index 000000000..60a2ba059 --- /dev/null +++ b/packages/services/api/src/modules/usage-estimation/providers/usage-estimation.provider.ts @@ -0,0 +1,85 @@ +import type { + UsageEstimatorApi, + UsageEstimatorQueryInput, +} from '@hive/usage-estimator'; +import { createTRPCClient } from '@trpc/client'; +import { Inject, Injectable, Scope } from 'graphql-modules'; +import { sentry } from '../../../shared/sentry'; +import { Logger } from '../../shared/providers/logger'; +import type { UsageEstimationServiceConfig } from './tokens'; +import { USAGE_ESTIMATION_SERVICE_CONFIG } from './tokens'; +import { fetch } from 'cross-undici-fetch'; + +@Injectable({ + scope: Scope.Singleton, +}) +export class UsageEstimationProvider { + private logger: Logger; + private usageEstimator; + + constructor( + logger: Logger, + @Inject(USAGE_ESTIMATION_SERVICE_CONFIG) + usageEstimationConfig: UsageEstimationServiceConfig + ) { + this.logger = logger.child({ service: 'UsageEstimationProvider' }); + this.usageEstimator = usageEstimationConfig.endpoint + ? createTRPCClient({ + url: `${usageEstimationConfig.endpoint}/trpc`, + fetch, + }) + : null; + } + + @sentry('UsageEstimation.estimateOperations') + async estimateOperations( + input: UsageEstimatorQueryInput<'estimateOperationsForTarget'> + ): Promise { + this.logger.debug('Estimation operations, input: %o', input); + + if (input.targetIds.length === 0) { + return 0; + } + + if (!this.usageEstimator) { + this.logger.warn( + 'Usage estimator is not available due to missing configuration' + ); + + return null; + } + + const result = await this.usageEstimator.query( + 'estimateOperationsForTarget', + input + ); + + return result.totalOperations; + } + + @sentry('UsageEstimation.estimateSchemaPushes') + async estimateSchemaPushes( + input: UsageEstimatorQueryInput<'estimateSchemaPushesForTarget'> + ): Promise { + this.logger.debug('Estimation schema pushes, input: %o', input); + + if (input.targetIds.length === 0) { + return 0; + } + + if (!this.usageEstimator) { + this.logger.warn( + 'Usage estimator is not available due to missing configuration' + ); + + return null; + } + + const result = await this.usageEstimator.query( + 'estimateSchemaPushesForTarget', + input + ); + + return result.totalSchemaPushes; + } +} diff --git a/packages/services/api/src/modules/usage-estimation/resolvers.ts b/packages/services/api/src/modules/usage-estimation/resolvers.ts new file mode 100644 index 000000000..83ed43029 --- /dev/null +++ b/packages/services/api/src/modules/usage-estimation/resolvers.ts @@ -0,0 +1,103 @@ +import { EnvelopError } from '@graphql-yoga/common'; +import { parseDateRangeInput } from '../../shared/helpers'; +import { AuthManager } from '../auth/providers/auth-manager'; +import { OrganizationAccessScope } from '../auth/providers/organization-access'; +import { ProjectManager } from '../project/providers/project-manager'; +import { IdTranslator } from '../shared/providers/id-translator'; +import { TargetManager } from '../target/providers/target-manager'; +import { UsageEstimationProvider } from './providers/usage-estimation.provider'; +import { UsageEstimationModule } from './__generated__/types'; + +export const resolvers: UsageEstimationModule.Resolvers = { + Query: { + async usageEstimation(root, args) { + const parsedRange = parseDateRangeInput(args.range); + + return { + startTime: parsedRange.from, + endTime: parsedRange.to, + }; + }, + }, + UsageEstimationScope: { + async target(range, args, { injector }) { + const targetId = await injector.get(IdTranslator).translateTargetId({ + organization: args.selector.organization, + project: args.selector.project, + target: args.selector.target, + }); + + return { + ...range, + targets: [targetId], + }; + }, + async org(range, args, { injector }) { + const organizationId = await injector + .get(IdTranslator) + .translateOrganizationId({ + organization: args.selector.organization, + }); + await injector.get(AuthManager).ensureOrganizationAccess({ + organization: organizationId, + scope: OrganizationAccessScope.SETTINGS, + }); + + const projects = await injector.get(ProjectManager).getProjects({ + organization: organizationId, + }); + + const targets = ( + await Promise.all( + projects.map((project) => { + return injector.get(TargetManager).getTargets({ + organization: organizationId, + project: project.id, + }); + }) + ) + ).flat(); + + return { + ...range, + targets: targets.map((t) => t.id), + }; + }, + }, + UsageEstimation: { + operations: async (params, args, { injector }) => { + const result = await injector + .get(UsageEstimationProvider) + .estimateOperations({ + targetIds: params.targets, + endTime: params.endTime.toString(), + startTime: params.startTime.toString(), + }); + + if (!result && result !== 0) { + throw new EnvelopError( + `Failed to estimate usage, please try again later.` + ); + } + + return result; + }, + schemaPushes: async (params, args, { injector }) => { + const result = await injector + .get(UsageEstimationProvider) + .estimateSchemaPushes({ + targetIds: params.targets, + endTime: params.endTime.toString(), + startTime: params.startTime.toString(), + }); + + if (!result && result !== 0) { + throw new EnvelopError( + `Failed to estimate usage, please try again later.` + ); + } + + return result; + }, + }, +}; diff --git a/packages/services/api/src/shared/entities.ts b/packages/services/api/src/shared/entities.ts new file mode 100644 index 000000000..815554d43 --- /dev/null +++ b/packages/services/api/src/shared/entities.ts @@ -0,0 +1,202 @@ +import type { DocumentNode } from 'graphql'; +import type { + SchemaError, + AlertChannelType, + AlertType, + AuthProvider, + OrganizationAccessScope, + ProjectAccessScope, + TargetAccessScope, +} from '../__generated__/types'; +import { parse } from 'graphql'; + +export interface Schema { + id: string; + author: string; + source: string; + date: string; + commit: string; + target: string; + url?: string | null; + service?: string | null; + metadata?: Record | null; +} + +export interface DateRange { + from: Date; + to: Date; +} + +export interface SchemaVersion { + id: string; + valid: boolean; + date: number; + commit: string; + base_schema: string | null; +} + +export interface SchemaObject { + document: DocumentNode; + source: string; + url?: string | null; + raw: string; +} + +export interface PersistedOperation { + id: string; + operationHash: string; + name: string; + kind: string; + project: string; + content: string; + date: string; +} + +export const emptySource = '*'; + +export function createSchemaObject(schema: Schema): SchemaObject { + return { + document: parse(schema.source), + raw: schema.source, + source: schema.service ?? emptySource, + url: schema.url ?? null, + }; +} + +export enum ProjectType { + FEDERATION = 'FEDERATION', + STITCHING = 'STITCHING', + SINGLE = 'SINGLE', + CUSTOM = 'CUSTOM', +} + +export enum OrganizationType { + PERSONAL = 'PERSONAL', + REGULAR = 'REGULAR', +} + +export interface Organization { + id: string; + cleanId: string; + name: string; + type: OrganizationType; + inviteCode: string; + billingPlan: string; + monthlyRateLimit: { + retentionInDays: number; + operations: number; + schemaPush: number; + }; +} + +export interface OrganizationBilling { + organizationId: string; + externalBillingReference: string; + billingEmailAddress?: string | null; +} + +export interface Project { + id: string; + cleanId: string; + orgId: string; + name: string; + type: ProjectType; + buildUrl?: string | null; + validationUrl?: string | null; + gitRepository?: string | null; +} + +export interface Target { + id: string; + cleanId: string; + projectId: string; + orgId: string; + name: string; +} + +export interface Token { + token: string; + tokenAlias: string; + name: string; + target: string; + project: string; + organization: string; + date: string; + lastUsedAt: string; + scopes: readonly string[]; +} + +export interface User { + id: string; + email: string; + fullName: string; + displayName: string; + provider: AuthProvider; + externalAuthUserId: string; +} + +export interface Member { + id: string; + user: User; + organization: string; + scopes: Array< + OrganizationAccessScope | ProjectAccessScope | TargetAccessScope + >; +} + +export interface TargetSettings { + validation: { + enabled: boolean; + period: number; + percentage: number; + targets: readonly string[]; + }; +} + +export interface Orchestrator { + ensureConfig(config: any): void | never; + validate(schemas: SchemaObject[], config?: any): Promise; + build(schemas: SchemaObject[], config?: any): Promise; + supergraph(schemas: SchemaObject[], config?: any): Promise; +} + +export interface ActivityObject { + id: string; + type: string; + meta: any; + createdAt: Date; + target?: Target; + project?: Project; + organization: Organization; + user?: User; +} + +export interface AlertChannel { + id: string; + projectId: string; + type: AlertChannelType; + name: string; + createdAt: string; + slackChannel: string | null; + webhookEndpoint: string | null; +} + +export interface Alert { + id: string; + type: AlertType; + channelId: string; + organizationId: string; + projectId: string; + targetId: string; + createdAt: string; +} + +export interface AdminOrganizationStats { + organization: Organization; + versions: number; + users: number; + projects: number; + targets: number; + persistedOperations: number; + daysLimit?: number | null; +} diff --git a/packages/services/api/src/shared/errors.ts b/packages/services/api/src/shared/errors.ts new file mode 100644 index 000000000..d77a741fc --- /dev/null +++ b/packages/services/api/src/shared/errors.ts @@ -0,0 +1,34 @@ +import { GraphQLError } from 'graphql'; +import { GraphQLYogaError } from '@graphql-yoga/common'; +import type { SchemaError } from '../__generated__/types'; + +export function toSchemaError(error: unknown): SchemaError { + if (isGraphQLError(error)) { + return { + message: error.message, + path: error.path?.map((i) => (typeof i === 'number' ? i + '' : i)), + }; + } + + if (error instanceof Error) { + return { + message: error.message, + }; + } + + return { + message: error as string, + }; +} + +export function isGraphQLError(error: unknown): error is GraphQLError { + return error instanceof GraphQLError; +} + +export const HiveError = GraphQLYogaError; + +export class AccessError extends HiveError { + constructor(reason: string) { + super(`No access (reason: "${reason}")`); + } +} diff --git a/packages/services/api/src/shared/helpers.ts b/packages/services/api/src/shared/helpers.ts new file mode 100644 index 000000000..1e98b35b7 --- /dev/null +++ b/packages/services/api/src/shared/helpers.ts @@ -0,0 +1,243 @@ +import type { InjectionToken } from 'graphql-modules'; +import ms from 'ms'; +import { createHash } from 'crypto'; +import type { + OrganizationSelector, + PersistedOperationSelector, + ProjectSelector, + TargetSelector, + DateRangeInput, +} from '../__generated__/types'; +import { DateRange } from './entities'; + +export { + msToNs, + nsToMs, + atomicPromise as atomic, + sharePromise as share, + cacheResult as cache, +} from '@theguild/buddy'; + +export type NullableAndPartial = { + [P in keyof T]?: T[P] | undefined | null; +}; +export type NullableDictionary = { [P in keyof T]: T[P] | null }; + +export type Listify = Omit & { + [key in K]: T[K] | readonly T[K][]; +}; + +export type MapToArray = Omit & { + [key in K]: readonly T[K][]; +}; + +export function uuid(len = 13) { + return Math.random().toString(16).substr(2, len); +} + +export function filterSelector( + kind: 'organization', + selector: OrganizationSelector +): OrganizationSelector; +export function filterSelector( + kind: 'project', + selector: ProjectSelector +): ProjectSelector; +export function filterSelector( + kind: 'target', + selector: TargetSelector +): TargetSelector; +export function filterSelector( + kind: 'persistedOperation', + selector: PersistedOperationSelector +): PersistedOperationSelector; +export function filterSelector( + kind: 'organization' | 'project' | 'target' | 'persistedOperation', + selector: any +): any { + switch (kind) { + case 'organization': + return { + organization: selector.organization, + }; + case 'project': + return { + organization: selector.organization, + project: selector.project, + }; + case 'target': + return { + organization: selector.organization, + project: selector.project, + target: selector.target, + }; + case 'persistedOperation': + return { + organization: selector.organization, + project: selector.project, + operationHash: selector.operationHash, // TODO: use translator + }; + } +} + +export function stringifySelector< + T extends { + [key: string]: any; + } +>(obj: T): string { + return JSON.stringify( + Object.keys(obj) + .sort() + .map((key) => [key, obj[key]]) + ); +} + +function validateDateTime(dateTimeString?: string) { + dateTimeString = + dateTimeString === null || dateTimeString === void 0 + ? void 0 + : dateTimeString.toUpperCase(); + + if (!dateTimeString) { + return false; + } + + const RFC_3339_REGEX = + /^(\d{4}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60))(\.\d{1,})?(([Z])|([+|-]([01][0-9]|2[0-3]):[0-5][0-9]))$/; + // Validate the structure of the date-string + if (!RFC_3339_REGEX.test(dateTimeString)) { + return false; + } + // Check if it is a correct date using the javascript Date parse() method. + const time = Date.parse(dateTimeString); + if (time !== time) { + return false; + } + // Split the date-time-string up into the string-date and time-string part. + // and check whether these parts are RFC 3339 compliant. + const index = dateTimeString.indexOf('T'); + const dateString = dateTimeString.substr(0, index); + const timeString = dateTimeString.substr(index + 1); + return validateDate(dateString) && validateTime(timeString); +} + +function validateTime(time?: string) { + time = time === null || time === void 0 ? void 0 : time.toUpperCase(); + + if (!time) { + return false; + } + + const TIME_REGEX = + /^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\.\d{1,})?(([Z])|([+|-]([01][0-9]|2[0-3]):[0-5][0-9]))$/; + return TIME_REGEX.test(time); +} + +function validateDate(datestring: string) { + const RFC_3339_REGEX = /^(\d{4}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01]))$/; + if (!RFC_3339_REGEX.test(datestring)) { + return false; + } + // Verify the correct number of days for + // the month contained in the date-string. + const year = Number(datestring.substr(0, 4)); + const month = Number(datestring.substr(5, 2)); + const day = Number(datestring.substr(8, 2)); + switch (month) { + case 2: // February + if (leapYear(year) && day > 29) { + return false; + } else if (!leapYear(year) && day > 28) { + return false; + } + return true; + case 4: // April + case 6: // June + case 9: // September + case 11: // November + if (day > 30) { + return false; + } + break; + } + return true; +} + +function leapYear(year: number) { + return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0; +} + +export function parseDateTime(value: number | string | Date): Date { + if (value instanceof Date) { + return value; + } + + if (typeof value === 'string') { + if (validateDateTime(value)) { + return new Date(value); + } + throw new TypeError( + `DateTime cannot represent an invalid date-time-string ${value}.` + ); + } + + if (typeof value === 'number') { + try { + return new Date(value); + } catch (e) { + throw new TypeError( + 'DateTime cannot represent an invalid Unix timestamp ' + value + ); + } + } + + throw new TypeError( + 'DateTime cannot be serialized from a non string, ' + + 'non numeric or non Date type ' + + JSON.stringify(value) + ); +} + +export function parseDateRangeInput(period: DateRangeInput): DateRange { + return { + from: parseDateTime(period.from), + to: parseDateTime(period.to), + }; +} + +export function createPeriod(period: string): DateRange { + const to = new Date(); + const from = to.getTime() - ms(period); + + return { + from: parseDateTime(from), + to, + }; +} + +export type TypeOfToken = T extends InjectionToken ? R : unknown; + +export type Optional = Pick> & + Partial>; + +export function hash(key: string): string { + return createHash('md5').update(key).digest('hex'); +} + +/** + * A function that accepts two arrays and returns a difference + */ +export function diffArrays( + left: readonly T[], + right: readonly T[] +): readonly T[] { + return left + .filter((val) => !right.includes(val)) + .concat(right.filter((val) => !left.includes(val))); +} + +export function pushIfMissing(list: T[], item: T): void { + if (!list.includes(item)) { + list.push(item); + } +} diff --git a/packages/services/api/src/shared/mappers.ts b/packages/services/api/src/shared/mappers.ts new file mode 100644 index 000000000..1cfcf39e8 --- /dev/null +++ b/packages/services/api/src/shared/mappers.ts @@ -0,0 +1,84 @@ +import type { + SchemaChange, + SchemaError, + OperationStats, + ClientStats, +} from '../__generated__/types'; +import type { + Member, + Organization, + PersistedOperation, + Project, + SchemaObject, + SchemaVersion as SchemaVersionEntity, + Target, + Token, + User, + ActivityObject, + DateRange, +} from './entities'; + +export interface SchemaVersion extends SchemaVersionEntity { + project: string; + target: string; + organization: string; +} + +export type SchemaChangeConnection = readonly SchemaChange[]; +export type SchemaErrorConnection = readonly SchemaError[]; +export type UserConnection = readonly User[]; +export type MemberConnection = readonly Member[]; +export type ActivityConnection = readonly ActivityObject[]; +export type OrganizationConnection = readonly Organization[]; +export type ProjectConnection = readonly Project[]; +export type TargetConnection = readonly Target[]; +export type PersistedOperationConnection = readonly PersistedOperation[]; +export type SchemaConnection = readonly Schema[]; +export type TokenConnection = readonly Token[]; +export type OperationStatsConnection = ReadonlyArray< + Omit & { duration: DurationStats } +>; +export type ClientStatsConnection = readonly ClientStats[]; +export type SchemaVersionConnection = { + nodes: readonly SchemaVersion[]; + hasMore: boolean; +}; +export type SchemaComparePayload = + | SchemaCompareResult + | { + message: string; + }; +export type SchemaCompareResult = + | readonly [SchemaObject, SchemaObject] + | readonly [undefined | null, SchemaObject]; +export interface Schema { + id: string; + author: string; + source: string; + date: string; + service?: string | null; +} + +export interface OperationsStats { + organization: string; + project: string; + target: string; + period: DateRange; + operations: readonly string[]; +} + +export interface DurationStats { + '75.0': number | null; + '90.0': number | null; + '95.0': number | null; + '99.0': number | null; +} + +export type TargetsEstimationDateFilter = { + startTime: Date; + endTime: Date; +}; + +export type TargetsEstimationFilter = TargetsEstimationDateFilter & { + targets: string[]; +}; diff --git a/packages/services/api/src/shared/markdown.ts b/packages/services/api/src/shared/markdown.ts new file mode 100644 index 000000000..3cce5e3af --- /dev/null +++ b/packages/services/api/src/shared/markdown.ts @@ -0,0 +1,16 @@ +export function bolderize(msg: string): string { + return quotesTransformer(msg, '**'); +} + +export function quotesTransformer(msg: string, symbols = '**') { + const findSingleQuotes = /'([^']+)'/gim; + const findDoubleQuotes = /"([^"]+)"/gim; + + function transformm(_: string, value: string) { + return `${symbols}${value}${symbols}`; + } + + return msg + .replace(findSingleQuotes, transformm) + .replace(findDoubleQuotes, transformm); +} diff --git a/packages/services/api/src/shared/mixpanel.ts b/packages/services/api/src/shared/mixpanel.ts new file mode 100644 index 000000000..177c3b934 --- /dev/null +++ b/packages/services/api/src/shared/mixpanel.ts @@ -0,0 +1,38 @@ +import Mixpanel from 'mixpanel'; + +export const enabled = !!process.env.MIXPANEL_TOKEN; +let mixpanel: Mixpanel.Mixpanel; + +if (enabled) { + mixpanel = Mixpanel.init(process.env.MIXPANEL_TOKEN ?? ''); +} + +export function track(event: { + event: string; + distinct_id: string; + data?: Record; +}) { + if (enabled) { + mixpanel.track(event.event, { + distinct_id: event.distinct_id, + ...(event.data ?? {}), + }); + } +} + +export function createOrUpdateUser(user: { + id: string; + email: string; + name?: string; +}) { + if (enabled) { + mixpanel.people.set(user.id, { + ...('name' in user + ? { + $name: user.name, + } + : {}), // we don't want to set $name as null or undefined + $email: user.email, + }); + } +} diff --git a/packages/services/api/src/shared/schema.ts b/packages/services/api/src/shared/schema.ts new file mode 100644 index 000000000..62fcfc4fb --- /dev/null +++ b/packages/services/api/src/shared/schema.ts @@ -0,0 +1,75 @@ +import { createHash } from 'crypto'; +import { + buildASTSchema, + GraphQLSchema, + lexicographicSortSchema, +} from 'graphql'; +import { Schema, SchemaObject, emptySource } from './entities'; + +export function hashSchema(schema: Schema): string { + return createHash('md5').update(schema.source, 'utf-8').digest('hex'); +} + +/** + * Builds GraphQLSchema without validation of SDL + */ +export function buildSchema(schema: SchemaObject): GraphQLSchema { + return lexicographicSortSchema( + buildASTSchema(schema.document, { + assumeValid: true, + assumeValidSDL: true, + }) + ); +} + +export function findSchema( + schemas: readonly Schema[], + expected: Schema +): Schema | undefined { + return schemas.find((schema) => schema.service === expected.service); +} + +export function updateSchemas( + schemas: readonly Schema[], + incoming: Schema +): { + schemas: readonly Schema[]; + swappedSchema: Schema | null; +} { + let swappedSchema: Schema | null = null; + const newSchemas = schemas.map((schema) => { + const matching = + (schema.service ?? emptySource) === (incoming.service ?? emptySource); + + if (matching) { + swappedSchema = schema; + return incoming; + } + + return schema; + }); + + if (!swappedSchema) { + newSchemas.push(incoming); + } + + return { + schemas: newSchemas, + swappedSchema, + }; +} + +export function minifySchema(schema: string): string { + return schema.replace(/\s+/g, ' ').trim(); +} + +export function createConnection() { + return { + nodes(nodes: readonly T[]) { + return nodes ?? []; + }, + total(nodes: readonly T[]) { + return nodes?.length ?? 0; + }, + }; +} diff --git a/packages/services/api/src/shared/sentry.ts b/packages/services/api/src/shared/sentry.ts new file mode 100644 index 000000000..cdd88e0f3 --- /dev/null +++ b/packages/services/api/src/shared/sentry.ts @@ -0,0 +1,70 @@ +import * as Sentry from '@sentry/node'; +import type { Span } from '@sentry/types'; + +export type SentryContext = Parameters[0] & { + captureException?: boolean; +}; + +export function sentry( + name: string, + addToContext?: (...args: any[]) => SentryContext +): MethodDecorator { + return function sentryDecorator(_target, _prop, descriptor) { + const originalMethod = descriptor.value; + + descriptor.value = function wrappedWithSentry(this: any, ...args: any[]) { + let context = { + op: name, + }; + + const lastArgument = + args.length > 0 ? (args[args.length - 1] as Span) : null; + const passedSpan = + lastArgument && 'spanId' in lastArgument ? lastArgument : null; + + if (addToContext) { + context = { + ...addToContext(...args), + ...context, + }; + } + + const parentSpan = + passedSpan ?? Sentry.getCurrentHub().getScope()?.getSpan(); + const span = parentSpan?.startChild( + typeof context === 'string' + ? { + op: context, + } + : context + ); + + if (!span) { + return (originalMethod as any).apply(this, args); + } + + const argsWithoutSpan = passedSpan + ? args.slice(0, args.length - 1) + : args; + + return ( + (originalMethod as any).apply( + this, + argsWithoutSpan.concat(span) + ) as Promise + ).then( + (result) => { + span.finish(); + return Promise.resolve(result); + }, + (error) => { + console.log('sentry decorator error', error); + Sentry.captureException(error); + span.setStatus('internal_error'); + span.finish(); + return Promise.reject(error); + } + ); + } as any; + }; +} diff --git a/packages/services/api/tsconfig.json b/packages/services/api/tsconfig.json new file mode 100644 index 000000000..9b376c2b1 --- /dev/null +++ b/packages/services/api/tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "../../../tsconfig.json", + "include": ["src"] +} diff --git a/packages/services/cdn-worker/.gitignore b/packages/services/cdn-worker/.gitignore new file mode 100644 index 000000000..b5decdf66 --- /dev/null +++ b/packages/services/cdn-worker/.gitignore @@ -0,0 +1,10 @@ +*-debug.log +*-error.log +/.nyc_output +/dist +/lib +/package-lock.json +/tmp +node_modules +src/sdk.ts +schema.graphql \ No newline at end of file diff --git a/packages/services/cdn-worker/.npmignore b/packages/services/cdn-worker/.npmignore new file mode 100644 index 000000000..9458ec818 --- /dev/null +++ b/packages/services/cdn-worker/.npmignore @@ -0,0 +1,2 @@ +src +examples diff --git a/packages/services/cdn-worker/README.md b/packages/services/cdn-worker/README.md new file mode 100644 index 000000000..20d797d93 --- /dev/null +++ b/packages/services/cdn-worker/README.md @@ -0,0 +1,43 @@ +## Hive CDN Worker + +Hive comes with a CDN worker (deployed to CF Workers), along with KV cache to storage. + +### Standalone Development + +To run Hive CDN locally, you can use the following command: `yarn dev`. + +> Note: during dev, KV is mocked using JS `Map`, so it's ephermal and will be deleted with any change in code. + +To publish manually a schema, for target id `1`: + +``` +curl -X PUT http://localhost:4010/1/storage/kv/namespaces/2/values/target:1:schema --data-raw '{"sdl": "type Query { foo: String }" }' -H 'content-type: text/plain' +``` + +You can also use the following to dump everything stored in the mocked KV: + +``` +curl http://localhost:4010/dump +``` + +To fetch a specific resource, for target id `1`, run one of the following: + +``` +curl http://localhost:4010/1/schema -H "x-hive-cdn-key: fake" +curl http://localhost:4010/1/sdl -H "x-hive-cdn-key: fake" +curl http://localhost:4010/1/introspection -H "x-hive-cdn-key: fake" +``` + +> Hive CDN Auth and access management is not enforced AT ALL during development. + +### Local Development with Hive Server + +Hive server has `CF_BASE_PATH` env var that tells is where to send the published schemas. + +To connect your server to the local, mocked CDN, make sure you have the following in `packages/server/.env`: + +``` +CF_BASE_PATH=http://localhost:4010 +``` + +This way, your local Hive instance will be able to send schema to the locally running CDN Worker. diff --git a/packages/services/cdn-worker/build.mjs b/packages/services/cdn-worker/build.mjs new file mode 100644 index 000000000..728aae227 --- /dev/null +++ b/packages/services/cdn-worker/build.mjs @@ -0,0 +1,21 @@ +import { build } from 'esbuild'; +import { fileURLToPath } from 'url'; +import { dirname } from 'path'; + +(async function main() { + const __dirname = dirname(fileURLToPath(import.meta.url)); + const localBuild = !!process.env.BUILD_FOR_LOCAL; + const outfile = localBuild ? '/dist/dev.js' : '/dist/worker.js'; + + await build({ + entryPoints: [__dirname + (localBuild ? '/src/dev.ts' : '/src/index.ts')], + bundle: true, + platform: localBuild ? 'node' : 'browser', + target: localBuild ? undefined : 'chrome95', + minify: false, + outfile: __dirname + '/' + outfile, + treeShaking: true, + }); + + console.info(`Done, file: ${outfile}`); +})(); diff --git a/packages/services/cdn-worker/package.json b/packages/services/cdn-worker/package.json new file mode 100644 index 000000000..aa5fc17f0 --- /dev/null +++ b/packages/services/cdn-worker/package.json @@ -0,0 +1,25 @@ +{ + "name": "@hive/cdn-script", + "private": true, + "version": "0.0.0", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --watch --onSuccess \"node dist/dev.js\"", + "build-local": "BUILD_FOR_LOCAL=1 node build.mjs", + "build": "node build.mjs", + "typecheck": "tsc --noEmit" + }, + "peerDependencies": { + "graphql": "^16.0.0" + }, + "dependencies": { + "graphql": "16.5.0" + }, + "devDependencies": { + "fastify": "3.29.0", + "esbuild": "0.14.39", + "cross-undici-fetch": "0.1.27", + "@types/service-worker-mock": "2.0.1", + "@cloudflare/workers-types": "3.4.0" + } +} diff --git a/packages/services/cdn-worker/src/auth.ts b/packages/services/cdn-worker/src/auth.ts new file mode 100644 index 000000000..ccfb02f2d --- /dev/null +++ b/packages/services/cdn-worker/src/auth.ts @@ -0,0 +1,35 @@ +const encoder = new TextEncoder(); +const SECRET_KEY_DATA = encoder.encode(KEY_DATA); + +export function byteStringToUint8Array(byteString: string) { + const ui = new Uint8Array(byteString.length); + + for (let i = 0; i < byteString.length; ++i) { + ui[i] = byteString.charCodeAt(i); + } + + return ui; +} + +export async function isKeyValid( + targetId: string, + headerKey: string +): Promise { + const headerData = byteStringToUint8Array(atob(headerKey)); + const secretKey = await crypto.subtle.importKey( + 'raw', + SECRET_KEY_DATA, + { name: 'HMAC', hash: 'SHA-256' }, + false, + ['verify'] + ); + + const verified = await crypto.subtle.verify( + 'HMAC', + secretKey, + headerData, + encoder.encode(targetId) + ); + + return verified; +} diff --git a/packages/services/cdn-worker/src/dev-polyfill.ts b/packages/services/cdn-worker/src/dev-polyfill.ts new file mode 100644 index 000000000..b9a3a2ec0 --- /dev/null +++ b/packages/services/cdn-worker/src/dev-polyfill.ts @@ -0,0 +1,12 @@ +/* eslint-disable import/no-extraneous-dependencies */ +import { Response, Request, Headers, ReadableStream } from 'cross-undici-fetch'; + +globalThis.Response = Response; +globalThis.Request = Request; +globalThis.Headers = Headers; +globalThis.ReadableStream = ReadableStream; + +export const devStorage = new Map(); + +(globalThis as any).KEY_DATA = ''; +(globalThis as any).HIVE_DATA = devStorage; diff --git a/packages/services/cdn-worker/src/dev.ts b/packages/services/cdn-worker/src/dev.ts new file mode 100644 index 000000000..6c9f71e10 --- /dev/null +++ b/packages/services/cdn-worker/src/dev.ts @@ -0,0 +1,171 @@ +import './dev-polyfill'; +import type { FastifyRequest } from 'fastify'; +import { createServer } from '@hive/service-common'; +import { handleRequest } from './handler'; +import type { ServerResponse } from 'http'; +import { Readable } from 'stream'; +import { devStorage } from './dev-polyfill'; + +const PORT = process.env.PORT ? parseInt(process.env.PORT, 10) : 4010; + +function isReadable(responseBody: any): responseBody is Readable { + return !!responseBody.pipe; +} + +export interface AddressInfo { + protocol: 'http' | 'https'; + hostname: string; + endpoint: string; + port: number; +} + +export function sendNodeResponse( + { headers, status, statusText, body }: Response, + serverResponse: ServerResponse +): void { + headers.forEach((value, name) => { + serverResponse.setHeader(name, value); + }); + serverResponse.statusCode = status; + serverResponse.statusMessage = statusText; + // Some fetch implementations like `node-fetch`, return `Response.body` as Promise + if (body == null) { + serverResponse.end(); + } else { + const nodeStream = ( + isReadable(body) ? body : Readable.from(body) + ) as Readable; + nodeStream.pipe(serverResponse); + } +} + +function getRequestAddressInfo( + nodeRequest: FastifyRequest, + defaultAddressInfo: AddressInfo +): AddressInfo { + const hostnameWithPort = + nodeRequest.hostname ?? + nodeRequest.headers.host ?? + defaultAddressInfo.hostname; + const [hostname = nodeRequest.hostname, port = defaultAddressInfo.port] = + hostnameWithPort.split(':'); + return { + protocol: nodeRequest.protocol ?? defaultAddressInfo.protocol, + hostname, + endpoint: nodeRequest.url ?? defaultAddressInfo.endpoint, + port, + } as AddressInfo; +} + +function buildFullUrl(addressInfo: AddressInfo) { + return `${addressInfo.protocol}://${addressInfo.hostname}:${addressInfo.port}${addressInfo.endpoint}`; +} + +export async function getNodeRequest( + nodeRequest: FastifyRequest, + defaultAddressInfo: AddressInfo +): Promise { + const addressInfo = getRequestAddressInfo(nodeRequest, defaultAddressInfo); + const fullUrl = buildFullUrl(addressInfo); + const baseRequestInit: RequestInit = { + method: nodeRequest.method, + headers: nodeRequest.headers, + }; + + if (nodeRequest.method !== 'POST') { + return new Request(fullUrl, baseRequestInit); + } + + const maybeParsedBody = nodeRequest.body; + if (maybeParsedBody) { + return new Request(fullUrl, { + ...baseRequestInit, + body: + typeof maybeParsedBody === 'string' + ? maybeParsedBody + : JSON.stringify(maybeParsedBody), + }); + } + + const rawRequest = nodeRequest.raw || nodeRequest.req || nodeRequest; + return new Request(fullUrl, { + headers: nodeRequest.headers, + method: nodeRequest.method, + body: rawRequest as any, + }); +} + +async function main() { + const server = createServer({ + tracing: false, + name: 'local_cdn', + }); + + server.route<{ + Params: { + accountId: string; + namespaceId: string; + key: string; + }; + }>({ + url: '/:accountId/storage/kv/namespaces/:namespaceId/values/:key', + method: 'PUT', + handler: async (request) => { + if (!request.params.key) { + throw new Error(`Missing key`); + } + + if (!request.body) { + throw new Error(`Missing body value`); + } + + console.log( + `Writing to ephermal storage: ${request.params.key}, value: ${request.body}` + ); + + devStorage.set(request.params.key, request.body as string); + + return { + success: true, + }; + }, + }); + + server.route({ + url: '/dump', + method: 'GET', + handler: async () => { + return Object.fromEntries(devStorage.entries()); + }, + }); + + server.route({ + url: '/_readiness', + method: 'GET', + handler: async (_, res) => { + res.status(200).send(); + }, + }); + + server.route({ + url: '*', + method: ['GET'], + handler: async (req, reply) => { + const response = await handleRequest( + await getNodeRequest(req, { + hostname: 'localhost', + port: PORT, + protocol: 'http', + endpoint: '/', + }), + async () => true + ); + + sendNodeResponse(response, reply.raw); + }, + }); + + await server.listen(PORT, '0.0.0.0'); +} + +main().catch((e) => console.error(e)); diff --git a/packages/services/cdn-worker/src/errors.ts b/packages/services/cdn-worker/src/errors.ts new file mode 100644 index 000000000..5f95816e2 --- /dev/null +++ b/packages/services/cdn-worker/src/errors.ts @@ -0,0 +1,106 @@ +const description = `Please refer to the documentation for more details: https://docs.graphql-hive.com/features/registry-usage`; + +export class MissingTargetIDErrorResponse extends Response { + constructor() { + super( + JSON.stringify({ + code: 'MISSING_TARGET_ID', + error: `Missing Hive target ID in request params.`, + description, + }), + { + status: 400, + } + ); + } +} + +export class InvalidArtifactTypeResponse extends Response { + constructor(artifactType: string) { + super( + JSON.stringify({ + code: 'INVALID_ARTIFACT_TYPE', + error: `Invalid artifact type: "${artifactType}"`, + description, + }), + { + status: 400, + } + ); + } +} + +export class MissingAuthKey extends Response { + constructor() { + super( + JSON.stringify({ + code: 'MISSING_AUTH_KEY', + error: `Hive CDN authentication key is missing`, + description, + }), + { + status: 400, + } + ); + } +} + +export class InvalidAuthKey extends Response { + constructor() { + super( + JSON.stringify({ + code: 'INVALID_AUTH_KEY', + error: `Hive CDN authentication key is invalid, or it does not match the requested target ID.`, + description, + }), + { + status: 403, + } + ); + } +} + +export class CDNArtifactNotFound extends Response { + constructor(artifactType: string, targetId: string) { + super( + JSON.stringify({ + code: 'NOT_FOUND', + error: `Hive CDN was unable to find an artifact of type "${artifactType}" for target "${targetId}"`, + description, + }), + { + status: 404, + } + ); + } +} + +export class InvalidArtifactMatch extends Response { + constructor(artifactType: string, targetId: string) { + super( + JSON.stringify({ + code: 'INVALID_ARTIFACT_MATCH', + error: `Target "${targetId}" does not support the artifact type "${artifactType}"`, + description, + }), + { + status: 400, + } + ); + } +} + +export class UnexpectedError extends Response { + constructor() { + super( + JSON.stringify({ + code: 'UNEXPECTED_ERROR', + error: `Please try again later, or contact Hive support if the problem persists.`, + description, + }), + { + status: 500, + } + ); + } +} diff --git a/packages/services/cdn-worker/src/global.d.ts b/packages/services/cdn-worker/src/global.d.ts new file mode 100644 index 000000000..ca9d87a82 --- /dev/null +++ b/packages/services/cdn-worker/src/global.d.ts @@ -0,0 +1,12 @@ +export {}; + +declare global { + /** + * KV Storage for the CDN + */ + let HIVE_DATA: KVNamespace; + /** + * Secret used to sign the CDN keys + */ + let KEY_DATA: string; +} diff --git a/packages/services/cdn-worker/src/handler.ts b/packages/services/cdn-worker/src/handler.ts new file mode 100644 index 000000000..2b0a66876 --- /dev/null +++ b/packages/services/cdn-worker/src/handler.ts @@ -0,0 +1,167 @@ +import { + CDNArtifactNotFound, + InvalidArtifactMatch, + InvalidArtifactTypeResponse, + InvalidAuthKey, + MissingAuthKey, + MissingTargetIDErrorResponse, +} from './errors'; +import { isKeyValid } from './auth'; +import { buildSchema, introspectionFromSchema } from 'graphql'; + +type SchemaArtifact = { + sdl: string; + url?: string; + name?: string; + date?: string; +}; + +const artifactTypesHandlers = { + /** + * Returns SchemaArtifact or SchemaArtifact[], same way as it's stored in the storage + */ + schema: (targetId: string, artifactType: string, rawValue: string) => + new Response(rawValue, { status: 200 }), + /** + * Returns Federation Supergraph, we store it as-is. + */ + supergraph: (targetId: string, artifactType: string, rawValue: string) => + new Response(rawValue, { status: 200 }), + sdl: (targetId: string, artifactType: string, rawValue: string) => { + if (rawValue.startsWith('[')) { + return new InvalidArtifactMatch(artifactType, targetId); + } + + const parsed = JSON.parse(rawValue) as SchemaArtifact; + + return new Response(parsed.sdl, { status: 200 }); + }, + /** + * Returns Metadata same way as it's stored in the storage + */ + metadata: (targetId: string, artifactType: string, rawValue: string) => + new Response(rawValue, { + status: 200, + headers: { + 'Content-Type': 'application/json', + }, + }), + introspection: (targetId: string, artifactType: string, rawValue: string) => { + if (rawValue.startsWith('[')) { + return new InvalidArtifactMatch(artifactType, targetId); + } + + const parsed = JSON.parse(rawValue) as SchemaArtifact; + const rawSdl = parsed.sdl; + const schema = buildSchema(rawSdl); + const introspection = introspectionFromSchema(schema); + + return new Response(JSON.stringify(introspection), { + status: 200, + headers: { + 'Content-Type': 'application/json', + }, + }); + }, +}; + +const VALID_ARTIFACT_TYPES = Object.keys(artifactTypesHandlers); +const AUTH_HEADER_NAME = 'x-hive-cdn-key'; + +function parseIncomingRequest( + request: Request, + keyValidator: typeof isKeyValid +): + | { error: Response } + | { + targetId: string; + artifactType: keyof typeof artifactTypesHandlers; + storageKeyType: string; + } { + const params = new URL(request.url).pathname + .replace(/^\/+/, '/') + .split('/') + .filter(Boolean); + const targetId = params[0]; + + if (!targetId) { + return { + error: new MissingTargetIDErrorResponse(), + }; + } + + const artifactType = (params[1] || + 'schema') as keyof typeof artifactTypesHandlers; + + if (!VALID_ARTIFACT_TYPES.includes(artifactType)) { + return { error: new InvalidArtifactTypeResponse(artifactType) }; + } + + const headerKey = request.headers.get(AUTH_HEADER_NAME); + + if (!headerKey) { + return { error: new MissingAuthKey() }; + } + + if (!keyValidator(targetId, headerKey)) { + return { + error: new InvalidAuthKey(), + }; + } + + return { + targetId, + artifactType, + storageKeyType: + artifactType === 'sdl' || + artifactType === 'introspection' || + artifactType === 'schema' + ? 'schema' + : artifactType, + }; +} + +export async function handleRequest( + request: Request, + keyValidator: typeof isKeyValid +) { + const parsedRequest = parseIncomingRequest(request, keyValidator); + + if ('error' in parsedRequest) { + return parsedRequest.error; + } + + const { targetId, artifactType, storageKeyType } = parsedRequest; + + const kvStorageKey = `target:${targetId}:${storageKeyType}`; + const rawValue = await HIVE_DATA.get(kvStorageKey); + + if (rawValue) { + switch (artifactType) { + case 'schema': + return artifactTypesHandlers.schema(targetId, artifactType, rawValue); + case 'supergraph': + return artifactTypesHandlers.supergraph( + targetId, + artifactType, + rawValue + ); + case 'sdl': + return artifactTypesHandlers.sdl(targetId, artifactType, rawValue); + case 'introspection': + return artifactTypesHandlers.introspection( + targetId, + artifactType, + rawValue + ); + case 'metadata': + return artifactTypesHandlers.metadata(targetId, artifactType, rawValue); + default: + return new Response(null, { + status: 500, + }); + } + } else { + return new CDNArtifactNotFound(artifactType, targetId); + } +} diff --git a/packages/services/cdn-worker/src/index.ts b/packages/services/cdn-worker/src/index.ts new file mode 100644 index 000000000..655dbb541 --- /dev/null +++ b/packages/services/cdn-worker/src/index.ts @@ -0,0 +1,11 @@ +import { isKeyValid } from './auth'; +import { UnexpectedError } from './errors'; +import { handleRequest } from './handler'; + +self.addEventListener('fetch', (event) => { + try { + event.respondWith(handleRequest(event.request, isKeyValid)); + } catch (e) { + event.respondWith(new UnexpectedError()); + } +}); diff --git a/packages/services/cdn-worker/tsconfig.json b/packages/services/cdn-worker/tsconfig.json new file mode 100644 index 000000000..9713b95b0 --- /dev/null +++ b/packages/services/cdn-worker/tsconfig.json @@ -0,0 +1,25 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "module": "commonjs", + "target": "esnext", + "lib": ["esnext"], + "skipLibCheck": true, + "alwaysStrict": true, + "strict": true, + "preserveConstEnums": true, + "moduleResolution": "node", + "sourceMap": true, + "esModuleInterop": true, + "types": ["@cloudflare/workers-types", "@types/service-worker-mock"] + }, + "include": ["src"], + "exclude": [ + "node_modules", + "dist", + "test", + "src/dev.ts", + "src/dev-polyfill.ts" + ] +} diff --git a/packages/services/police-worker/.gitignore b/packages/services/police-worker/.gitignore new file mode 100644 index 000000000..b5decdf66 --- /dev/null +++ b/packages/services/police-worker/.gitignore @@ -0,0 +1,10 @@ +*-debug.log +*-error.log +/.nyc_output +/dist +/lib +/package-lock.json +/tmp +node_modules +src/sdk.ts +schema.graphql \ No newline at end of file diff --git a/packages/services/police-worker/.npmignore b/packages/services/police-worker/.npmignore new file mode 100644 index 000000000..9458ec818 --- /dev/null +++ b/packages/services/police-worker/.npmignore @@ -0,0 +1,2 @@ +src +examples diff --git a/packages/services/police-worker/README.md b/packages/services/police-worker/README.md new file mode 100644 index 000000000..fe37c349f --- /dev/null +++ b/packages/services/police-worker/README.md @@ -0,0 +1,19 @@ +## Hive Police Worker + +1. Hive code, or Hive support teams can now create and maintain a list of rules that needs to be blocked on CF. +2. List of rules is defined in CloudFlare KV (as the `K`). +3. A CF Worker is running every X minutes (defined in Pulumi code), and syncs the records in KV into a CloudFlare WAF Rule. +4. When synced correctly, CF will make sure to block all matching requests and prevent traffic from getting to Hive servers. + +> You can also trigger the scheduled worker manually from CloudFlare dashboard if you need to speed things up. + +## Available Rules + +- Block missing/empty header: `header:HEADER_NAME:empty` +- Block by header value: `header:HEADER_NAME:SOME_VALUE` (or, with method: `header:HEADER_NAME:SOME_VALUE:POST`, or with method and path: `header:HEADER_NAME:SOME_VALUE:POST:/usage`) +- Block by IP: `ip:123.123.123.123` + +### Useful Links + +- CloudFlare List of KVs: https://dash.cloudflare.com/6d5bc18cd8d13babe7ed321adba3d8ae/workers/kv/namespaces (we use `hive-police-ENV`) +- CF Expressions documentation: https://developers.cloudflare.com/ruleset-engine/rules-language/ diff --git a/packages/services/police-worker/build.mjs b/packages/services/police-worker/build.mjs new file mode 100644 index 000000000..bc9c365d2 --- /dev/null +++ b/packages/services/police-worker/build.mjs @@ -0,0 +1,20 @@ +import { build } from 'esbuild'; +import { fileURLToPath } from 'url'; +import { dirname } from 'path'; + +(async function main() { + const __dirname = dirname(fileURLToPath(import.meta.url)); + const outfile = '/dist/worker.js'; + + await build({ + entryPoints: [__dirname + '/src/index.ts'], + bundle: true, + platform: 'browser', + target: 'chrome95', + minify: false, + outfile: __dirname + '/' + outfile, + treeShaking: true, + }); + + console.info(`Done, file: ${outfile}`); +})(); diff --git a/packages/services/police-worker/package.json b/packages/services/police-worker/package.json new file mode 100644 index 000000000..552fe250f --- /dev/null +++ b/packages/services/police-worker/package.json @@ -0,0 +1,15 @@ +{ + "name": "@hive/police-script", + "private": true, + "version": "0.0.0", + "license": "MIT", + "scripts": { + "build": "node build.mjs", + "typecheck": "tsc --noEmit" + }, + "devDependencies": { + "esbuild": "0.14.39", + "@types/service-worker-mock": "2.0.1", + "@cloudflare/workers-types": "3.4.0" + } +} diff --git a/packages/services/police-worker/src/global.d.ts b/packages/services/police-worker/src/global.d.ts new file mode 100644 index 000000000..003f6b24f --- /dev/null +++ b/packages/services/police-worker/src/global.d.ts @@ -0,0 +1,12 @@ +export {}; + +declare global { + /** + * KV Storage for the Police records + */ + let HIVE_POLICE: KVNamespace; + let ZONE_IDENTIFIER: string; + let CF_BEARER_TOKEN: string; + let HOSTNAMES: string; + let WAF_RULE_NAME: string; +} diff --git a/packages/services/police-worker/src/index.ts b/packages/services/police-worker/src/index.ts new file mode 100644 index 000000000..25a80e535 --- /dev/null +++ b/packages/services/police-worker/src/index.ts @@ -0,0 +1,126 @@ +const CF_BASE_URL = 'https://api.cloudflare.com/client/v4'; + +addEventListener('scheduled', (event) => { + event.waitUntil(handleSchedule()); +}); + +async function execute( + url: string, + options: Request | RequestInit = {} +): Promise { + const config = { + headers: { + Authorization: `Bearer ${CF_BEARER_TOKEN}`, + 'Content-type': 'application/json', + Accept: 'application/json', + ...(options.headers || {}), + }, + ...options, + }; + + return await fetch( + `${CF_BASE_URL}/zones/${ZONE_IDENTIFIER}${url}`, + config + ).then((r) => r.json()); +} + +async function handleSchedule() { + const data = await HIVE_POLICE.list(); + const rulesArr = data.keys.map((key) => { + const [type, value, ...rest] = key.name.split(':'); + + switch (type) { + // based on https://developers.cloudflare.com/ruleset-engine/rules-language/ + case 'ip': { + return `ip.src == ${value}`; + } + case 'header': { + const headerValue = rest[0]; + const method = rest[1]; + const path = rest[2]; + let rule: string | null = null; + + if (headerValue === 'empty' || headerValue === 'undefined') { + rule = `not any(lower(http.request.headers.names[*])[*] contains "${value}")`; + } else { + rule = `any(http.request.headers["${value}"][*] contains "${headerValue}")`; + } + + if (method) { + rule = `${rule} and http.request.method == "${method}"`; + } + + if (path) { + rule = `${rule} and http.request.uri.path == "${path}"`; + } + + return rule; + } + default: { + return null; + } + } + }); + + if (rulesArr.length === 0) { + console.warn(`No rules in expression, nothing to enforce yet.`); + + return; + } + + let rulesExpression = rulesArr + .filter(Boolean) + .map((v) => `(${v})`) + .join(' or '); + + rulesExpression = `http.host in { ${HOSTNAMES.split(',') + .map((v) => `"${v}"`) + .join(' ')} } and ${rulesExpression}`; + + console.log(`Calculated WAF Expression:`, rulesExpression); + + const firewallRules = await execute(`/firewall/rules`); + let rule = firewallRules.result.find((v: any) => v.ref === WAF_RULE_NAME); + + console.log('found rule:', rule); + + if (!rule) { + const response = await execute(`/firewall/rules`, { + method: 'POST', + body: JSON.stringify([ + { + ref: WAF_RULE_NAME, + action: 'block', + description: WAF_RULE_NAME, + filter: { + paused: true, + expression: rulesExpression, + ref: `${WAF_RULE_NAME}-filter`, + }, + }, + ]), + }); + + console.log(`Create response: `, response); + rule = response.result[0]; + } + + if (!rule) { + console.warn(`rule is empty`); + + return; + } + + const updateResponse = await execute(`/filters`, { + method: 'PUT', + body: JSON.stringify([ + { + id: rule.filter.id, + ref: rule.filter.ref, + paused: false, + expression: rulesExpression, + }, + ]), + }); + console.log(`Update response: `, updateResponse.result); +} diff --git a/packages/services/police-worker/tsconfig.json b/packages/services/police-worker/tsconfig.json new file mode 100644 index 000000000..903e5020d --- /dev/null +++ b/packages/services/police-worker/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "outDir": "./dist", + "module": "commonjs", + "target": "esnext", + "lib": ["esnext"], + "skipLibCheck": true, + "alwaysStrict": true, + "strict": true, + "preserveConstEnums": true, + "moduleResolution": "node", + "sourceMap": true, + "esModuleInterop": true, + "types": ["@cloudflare/workers-types", "@types/service-worker-mock"] + }, + "include": ["src"], + "exclude": ["node_modules", "dist", "test"] +} diff --git a/packages/services/rate-limit/.env.template b/packages/services/rate-limit/.env.template new file mode 100644 index 000000000..cd44a2ec9 --- /dev/null +++ b/packages/services/rate-limit/.env.template @@ -0,0 +1,7 @@ +PORT=4012 +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=registry +USAGE_ESTIMATOR_ENDPOINT=http://localhost:4011 \ No newline at end of file diff --git a/packages/services/rate-limit/package.json b/packages/services/rate-limit/package.json new file mode 100644 index 000000000..415a9805e --- /dev/null +++ b/packages/services/rate-limit/package.json @@ -0,0 +1,40 @@ +{ + "private": true, + "type": "module", + "name": "@hive/rate-limit", + "description": "A microservice for Hive SaaS, that exposes information about rate limits per given org/target.", + "version": "0.0.1", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --watch --format esm --target node16 --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "cross-undici-fetch": "0.4.3", + "zod": "3.15.1", + "@trpc/server": "9.23.2", + "@trpc/client": "9.23.2", + "reflect-metadata": "0.1.13", + "@graphql-hive/core": "0.2.0", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "dotenv": "10.0.0", + "date-fns": "2.28.0", + "got": "12.0.4" + }, + "devDependencies": { + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "external": [ + "pg-native" + ], + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/rate-limit/src/api.ts b/packages/services/rate-limit/src/api.ts new file mode 100644 index 000000000..54a711eec --- /dev/null +++ b/packages/services/rate-limit/src/api.ts @@ -0,0 +1,44 @@ +import * as trpc from '@trpc/server'; +import type { Limiter } from './limiter'; +import { z } from 'zod'; +import { inferProcedureInput, inferProcedureOutput } from '@trpc/server'; + +export type RateLimitInput = z.infer; + +const VALIDATION = z + .object({ + id: z.string().nonempty(), + entityType: z.enum(['organization', 'target']), + type: z.enum(['schema-push', 'operations-reporting']), + /** + * Token is optional, and used only when an additional blocking (WAF) process is needed. + */ + token: z.string().nullish(), + }) + .required(); + +export const rateLimitApiRouter = trpc + .router() + .query('getRetention', { + input: z + .object({ + targetId: z.string().nonempty(), + }) + .required(), + async resolve({ ctx, input }) { + return ctx.getRetention(input.targetId); + }, + }) + .query('checkRateLimit', { + input: VALIDATION, + async resolve({ ctx, input }) { + return ctx.checkLimit(input); + }, + }); + +export type RateLimitApi = typeof rateLimitApiRouter; +export type RateLimitApiQuery = keyof RateLimitApi['_def']['queries']; +export type RateLimitQueryOutput = + inferProcedureOutput; +export type RateLimitQueryInput = + inferProcedureInput; diff --git a/packages/services/rate-limit/src/dev.ts b/packages/services/rate-limit/src/dev.ts new file mode 100644 index 000000000..213339db1 --- /dev/null +++ b/packages/services/rate-limit/src/dev.ts @@ -0,0 +1,7 @@ +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/rate-limit/src/index.ts b/packages/services/rate-limit/src/index.ts new file mode 100644 index 000000000..98f081920 --- /dev/null +++ b/packages/services/rate-limit/src/index.ts @@ -0,0 +1,100 @@ +#!/usr/bin/env node +import 'reflect-metadata'; +import * as Sentry from '@sentry/node'; +import { + createServer, + startMetrics, + ensureEnv, + registerShutdown, +} from '@hive/service-common'; +import { createRateLimiter } from './limiter'; +import { createConnectionString } from '@hive/storage'; +import { fastifyTRPCPlugin } from '@trpc/server/adapters/fastify/dist/trpc-server-adapters-fastify.cjs.js'; +import { rateLimitApiRouter } from './api'; + +const LIMIT_CACHE_UPDATE_INTERVAL_MS = process.env + .LIMIT_CACHE_UPDATE_INTERVAL_MS + ? parseInt(process.env.LIMIT_CACHE_UPDATE_INTERVAL_MS as string) + : 1 * 60_000; // default is every 1m + +async function main() { + Sentry.init({ + serverName: 'rate-limit', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'rate-limit', + tracing: false, + }); + + try { + const ctx = createRateLimiter({ + logger: server.log, + rateLimitConfig: { + interval: LIMIT_CACHE_UPDATE_INTERVAL_MS, + }, + rateEstimator: { + endpoint: ensureEnv('USAGE_ESTIMATOR_ENDPOINT', 'string'), + }, + storage: { + connectionString: createConnectionString(process.env as any), + }, + }); + + server.register(fastifyTRPCPlugin, { + prefix: '/trpc', + trpcOptions: { + router: rateLimitApiRouter, + createContext: () => ctx, + }, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await Promise.all([stop(), server.close()]); + }, + }); + + const port = process.env.PORT || 5000; + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(_, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(ctx.readiness() ? 200 : 400).send(); + }, + }); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + await server.listen(port, '0.0.0.0'); + await ctx.start(); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/rate-limit/src/limiter.ts b/packages/services/rate-limit/src/limiter.ts new file mode 100644 index 000000000..6217b9730 --- /dev/null +++ b/packages/services/rate-limit/src/limiter.ts @@ -0,0 +1,217 @@ +import { fetch } from 'cross-undici-fetch'; +import type { FastifyLoggerInstance } from '@hive/service-common'; +import { createStorage as createPostgreSQLStorage } from '@hive/storage'; + +import { startOfMonth, endOfMonth } from 'date-fns'; +import * as Sentry from '@sentry/node'; +import { createTRPCClient } from '@trpc/client'; +import type { UsageEstimatorApi } from '@hive/usage-estimator'; +import type { RateLimitInput } from './api'; +import { + rateLimitOperationsEventOrg, + rateLimitSchemaEventOrg, +} from './metrics'; + +export type RateLimitCheckResponse = { + limited: boolean; + quota?: number; + current?: number; +}; + +const DEFAULT_RETENTION = 30; // days + +export type Limiter = ReturnType; + +export function createRateLimiter(config: { + logger: FastifyLoggerInstance; + rateLimitConfig: { + interval: number; + }; + rateEstimator: { + endpoint: string; + }; + storage: { + connectionString: string; + }; +}) { + const rateEstimator = createTRPCClient({ + url: `${config.rateEstimator.endpoint}/trpc`, + fetch, + }); + + const { logger } = config; + const postgres$ = createPostgreSQLStorage(config.storage.connectionString); + let initialized = false; + let intervalHandle: ReturnType | null = null; + let targetIdToRateLimitStatus = { + orgToTargetIdMap: new Map(), + retention: new Map(), + operationsReporting: new Map(), + schemaPushes: new Map(), + }; + + async function fetchAndCalculateUsageInformation() { + const now = new Date(); + const window = { + startTime: startOfMonth(now).toUTCString(), + endTime: endOfMonth(now).toUTCString(), + }; + config.logger.info( + `Calculating rate-limit information based on window: ${window.startTime} -> ${window.endTime}` + ); + const storage = await postgres$; + const newMap: typeof targetIdToRateLimitStatus = { + orgToTargetIdMap: new Map(), + retention: new Map(), + operationsReporting: new Map(), + schemaPushes: new Map(), + }; + + const [records, operations, pushes] = await Promise.all([ + storage.getGetOrganizationsAndTargetPairsWithLimitInfo(), + rateEstimator.query('estimateOperationsForAllTargets', window), + rateEstimator.query('estiamteSchemaPushesForAllTargets', window), + ]); + + logger.debug( + `Fetched total of ${Object.keys(records).length} targets from the DB` + ); + logger.debug( + `Fetched total of ${ + Object.keys(operations).length + } targets with usage information` + ); + logger.debug( + `Fetched total of ${ + Object.keys(pushes).length + } targets with schema push information` + ); + + for (const record of records) { + newMap.orgToTargetIdMap.set(record.organization, record.target); + const currentOperations = operations[record.target] || 0; + const operationsLimited = + record.limit_operations_monthly === 0 + ? false + : record.limit_operations_monthly < currentOperations; + + newMap.retention.set(record.target, record.limit_retention_days); + + newMap.operationsReporting.set(record.target, { + current: currentOperations, + quota: record.limit_operations_monthly, + limited: operationsLimited, + }); + + const currentPushes = pushes[record.target] || 0; + const pushLimited = + record.limit_schema_push_monthly === 0 + ? false + : record.limit_schema_push_monthly < currentPushes; + newMap.schemaPushes.set(record.target, { + current: currentPushes, + quota: record.limit_schema_push_monthly, + limited: pushLimited, + }); + + if (operationsLimited) { + rateLimitOperationsEventOrg + .labels({ + orgId: record.organization, + }) + .inc(); + logger.info( + `Target="${record.target}" (org="${record.organization}") is now being rate-limited for operations (${currentOperations}/${record.limit_operations_monthly})` + ); + } + + if (pushLimited) { + rateLimitSchemaEventOrg + .labels({ + orgId: record.organization, + }) + .inc(); + logger.info( + `Target="${record.target}" (org="${record.organization}") is now being rate-limited for schema pushes (${currentPushes}/${record.limit_schema_push_monthly})` + ); + } + } + + targetIdToRateLimitStatus = newMap; + } + + return { + readiness() { + return initialized; + }, + getRetention(targetId: string) { + const map = targetIdToRateLimitStatus.retention; + + if (map.has(targetId)) { + return map.get(targetId)!; + } else { + // In case we don't have any knowledge on that target id, to use the default. + return DEFAULT_RETENTION; + } + }, + checkLimit(input: RateLimitInput): RateLimitCheckResponse { + const map = + input.type === 'operations-reporting' + ? targetIdToRateLimitStatus.operationsReporting + : targetIdToRateLimitStatus.schemaPushes; + + const entityId = + input.entityType === 'target' + ? input.id + : targetIdToRateLimitStatus.orgToTargetIdMap.get(input.id); + + if (!entityId) { + logger.warn( + `Failed to resolve/find rate limit information for entityId=${entityId} (type=${input.entityType})` + ); + + return { + limited: false, + }; + } + + if (map.has(entityId)) { + return map.get(entityId)!; + } else { + // In case we don't have any knowledge on that target id, we allow it to run + return { + limited: false, + }; + } + }, + async start() { + logger.info( + `Rate Limiter starting, will update rate-limit information every ${config.rateLimitConfig.interval}ms` + ); + await fetchAndCalculateUsageInformation(); + initialized = true; + intervalHandle = setInterval(async () => { + logger.info( + `Interval triggered, updating internval rate-limit cache...` + ); + + try { + await fetchAndCalculateUsageInformation(); + } catch (error) { + logger.error(error, `Failed to update rate-limit cache`); + Sentry.captureException(error, { + level: Sentry.Severity.Error, + }); + } + }, config.rateLimitConfig.interval); + }, + async stop() { + initialized = false; // to make readiness check == false + if (intervalHandle) { + clearInterval(intervalHandle); + intervalHandle = null; + } + logger.info('Rate Limiter stopped'); + }, + }; +} diff --git a/packages/services/rate-limit/src/metrics.ts b/packages/services/rate-limit/src/metrics.ts new file mode 100644 index 000000000..7d0f5c6a4 --- /dev/null +++ b/packages/services/rate-limit/src/metrics.ts @@ -0,0 +1,13 @@ +import { metrics } from '@hive/service-common'; + +export const rateLimitSchemaEventOrg = new metrics.Counter({ + name: 'rate_limited_schema_events_count', + help: 'Rate limit events per org id, for schema pushses.', + labelNames: ['orgId'], +}); + +export const rateLimitOperationsEventOrg = new metrics.Counter({ + name: 'rate_limited_operations_events_count', + help: 'Rate limit events per org id, for operations.', + labelNames: ['orgId'], +}); diff --git a/packages/services/rate-limit/tsconfig.json b/packages/services/rate-limit/tsconfig.json new file mode 100644 index 000000000..62f6d54e8 --- /dev/null +++ b/packages/services/rate-limit/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/schema/.env.template b/packages/services/schema/.env.template new file mode 100644 index 000000000..0d4defd38 --- /dev/null +++ b/packages/services/schema/.env.template @@ -0,0 +1,3 @@ +REDIS_HOST="localhost" +REDIS_PORT="6379" +REDIS_PASSWORD="" \ No newline at end of file diff --git a/packages/services/schema/.gitignore b/packages/services/schema/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/services/schema/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/services/schema/CHANGELOG.md b/packages/services/schema/CHANGELOG.md new file mode 100644 index 000000000..e4ed9f42b --- /dev/null +++ b/packages/services/schema/CHANGELOG.md @@ -0,0 +1,106 @@ +# @hive/schema + +## 0.2.1 + +### Patch Changes + +- e204afe4: Controller concurrency + +## 0.2.0 + +### Minor Changes + +- 845e0880: Trim descriptions + +## 0.1.14 + +### Patch Changes + +- 1623aca5: Upgrade sentry + +## 0.1.13 + +### Patch Changes + +- ffb6feb6: Add Redis word to redis logging + +## 0.1.12 + +### Patch Changes + +- Updated dependencies [3a435baa] + - @hive/service-common@0.1.3 + +## 0.1.11 + +### Patch Changes + +- 689610ac: fix(deps): update sentry-javascript monorepo to v6.16.1 + +## 0.1.10 + +### Patch Changes + +- d4a4c464: Use throng + +## 0.1.9 + +### Patch Changes + +- 4b85bf48: Seems like calculateDelay owns the retry limit + +## 0.1.8 + +### Patch Changes + +- 4e27e93e: Scrape schema and webhooks services + +## 0.1.7 + +### Patch Changes + +- 0ea9cf3e: Adjust connection to ClickHouse + +## 0.1.6 + +### Patch Changes + +- 953794f2: Fix #945 + +## 0.1.5 + +### Patch Changes + +- a8485a06: Use Pino logger +- Updated dependencies [a8485a06] + - @hive/service-common@0.1.2 + +## 0.1.4 + +### Patch Changes + +- 7a6f4e6f: Logs and printing + +## 0.1.3 + +### Patch Changes + +- e08d7691: Share stitchSchemas across validate and build phases + +## 0.1.2 + +### Patch Changes + +- 2e513bed: Smaller payload + +## 0.1.1 + +### Patch Changes + +- 79129085: Bump + +## 0.1.0 + +### Minor Changes + +- 747fccdb: Introduces Schema service to validate and build GraphQL schemas diff --git a/packages/services/schema/LICENSE b/packages/services/schema/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/services/schema/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/services/schema/package.json b/packages/services/schema/package.json new file mode 100644 index 000000000..08e4b02eb --- /dev/null +++ b/packages/services/schema/package.json @@ -0,0 +1,36 @@ +{ + "name": "@hive/schema", + "private": true, + "type": "module", + "version": "0.2.1", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --format esm --target node16 --watch --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@apollo/federation": "0.36.1", + "zod": "3.15.1", + "@trpc/server": "9.23.2", + "@graphql-tools/stitch": "8.3.1", + "@graphql-tools/stitching-directives": "2.1.1", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "dotenv": "10.0.0", + "graphql": "16.5.0", + "ioredis": "4.28.3" + }, + "devDependencies": { + "@types/ioredis": "4.28.7", + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/schema/src/api.ts b/packages/services/schema/src/api.ts new file mode 100644 index 000000000..3a3321294 --- /dev/null +++ b/packages/services/schema/src/api.ts @@ -0,0 +1,132 @@ +import * as trpc from '@trpc/server'; +import { inferProcedureInput } from '@trpc/server'; +import type { FastifyLoggerInstance } from 'fastify'; +import { z } from 'zod'; +import { buildCounter, supergraphCounter, validateCounter } from './metrics'; +import { pickOrchestrator } from './orchestrators'; +import { Redis } from 'ioredis'; +import { createErrorHandler } from '@hive/service-common'; + +const TYPE_VALIDATION = z.enum(['single', 'federation', 'stitching']); +const SCHEMA_OBJECT_VALIDATION = { + raw: z.string().nonempty(), + source: z.string().nonempty(), +}; +const SCHEMAS_VALIDATION = z.array(z.object(SCHEMA_OBJECT_VALIDATION)); + +export const schemaBuilderApiRouter = trpc + .router<{ + logger: FastifyLoggerInstance; + redis: Redis; + errorHandler: ReturnType; + }>() + .mutation('supergraph', { + input: z + .object({ + type: TYPE_VALIDATION, + schemas: z.array( + z + .object({ + ...SCHEMA_OBJECT_VALIDATION, + url: z.string().nullish(), + }) + .required() + ), + }) + .required(), + async resolve({ ctx, input }) { + supergraphCounter + .labels({ + type: input.type, + }) + .inc(); + try { + const orchestrator = pickOrchestrator( + input.type, + ctx.redis, + ctx.logger + ); + + return await orchestrator.supergraph(input.schemas); + } catch (error) { + ctx.errorHandler( + 'Failed to build a supergraph', + error as Error, + ctx.logger + ); + + throw error; + } + }, + }) + .mutation('validate', { + input: z + .object({ + type: TYPE_VALIDATION, + schemas: SCHEMAS_VALIDATION, + }) + .required(), + async resolve({ ctx, input }) { + validateCounter + .labels({ + type: input.type, + }) + .inc(); + try { + const orchestrator = pickOrchestrator( + input.type, + ctx.redis, + ctx.logger + ); + return await orchestrator.validate(input.schemas); + } catch (error) { + ctx.errorHandler( + 'Failed to validate a schema', + error as Error, + ctx.logger + ); + + throw error; + } + }, + }) + .mutation('build', { + input: z + .object({ + type: TYPE_VALIDATION, + schemas: SCHEMAS_VALIDATION, + }) + .required(), + async resolve({ ctx, input }) { + buildCounter + .labels({ + type: input.type, + }) + .inc(); + try { + const orchestrator = pickOrchestrator( + input.type, + ctx.redis, + ctx.logger + ); + + return await orchestrator.build(input.schemas); + } catch (error) { + ctx.errorHandler( + 'Failed to build a schema', + error as Error, + ctx.logger + ); + + throw error; + } + }, + }); + +export type SchemaBuilderApi = typeof schemaBuilderApiRouter; +export type SchemaBuilderApiMutate = + keyof SchemaBuilderApi['_def']['mutations']; + +export type SchemaBuilderMutationInput< + TRouteKey extends SchemaBuilderApiMutate +> = inferProcedureInput; diff --git a/packages/services/schema/src/dev.ts b/packages/services/schema/src/dev.ts new file mode 100644 index 000000000..213339db1 --- /dev/null +++ b/packages/services/schema/src/dev.ts @@ -0,0 +1,7 @@ +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/schema/src/index.ts b/packages/services/schema/src/index.ts new file mode 100644 index 000000000..59d08de23 --- /dev/null +++ b/packages/services/schema/src/index.ts @@ -0,0 +1,121 @@ +#!/usr/bin/env node +import { + createServer, + createErrorHandler, + ensureEnv, + startMetrics, + registerShutdown, +} from '@hive/service-common'; +import * as Sentry from '@sentry/node'; +import Redis from 'ioredis'; +import { fastifyTRPCPlugin } from '@trpc/server/adapters/fastify/dist/trpc-server-adapters-fastify.cjs.js'; +import { schemaBuilderApiRouter } from './api'; + +async function main() { + Sentry.init({ + serverName: 'schema', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'schema', + tracing: false, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await Promise.all([server.close(), redis.disconnect(false)]); + }, + }); + + const errorHandler = createErrorHandler(server); + + const redis = new Redis({ + host: ensureEnv('REDIS_HOST'), + port: ensureEnv('REDIS_PORT', 'number'), + password: ensureEnv('REDIS_PASSWORD'), + retryStrategy(times) { + return Math.min(times * 500, 2000); + }, + reconnectOnError(error) { + server.log.warn('Redis reconnectOnError', error); + return 1; + }, + db: 0, + maxRetriesPerRequest: null, + enableReadyCheck: false, + }); + + try { + redis.on('error', (err) => { + errorHandler('Redis error', err); + }); + + redis.on('connect', () => { + server.log.debug('Redis connection established'); + }); + + redis.on('ready', () => { + server.log.info('Redis connection ready'); + }); + + redis.on('close', () => { + server.log.info('Redis connection closed'); + }); + + redis.on('reconnecting', (timeToReconnect) => { + server.log.info('Redis reconnecting in %s', timeToReconnect); + }); + + const port = process.env.PORT || 6500; + const context = { + redis, + logger: server.log, + errorHandler, + }; + + server.register(fastifyTRPCPlugin, { + prefix: '/trpc', + trpcOptions: { + router: schemaBuilderApiRouter, + createContext: () => context, + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(_, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(200).send(); + }, + }); + + await server.listen(port, '0.0.0.0'); + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + } catch (error) { + server.log.fatal(error); + throw error; + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/schema/src/metrics.ts b/packages/services/schema/src/metrics.ts new file mode 100644 index 000000000..9efe4e449 --- /dev/null +++ b/packages/services/schema/src/metrics.ts @@ -0,0 +1,19 @@ +import { metrics } from '@hive/service-common'; + +export const validateCounter = new metrics.Counter({ + name: 'schema_validate_total', + help: 'Number of call to validate schema', + labelNames: ['type'], +}); + +export const buildCounter = new metrics.Counter({ + name: 'schema_build_total', + help: 'Number of call to build schema', + labelNames: ['type'], +}); + +export const supergraphCounter = new metrics.Counter({ + name: 'schema_supergraph_total', + help: 'Number of call to build supergraph', + labelNames: ['type'], +}); diff --git a/packages/services/schema/src/orchestrators.ts b/packages/services/schema/src/orchestrators.ts new file mode 100644 index 000000000..c96ca99a2 --- /dev/null +++ b/packages/services/schema/src/orchestrators.ts @@ -0,0 +1,409 @@ +import type { Redis as RedisInstance } from 'ioredis'; +import type { FastifyLoggerInstance } from '@hive/service-common'; +import { createHash } from 'crypto'; +import { printSchema, parse, concatAST, visit, print, ASTNode } from 'graphql'; +import type { DocumentNode } from 'graphql'; +import { validateSDL } from 'graphql/validation/validate.js'; +import { composeAndValidate, compositionHasErrors } from '@apollo/federation'; +import { stitchSchemas } from '@graphql-tools/stitch'; +import { stitchingDirectives } from '@graphql-tools/stitching-directives'; +import type { + SchemaType, + BuildInput, + BuildOutput, + ValidationInput, + ValidationOutput, + SupergraphInput, + SupergraphOutput, +} from './types'; + +function trimDescriptions(doc: DocumentNode): DocumentNode { + function trim(node: T): T { + if (node && 'description' in node && node.description) { + (node.description as any).value = node.description.value.trim(); + } + + return node; + } + + return visit(doc, { + SchemaDefinition: trim, + ObjectTypeDefinition: trim, + ObjectTypeExtension: trim, + InterfaceTypeExtension: trim, + UnionTypeExtension: trim, + InputObjectTypeExtension: trim, + EnumTypeExtension: trim, + SchemaExtension: trim, + ScalarTypeExtension: trim, + FieldDefinition: trim, + InputValueDefinition: trim, + InterfaceTypeDefinition: trim, + UnionTypeDefinition: trim, + EnumTypeDefinition: trim, + EnumValueDefinition: trim, + InputObjectTypeDefinition: trim, + DirectiveDefinition: trim, + }); +} + +const emptySource = '*'; + +function toValidationError(error: any) { + if (error instanceof Error) { + return { + message: error.message, + }; + } + + return { + message: error as string, + }; +} + +interface Orchestrator { + validate(schemas: ValidationInput): Promise; + build(schemas: BuildInput): Promise; + supergraph(schemas: SupergraphInput): Promise; +} + +interface CompositionSuccess { + type: 'success'; + result: { + supergraphSdl: string; + raw: string; + }; +} + +interface CompositionFailure { + type: 'failure'; + result: { + errors: Array<{ + message: string; + }>; + }; +} + +const createFederation: ( + redis: RedisInstance, + logger: FastifyLoggerInstance +) => Orchestrator = (redis, logger) => { + const compose = reuse< + ValidationInput, + CompositionSuccess | CompositionFailure + >( + async (schemas) => { + const result = composeAndValidate( + schemas.map((schema) => { + return { + typeDefs: trimDescriptions(parse(schema.raw)), + name: schema.source, + }; + }) + ); + + if (compositionHasErrors(result)) { + return { + type: 'failure', + result: { + errors: result.errors.map(toValidationError), + }, + }; + } + + return { + type: 'success', + result: { + supergraphSdl: result.supergraphSdl, + raw: printSchema(result.schema), + }, + }; + }, + 'federation', + redis, + logger + ); + + return { + async validate(schemas) { + const result = await compose(schemas); + + if (result.type === 'failure') { + return { + errors: result.result.errors, + }; + } + + return { + errors: [], + }; + }, + async build(schemas) { + const result = await compose(schemas); + + if (result.type === 'failure') { + throw new Error( + [ + `Schemas couldn't be merged:`, + result.result.errors.map((error) => `\t - ${error.message}`), + ].join('\n') + ); + } + + return { + raw: result.result.raw, + source: emptySource, + }; + }, + async supergraph(schemas) { + const result = await compose(schemas); + + return { + supergraph: + 'supergraphSdl' in result.result ? result.result.supergraphSdl : null, + }; + }, + }; +}; + +const single: Orchestrator = { + async validate(schemas) { + const schema = schemas[0]; + const errors = validateSDL(parse(schema.raw)).map(toValidationError); + + return { + errors, + }; + }, + async build(schemas) { + const schema = schemas[0]; + + return { + source: schema.source, + raw: print(trimDescriptions(parse(schema.raw))), + }; + }, + async supergraph() { + throw new Error('Single schema orchestrator does not support supergraph'); + }, +}; + +const createStitching: ( + redis: RedisInstance, + logger: FastifyLoggerInstance +) => Orchestrator = (redis, logger) => { + const stitchAndPrint = reuse( + async (schemas: ValidationInput) => { + return printSchema( + stitchSchemas({ + typeDefs: schemas.map((schema) => + trimDescriptions(parse(schema.raw)) + ), + }) + ); + }, + 'stitching', + redis, + logger + ); + + return { + async validate(schemas) { + const parsed = schemas.map((s) => parse(s.raw)); + + const errors = parsed + .map((schema) => validateStitchedSchema(schema)) + .flat(); + + try { + await stitchAndPrint(schemas); + } catch (error) { + errors.push(toValidationError(error)); + } + + return { + errors, + }; + }, + async build(schemas) { + const raw = await stitchAndPrint(schemas); + + return { + raw, + source: emptySource, + }; + }, + async supergraph() { + throw new Error( + 'Stitching schema orchestrator does not support supergraph' + ); + }, + }; +}; + +function validateStitchedSchema(doc: DocumentNode) { + const { allStitchingDirectivesTypeDefs } = stitchingDirectives(); + + return validateSDL( + concatAST([parse(allStitchingDirectivesTypeDefs), doc]) + ).map(toValidationError); +} + +export function pickOrchestrator( + type: SchemaType, + redis: RedisInstance, + logger: FastifyLoggerInstance +) { + switch (type) { + case 'federation': + logger.debug('Using federation orchestrator'); + return createFederation(redis, logger); + case 'single': + logger.debug('Using single orchestrator'); + return single; + case 'stitching': + logger.debug('Using stitching orchestrator'); + return createStitching(redis, logger); + default: + throw new Error(`Unknown schema type: ${type}`); + } +} + +interface ActionStarted { + status: 'started'; +} + +interface ActionCompleted { + status: 'completed'; + result: T; +} + +function createChecksum(input: TInput, uniqueKey: string): string { + return createHash('sha256') + .update(JSON.stringify(input)) + .update(`key:${uniqueKey}`) + .digest('hex'); +} + +async function readAction( + checksum: string, + redis: RedisInstance +): Promise | null> { + const action = await redis.get(`schema-service:${checksum}`); + + if (action) { + return JSON.parse(action); + } + + return null; +} + +async function startAction( + checksum: string, + redis: RedisInstance, + logger: FastifyLoggerInstance +): Promise { + const key = `schema-service:${checksum}`; + logger.debug('Starting action (checksum=%s)', checksum); + // Set and lock + expire + const inserted = await redis.setnx( + key, + JSON.stringify({ status: 'started' }) + ); + + if (inserted) { + logger.debug('Started action (checksum=%s)', checksum); + await redis.expire(key, 60); + return true; + } + + logger.debug('Action already started (checksum=%s)', checksum); + + return false; +} + +async function completeAction( + checksum: string, + data: O, + redis: RedisInstance, + logger: FastifyLoggerInstance +): Promise { + const key = `schema-service:${checksum}`; + logger.debug('Completing action (checksum=%s)', checksum); + await redis.setex( + key, + 60, + JSON.stringify({ + status: 'completed', + result: data, + }) + ); +} + +async function removeAction( + checksum: string, + redis: RedisInstance, + logger: FastifyLoggerInstance +): Promise { + logger.debug('Removing action (checksum=%s)', checksum); + const key = `schema-service:${checksum}`; + await redis.del(key); +} + +function reuse( + factory: (input: I) => Promise, + key: string, + redis: RedisInstance, + logger: FastifyLoggerInstance +): (input: I) => Promise { + async function reuseFactory(input: I, attempt = 0): Promise { + const checksum = createChecksum(input, key); + + if (attempt === 3) { + await removeAction(checksum, redis, logger); + throw new Error('Tried too many times'); + } + + let cached = await readAction(checksum, redis); + + if (!cached) { + const started = await startAction(checksum, redis, logger); + + if (!started) { + return reuseFactory(input, attempt + 1); + } + + const result = await factory(input).catch(async (error) => { + await removeAction(checksum, redis, logger); + return Promise.reject(error); + }); + await completeAction(checksum, result, redis, logger); + + return result; + } + + const startedAt = Date.now(); + while (cached && cached.status !== 'completed') { + logger.debug( + 'Waiting action to complete (checksum=%s, time=%s)', + checksum, + Date.now() - startedAt + ); + await new Promise((resolve) => setTimeout(resolve, 500)); + cached = await readAction(checksum, redis); + + if (Date.now() - startedAt > 30_000) { + await removeAction(checksum, redis, logger); + throw new Error('Timeout after 30s'); + } + } + + if (!cached) { + return reuseFactory(input, attempt + 1); + } + + return cached.result; + } + + return reuseFactory; +} diff --git a/packages/services/schema/src/types.ts b/packages/services/schema/src/types.ts new file mode 100644 index 000000000..355f00ff1 --- /dev/null +++ b/packages/services/schema/src/types.ts @@ -0,0 +1,32 @@ +export type SchemaType = 'single' | 'federation' | 'stitching'; + +export type BuildInput = Array<{ + raw: string; + source: string; +}>; + +export interface BuildOutput { + source: string; + raw: string; +} + +export type ValidationInput = Array<{ + raw: string; + source: string; +}>; + +export interface ValidationOutput { + errors: Array<{ + message: string; + }>; +} + +export type SupergraphInput = Array<{ + raw: string; + source: string; + url?: string | null; +}>; + +export type SupergraphOutput = { + supergraph: string | null; +}; diff --git a/packages/services/schema/tsconfig.json b/packages/services/schema/tsconfig.json new file mode 100644 index 000000000..1bee108c6 --- /dev/null +++ b/packages/services/schema/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/server/.env.template b/packages/services/server/.env.template new file mode 100644 index 000000000..bcdf5437c --- /dev/null +++ b/packages/services/server/.env.template @@ -0,0 +1,38 @@ +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=registry +ROARR_LOG=false +AUTH0_SECRET="" +AUTH0_DOMAIN="" +AUTH0_CLIENT_ID="" +AUTH0_CLIENT_SECRET="" +AUTH0_SCOPE="openid profile offline_access" +AUTH0_AUDIENCE="" +AUTH0_CONNECTION="" +CLICKHOUSE_PROTOCOL="http" +CLICKHOUSE_HOST="localhost" +CLICKHOUSE_PORT="8123" +CLICKHOUSE_USERNAME="test" +CLICKHOUSE_PASSWORD="test" +TOKENS_ENDPOINT="http://localhost:6001" +SCHEMA_ENDPOINT="http://localhost:6500" +USAGE_ESTIMATOR_ENDPOINT="http://localhost:4011" +RATE_LIMIT_ENDPOINT="http://localhost:4012" +BILLING_ENDPOINT="http://localhost:4013" +WEBHOOKS_ENDPOINT="http://localhost:6250" +REDIS_HOST="localhost" +REDIS_PORT="6379" +REDIS_PASSWORD="" +GITHUB_APP_ID="" +GITHUB_APP_PRIVATE_KEY="" +CF_BASE_PATH=http://localhost:4010 +CF_ACCOUNT_ID=103df45224310d669213971ce28b5b70 +CF_AUTH_TOKEN=85e20c26c03759603c0f45884824a1c3 +CF_NAMESPACE_ID=33b1e3bbb4a4707d05ea0307cbb55c79 +CDN_AUTH_PRIVATE_KEY=1e1064ef9cda8bf38936b77317e90dc3 +CDN_BASE_URL="http://localhost:4010" +ENCRYPTION_SECRET="97e4094d2463e71a981913cca4e56788" +FEEDBACK_SLACK_TOKEN="" +FEEDBACK_SLACK_CHANNEL="#hive" \ No newline at end of file diff --git a/packages/services/server/CHANGELOG.md b/packages/services/server/CHANGELOG.md new file mode 100644 index 000000000..bea586088 --- /dev/null +++ b/packages/services/server/CHANGELOG.md @@ -0,0 +1,1381 @@ +# @hive/server + +## 0.27.8 + +### Patch Changes + +- Updated dependencies [a33cdcef] + - @graphql-hive/client@0.15.2 + +## 0.27.7 + +### Patch Changes + +- Updated dependencies [cd998fab] + - @graphql-hive/client@0.15.1 + +## 0.27.6 + +### Patch Changes + +- Updated dependencies [ac9b868c] + - @graphql-hive/client@0.15.0 + +## 0.27.5 + +### Patch Changes + +- Updated dependencies [903edf84] + - @graphql-hive/client@0.14.2 + +## 0.27.4 + +### Patch Changes + +- Updated dependencies [ff82bd75] +- Updated dependencies [ccb93298] + - @graphql-hive/client@0.14.1 + +## 0.27.3 + +### Patch Changes + +- Updated dependencies [fe2b5dbc] + - @graphql-hive/client@0.14.0 + +## 0.27.2 + +### Patch Changes + +- fef049c0: Capture exceptions in Urql + +## 0.27.1 + +### Patch Changes + +- 95f93830: New service url undefined !== null + +## 0.27.0 + +### Minor Changes + +- 845e0880: Trim descriptions + +## 0.26.2 + +### Patch Changes + +- c1db1c60: Fix token accessing another token's data in usage checking phase + +## 0.26.1 + +### Patch Changes + +- 1623aca5: Upgrade sentry + +## 0.26.0 + +### Minor Changes + +- 919fff93: Store schema coordinates in a separate table + +## 0.25.0 + +### Minor Changes + +- ffb6feb6: Allow to check usage from multiple targets + +## 0.24.4 + +### Patch Changes + +- 3a435baa: Show one value of x-request-id +- Updated dependencies [3a435baa] + - @hive/service-common@0.1.3 + +## 0.24.3 + +### Patch Changes + +- 158a33e6: Pass span to schemaPublish and describe CH spans as CH + queryId + +## 0.24.2 + +### Patch Changes + +- 96caa261: fix(deps): update dependency agentkeepalive to v4.2.0 +- 689610ac: fix(deps): update sentry-javascript monorepo to v6.16.1 +- 79d4b4c2: fix(deps): update envelop monorepo +- 8a38ced6: Pass error as second or first +- Updated dependencies [79d4b4c2] +- Updated dependencies [607a4fe2] + - @graphql-hive/client@0.13.0 + +## 0.24.1 + +### Patch Changes + +- 1e433ef0: bump + +## 0.24.0 + +### Minor Changes + +- 74f8187b: Add failure rate column to operations list + +## 0.23.0 + +### Minor Changes + +- b12a7254: Introduce Webhooks service + +## 0.22.6 + +### Patch Changes + +- 4e452fd2: Delay only by 250ms minimum + +## 0.22.5 + +### Patch Changes + +- 26d6545d: Backoff + +## 0.22.4 + +### Patch Changes + +- e3a96531: Increase the overall timeout and bump retry to 6 in CH + +## 0.22.3 + +### Patch Changes + +- Updated dependencies [a8485a06] + - @hive/service-common@0.1.2 + +## 0.22.2 + +### Patch Changes + +- 2e513bed: Smaller payload + +## 0.22.1 + +### Patch Changes + +- 79129085: Bump + +## 0.22.0 + +### Minor Changes + +- 747fccdb: Introduces Schema service to validate and build GraphQL schemas + +## 0.21.2 + +### Patch Changes + +- c5618186: Send noop post http request to CF worker to test Altinity issue + +## 0.21.1 + +### Patch Changes + +- 391b7f47: Use "per target" instead of "per project" message + +## 0.21.0 + +### Minor Changes + +- 78f3fd29: Share publication of schema - allows to scale up +- 016dd92c: fix: return SchemaPublishMissingServiceError from Mutation.schemaPublish if the operation selects it and the service name parameter is missing + +### Patch Changes + +- b3e54d5a: Fixed 'only one service schema...' + +## 0.20.6 + +### Patch Changes + +- 82b40d1: Bump to 1s + +## 0.20.5 + +### Patch Changes + +- 5fec981: Wait 500ms for lookup, connect and secureConnect before retrying the http request + +## 0.20.4 + +### Patch Changes + +- 10b7fff: Try out agent keepalive + +## 0.20.3 + +### Patch Changes + +- 7fa2f1c: Multiply timeout by retry number + +## 0.20.2 + +### Patch Changes + +- 33fbb5e: Bump + +## 0.20.1 + +### Patch Changes + +- 5798314: Single x-request-id header + +## 0.20.0 + +### Minor Changes + +- dc8fb96: Introduce base schema + +### Patch Changes + +- bf78c16: Bump + +## 0.19.0 + +### Minor Changes + +- b5966ab: Replace undici with got +- b5966ab: Say hi to TSUP! + +### Patch Changes + +- Updated dependencies [b5966ab] + - @graphql-hive/client@0.12.0 + +## 0.18.6 + +### Patch Changes + +- 5d393f4: Allow to access slack token based on context + +## 0.18.5 + +### Patch Changes + +- 31beac1: Dummy bump + +## 0.18.4 + +### Patch Changes + +- d0f2a63: Dummy update to test deployment + +## 0.18.3 + +### Patch Changes + +- 02b00f0: Update undici, sentry, bullmq +- Updated dependencies [02b00f0] + - @graphql-hive/client@0.11.1 + +## 0.18.2 + +### Patch Changes + +- 34d93e9: Mutation.createCdnToken should be accessed with target:registry:read + +## 0.18.1 + +### Patch Changes + +- 7549a38: Fix startup + +## 0.18.0 + +### Minor Changes + +- 7eca7f0: Introduce access scopes + +### Patch Changes + +- Updated dependencies [7eca7f0] + - @graphql-hive/client@0.11.0 + +## 0.17.3 + +### Patch Changes + +- 52c15e5: Fixes for timeouts? + +## 0.17.2 + +### Patch Changes + +- 2f7bc32: Collect default metrics + +## 0.17.1 + +### Patch Changes + +- 86909ba: Unmask masked errors that should not be masked + +## 0.17.0 + +### Minor Changes + +- 14494fd: Notify user when slack is added + +## 0.16.10 + +### Patch Changes + +- d5854ee: Use Inspector@3.0.1 + +## 0.16.9 + +### Patch Changes + +- 19d4cd5: Bump + +## 0.16.8 + +### Patch Changes + +- cc9aa01: Update dependencies +- cc9aa01: Use latest Inspector + +## 0.16.7 + +### Patch Changes + +- ac69fb8: Fix #742 + +## 0.16.6 + +### Patch Changes + +- 65b687e: Batch getSchema calls + +## 0.16.5 + +### Patch Changes + +- 9723cc8: Fix patches + +## 0.16.4 + +### Patch Changes + +- 4dbb8f0: Fix sentry + +## 0.16.3 + +### Patch Changes + +- a3fd1bb: More nested spans + +## 0.16.2 + +### Patch Changes + +- db0fe9a: Pass Span to functions wrapped with @sentry decorator + +## 0.16.1 + +### Patch Changes + +- 04a18ac: Set lower timeout for ClickHouse queries and set it per query +- 81d8cbc: Create a span for every clickhouse http call attempt + +## 0.16.0 + +### Minor Changes + +- 91a6957: Allow to update url of a service + +## 0.15.2 + +### Patch Changes + +- c21a099: Share publication of schema between multiple exact same requests + +## 0.15.1 + +### Patch Changes + +- Updated dependencies [d67d3e8] + - @graphql-hive/client@0.10.0 + +## 0.15.0 + +### Minor Changes + +- 6b62d63: No more ElasticSearch + +## 0.14.3 + +### Patch Changes + +- 3d828f4: Use latest Sentry and Sentry NextJS integration +- efd8648: Better handling of x-request-id (also for local env) +- efd8648: Mask errors and doesn't expose any information + +## 0.14.2 + +### Patch Changes + +- b15020b: Fixes for double-reporting to Sentry in case of alert error + +## 0.14.1 + +### Patch Changes + +- f9b545f: Use missing when only version is not available +- Updated dependencies [f9b545f] + - @graphql-hive/client@0.9.1 + +## 0.14.0 + +### Minor Changes + +- 6f204be: Display token info + +### Patch Changes + +- Updated dependencies [6f204be] + - @graphql-hive/client@0.9.0 + +## 0.13.22 + +### Patch Changes + +- 2d8c2ce: Use stable sentry plugin + +## 0.13.21 + +### Patch Changes + +- c7a3d24: Gracefully handle Schema Change Notification errors +- b502e9e: Fix issue with conflicting name on target + +## 0.13.20 + +### Patch Changes + +- ce155d4: Use orchestrators to build and print schema in Lab +- ce155d4: Skip Sentry for graphql readiness check + +## 0.13.19 + +### Patch Changes + +- 9fb90bc: Bump server + +## 0.13.18 + +### Patch Changes + +- 07c654b: Do not track readiness introspection queries + +## 0.13.17 + +### Patch Changes + +- 34fe260: Logs in project and organization access dataloader + +## 0.13.16 + +### Patch Changes + +- eacffea: Exclude readiness check from Sentry transactions + +## 0.13.15 + +### Patch Changes + +- 73782cb: Print stringified period object instead of [object Object] + +## 0.13.14 + +### Patch Changes + +- 1747d29: Track authorizationa and x-api-token headers in transaction data + +## 0.13.13 + +### Patch Changes + +- df6c501: Make Query.lab nullable + +## 0.13.12 + +### Patch Changes + +- b358e0a: Allow requests with up to 11mb body size + +## 0.13.11 + +### Patch Changes + +- aff0857: Add requestId to logger +- aff0857: Pass x-request-id to responses + +## 0.13.10 + +### Patch Changes + +- 080cf71: Fix missing filter + +## 0.13.9 + +### Patch Changes + +- 7c5c710: Show stats for client versions + +## 0.13.8 + +### Patch Changes + +- c1dd4e6: Stop Sentry from hiding user id + +## 0.13.7 + +### Patch Changes + +- 0f8d7b7: Make No Access error more descriptive + +## 0.13.6 + +### Patch Changes + +- 249e484: Use json() + +## 0.13.5 + +### Patch Changes + +- 05d0140: Use @theguild/buddy +- 052fc32: Use replicas + +## 0.13.4 + +### Patch Changes + +- 5f99c67: Batch getOrganizationOwner calls (homemade dataloader) + +## 0.13.3 + +### Patch Changes + +- 88fe4b6: Show more data in admin stats + +## 0.13.2 + +### Patch Changes + +- 7f2b355: Parse total number + +## 0.13.1 + +### Patch Changes + +- 4ee9a3b: Fix operations count + +## 0.13.0 + +### Minor Changes + +- efd7b74: Admin panel + +## 0.12.3 + +### Patch Changes + +- 49ccd19: Fix field stats + +## 0.12.2 + +### Patch Changes + +- fa58c17: Months are zero-based... + +## 0.12.1 + +### Patch Changes + +- a835491: Fix conditions in field stats + +## 0.12.0 + +### Minor Changes + +- 54f5870: Use ClickHouse already to show data from recent 20 days + +## 0.11.0 + +### Minor Changes + +- 5a46b7e: Collect data from operations_new and materialized views + +## 0.10.6 + +### Patch Changes + +- bd24700: Add elapsed metric + +## 0.10.5 + +### Patch Changes + +- 8434d44: Use Histogram + +## 0.10.4 + +### Patch Changes + +- 45c30d0: Fix metrics + +## 0.10.3 + +### Patch Changes + +- 23fc805: Fix missing comma :) + +## 0.10.2 + +### Patch Changes + +- 51f54f3: Fix missing query label + +## 0.10.1 + +### Patch Changes + +- 066824a: Log ClickHouse read latency to Prom + +## 0.10.0 + +### Minor Changes + +- 889368b: Bump + +## 0.9.0 + +### Minor Changes + +- 11e6800: Allow multiple auth providers and add displayName and fullName to profiles + +## 0.8.1 + +### Patch Changes + +- ea7b7f9: Do not track resolvers in Sentry +- ea7b7f9: Use compression + +## 0.8.0 + +### Minor Changes + +- 0527e3c: Support Envelop 1.0 + +### Patch Changes + +- 4647d25: Dynamically calculate windows for operations data based on resolution +- 0527e3c: Update Sentry +- a111e68: Update link to a commit in Slack notification +- 0527e3c: Add serverName tag to Sentry.init +- 0527e3c: Update undici +- Updated dependencies [0527e3c] +- Updated dependencies [0527e3c] + - @graphql-hive/client@0.8.0 + +## 0.7.23 + +### Patch Changes + +- bde9548: Introduce Query.schemaVersion + +## 0.7.22 + +### Patch Changes + +- Updated dependencies [0e712c7] + - @graphql-hive/client@0.7.0 + +## 0.7.21 + +### Patch Changes + +- Updated dependencies [e09f95a] + - @graphql-hive/client@0.6.3 + +## 0.7.20 + +### Patch Changes + +- Updated dependencies [074c052] + - @graphql-hive/client@0.6.2 + +## 0.7.19 + +### Patch Changes + +- Updated dependencies [38bfd02] + - @graphql-hive/client@0.6.1 + +## 0.7.18 + +### Patch Changes + +- 23636de: No longer require target selector when fetching latest schema version (we use API Token there anyway) +- 23636de: Store single schema and multiple schemas in CDN (with details) +- 23636de: Remove Identifier from the CDN +- 23636de: Store supergraph in CDN +- Updated dependencies [23636de] +- Updated dependencies [23636de] +- Updated dependencies [23636de] +- Updated dependencies [23636de] + - @graphql-hive/client@0.6.0 + +## 0.7.17 + +### Patch Changes + +- 3d4852c: Update urql and codegen + +## 0.7.16 + +### Patch Changes + +- 9295075: Bump + +## 0.7.15 + +### Patch Changes + +- 1ac74a4: Fix timezone mismatch between App, Api and ClickHouse + +## 0.7.14 + +### Patch Changes + +- aa4e661: Bump Undici +- Updated dependencies [aa4e661] + - @graphql-hive/client@0.5.3 + +## 0.7.13 + +### Patch Changes + +- fb3efda: Use defer in general operations stats query + +## 0.7.12 + +### Patch Changes + +- 34fd1f0: Fix syntax error in failuresOverTime query +- 1cb18e5: Track queries in Sentry + +## 0.7.11 + +### Patch Changes + +- 6e75bd1: Fix incorrect number of errors over time + +## 0.7.10 + +### Patch Changes + +- 2fbbf66: Fix timeouts + +## 0.7.9 + +### Patch Changes + +- 9006b6e: Fix missing json parsing in TokensStorage + +## 0.7.8 + +### Patch Changes + +- 36baac7: Fix JSON parsing of non-json responses + +## 0.7.7 + +### Patch Changes + +- 356760f: Make Sentry capture exception only on last attempt + +## 0.7.6 + +### Patch Changes + +- 48d482e: Retry ClickHouse on error + +## 0.7.5 + +### Patch Changes + +- e0a47fb: Use Undici instead of Got and Agentkeepalive +- Updated dependencies [e0a47fb] + - @graphql-hive/client@0.5.2 + +## 0.7.4 + +### Patch Changes + +- fb9575f: Track more error details and set timeouts + +## 0.7.3 + +### Patch Changes + +- 5ff2e7a: Bump +- 8627a9e: Fix fastify hooks + +## 0.7.2 + +### Patch Changes + +- 8f62c26: Update fastify + +## 0.7.1 + +### Patch Changes + +- 8a9fdd7: The has method returns true on staled values - tiny-lru +- Updated dependencies [8a9fdd7] + - @graphql-hive/client@0.5.1 + +## 0.7.0 + +### Minor Changes + +- d7348a3: Hide literals and remove aliases +- d7348a3: Use ClickHouse next to ElasticSearch + +### Patch Changes + +- d7348a3: Check only once every 30 days if target has collected ops +- d7348a3: Set dynamic TTL based on expires_at column +- d7348a3: Reuse TCP connections +- Updated dependencies [d7348a3] + - @graphql-hive/client@0.5.0 + +## 0.6.17 + +### Patch Changes + +- b010137: Update Sentry to 6.10.0 + +## 0.6.16 + +### Patch Changes + +- 7e944f2: Use less filters +- abd3d3e: Use p75, p90, p95 and p99 only + +## 0.6.15 + +### Patch Changes + +- 0bfe9c1: Improve percentiles calculation by 70% using HDR algorithm + +## 0.6.14 + +### Patch Changes + +- 0c59f14: use composite aggregation to show all clients +- 0c59f14: Use filters instead of must conditions + +## 0.6.13 + +### Patch Changes + +- 6214042: Better error when invalid token is provided + +## 0.6.12 + +### Patch Changes + +- 11b3eb9: Added CDN using CF + +## 0.6.11 + +### Patch Changes + +- Updated dependencies [db2c1c3] +- Updated dependencies [4e9f0aa] + - @hive/service-common@0.1.1 + +## 0.6.10 + +### Patch Changes + +- Updated dependencies [6ed9bf2] +- Updated dependencies [588285c] + - @hive/service-common@0.1.0 + +## 0.6.9 + +### Patch Changes + +- cdbb7b1: Collect ElasticSearch query in case of an error +- 2576e63: Simplify OperationsCollector.countFields and make it faster by 50%" + +## 0.6.8 + +### Patch Changes + +- Updated dependencies [ee6b82b] + - @graphql-hive/client@0.4.5 + +## 0.6.7 + +### Patch Changes + +- dae2b90: Add operations filter to operations stats page + +## 0.6.6 + +### Patch Changes + +- e7fe3df: @cache related fixes + +## 0.6.5 + +### Patch Changes + +- bda322c: Use schema:check in our CI + +## 0.6.4 + +### Patch Changes + +- 4bc83be: Use HEAD and GET for healthchecks +- 4bc83be: Node 16 + +## 0.6.3 + +### Patch Changes + +- a73e5cb: Expose GraphQL API healthcheck +- Updated dependencies [a73e5cb] + - @graphql-hive/client@0.4.4 + +## 0.6.2 + +### Patch Changes + +- 9b1425f: Send alerts only for relevant targets +- 93674cf: Update Sentry to 6.7.0 +- 3e16adb: Attach originalError to captured expection by Sentry and set sql and values from Slonik + +## 0.6.1 + +### Patch Changes + +- 5aa5e93: Bump +- Updated dependencies [5aa5e93] + - @graphql-hive/client@0.4.3 + +## 0.6.0 + +### Minor Changes + +- 87e3d2e: Alerts, yay! + +### Patch Changes + +- 968614d: Fix persisting the same query twice +- Updated dependencies [968614d] + - @graphql-hive/client@0.4.2 + +## 0.5.4 + +### Patch Changes + +- 1a16360: Collect GraphQL Client name and version +- Updated dependencies [1a16360] + - @graphql-hive/client@0.4.1 + +## 0.5.3 + +### Patch Changes + +- 41a9117: Fix an issue when publishing a schema for the first time + +## 0.5.2 + +### Patch Changes + +- 203c563: Use "experiment" as the default branch instead of "development" + +## 0.5.1 + +### Patch Changes + +- 4224cb9: Add info with a link to documentation on missing data +- c6ef3d2: Bob update +- Updated dependencies [c6ef3d2] +- Updated dependencies [4224cb9] + - @graphql-hive/client@0.4.0 + +## 0.5.0 + +### Minor Changes + +- 143fa32: Added Schema Laboratory + +## 0.4.6 + +### Patch Changes + +- e65b9cc: Do not set \$created when updating user profile + +## 0.4.5 + +### Patch Changes + +- 26dc80e: Fix issues with proxy setup + +## 0.4.4 + +### Patch Changes + +- 148b294: Fix issues with undici headers timeout +- Updated dependencies [148b294] + - @graphql-hive/client@0.3.3 + +## 0.4.3 + +### Patch Changes + +- 2ebac11: Use externalAuthUserId when creating a user + +## 0.4.2 + +### Patch Changes + +- 85b85d4: Dependencies update, cleanup, ui fixes +- Updated dependencies [85b85d4] + - @graphql-hive/client@0.3.2 + +## 0.4.1 + +### Patch Changes + +- 9b14d18: Bump + +## 0.4.0 + +### Minor Changes + +- 36097a6: Add mixpanel + +## 0.3.7 + +### Patch Changes + +- Updated dependencies [a19fef4] + - @graphql-hive/client@0.3.1 + +## 0.3.6 + +### Patch Changes + +- Updated dependencies [1fe62bb] + - @graphql-hive/client@0.3.0 + +## 0.3.5 + +### Patch Changes + +- 4a7c569: Move operation hashing to Usage service +- Updated dependencies [4a7c569] + - @graphql-hive/client@0.2.2 + +## 0.3.4 + +### Patch Changes + +- 6d528a3: Use composite aggregation to show more than 10 records + +## 0.3.3 + +### Patch Changes + +- dbcfa69: Split operations stats query into smaller queries (looks WAY better) + +## 0.3.2 + +### Patch Changes + +- 824a403: Duration over time stats +- Updated dependencies [5ca6a06] +- Updated dependencies [f96cfc9] + - @graphql-hive/client@0.2.1 + +## 0.3.1 + +### Patch Changes + +- bb5b3c4: Preparations for persisted operations in Lance +- Updated dependencies [30da7e7] +- Updated dependencies [bb5b3c4] + - @graphql-hive/client@0.2.0 + +## 0.3.0 + +### Minor Changes + +- acab74b: Added support for persisted operations - Changes made in API, APP, CLI, Server, Storage + +## 0.2.1 + +### Patch Changes + +- 0873fba: Use logarithim scale in latency histogram + +## 0.2.0 + +### Minor Changes + +- c507159: Redesign, fixes, different structure of components and RPM over time + +## 0.1.35 + +### Patch Changes + +- c591b5b: Distribution of latency +- ba5f690: Show requests per minute + +## 0.1.34 + +### Patch Changes + +- 3c72c34: Percentiles per operation + +## 0.1.33 + +### Patch Changes + +- ec400f8: Show failures over time +- e62e498: Fix conditional breaking changes +- a471c88: Support percentiles of request duration + +## 0.1.32 + +### Patch Changes + +- @graphql-hive/client@0.1.3 + +## 0.1.31 + +### Patch Changes + +- Updated dependencies [e1f9e1e] +- Updated dependencies [02322e7] +- Updated dependencies [8585fd8] + - @graphql-hive/client@0.1.2 + +## 0.1.30 + +### Patch Changes + +- 4a1de8c: Change windows and add min/max to xAxis + +## 0.1.29 + +### Patch Changes + +- f6d2ca6: bump + +## 0.1.28 + +### Patch Changes + +- 6e68e25: More stats + +## 0.1.27 + +### Patch Changes + +- 23e19fe: Add Requests Over Time plot + +## 0.1.26 + +### Patch Changes + +- ed8b326: Show simple stats + +## 0.1.25 + +### Patch Changes + +- b33bf11: List of collected operations + +## 0.1.24 + +### Patch Changes + +- 67660b1: Bump +- c083cb6: Use SENTRY_DSN + +## 0.1.23 + +### Patch Changes + +- Updated dependencies [5135307] + - @graphql-hive/client@0.1.1 + +## 0.1.22 + +### Patch Changes + +- 7113a0e: Update Sentry to 6.3.5 +- Updated dependencies [7113a0e] +- Updated dependencies [7113a0e] +- Updated dependencies [078e758] +- Updated dependencies [65cc5b5] + - @graphql-hive/client@0.1.0 + +## 0.1.21 + +### Patch Changes + +- 7dca692: No longer generate x-request-id + +## 0.1.20 + +### Patch Changes + +- d485371: Use trustProxy + +## 0.1.19 + +### Patch Changes + +- e43375f: Test deployment + +## 0.1.18 + +### Patch Changes + +- Updated dependencies [fd38851] +- Updated dependencies [32f198b] + - @graphql-hive/client@0.0.8 + +## 0.1.17 + +### Patch Changes + +- Updated dependencies [eedbad6] + - @graphql-hive/client@0.0.7 + +## 0.1.16 + +### Patch Changes + +- ab5c204: Collect more with Sentry +- Updated dependencies [ab5c204] + - @graphql-hive/client@0.0.6 + +## 0.1.15 + +### Patch Changes + +- 3a03b35: Fix release id and LOG_LEVEL debug +- cbae6ce: Capture errors on resolver-level + +## 0.1.14 + +### Patch Changes + +- 6633f2b: Share x-request-id with tokens + +## 0.1.13 + +### Patch Changes + +- df4abcb: Enable Sentry only in prod + +## 0.1.12 + +### Patch Changes + +- 741c92f: just test for triggering deployment ci + +## 0.1.11 + +### Patch Changes + +- 932de99: Do not track health and readiness checks + +## 0.1.10 + +### Patch Changes + +- 93fbf26: Use Sentry Tracing + +## 0.1.9 + +### Patch Changes + +- a4970d0: Fix ENVIRONMENT + +## 0.1.8 + +### Patch Changes + +- 7bfdb93: Use Sentry to track performance + +## 0.1.7 + +### Patch Changes + +- 2269c61: No extra calls to Auth0 +- Updated dependencies [2269c61] + - @graphql-hive/client@0.0.5 + +## 0.1.6 + +### Patch Changes + +- Updated dependencies [d64a3c5] + - @graphql-hive/client@0.0.4 + +## 0.1.5 + +### Patch Changes + +- c1e705a: bump + +## 0.1.4 + +### Patch Changes + +- 7e88e71: bump +- Updated dependencies [7e88e71] + - @graphql-hive/client@0.0.3 + +## 0.1.3 + +### Patch Changes + +- b2d686e: bump +- Updated dependencies [b2d686e] + - @graphql-hive/client@0.0.2 + +## 0.1.2 + +### Patch Changes + +- 9da6738: bump + +## 0.1.1 + +### Patch Changes + +- e8cb071: fix issues with ncc packages diff --git a/packages/services/server/github-app.pem b/packages/services/server/github-app.pem new file mode 100644 index 000000000..ec9b92b1b --- /dev/null +++ b/packages/services/server/github-app.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEArqnNoaF8AutuQ8U/5jwD+n/EHBqme/dGqJ2IXfsMtm6RRGt0 +PgIXQ6MAvzVT1itCcez7vIwgIfiNf/ameYfUsRcboOql5zsHtaYkiSXuttg5Ag0U +3+MVoEk6TM/Hh3/5AJyIk7OWkWkGOehg+KgItrDLLBoNDulA/gooyEAi3JfUd09T +srpUISkHFKtkSOKonrqxGQL8UbKVEujMTsafhWYgF04x5ePvTEc/EPdnYluGFoiv +y8E3fCDSdWVfk6eROkvMeBEDOpVnt/Qxy725NMJRKZvcDsQLFzelvlMXNyArJhN9 +ua8419CVnPncM5jOI4yh9s5TjBW1u8b/QTcSwwIDAQABAoIBAQCYY2gYfAbxmS0k +DcW5f2VNOgoFJy9X+x9337HppS6rQH6XvlS2AScBCQeOy90wf5/CPCxKjcs/OFdk +bPNEF0F3nJyrYe6LdxI75w/7ZNAIUVAzvOwYhk0JArrN537DduDuKqRykX5tsb8P +6PIqIJkxo8Av4y6/cr0UqBJ63JJN+Oy+MyLfuuQ3m0FL4OhyODWtVljNjzCHu43f +O+oeioxOVmOZafbigd8ZBqAhPi+Ch9DSveWh9q5fory4WEZp8TV2VVXrf6qWsBpK +YW2m0UNOOessYZYRXg4jDHRD7ksYB9+bifZJFxJ8iUjH7aIPKqbfNnUNJa3CIAQL +ivc5UjDhAoGBANzcefPb/HSA1fJ8DIeTxc8TBJlNP9b57UkbFnHVBO4EosCxYMC8 +EkVSQt0SGkRlq5UVt8D2oavyOFNInmaYgfVVlsakfIPyUubT4S80uRtn3YN4R0TD +9UMSH0yF7/yhWPTxGqoUQBjB2ffdSUMpdSJmZin2Y+hIE7NXMR9oe65TAoGBAMpz +t45cl5gPnIPxV/uN+Y6GHDm5bsut0AIGKuD+m+8BoFtkWDQI0sfSZ4Ug5LMgVBvP +tjmWokycVfGsc+HEFXr6sysHLB25IpKJqeCKOiY8QydObSe6+B3kOuSXwLZd3zYD +NvPlIRA2JXEFW3EvS/FcBEHtm7sgM1HzHtq6WRvRAoGBAJC2UZ3L7U10wTpht1/6 +HikBIAlGiWbVWXmGKx2o4tMF7JHvZyLB2ALd1Hu/icB5EYTctwrTTrk4O1mugkNc ++AIE84bybbeSxmkCOVDR7ducIZMr8x6GeyRYMnPUapuQKJZNiBgarqjKE4SJ1KAS +APLqP1SNYYbn+yiOH9yhfDCLAoGBAJqpACcfkSe3dWhOrVodQfKhfVqT0JKE/A0v +zP4JQ/K4mVp4M/G8c9n3YOIRGOXG4rpEWf9wlhhUrktr5TMp6wcCEnHijs8+Ka0s +wbwpnPasvth+T2E0smuTeXI8hBpNaEII/qgf+Lw7wSqwXwT+jtOZ6h67eGbh2JLY +Wk/aYEyhAoGAYdkY603ytt28vvNEuZESvkgpOzHI65LX69mEZHv0qcP8H2eyE6E5 +qlZRwLYMzQOf7ixseQ+nFhGonhomvN4/Yqq27lx2dVImf6oYlSjkolfi2PlAvFv6 +HVNajPGXxc+LWYpjpvoVWSDv4qnEODy4+ys10I0ziPyyG0Fx6J1zQus= +-----END RSA PRIVATE KEY----- diff --git a/packages/services/server/package.json b/packages/services/server/package.json new file mode 100644 index 000000000..788a365b9 --- /dev/null +++ b/packages/services/server/package.json @@ -0,0 +1,43 @@ +{ + "name": "@hive/server", + "type": "module", + "private": true, + "bin": "index.js", + "version": "0.27.8", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --format esm --target node16 --watch --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@envelop/graphql-modules": "3.3.1", + "@envelop/auth0": "3.3.1", + "@envelop/sentry": "3.4.0", + "@envelop/types": "2.2.0", + "@graphql-hive/client": "0.15.2", + "@graphql-yoga/node": "2.5.0", + "@sentry/node": "6.19.7", + "@sentry/integrations": "6.19.7", + "@sentry/tracing": "6.19.7", + "dotenv": "10.0.0", + "got": "12.0.4", + "graphql": "16.5.0", + "reflect-metadata": "0.1.13" + }, + "devDependencies": { + "pino-pretty": "6.0.0", + "@swc/core": "1.2.185" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "external": [ + "pg-native" + ], + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/server/src/async-storage.ts b/packages/services/server/src/async-storage.ts new file mode 100644 index 000000000..39cf5734b --- /dev/null +++ b/packages/services/server/src/async-storage.ts @@ -0,0 +1,5 @@ +import { AsyncLocalStorage } from 'async_hooks'; + +export const asyncStorage = new AsyncLocalStorage<{ + requestId?: string; +}>(); diff --git a/packages/services/server/src/dev.ts b/packages/services/server/src/dev.ts new file mode 100644 index 000000000..9259b49a8 --- /dev/null +++ b/packages/services/server/src/dev.ts @@ -0,0 +1,15 @@ +import { config } from 'dotenv'; +import { readFileSync, existsSync } from 'fs'; +import { join } from 'path'; + +config({ + debug: true, +}); + +const privateKeyFile = join(process.cwd(), 'github-app.pem'); + +if (existsSync(privateKeyFile)) { + process.env.GITHUB_APP_PRIVATE_KEY = readFileSync(privateKeyFile, 'utf8'); +} + +await import('./index'); diff --git a/packages/services/server/src/graphql-handler.ts b/packages/services/server/src/graphql-handler.ts new file mode 100644 index 000000000..8e2da2a8c --- /dev/null +++ b/packages/services/server/src/graphql-handler.ts @@ -0,0 +1,196 @@ +import type { RouteHandlerMethod, FastifyRequest, FastifyReply } from 'fastify'; +import { Registry } from '@hive/api'; +import { cleanRequestId } from '@hive/service-common'; +import { createServer } from '@graphql-yoga/node'; +import { + GraphQLError, + ValidationContext, + ValidationRule, + Kind, + OperationDefinitionNode, + print, +} from 'graphql'; +import { useGraphQLModules } from '@envelop/graphql-modules'; +import { useAuth0 } from '@envelop/auth0'; +import { useSentry } from '@envelop/sentry'; +import { asyncStorage } from './async-storage'; +import { useSentryUser, extractUserId } from './use-sentry-user'; +import { useHive } from '@graphql-hive/client'; +import { useErrorHandler, Plugin } from '@graphql-yoga/node'; + +export interface GraphQLHandlerOptions { + graphiqlEndpoint: string; + registry: Registry; + onError: (e: Error) => void; + signature: string; +} + +const NoIntrospection: ValidationRule = (context: ValidationContext) => ({ + Field(node) { + if (node.name.value === '__schema' || node.name.value === '__type') { + context.reportError( + new GraphQLError('GraphQL introspection is not allowed', [node]) + ); + } + }, +}); + +const isNonProductionEnvironment = process.env.ENVIRONMENT !== 'prod'; + +function useNoIntrospection(params: { + signature: string; +}): Plugin<{ req: FastifyRequest }> { + return { + onValidate({ context, addValidationRule }) { + const isReadinessCheck = + context.req.headers['x-signature'] === params.signature; + if (isReadinessCheck || isNonProductionEnvironment) { + return; + } + addValidationRule(NoIntrospection); + }, + }; +} + +export const graphqlHandler = ( + options: GraphQLHandlerOptions +): RouteHandlerMethod => { + const additionalPlugins: Plugin[] = []; + + if (process.env.ENVIRONMENT === 'prod') { + additionalPlugins.push( + useSentry({ + startTransaction: false, + renameTransaction: true, + operationName: () => 'graphql', + transactionName(args) { + const rootOperation = args.document.definitions.find( + (o) => o.kind === Kind.OPERATION_DEFINITION + ) as OperationDefinitionNode; + const operationType = rootOperation.operation; + const opName = + args.operationName || rootOperation.name?.value || 'anonymous'; + + return `${operationType}.${opName}`; + }, + includeRawResult: false, + includeResolverArgs: false, + includeExecuteVariables: true, + configureScope(args, scope) { + scope.setContext('Extra Info', { + variables: JSON.stringify(args.variableValues), + operationName: args.operationName, + operation: print(args.document), + userId: extractUserId(args.contextValue as any), + }); + }, + trackResolvers: false, + appendTags: ({ contextValue }) => { + const auth0_user_id = extractUserId(contextValue as any); + const request_id = cleanRequestId( + (contextValue as any).req.headers['x-request-id'] + ); + + return { auth0_user_id, request_id }; + }, + skip(args) { + // It's the readiness check + return args.operationName === 'readiness'; + }, + }), + useSentryUser(), + useErrorHandler((errors) => { + errors?.map((e) => server.logger.error(e)); + + for (const error of errors) { + options.onError(error); + } + }) + ); + } + + const server = createServer<{ + req: FastifyRequest; + reply: FastifyReply; + headers: Record; + }>({ + maskedErrors: + process.env.ENVIRONMENT === 'prod' || + process.env.ENVIRONMENT === 'staging', + plugins: [ + ...additionalPlugins, + useAuth0({ + onError() {}, + domain: process.env.AUTH0_DOMAIN!, + audience: process.env.AUTH0_AUDIENCE!, + extendContextField: 'user', + headerName: 'authorization', + preventUnauthenticatedAccess: true, + tokenType: 'Bearer', + }), + useHive({ + debug: true, + enabled: + process.env.ENVIRONMENT === 'prod' || + process.env.ENVIRONMENT === 'staging', + token: process.env.HIVE_API_TOKEN!, + usage: { + endpoint: process.env.HIVE_USAGE_ENDPOINT, + clientInfo(ctx: { req: FastifyRequest; reply: FastifyReply }) { + const name = ctx.req.headers['graphql-client-name'] as string; + const version = + (ctx.req.headers['graphql-client-version'] as string) ?? + 'missing'; + + if (name) { + return { name, version }; + } + + return null; + }, + exclude: ['readiness'], + }, + reporting: { + endpoint: process.env.HIVE_REPORTING_ENDPOINT, + author: 'Hive API', + commit: process.env.RELEASE ?? 'local', + }, + }), + useGraphQLModules(options.registry), + useNoIntrospection({ signature: options.signature }), + ], + graphiql: (request) => + isNonProductionEnvironment + ? { endpoint: request.headers.get('x-use-proxy') ?? request.url } + : false, + }); + + return async (req, reply) => { + const requestIdHeader = req.headers['x-request-id']; + const requestId = cleanRequestId(requestIdHeader); + + await asyncStorage.run( + { + requestId, + }, + async () => { + const response = await server.handleIncomingMessage(req, { + req, + reply, + headers: req.headers, + }); + + response.headers.forEach((value, key) => { + reply.header(key, value); + }); + + if (!reply.hasHeader('x-request-id')) { + reply.header('x-request-id', requestId || ''); + } + + reply.status(response.status); + reply.send(response.body); + } + ); + }; +}; diff --git a/packages/services/server/src/index.ts b/packages/services/server/src/index.ts new file mode 100644 index 000000000..fb92e7416 --- /dev/null +++ b/packages/services/server/src/index.ts @@ -0,0 +1,301 @@ +#!/usr/bin/env node + +import 'reflect-metadata'; +import { + createServer, + startMetrics, + ensureEnv, + registerShutdown, +} from '@hive/service-common'; +import { createRegistry, LogFn, Logger } from '@hive/api'; +import { + createStorage as createPostgreSQLStorage, + createConnectionString, +} from '@hive/storage'; +import got from 'got'; +import { stripIgnoredCharacters } from 'graphql'; +import * as Sentry from '@sentry/node'; +import { ExtraErrorData, Dedupe } from '@sentry/integrations'; +import { asyncStorage } from './async-storage'; +import { graphqlHandler } from './graphql-handler'; +import { clickHouseReadDuration, clickHouseElapsedDuration } from './metrics'; + +export async function main() { + Sentry.init({ + serverName: 'api', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + tracesSampleRate: 1.0, + release: process.env.RELEASE || 'local', + integrations: [ + new Sentry.Integrations.Http({ tracing: true }), + new Sentry.Integrations.ContextLines(), + new Sentry.Integrations.LinkedErrors(), + new ExtraErrorData({ + depth: 2, + }), + new Dedupe(), + ], + defaultIntegrations: false, + autoSessionTracking: false, + }); + + const server = createServer({ + name: 'graphql-api', + tracing: true, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await server.close(); + }, + }); + + function createErrorHandler(level: Sentry.Severity): LogFn { + return (error: any, errorLike?: any, ...args: any[]) => { + server.log.error(error, errorLike, ...args); + + const errorObj = + error instanceof Error + ? error + : errorLike instanceof Error + ? errorLike + : null; + + if (errorObj instanceof Error) { + console.log('createErrorHandler', errorObj); + Sentry.captureException(errorObj, { + level, + extra: { + error, + errorLike, + rest: args, + }, + }); + } + }; + } + + function getRequestId() { + const store = asyncStorage.getStore(); + + return store?.requestId; + } + + function wrapLogFn(fn: LogFn): LogFn { + return (msg, ...args) => { + const requestId = getRequestId(); + + if (requestId) { + fn(msg + ` - (requestId=${requestId})`, ...args); + } else { + fn(msg, ...args); + } + }; + } + + try { + const errorHandler = createErrorHandler(Sentry.Severity.Error); + const fatalHandler = createErrorHandler(Sentry.Severity.Fatal); + + // eslint-disable-next-line no-inner-declarations + function createGraphQLLogger(binds: Record = {}): Logger { + return { + error: wrapLogFn(errorHandler), + fatal: wrapLogFn(fatalHandler), + info: wrapLogFn(server.log.info.bind(server.log)), + warn: wrapLogFn(server.log.warn.bind(server.log)), + trace: wrapLogFn(server.log.trace.bind(server.log)), + debug: wrapLogFn(server.log.debug.bind(server.log)), + child(bindings) { + return createGraphQLLogger({ + ...binds, + ...bindings, + requestId: getRequestId(), + }); + }, + }; + } + + const graphqlLogger = createGraphQLLogger(); + + const registry = createRegistry({ + tokens: { + endpoint: ensureEnv('TOKENS_ENDPOINT'), + }, + billing: { + endpoint: process.env.BILLING_ENDPOINT + ? ensureEnv('BILLING_ENDPOINT').replace(/\/$/g, '') + : null, + }, + webhooks: { + endpoint: ensureEnv('WEBHOOKS_ENDPOINT').replace(/\/$/g, ''), + }, + schemaService: { + endpoint: ensureEnv('SCHEMA_ENDPOINT').replace(/\/$/g, ''), + }, + usageEstimationService: { + endpoint: process.env.USAGE_ESTIMATOR_ENDPOINT + ? ensureEnv('USAGE_ESTIMATOR_ENDPOINT').replace(/\/$/g, '') + : null, + }, + rateLimitService: { + endpoint: process.env.RATE_LIMIT_ENDPOINT + ? ensureEnv('RATE_LIMIT_ENDPOINT').replace(/\/$/g, '') + : null, + }, + logger: graphqlLogger, + storage: await createPostgreSQLStorage( + createConnectionString(process.env as any) + ), + redis: { + host: ensureEnv('REDIS_HOST'), + port: ensureEnv('REDIS_PORT', 'number'), + password: ensureEnv('REDIS_PASSWORD'), + }, + githubApp: { + appId: ensureEnv('GITHUB_APP_ID', 'number'), + privateKey: ensureEnv('GITHUB_APP_PRIVATE_KEY'), + }, + clickHouse: { + protocol: ensureEnv('CLICKHOUSE_PROTOCOL'), + host: ensureEnv('CLICKHOUSE_HOST'), + port: ensureEnv('CLICKHOUSE_PORT', 'number'), + username: ensureEnv('CLICKHOUSE_USERNAME'), + password: ensureEnv('CLICKHOUSE_PASSWORD'), + onReadEnd(query, timings) { + clickHouseReadDuration + .labels({ query }) + .observe(timings.totalSeconds); + clickHouseElapsedDuration + .labels({ query }) + .observe(timings.elapsedSeconds); + }, + }, + cdn: { + authPrivateKey: ensureEnv('CDN_AUTH_PRIVATE_KEY'), + baseUrl: ensureEnv('CDN_BASE_URL'), + cloudflare: { + basePath: ensureEnv('CF_BASE_PATH'), + accountId: ensureEnv('CF_ACCOUNT_ID'), + authToken: ensureEnv('CF_AUTH_TOKEN'), + namespaceId: ensureEnv('CF_NAMESPACE_ID'), + }, + }, + encryptionSecret: ensureEnv('ENCRYPTION_SECRET'), + feedback: { + token: ensureEnv('FEEDBACK_SLACK_TOKEN'), + channel: ensureEnv('FEEDBACK_SLACK_CHANNEL'), + }, + }); + const graphqlPath = '/graphql'; + const port = process.env.PORT || 4000; + const signature = Math.random().toString(16).substr(2); + const graphql = graphqlHandler({ + graphiqlEndpoint: graphqlPath, + registry, + signature, + onError(error) { + console.log('graphqlHandler.onError', error); + Sentry.captureException(error, { + extra: { + error, + originalError: (error as any).originalError, + }, + level: Sentry.Severity.Error, + }); + graphqlLogger.error(error.message); + return graphqlLogger.error(`GraphQL execution failed`, error); + }, + }); + + server.route({ + method: ['GET', 'POST'], + url: graphqlPath, + handler: graphql, + }); + + const introspection = JSON.stringify({ + query: stripIgnoredCharacters(` + query readiness { + __schema { + queryType { + name + } + } + } + `), + operationName: 'readiness', + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + async handler(req, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: 'GET', + url: '/lab/:org/:project/:target', + async handler(req, res) { + res.status(200).send({ + ok: true, + }); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + async handler(req, res) { + try { + const response = await got.post( + `http://0.0.0.0:${port}${graphqlPath}`, + { + method: 'POST', + body: introspection, + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + 'x-signature': signature, + }, + } + ); + + if (response.statusCode >= 200 && response.statusCode < 300) { + if (response.body.includes('"__schema"')) { + res.status(200).send(); + return; + } + } + console.error(response.statusCode, response.body); + } catch (error) { + console.error(error); + } + + res.status(500).send(); + }, + }); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + + await server.listen(port, '0.0.0.0'); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + process.exit(1); + } +} + +main().catch((err) => { + console.error(err); + process.exit(1); +}); diff --git a/packages/services/server/src/metrics.ts b/packages/services/server/src/metrics.ts new file mode 100644 index 000000000..cb76cccbb --- /dev/null +++ b/packages/services/server/src/metrics.ts @@ -0,0 +1,13 @@ +import { metrics } from '@hive/service-common'; + +export const clickHouseReadDuration = new metrics.Histogram({ + name: 'api_clickhouse_read_duration', + help: 'Read duration - ClickHouse', + labelNames: ['query'], +}); + +export const clickHouseElapsedDuration = new metrics.Histogram({ + name: 'api_clickhouse_elapsed_duration', + help: 'Read elapsed - ClickHouse', + labelNames: ['query'], +}); diff --git a/packages/services/server/src/use-sentry-user.ts b/packages/services/server/src/use-sentry-user.ts new file mode 100644 index 000000000..36e520337 --- /dev/null +++ b/packages/services/server/src/use-sentry-user.ts @@ -0,0 +1,36 @@ +import type { Plugin } from '@envelop/types'; +import * as Sentry from '@sentry/node'; + +export function extractUserId(context?: { user?: { sub: string } }) { + const sub = context?.user?.sub; + + if (sub) { + const [provider, id] = sub.split('|'); + const maxLen = 10; + + // Why? Sentry hides a user id when it looks similar to an api key (long hash) + return `${provider}|${ + id.length > maxLen ? id.substr(0, maxLen) + '...' : id + }`; + } + + return null; +} + +export const useSentryUser = (): Plugin<{ + user: any; +}> => { + return { + onExecute({ args }) { + const id = extractUserId(args.contextValue); + + if (id) { + Sentry.configureScope((scope) => { + scope.setUser({ + id, + }); + }); + } + }, + }; +}; diff --git a/packages/services/server/tsconfig.json b/packages/services/server/tsconfig.json new file mode 100644 index 000000000..2e828b7e2 --- /dev/null +++ b/packages/services/server/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "skipLibCheck": true, + "skipDefaultLibCheck": true, + "rootDir": "../..", + "experimentalDecorators": true, + "emitDecoratorMetadata": true + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/service-common/.gitignore b/packages/services/service-common/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/services/service-common/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/services/service-common/CHANGELOG.md b/packages/services/service-common/CHANGELOG.md new file mode 100644 index 000000000..32dd10aa5 --- /dev/null +++ b/packages/services/service-common/CHANGELOG.md @@ -0,0 +1,33 @@ +# @hive/service-common + +## 0.1.3 + +### Patch Changes + +- 3a435baa: Show one value of x-request-id + +## 0.1.2 + +### Patch Changes + +- a8485a06: Use Pino logger + +## 0.1.1 + +### Patch Changes + +- db2c1c3: Remove console.log +- 4e9f0aa: Mark every transaction containing GraphQL errors as failure + +## 0.1.0 + +### Minor Changes + +- 6ed9bf2: Add prometheus metrics +- 588285c: Reuse or generate request id in fastify logger + +## 0.0.2 + +### Patch Changes + +- 6b9dfc7: Add referer diff --git a/packages/services/service-common/LICENSE b/packages/services/service-common/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/services/service-common/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/services/service-common/package.json b/packages/services/service-common/package.json new file mode 100644 index 000000000..cd74053a1 --- /dev/null +++ b/packages/services/service-common/package.json @@ -0,0 +1,16 @@ +{ + "private": true, + "type": "module", + "name": "@hive/service-common", + "version": "0.1.3", + "license": "MIT", + "dependencies": { + "fastify": "3.29.0", + "fastify-cors": "6.0.2", + "prom-client": "14.0.1" + }, + "peerDependencies": { + "@sentry/node": "^6.0.0", + "@sentry/tracing": "^6.0.0" + } +} diff --git a/packages/services/service-common/src/env.ts b/packages/services/service-common/src/env.ts new file mode 100644 index 000000000..20d52d8b5 --- /dev/null +++ b/packages/services/service-common/src/env.ts @@ -0,0 +1,29 @@ +import { invariant } from './helpers'; + +type ValueType = 'string' | 'number' | 'boolean'; + +export function ensureEnv(key: string): string; +export function ensureEnv(key: string, valueType: 'string'): string; +export function ensureEnv(key: string, valueType: 'number'): number; +export function ensureEnv(key: string, valueType: 'boolean'): boolean; +export function ensureEnv(key: string, valueType?: ValueType) { + let value = process.env[key]; + + if (value === '') { + value = undefined; + } + + invariant( + typeof value === 'string', + `Missing "${key}" environment varariable` + ); + + switch (valueType) { + case 'number': + return parseInt(value, 10); + case 'boolean': + return value === 'true'; + default: + return value; + } +} diff --git a/packages/services/service-common/src/errors.ts b/packages/services/service-common/src/errors.ts new file mode 100644 index 000000000..8b0d75942 --- /dev/null +++ b/packages/services/service-common/src/errors.ts @@ -0,0 +1,18 @@ +import type { FastifyInstance, FastifyLoggerInstance } from 'fastify'; +import * as Sentry from '@sentry/node'; + +export function createErrorHandler(server: FastifyInstance) { + return function errorHandler( + message: string, + error: Error, + logger?: FastifyLoggerInstance + ) { + console.log('createErrorHandler', message, error); + Sentry.captureException(error); + if (logger) { + logger.error(message + ' (error=%s)', error); + } else { + server.log.error(message + ' (error=%s)', error); + } + }; +} diff --git a/packages/services/service-common/src/fastify.ts b/packages/services/service-common/src/fastify.ts new file mode 100644 index 000000000..ba8db5065 --- /dev/null +++ b/packages/services/service-common/src/fastify.ts @@ -0,0 +1,43 @@ +import { fastify } from 'fastify'; +import cors from 'fastify-cors'; +import * as Sentry from '@sentry/node'; +import { useSentryTracing } from './sentry'; + +export type { FastifyLoggerInstance } from 'fastify'; + +export function createServer(options: { tracing: boolean; name: string }) { + const server = fastify({ + disableRequestLogging: true, + bodyLimit: 11e6, // 11 mb + logger: { + level: 'debug', + }, + maxParamLength: 5000, + requestIdHeader: 'x-request-id', + trustProxy: true, + }); + + server.addHook('onReady', async () => { + server.log.info(`Service "${options.name}" is ready`); + }); + + process + .on('unhandledRejection', (reason, p) => { + console.log('unhandledRejection', reason); + Sentry.captureException(reason); + server.log.error(reason as any, 'Unhandled Rejection at Promise', p); + }) + .on('uncaughtException', (err) => { + console.log('uncaughtException', err); + Sentry.captureException(err); + server.log.error(err as any, 'Uncaught Exception thrown'); + }); + + if (options.tracing) { + useSentryTracing(server); + } + + server.register(cors); + + return server; +} diff --git a/packages/services/service-common/src/graceful-shutdown.ts b/packages/services/service-common/src/graceful-shutdown.ts new file mode 100644 index 000000000..a963c67e6 --- /dev/null +++ b/packages/services/service-common/src/graceful-shutdown.ts @@ -0,0 +1,52 @@ +import type { FastifyLoggerInstance } from 'fastify'; + +const errorTypes = ['unhandledRejection', 'uncaughtException']; +const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2']; + +export function registerShutdown(config: { + logger: FastifyLoggerInstance; + onShutdown(): void | Promise; + noExit?: boolean; +}) { + let exited = false; + + const shouldExit = !config.noExit; + + async function shutdown() { + if (exited) { + return; + } + config.logger.info('Shutting down...'); + exited = true; + await config.onShutdown(); + } + + errorTypes.map((type) => { + process.on(type, async (e) => { + try { + config.logger.info(`process.on ${type}`); + config.logger.error(e); + await shutdown(); + if (shouldExit) { + process.exit(0); + } + } catch (_) { + if (shouldExit) { + process.exit(1); + } + } + }); + }); + + signalTraps.map((type) => { + process.once(type, async () => { + try { + await shutdown(); + } finally { + if (shouldExit) { + process.kill(process.pid, type); + } + } + }); + }); +} diff --git a/packages/services/service-common/src/helpers.ts b/packages/services/service-common/src/helpers.ts new file mode 100644 index 000000000..085a00a05 --- /dev/null +++ b/packages/services/service-common/src/helpers.ts @@ -0,0 +1,46 @@ +export function cleanRequestId( + requestId?: string | string[] | null +): string | undefined { + if (requestId) { + if (Array.isArray(requestId)) { + return cleanRequestId(requestId[0]); + } + + return requestId.split(',')[0].trim(); + } +} + +const isProduction: boolean = process.env.NODE_ENV === 'production'; +const prefix = 'Invariant failed'; + +// Throw an error if the condition fails +// Strip out error messages for production +// > Not providing an inline default argument for message as the result is smaller +export function invariant( + condition: any, + // Can provide a string, or a function that returns a string for cases where + // the message takes a fair amount of effort to compute + message?: string | (() => string) +): asserts condition { + if (condition) { + return; + } + // Condition not passed + + // In production we strip the message but still throw + if (isProduction) { + throw new Error(prefix); + } + + // When not in production we allow the message to pass through + // *This block will be removed in production builds* + + const provided: string | undefined = + typeof message === 'function' ? message() : message; + + // Options: + // 1. message provided: `${prefix}: ${provided}` + // 2. message not provided: prefix + const value: string = provided ? `${prefix}: ${provided}` : prefix; + throw new Error(value); +} diff --git a/packages/services/service-common/src/index.ts b/packages/services/service-common/src/index.ts new file mode 100644 index 000000000..824e5feda --- /dev/null +++ b/packages/services/service-common/src/index.ts @@ -0,0 +1,7 @@ +export { createServer } from './fastify'; +export type { FastifyLoggerInstance } from './fastify'; +export * from './errors'; +export * from './metrics'; +export * from './env'; +export { registerShutdown } from './graceful-shutdown'; +export { cleanRequestId } from './helpers'; diff --git a/packages/services/service-common/src/metrics.ts b/packages/services/service-common/src/metrics.ts new file mode 100644 index 000000000..eafa7e55b --- /dev/null +++ b/packages/services/service-common/src/metrics.ts @@ -0,0 +1,36 @@ +import { fastify } from 'fastify'; +import cors from 'fastify-cors'; +import promClient from 'prom-client'; + +export { promClient as metrics }; + +export function startMetrics() { + promClient.collectDefaultMetrics({ + labels: { instance: process.env.POD_NAME }, + }); + + const server = fastify({ + disableRequestLogging: true, + trustProxy: true, + }); + + server.route({ + method: 'GET', + url: '/metrics', + async handler(req, res) { + try { + res.header('Content-Type', promClient.register.contentType); + const result = await promClient.register.metrics(); + + res.send(result); + } catch (error) { + console.log('metrics error', error); + res.status(500).send(error); + } + }, + }); + + server.register(cors); + + return server.listen(10254, '0.0.0.0'); +} diff --git a/packages/services/service-common/src/sentry.ts b/packages/services/service-common/src/sentry.ts new file mode 100644 index 000000000..69bfaf328 --- /dev/null +++ b/packages/services/service-common/src/sentry.ts @@ -0,0 +1,134 @@ +import * as Sentry from '@sentry/node'; +import '@sentry/tracing'; +import { Transaction } from '@sentry/tracing'; +import type { FastifyInstance, onRequestHookHandler } from 'fastify'; +import { cleanRequestId } from './helpers'; + +export function useSentryTracing(server: FastifyInstance) { + const requestHandler = Sentry.Handlers.requestHandler(); + const tracingHandler = Sentry.Handlers.tracingHandler(); + + function filter(hook: onRequestHookHandler): onRequestHookHandler { + return function _hook(req, res, done) { + if (req.routerPath === '/_health' || req.routerPath === '/_readiness') { + return done(); + } + + if (req.routerPath === '/graphql' && req.headers['x-signature']) { + // x-signature means it's an introspection query (from readiness check) + return done(); + } + + hook.call(this, req, res, done); + }; + } + + server.addHook( + 'onRequest', + filter((req, res, next) => { + const requestId = cleanRequestId(req.headers['x-request-id']); + if (requestId) { + Sentry.configureScope((scope) => { + scope.setTag('request_id', requestId as string); + if (req.headers.referer) { + scope.setTag('referer', req.headers.referer); + } + }); + } + + const transaction: Transaction | undefined = (res.raw as any) + ?.__sentry_transaction; + + if (transaction) { + transaction.setData( + 'authorization', + replaceAuthorization(req.headers.authorization) + ); + transaction.setData( + 'x-api-token', + replaceAuthorization(req.headers['x-api-token'] as any) + ); + } + + requestHandler(req.raw, res.raw, next); + }) + ); + + server.addHook( + 'onRequest', + filter((req, res, next) => { + tracingHandler(req.raw, res.raw, next); + }) + ); + + server.setErrorHandler((err, req, reply) => { + Sentry.withScope((scope) => { + scope.setUser({ + ip_address: req.ip, + }); + + const requestId = cleanRequestId(req.headers['x-request-id']); + + if (requestId) { + scope.setTag('request_id', requestId as string); + } + + const referer = req.headers.referer; + + if (referer) { + scope.setTag('referer', referer); + } + + scope.setTag('path', req.raw.url); + scope.setTag('method', req.raw.method); + console.log('fastify.setErrorHandler error', err); + Sentry.captureException(err); + + reply.send({ + error: 500, + message: 'Internal Server Error', + }); + }); + }); +} + +function replaceString(value: string) { + const jwt = value.split('.'); + if (jwt.length === 3) { + return `${jwt[0]}.${jwt[1]}.`; + } + + value = value.trim(); + + // Mask the token + if (value.length === 32) { + return ( + value.substring(0, 3) + + '•'.repeat(value.length - 6) + + value.substring(value.length - 3) + ); + } + + return `string(${value.trim().length})`; +} + +function replaceAuthorization(): string; +function replaceAuthorization(value?: string): string; +function replaceAuthorization(value?: string[]): string[]; +function replaceAuthorization(value?: string | string[]): string | string[] { + if (typeof value === 'string') { + const bearer = 'Bearer '; + + if (value.startsWith(bearer)) { + return `${bearer}${replaceString(value.replace(bearer, ''))}`; + } + + return replaceString(value); + } + + if (Array.isArray(value)) { + return value.map((v) => replaceAuthorization(v)); + } + + return ''; +} diff --git a/packages/services/service-common/tsconfig.json b/packages/services/service-common/tsconfig.json new file mode 100644 index 000000000..9b376c2b1 --- /dev/null +++ b/packages/services/service-common/tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "../../../tsconfig.json", + "include": ["src"] +} diff --git a/packages/services/storage/.env.template b/packages/services/storage/.env.template new file mode 100644 index 000000000..2bc319496 --- /dev/null +++ b/packages/services/storage/.env.template @@ -0,0 +1,7 @@ +MIGRATOR="up" +CLICKHOUSE_MIGRATOR="up" +CLICKHOUSE_PROTOCOL="http" +CLICKHOUSE_HOST="localhost" +CLICKHOUSE_PORT="8123" +CLICKHOUSE_USERNAME="test" +CLICKHOUSE_PASSWORD="test" \ No newline at end of file diff --git a/packages/services/storage/.gitignore b/packages/services/storage/.gitignore new file mode 100644 index 000000000..a6e6a7788 --- /dev/null +++ b/packages/services/storage/.gitignore @@ -0,0 +1,5 @@ +*.log +.DS_Store +node_modules +dist +volumes \ No newline at end of file diff --git a/packages/services/storage/CHANGELOG.md b/packages/services/storage/CHANGELOG.md new file mode 100644 index 000000000..2a8cdfdb1 --- /dev/null +++ b/packages/services/storage/CHANGELOG.md @@ -0,0 +1,321 @@ +# @hive/storage + +## 0.14.1 + +### Patch Changes + +- 1623aca5: Upgrade sentry + +## 0.14.0 + +### Minor Changes + +- ffb6feb6: Allow to check usage from multiple targets + +## 0.13.3 + +### Patch Changes + +- 689610ac: fix(deps): update sentry-javascript monorepo to v6.16.1 + +## 0.13.2 + +### Patch Changes + +- b3e54d5a: Fixed 'only one service schema...' + +## 0.13.1 + +### Patch Changes + +- 33fbb5e: Bump + +## 0.13.0 + +### Minor Changes + +- dc8fb96: Introduce base schema + +### Patch Changes + +- bf78c16: Bump + +## 0.12.0 + +### Minor Changes + +- b5966ab: Say hi to TSUP! + +## 0.11.2 + +### Patch Changes + +- 02b00f0: Update undici, sentry, bullmq + +## 0.11.1 + +### Patch Changes + +- 7549a38: Fix startup + +## 0.11.0 + +### Minor Changes + +- 7eca7f0: Introduce access scopes + +## 0.10.3 + +### Patch Changes + +- 19d4cd5: Bump + +## 0.10.2 + +### Patch Changes + +- cc9aa01: Update dependencies + +## 0.10.1 + +### Patch Changes + +- 65b687e: Batch getSchema calls + +## 0.10.0 + +### Minor Changes + +- 94f45a5: Do not remove tokens, mark as deleted + +## 0.9.0 + +### Minor Changes + +- 91a6957: Allow to update url of a service + +## 0.8.7 + +### Patch Changes + +- 3d828f4: Use latest Sentry and Sentry NextJS integration + +## 0.8.6 + +### Patch Changes + +- b502e9e: Fix issue with conflicting name on target + +## 0.8.5 + +### Patch Changes + +- 9fb90bc: Fix issues with fetching project by name, and use org_id as well + +## 0.8.4 + +### Patch Changes + +- df6c501: Make Query.lab nullable + +## 0.8.3 + +### Patch Changes + +- 05d0140: Use @theguild/buddy + +## 0.8.2 + +### Patch Changes + +- 5f99c67: Batch getOrganizationOwner calls (homemade dataloader) + +## 0.8.1 + +### Patch Changes + +- 4ee9a3b: Fix operations count + +## 0.8.0 + +### Minor Changes + +- efd7b74: Admin panel + +## 0.7.0 + +### Minor Changes + +- 889368b: Bump + +## 0.6.0 + +### Minor Changes + +- 11e6800: Allow multiple auth providers and add displayName and fullName to profiles + +## 0.5.6 + +### Patch Changes + +- 0527e3c: Update Sentry + +## 0.5.5 + +### Patch Changes + +- 23636de: Fix missing URLs + +## 0.5.4 + +### Patch Changes + +- b010137: Update Sentry to 6.10.0 + +## 0.5.3 + +### Patch Changes + +- 4bc83be: Node 16 + +## 0.5.2 + +### Patch Changes + +- 93674cf: Update Sentry to 6.7.0 +- 3e16adb: Attach originalError to captured expection by Sentry and set sql and values from Slonik + +## 0.5.1 + +### Patch Changes + +- 5aa5e93: Bump + +## 0.5.0 + +### Minor Changes + +- 87e3d2e: Alerts, yay! + +### Patch Changes + +- 968614d: Fix persisting the same query twice + +## 0.4.0 + +### Minor Changes + +- 143fa32: Added Schema Laboratory + +## 0.3.3 + +### Patch Changes + +- 148b294: Fix issues with undici headers timeout + +## 0.3.2 + +### Patch Changes + +- 85b85d4: Dependencies update, cleanup, ui fixes + +## 0.3.1 + +### Patch Changes + +- 9b14d18: Bump + +## 0.3.0 + +### Minor Changes + +- 36097a6: Add mixpanel + +## 0.2.3 + +### Patch Changes + +- 127e1fb: Bump storage + +## 0.2.2 + +### Patch Changes + +- f6d2ca6: bump + +## 0.2.1 + +### Patch Changes + +- 8f3e43c: Track usage of tokens + +## 0.2.0 + +### Minor Changes + +- 60cd35d: Consider Usage in Inspector +- 078e758: Token per Target + +### Patch Changes + +- 7113a0e: Update Sentry to 6.3.5 + +## 0.1.10 + +### Patch Changes + +- 8d06fd4: Eh console.log + +## 0.1.9 + +### Patch Changes + +- 6a344d3: Fix invitations, expose only organization.name + +## 0.1.8 + +### Patch Changes + +- e688d94: Use native bidings, improve perf by disabling error stack traces + +## 0.1.7 + +### Patch Changes + +- 4793381: Prevent some organization names + +## 0.1.6 + +### Patch Changes + +- c1e705a: bump + +## 0.1.5 + +### Patch Changes + +- 54b60f6: Fix issues with build artifact + +## 0.1.4 + +### Patch Changes + +- 7e88e71: bump + +## 0.1.3 + +### Patch Changes + +- b2d686e: bump + +## 0.1.2 + +### Patch Changes + +- 9da6738: bump + +## 0.1.1 + +### Patch Changes + +- e8cb071: fix issues with ncc packages diff --git a/packages/services/storage/LICENSE b/packages/services/storage/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/services/storage/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/services/storage/configs/clickhouse/config.xml b/packages/services/storage/configs/clickhouse/config.xml new file mode 100644 index 000000000..763a9ce54 --- /dev/null +++ b/packages/services/storage/configs/clickhouse/config.xml @@ -0,0 +1,20 @@ + + + + + 1 + + 4 + + 2000000 + + 1000 + + 0 + + + \ No newline at end of file diff --git a/packages/services/storage/docker-compose.yml b/packages/services/storage/docker-compose.yml new file mode 100644 index 000000000..2f1bfbc9e --- /dev/null +++ b/packages/services/storage/docker-compose.yml @@ -0,0 +1,124 @@ +version: '3.8' +services: + db: + image: postgres:13.4-alpine + networks: + - 'stack' + healthcheck: + test: ['CMD-SHELL', 'pg_isready'] + interval: 10s + timeout: 5s + retries: 5 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + PGDATA: /var/lib/postgresql/data + volumes: + - ./volumes/postgresql/db:/var/lib/postgresql/data + ports: + - '5432:5432' + + redis: + image: docker.io/bitnami/redis:6.2 + networks: + - 'stack' + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] + interval: 30s + timeout: 10s + retries: 10 + start_period: 30s + environment: + - ALLOW_EMPTY_PASSWORD=yes + - REDIS_DISABLE_COMMANDS=FLUSHDB,FLUSHALL + ports: + - '6379:6379' + volumes: + - './volumes/redis/db:/bitnami/redis/data' + + clickhouse: + image: clickhouse/clickhouse-server:22.3.5.5-alpine + environment: + CLICKHOUSE_USER: test + CLICKHOUSE_PASSWORD: test + healthcheck: + test: ['CMD', 'wget', '--spider', '-q', 'localhost:8123/ping'] + interval: 30s + timeout: 5s + retries: 3 + volumes: + - ./volumes/clickhouse/logs:/var/log/clickhouse-server + - ./volumes/clickhouse/db:/var/lib/clickhouse + - ./configs/clickhouse:/etc/clickhouse-server/conf.d + ports: + - '8123:8123' + networks: + - 'stack' + + zookeeper: + image: confluentinc/cp-zookeeper:6.2.2 + hostname: zookeeper + networks: + - 'stack' + ports: + - '2181:2181' + environment: + BUMP: 1 + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + healthcheck: + test: ['CMD', 'cub', 'zk-ready', '127.0.0.1:2181', '10'] + interval: 30s + timeout: 10s + retries: 10 + start_period: 30s + volumes: + - ./volumes/zookeeper/db:/var/lib/zookeeper/data + - ./volumes/zookeeper/log:/var/lib/zookeeper/log + + broker: + image: confluentinc/cp-kafka:6.2.2 + hostname: borker + depends_on: + zookeeper: + condition: service_healthy + networks: + - 'stack' + ports: + - '29092:29092' + - '9092:9092' + healthcheck: + test: + [ + 'CMD', + 'cub', + 'kafka-ready', + '1', + '5', + '-b', + '127.0.0.1:9092', + '-c', + '/etc/kafka/kafka.properties', + ] + interval: 30s + timeout: 10s + retries: 10 + start_period: 30s + ulimits: + nofile: + soft: 16384 + hard: 16384 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + volumes: + - ./volumes/broker/db:/var/lib/kafka/data + +networks: + stack: {} diff --git a/packages/services/storage/migrations/actions/2021-03-05T19-06-23.initial.sql b/packages/services/storage/migrations/actions/2021-03-05T19-06-23.initial.sql new file mode 100644 index 000000000..f7f98a44f --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-03-05T19-06-23.initial.sql @@ -0,0 +1,93 @@ +--initial (up) + +-- Extensions +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +--- Custom Types +CREATE TYPE organization_type AS ENUM ('PERSONAL', 'REGULAR'); + +CREATE DOMAIN url AS text CHECK (VALUE ~ 'https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#()?&//=]*)'); +COMMENT ON DOMAIN url IS 'match URLs (http or https)'; + +CREATE DOMAIN slug AS text CHECK (VALUE ~ '[a-z0-9]+(?:-[a-z0-9]+)*'); +COMMENT ON DOMAIN slug IS 'valid slug'; + +--- Tables +CREATE TABLE public.users ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + email VARCHAR(320) NOT NULL UNIQUE, + external_auth_user_id VARCHAR(50) NOT NULL UNIQUE, + created_at timestamp with time zone NOT NULL DEFAULT NOW() +); + +CREATE TABLE public.organizations ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + clean_id slug NOT NULL, + name text NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + invite_code varchar(10) NOT NULL UNIQUE DEFAULT substr(md5(random()::text), 0, 10), + user_id uuid NOT NULL REFERENCES public.users(id), + type organization_type NOT NULL +); + +CREATE TABLE public.projects ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + clean_id slug NOT NULL, + name varchar(200) NOT NULL, + type varchar(50) NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + build_url url, + validation_url url, + org_id uuid NOT NULL REFERENCES public.organizations(id) ON DELETE CASCADE +); + +CREATE TABLE public.targets ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + clean_id slug NOT NULL, + name text NOT NULL, + project_id uuid NOT NULL REFERENCES public.projects(id) ON DELETE CASCADE, + created_at timestamp with time zone NOT NULL DEFAULT NOW() +); + +CREATE TABLE public.commits ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + author text NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + service text, + content text NOT NULL, + commit text NOT NULL +); + +CREATE TABLE public.versions ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + valid boolean NOT NULL, + target_id uuid NOT NULL REFERENCES public.targets(id) ON DELETE CASCADE, + commit_id uuid NOT NULL REFERENCES public.commits(id) ON DELETE CASCADE +); + +CREATE TABLE public.version_commit ( + version_id uuid NOT NULL REFERENCES public.versions(id) ON DELETE CASCADE, + commit_id uuid NOT NULL REFERENCES public.commits(id) ON DELETE CASCADE, + url url, + PRIMARY KEY(version_id, commit_id) +); + +CREATE TABLE public.tokens ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + token varchar(32) NOT NULL DEFAULT md5(random()::text), + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + project_id uuid NOT NULL REFERENCES public.projects(id) ON DELETE CASCADE, + name text NOT NULL, + last_used_at timestamp with time zone +); + +CREATE TABLE public.organization_member ( + organization_id uuid NOT NULL REFERENCES public.organizations(id) ON DELETE CASCADE, + user_id uuid NOT NULL REFERENCES public.users(id) ON DELETE CASCADE, + PRIMARY KEY (organization_id, user_id) +); + +--- Indices +CREATE INDEX email_idx ON public.users USING btree(email); +CREATE INDEX external_auth_user_id_idx ON public.users USING btree(external_auth_user_id); diff --git a/packages/services/storage/migrations/actions/2021-03-08T11-02-26.urls.sql b/packages/services/storage/migrations/actions/2021-03-08T11-02-26.urls.sql new file mode 100644 index 000000000..f8e588c96 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-03-08T11-02-26.urls.sql @@ -0,0 +1,5 @@ +--urls (up) + +ALTER TABLE public.projects +ALTER COLUMN build_url TYPE text, +ALTER COLUMN validation_url TYPE text; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-03-09T10-30-35.roles.sql b/packages/services/storage/migrations/actions/2021-03-09T10-30-35.roles.sql new file mode 100644 index 000000000..3a2f6dcf2 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-03-09T10-30-35.roles.sql @@ -0,0 +1,13 @@ +--roles (up) + +CREATE TYPE user_role AS ENUM ('ADMIN', 'MEMBER'); + +ALTER TABLE public.organization_member +ADD COLUMN role user_role NOT NULL DEFAULT 'MEMBER'; + +UPDATE public.organization_member as om SET role = 'ADMIN' +WHERE +( + SELECT o.user_id + FROM public.organizations as o WHERE o.id = om.organization_id AND o.user_id = om.user_id +) IS NOT NULL; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-03-09T14-02-34.activities.sql b/packages/services/storage/migrations/actions/2021-03-09T14-02-34.activities.sql new file mode 100644 index 000000000..06c81accc --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-03-09T14-02-34.activities.sql @@ -0,0 +1,12 @@ +--activities (up) + +CREATE TABLE public.activities ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + user_id uuid NOT NULL REFERENCES public.users(id) ON DELETE CASCADE, + organization_id uuid NOT NULL REFERENCES public.organizations(id) ON DELETE CASCADE, + project_id uuid REFERENCES public.projects(id) ON DELETE CASCADE, + target_id uuid REFERENCES public.targets(id) ON DELETE CASCADE, + activity_type VARCHAR(30) NOT NULL, + activity_metadata JSONB NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT NOW() +); \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-03-15T19-32-01.commit-project-id.sql b/packages/services/storage/migrations/actions/2021-03-15T19-32-01.commit-project-id.sql new file mode 100644 index 000000000..bbb611491 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-03-15T19-32-01.commit-project-id.sql @@ -0,0 +1,11 @@ +--commit-project-id (up) + +ALTER TABLE public.commits +ADD COLUMN project_id uuid REFERENCES public.projects(id) ON DELETE CASCADE; + +UPDATE public.commits as c SET project_id = t.project_id +FROM public.versions as v, public.targets as t +WHERE v.commit_id = c.id AND t.id = v.target_id; + +ALTER TABLE public.commits +ALTER COLUMN project_id SET NOT NULL; diff --git a/packages/services/storage/migrations/actions/2021-04-20T11-30-30.tokens.sql b/packages/services/storage/migrations/actions/2021-04-20T11-30-30.tokens.sql new file mode 100644 index 000000000..82c08d07a --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-04-20T11-30-30.tokens.sql @@ -0,0 +1,3 @@ +--tokens (up) + +ALTER TABLE public.tokens DROP COLUMN last_used_at; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-04-30T07-01-57.token-per-target.sql b/packages/services/storage/migrations/actions/2021-04-30T07-01-57.token-per-target.sql new file mode 100644 index 000000000..db4bdaa71 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-04-30T07-01-57.token-per-target.sql @@ -0,0 +1,4 @@ +--token-per-target (up) + +ALTER TABLE public.tokens ADD COLUMN target_id uuid NOT NULL REFERENCES public.targets(id) ON DELETE CASCADE; +ALTER TABLE public.tokens ADD COLUMN organization_id uuid NOT NULL REFERENCES public.organizations(id) ON DELETE CASCADE; diff --git a/packages/services/storage/migrations/actions/2021-04-30T11-47-26.validation.sql b/packages/services/storage/migrations/actions/2021-04-30T11-47-26.validation.sql new file mode 100644 index 000000000..76fe430c2 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-04-30T11-47-26.validation.sql @@ -0,0 +1,5 @@ +--validation (up) + +ALTER TABLE public.targets ADD COLUMN validation_enabled boolean NOT NULL DEFAULT FALSE; +ALTER TABLE public.targets ADD COLUMN validation_period smallint NOT NULL DEFAULT 30; +ALTER TABLE public.targets ADD COLUMN validation_percentage float NOT NULL DEFAULT 0.00; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-04-30T18-30-00.persisted-operations.sql b/packages/services/storage/migrations/actions/2021-04-30T18-30-00.persisted-operations.sql new file mode 100644 index 000000000..ae7405d43 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-04-30T18-30-00.persisted-operations.sql @@ -0,0 +1,14 @@ +--persisted_operations (up) + +CREATE TYPE operation_kind AS ENUM ('query', 'mutation', 'subscription'); + +CREATE TABLE public.persisted_operations ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + operation_hash VARCHAR(600) NOT NULL, + operation_name text NOT NULL, + operation_kind operation_kind NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + content text NOT NULL, + project_id uuid NOT NULL REFERENCES public.projects(id) ON DELETE CASCADE, + UNIQUE (operation_hash, project_id) +); \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-05-07T07-28-07.token-last-used-at.sql b/packages/services/storage/migrations/actions/2021-05-07T07-28-07.token-last-used-at.sql new file mode 100644 index 000000000..3f59fbfe7 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-05-07T07-28-07.token-last-used-at.sql @@ -0,0 +1,3 @@ +--token-last-used-at (up) + +ALTER TABLE public.tokens ADD COLUMN last_used_at timestamp with time zone; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-06-11T10-46-24.slack-integration.sql b/packages/services/storage/migrations/actions/2021-06-11T10-46-24.slack-integration.sql new file mode 100644 index 000000000..d696547d2 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-06-11T10-46-24.slack-integration.sql @@ -0,0 +1,3 @@ +--slack-integration (up) + +ALTER TABLE public.organizations ADD COLUMN slack_token text; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-06-11T15-38-28.alerts.sql b/packages/services/storage/migrations/actions/2021-06-11T15-38-28.alerts.sql new file mode 100644 index 000000000..88a4f57eb --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-06-11T15-38-28.alerts.sql @@ -0,0 +1,23 @@ +--alerts (up) + +CREATE TYPE alert_channel_type AS ENUM ('SLACK', 'WEBHOOK'); +CREATE TYPE alert_type AS ENUM ('SCHEMA_CHANGE_NOTIFICATIONS'); + +CREATE TABLE public.alert_channels ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + type alert_channel_type NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + name text NOT NULL, + slack_channel text, + webhook_endpoint text, + project_id uuid NOT NULL REFERENCES public.projects(id) ON DELETE CASCADE +); + +CREATE TABLE public.alerts ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + type alert_type NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT NOW(), + alert_channel_id uuid NOT NULL REFERENCES public.alert_channels(id) ON DELETE CASCADE, + project_id uuid NOT NULL REFERENCES public.projects(id) ON DELETE CASCADE, + target_id uuid NOT NULL REFERENCES public.targets(id) ON DELETE CASCADE +); \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-08-18T13-20-45.urls.sql b/packages/services/storage/migrations/actions/2021-08-18T13-20-45.urls.sql new file mode 100644 index 000000000..248211c22 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-08-18T13-20-45.urls.sql @@ -0,0 +1,10 @@ +--urls (up) + +ALTER TABLE public.version_commit + ALTER COLUMN url TYPE text; + +ALTER TABLE public.projects + ALTER COLUMN build_url TYPE text; + +ALTER TABLE public.projects + ALTER COLUMN validation_url TYPE text; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021-08-27T14-19-48.non-unique-emails.sql b/packages/services/storage/migrations/actions/2021-08-27T14-19-48.non-unique-emails.sql new file mode 100644 index 000000000..7ce557df5 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021-08-27T14-19-48.non-unique-emails.sql @@ -0,0 +1,13 @@ +--non-unique-emails (up) + +DROP INDEX email_idx; +ALTER TABLE public.users DROP CONSTRAINT users_email_key; + +ALTER TABLE public.users ADD CONSTRAINT users_external_email_key UNIQUE (email, external_auth_user_id); +ALTER TABLE public.users ADD display_name VARCHAR(300); +ALTER TABLE public.users ADD full_name VARCHAR(300); + +UPDATE public.users SET display_name = split_part(email, '@', 1), full_name = split_part(email, '@', 1); + +ALTER TABLE public.users ALTER COLUMN display_name SET NOT NULL; +ALTER TABLE public.users ALTER COLUMN full_name SET NOT NULL; diff --git a/packages/services/storage/migrations/actions/2021.09.17T14.45.36.token-deleted.sql b/packages/services/storage/migrations/actions/2021.09.17T14.45.36.token-deleted.sql new file mode 100644 index 000000000..a60b24d87 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021.09.17T14.45.36.token-deleted.sql @@ -0,0 +1 @@ +ALTER TABLE public.tokens ADD COLUMN deleted_at timestamp with time zone DEFAULT NULL; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021.10.07T12.11.13.access-scopes.sql b/packages/services/storage/migrations/actions/2021.10.07T12.11.13.access-scopes.sql new file mode 100644 index 000000000..663ae1039 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021.10.07T12.11.13.access-scopes.sql @@ -0,0 +1,25 @@ +-- Adds scopes to tokens +ALTER TABLE public.tokens ADD COLUMN scopes text[] DEFAULT NULL; + +-- Adds scopes to organization_member +ALTER TABLE public.organization_member ADD COLUMN scopes text[] DEFAULT NULL; + +-- Adds scopes to existing regular members +UPDATE + public.organization_member +SET + scopes = ARRAY['organization:read', 'project:read', 'project:operations-store:read', 'target:read', 'target:registry:read'] +WHERE role = 'MEMBER'; + +-- Adds scopes to existing admin members +UPDATE + public.organization_member +SET + scopes = ARRAY['organization:read', 'organization:delete', 'organization:settings', 'organization:integrations', 'organization:members', 'project:read', 'project:delete', 'project:settings', 'project:alerts', 'project:operations-store:read', 'project:operations-store:write', 'target:read', 'target:delete', 'target:settings', 'target:registry:read', 'target:registry:write', 'target:tokens:read', 'target:tokens:write'] +WHERE role = 'ADMIN'; + +-- Adds scopes to existing tokens +UPDATE + public.tokens +SET + scopes = ARRAY['organization:read', 'project:read', 'project:operations-store:read', 'project:operations-store:write', 'target:read', 'target:registry:read', 'target:registry:write'] \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021.11.22T11.23.44.base-schema.sql b/packages/services/storage/migrations/actions/2021.11.22T11.23.44.base-schema.sql new file mode 100644 index 000000000..91c180940 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021.11.22T11.23.44.base-schema.sql @@ -0,0 +1,3 @@ +-- Adds a base schema column in target table and versions table +ALTER TABLE public.targets ADD base_schema text; +ALTER TABLE public.versions ADD base_schema text; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2021.12.20T14.05.30.commits-with-targets.sql b/packages/services/storage/migrations/actions/2021.12.20T14.05.30.commits-with-targets.sql new file mode 100644 index 000000000..9fb586314 --- /dev/null +++ b/packages/services/storage/migrations/actions/2021.12.20T14.05.30.commits-with-targets.sql @@ -0,0 +1,12 @@ +--creates and fills a target_id column on public.commits + +ALTER TABLE public.commits +ADD COLUMN target_id uuid REFERENCES public.targets(id) ON DELETE CASCADE; + +UPDATE public.commits as c SET target_id = v.target_id +FROM public.versions as v +WHERE v.commit_id = c.id; + +ALTER TABLE public.commits +ALTER COLUMN target_id SET NOT NULL; + diff --git a/packages/services/storage/migrations/actions/2022.01.21T12.34.46.validation-targets.sql b/packages/services/storage/migrations/actions/2022.01.21T12.34.46.validation-targets.sql new file mode 100644 index 000000000..a6df6fb15 --- /dev/null +++ b/packages/services/storage/migrations/actions/2022.01.21T12.34.46.validation-targets.sql @@ -0,0 +1,7 @@ +CREATE TABLE public.target_validation ( + target_id uuid NOT NULL REFERENCES public.targets(id) ON DELETE CASCADE, + destination_target_id uuid NOT NULL REFERENCES public.targets(id) ON DELETE CASCADE, + PRIMARY KEY (target_id, destination_target_id) +); + +INSERT INTO public.target_validation (target_id, destination_target_id) (SELECT id as target_id, id as destination_target_id FROM public.targets WHERE validation_enabled IS TRUE); \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2022.03.28T10.31.26.github-integration.sql b/packages/services/storage/migrations/actions/2022.03.28T10.31.26.github-integration.sql new file mode 100644 index 000000000..52db0c216 --- /dev/null +++ b/packages/services/storage/migrations/actions/2022.03.28T10.31.26.github-integration.sql @@ -0,0 +1,4 @@ +--slack-integration (up) + +ALTER TABLE public.organizations ADD COLUMN github_app_installation_id text; +ALTER TABLE public.projects ADD COLUMN git_repository text; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2022.04.15T14.24.17.hash-tokens.sql b/packages/services/storage/migrations/actions/2022.04.15T14.24.17.hash-tokens.sql new file mode 100644 index 000000000..f92d0d744 --- /dev/null +++ b/packages/services/storage/migrations/actions/2022.04.15T14.24.17.hash-tokens.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.tokens ADD COLUMN token_alias varchar(64) NOT NULL DEFAULT repeat('*', 64); +ALTER TABLE public.tokens ALTER COLUMN token TYPE varchar(64); +UPDATE public.tokens SET token_alias = concat(substring(token from 1 for 3), repeat('*', 26), substring(token from 30 for 3)), token = encode(sha256(token::bytea), 'hex'); diff --git a/packages/services/storage/migrations/actions/2022.05.03T15.58.13.org_rate_limits.sql b/packages/services/storage/migrations/actions/2022.05.03T15.58.13.org_rate_limits.sql new file mode 100644 index 000000000..69377a2c3 --- /dev/null +++ b/packages/services/storage/migrations/actions/2022.05.03T15.58.13.org_rate_limits.sql @@ -0,0 +1,3 @@ +ALTER table public.organizations ADD COLUMN limit_operations_monthly BIGINT NOT NULL DEFAULT 1000000; -- HOBBY plan is default +ALTER table public.organizations ADD COLUMN limit_schema_push_monthly BIGINT NOT NULL DEFAULT 50; -- HOBBY plan is default +ALTER table public.organizations ADD COLUMN limit_retention_days BIGINT NOT NULL DEFAULT 3; -- HOBBY plan is default \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/2022.05.04T11.01.22.billing_plans.sql b/packages/services/storage/migrations/actions/2022.05.04T11.01.22.billing_plans.sql new file mode 100644 index 000000000..8569ef32f --- /dev/null +++ b/packages/services/storage/migrations/actions/2022.05.04T11.01.22.billing_plans.sql @@ -0,0 +1,8 @@ +CREATE TABLE public.organizations_billing ( + organization_id uuid NOT NULL REFERENCES public.organizations(id) ON DELETE CASCADE, -- org id + external_billing_reference_id VARCHAR(255) NOT NULL, -- stripe customer id + billing_email_address VARCHAR(255), + PRIMARY KEY (organization_id) +); + +ALTER table public.organizations ADD COLUMN plan_name VARCHAR(50) NOT NULL DEFAULT 'HOBBY'; diff --git a/packages/services/storage/migrations/actions/2022.05.05T08.05.35.commits-metadata.sql b/packages/services/storage/migrations/actions/2022.05.05T08.05.35.commits-metadata.sql new file mode 100644 index 000000000..f40b8fb54 --- /dev/null +++ b/packages/services/storage/migrations/actions/2022.05.05T08.05.35.commits-metadata.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.commits + ADD COLUMN "metadata" text +; diff --git a/packages/services/storage/migrations/actions/down/2021-03-05T19-06-23.initial.sql b/packages/services/storage/migrations/actions/down/2021-03-05T19-06-23.initial.sql new file mode 100644 index 000000000..676a0cc79 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-03-05T19-06-23.initial.sql @@ -0,0 +1,19 @@ +--initial (down) + +DROP TABLE IF EXISTS version_commit; +DROP TABLE IF EXISTS versions; +DROP TABLE IF EXISTS tokens; +DROP TABLE IF EXISTS targets; +DROP TABLE IF EXISTS commits; + +DROP TABLE IF EXISTS projects; +DROP TABLE IF EXISTS organization_member; +DROP TABLE IF EXISTS organizations; +DROP TABLE IF EXISTS users; + +DROP INDEX IF EXISTS email_idx; +DROP INDEX IF EXISTS external_auth_user_id_idx; + +DROP TYPE IF EXISTS organization_type; +DROP DOMAIN IF EXISTS url; +DROP DOMAIN IF EXISTS slug; diff --git a/packages/services/storage/migrations/actions/down/2021-03-08T11-02-26.urls.sql b/packages/services/storage/migrations/actions/down/2021-03-08T11-02-26.urls.sql new file mode 100644 index 000000000..cde4f08d1 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-03-08T11-02-26.urls.sql @@ -0,0 +1,5 @@ +--urls (down) + +ALTER TABLE public.projects +ALTER COLUMN build_url TYPE url, +ALTER COLUMN validation_url TYPE url; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-03-09T10-30-35.roles.sql b/packages/services/storage/migrations/actions/down/2021-03-09T10-30-35.roles.sql new file mode 100644 index 000000000..5c21c9694 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-03-09T10-30-35.roles.sql @@ -0,0 +1,4 @@ +--roles (down) + +ALTER TABLE public.organization_member DROP COLUMN role; +DROP TYPE IF EXISTS user_role; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-03-09T14-02-34.activities.sql b/packages/services/storage/migrations/actions/down/2021-03-09T14-02-34.activities.sql new file mode 100644 index 000000000..b7178e255 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-03-09T14-02-34.activities.sql @@ -0,0 +1,3 @@ +--activities (down) + +DROP TABLE IF EXISTS activities; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-03-15T19-32-01.commit-project-id.sql b/packages/services/storage/migrations/actions/down/2021-03-15T19-32-01.commit-project-id.sql new file mode 100644 index 000000000..c395902a3 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-03-15T19-32-01.commit-project-id.sql @@ -0,0 +1,3 @@ +--commit-project-id (down) + +ALTER TABLE public.commits DROP COLUMN project_id; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-04-20T11-30-30.tokens.sql b/packages/services/storage/migrations/actions/down/2021-04-20T11-30-30.tokens.sql new file mode 100644 index 000000000..204480a5f --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-04-20T11-30-30.tokens.sql @@ -0,0 +1,3 @@ +--tokens (down) + +ALTER TABLE public.tokens ADD COLUMN last_used_at timestamp with time zone; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-04-30T07-01-57.token-per-target.sql b/packages/services/storage/migrations/actions/down/2021-04-30T07-01-57.token-per-target.sql new file mode 100644 index 000000000..b8adcfd98 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-04-30T07-01-57.token-per-target.sql @@ -0,0 +1,4 @@ +--token-per-target (down) + +ALTER TABLE public.tokens DROP COLUMN target_id; +ALTER TABLE public.tokens DROP COLUMN organization_id; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-04-30T11-47-26.validation.sql b/packages/services/storage/migrations/actions/down/2021-04-30T11-47-26.validation.sql new file mode 100644 index 000000000..93b97e1dd --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-04-30T11-47-26.validation.sql @@ -0,0 +1,5 @@ +--target-settings (down) + +ALTER TABLE public.targets DROP COLUMN validation_enabled; +ALTER TABLE public.targets DROP COLUMN validation_period; +ALTER TABLE public.targets DROP COLUMN validation_percentage; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-04-30T18-30-00.persisted-operations.sql b/packages/services/storage/migrations/actions/down/2021-04-30T18-30-00.persisted-operations.sql new file mode 100644 index 000000000..be3a9cde4 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-04-30T18-30-00.persisted-operations.sql @@ -0,0 +1,4 @@ +--persisted_operations (down) + +DROP TABLE IF EXISTS public.persisted_operations; +DROP TYPE IF EXISTS operation_kind; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-05-07T07-28-07.token-last-used-at.sql b/packages/services/storage/migrations/actions/down/2021-05-07T07-28-07.token-last-used-at.sql new file mode 100644 index 000000000..f7256afbe --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-05-07T07-28-07.token-last-used-at.sql @@ -0,0 +1,3 @@ +--token-last-used-at (down) + +ALTER TABLE public.tokens DROP COLUMN last_used_at; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-06-11T10-46-24.slack-integration.sql b/packages/services/storage/migrations/actions/down/2021-06-11T10-46-24.slack-integration.sql new file mode 100644 index 000000000..eba42e65e --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-06-11T10-46-24.slack-integration.sql @@ -0,0 +1,3 @@ +--slack-integration (down) + +ALTER TABLE public.organizations DROP COLUMN slack_token; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-06-11T15-38-28.alerts.sql b/packages/services/storage/migrations/actions/down/2021-06-11T15-38-28.alerts.sql new file mode 100644 index 000000000..92f211412 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-06-11T15-38-28.alerts.sql @@ -0,0 +1,6 @@ +--alerts (down) + +DROP TYPE IF EXISTS alert_channel_type; +DROP TYPE IF EXISTS alert_type; +DROP TABLE IF EXISTS public.alert_channels; +DROP TABLE IF EXISTS public.alerts; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-08-18T13-20-45.urls.sql b/packages/services/storage/migrations/actions/down/2021-08-18T13-20-45.urls.sql new file mode 100644 index 000000000..4d785082b --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-08-18T13-20-45.urls.sql @@ -0,0 +1,10 @@ +--urls (down) + +ALTER TABLE public.version_commit + ALTER COLUMN url TYPE url; + +ALTER TABLE public.projects + ALTER COLUMN build_url TYPE url + +ALTER TABLE public.projects + ALTER COLUMN validation_url TYPE url; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021-08-27T14-19-48.non-unique-emails.sql b/packages/services/storage/migrations/actions/down/2021-08-27T14-19-48.non-unique-emails.sql new file mode 100644 index 000000000..f55f080e7 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021-08-27T14-19-48.non-unique-emails.sql @@ -0,0 +1,4 @@ +--non-unique-emails (down) + +ALTER TABLE public.users ADD CONSTRAINT users_email_key UNIQUE (email); +ALTER TABLE public.users DROP CONSTRAINT users_external_email_key; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021.09.17T14.45.36.token-deleted.sql b/packages/services/storage/migrations/actions/down/2021.09.17T14.45.36.token-deleted.sql new file mode 100644 index 000000000..eebec683e --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021.09.17T14.45.36.token-deleted.sql @@ -0,0 +1 @@ +DELETE FROM public.tokens WHERE deleted_at IS NOT NULL; diff --git a/packages/services/storage/migrations/actions/down/2021.10.07T12.11.13.access-scopes.sql b/packages/services/storage/migrations/actions/down/2021.10.07T12.11.13.access-scopes.sql new file mode 100644 index 000000000..9f77d39cb --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021.10.07T12.11.13.access-scopes.sql @@ -0,0 +1,5 @@ +-- Adds scopes to tokens +ALTER TABLE public.tokens DROP COLUMN scopes; + +-- Adds scopes to organization_member +ALTER TABLE public.organization_member DROP COLUMN scopes; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2021.11.22T11.23.44.base-schema.sql b/packages/services/storage/migrations/actions/down/2021.11.22T11.23.44.base-schema.sql new file mode 100644 index 000000000..b6583c9b3 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021.11.22T11.23.44.base-schema.sql @@ -0,0 +1,3 @@ +-- Adds a base schema column in target table and versions table +ALTER TABLE public.targets DROP COLUMN base_schema; +ALTER TABLE public.versions DROP COLUMN base_schema; diff --git a/packages/services/storage/migrations/actions/down/2021.12.20T14.05.30.commits-with-targets.sql b/packages/services/storage/migrations/actions/down/2021.12.20T14.05.30.commits-with-targets.sql new file mode 100644 index 000000000..d20c27080 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2021.12.20T14.05.30.commits-with-targets.sql @@ -0,0 +1,3 @@ +--creates and fills a target_id column on public.commits + +ALTER TABLE public.commits DROP COLUMN target_id; diff --git a/packages/services/storage/migrations/actions/down/2022.01.21T12.34.46.validation-targets.sql b/packages/services/storage/migrations/actions/down/2022.01.21T12.34.46.validation-targets.sql new file mode 100644 index 000000000..3238dbaae --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2022.01.21T12.34.46.validation-targets.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS public.target_validation; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2022.03.28T10.31.26.github-integration.sql b/packages/services/storage/migrations/actions/down/2022.03.28T10.31.26.github-integration.sql new file mode 100644 index 000000000..85fe1db11 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2022.03.28T10.31.26.github-integration.sql @@ -0,0 +1,4 @@ +--github-integration (down) + +ALTER TABLE public.organizations DROP COLUMN github_app_installation_id; +ALTER TABLE public.projects DROP COLUMN git_repository; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2022.04.15T14.24.17.hash-tokens.sql b/packages/services/storage/migrations/actions/down/2022.04.15T14.24.17.hash-tokens.sql new file mode 100644 index 000000000..a290d2907 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2022.04.15T14.24.17.hash-tokens.sql @@ -0,0 +1 @@ +ALTER TABLE public.tokens DROP COLUMN token_alias; \ No newline at end of file diff --git a/packages/services/storage/migrations/actions/down/2022.05.03T15.58.13.org_rate_limits.sql b/packages/services/storage/migrations/actions/down/2022.05.03T15.58.13.org_rate_limits.sql new file mode 100644 index 000000000..63c557536 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2022.05.03T15.58.13.org_rate_limits.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.organizations DROP COLUMN limit_operations_monthly; +ALTER TABLE public.organizations DROP COLUMN limit_schema_push_monthly; +ALTER TABLE public.organizations DROP COLUMN limit_retention_days; diff --git a/packages/services/storage/migrations/actions/down/2022.05.04T11.01.22.billing_plans.sql b/packages/services/storage/migrations/actions/down/2022.05.04T11.01.22.billing_plans.sql new file mode 100644 index 000000000..6b53a5b89 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2022.05.04T11.01.22.billing_plans.sql @@ -0,0 +1,2 @@ +DROP TABLE public.organizations_billing; +ALTER TABLE public.organizations DROP COLUMN plan_name; diff --git a/packages/services/storage/migrations/actions/down/2022.05.05T08.05.35.commits-metadata.sql b/packages/services/storage/migrations/actions/down/2022.05.05T08.05.35.commits-metadata.sql new file mode 100644 index 000000000..ce175ca25 --- /dev/null +++ b/packages/services/storage/migrations/actions/down/2022.05.05T08.05.35.commits-metadata.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.commits + DROP COLUMN "metadata" +; diff --git a/packages/services/storage/migrations/clickhouse.ts b/packages/services/storage/migrations/clickhouse.ts new file mode 100644 index 000000000..84ea56a32 --- /dev/null +++ b/packages/services/storage/migrations/clickhouse.ts @@ -0,0 +1,182 @@ +import { got } from 'got'; +import { config } from '../src/env'; + +const create_operations_new_query = /* SQL */ ` + CREATE TABLE IF NOT EXISTS default.operations_new + ( + target LowCardinality(String) CODEC(ZSTD(1)), + timestamp DateTime('UTC'), + expires_at DateTime('UTC'), + hash String CODEC(ZSTD(1)), + ok UInt8 CODEC(ZSTD(1)), + errors UInt16 CODEC(ZSTD(1)), + duration UInt64 CODEC(ZSTD(1)), + schema Array(String) CODEC(ZSTD(1)), + client_name LowCardinality(String) CODEC(ZSTD(1)), + client_version String CODEC(ZSTD(1)), + INDEX idx_schema schema TYPE bloom_filter(0.01) GRANULARITY 3, + INDEX idx_ok ok TYPE minmax GRANULARITY 1, + INDEX idx_errors errors TYPE minmax GRANULARITY 1 + ) + ENGINE = MergeTree + PARTITION BY toYYYYMMDD(timestamp) + PRIMARY KEY (target, hash, timestamp) + ORDER BY (target, hash, timestamp) + TTL expires_at + SETTINGS index_granularity = 8192 +`; + +const create_operations_new_hourly_mv_query = /* SQL */ ` + CREATE MATERIALIZED VIEW IF NOT EXISTS default.operations_new_hourly_mv + ( + target LowCardinality(String) CODEC(ZSTD(1)), + timestamp DateTime('UTC'), + hash String CODEC(ZSTD(1)), + total UInt32 CODEC(ZSTD(1)), + total_ok UInt32 CODEC(ZSTD(1)), + duration_avg AggregateFunction(avg, UInt64) CODEC(ZSTD(1)), + duration_quantiles AggregateFunction(quantiles(0.75, 0.9, 0.95, 0.99), UInt64) CODEC(ZSTD(1)) + ) + ENGINE = SummingMergeTree + PARTITION BY toYYYYMMDD(timestamp) + PRIMARY KEY (target, hash, timestamp) + ORDER BY (target, hash, timestamp) + SETTINGS index_granularity = 8192 AS + SELECT + target, + toStartOfHour(timestamp) AS timestamp, + hash, + count() AS total, + sum(ok) AS total_ok, + avgState(duration) AS duration_avg, + quantilesState(0.75, 0.9, 0.95, 0.99)(duration) AS duration_quantiles + FROM default.operations_new + GROUP BY + target, + hash, + timestamp +`; + +const create_operations_registry_query = /* SQL */ ` + CREATE TABLE IF NOT EXISTS default.operations_registry + ( + target LowCardinality(String), + hash String, + name String, + body String, + operation String, + inserted_at DateTime('UTC') DEFAULT toDateTime(0) + ) + ENGINE = ReplacingMergeTree(inserted_at) + PARTITION BY target + ORDER BY (target, hash) + SETTINGS index_granularity = 8192 +`; + +const create_schema_coordinates_daily_query = ` + CREATE MATERIALIZED VIEW IF NOT EXISTS default.schema_coordinates_daily + ( + target LowCardinality(String) CODEC(ZSTD(1)), + hash String CODEC(ZSTD(1)), + timestamp DateTime('UTC'), + total UInt32 CODEC(ZSTD(1)), + coordinate String CODEC(ZSTD(1)) + ) + ENGINE = SummingMergeTree + PARTITION BY toYYYYMMDD(timestamp) + PRIMARY KEY (target, coordinate, hash) + ORDER BY (target, coordinate, hash) + SETTINGS index_granularity = 8192 + AS + SELECT + target, + hash, + toStartOfDay(timestamp) AS timestamp, + count() AS total, + coordinate + FROM default.operations_new + ARRAY JOIN schema as coordinate + GROUP BY + target, + coordinate, + hash, + timestamp +`; + +const create_client_names_daily_query = ` + CREATE MATERIALIZED VIEW IF NOT EXISTS default.client_names_daily + ( + target LowCardinality(String) CODEC(ZSTD(1)), + client_name String CODEC(ZSTD(1)), + hash String CODEC(ZSTD(1)), + timestamp DateTime('UTC'), + total UInt32 CODEC(ZSTD(1)) + ) + ENGINE = SummingMergeTree + PARTITION BY toYYYYMMDD(timestamp) + PRIMARY KEY (target, client_name, hash) + ORDER BY (target, client_name, hash) + SETTINGS index_granularity = 8192 + AS + SELECT + target, + client_name, + hash, + toStartOfDay(timestamp) AS timestamp, + count() AS total + FROM default.operations_new + GROUP BY + target, + client_name, + hash, + timestamp +`; + +export async function migrateClickHouse() { + if (process.env.CLICKHOUSE_MIGRATOR !== 'up') { + console.log('Skipping ClickHouse migration'); + return; + } + + const endpoint = `${config.clickhouse.protocol}://${config.clickhouse.host}:${config.clickhouse.port}`; + + console.log('Migrating ClickHouse'); + console.log('Endpoint:', endpoint); + console.log('Username:', config.clickhouse.username); + console.log('Password:', config.clickhouse.password?.length); + + const queries = [ + create_operations_registry_query, + create_operations_new_query, + create_operations_new_hourly_mv_query, + create_schema_coordinates_daily_query, + create_client_names_daily_query, + ]; + + for await (const query of queries) { + await got + .post(endpoint, { + body: query, + searchParams: { + default_format: 'JSON', + wait_end_of_query: '1', + }, + timeout: { + request: 10_000, + }, + headers: { + Accept: 'text/plain', + }, + username: config.clickhouse.username, + password: config.clickhouse.password, + }) + .catch((error) => { + const body = error?.response?.body; + if (body) { + console.error(body); + } + + return Promise.reject(error); + }); + } +} diff --git a/packages/services/storage/migrations/migrate.ts b/packages/services/storage/migrations/migrate.ts new file mode 100644 index 000000000..008b6b140 --- /dev/null +++ b/packages/services/storage/migrations/migrate.ts @@ -0,0 +1,48 @@ +#!/usr/bin/env node +import url from 'url'; +import { sep } from 'path'; +// eslint-disable-next-line import/no-extraneous-dependencies +import { SlonikMigrator } from '@slonik/migrator'; +// eslint-disable-next-line import/no-extraneous-dependencies +import { createPool } from 'slonik'; +import { config } from '../src/env'; +import { migrateClickHouse } from './clickhouse'; + +const [, , cmd] = process.argv; +const slonik = createPool(config.postgresConnectionString); + +const actionsDirectory = __dirname + sep + 'actions'; +console.log('Actions in:', actionsDirectory); + +const migrator = new SlonikMigrator({ + migrationsPath: actionsDirectory, + slonik, + migrationTableName: 'migration', + logger: console, +}); + +// Why? We don't want to run the 'create' and 'down' commands programmatically, it should run from CLI. +const isCreateCommand = cmd === 'create'; +const isDownCommand = cmd === 'down'; + +// This is used by production build of this package. +// We are building a "cli" out of the package, so we need a workaround to pass the command to run. +if (process.env.MIGRATOR === 'up' && !isCreateCommand && !isDownCommand) { + console.log('Running the UP migrations'); + migrator + .up() + .then(() => migrateClickHouse()) + .then(() => { + process.exit(0); + }) + .catch((error) => { + console.error(error); + process.exit(1); + }); +} else if ( + import.meta.url === url.pathToFileURL(process.argv[1]).href || + require.main === module +) { + console.log('Running as a CLI'); + migrator.runAsCLI(); +} diff --git a/packages/services/storage/migrations/package.json b/packages/services/storage/migrations/package.json new file mode 100644 index 000000000..5bbefffba --- /dev/null +++ b/packages/services/storage/migrations/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/packages/services/storage/package.json b/packages/services/storage/package.json new file mode 100644 index 000000000..f80c43f84 --- /dev/null +++ b/packages/services/storage/package.json @@ -0,0 +1,52 @@ +{ + "name": "@hive/storage", + "type": "module", + "private": true, + "version": "0.14.1", + "license": "MIT", + "scripts": { + "setup": "yarn db:start && yarn db", + "db": "yarn db:create && yarn migration:run", + "db:create": "node tools/create-db.mjs", + "db:migrator": "node -r ./tools/register.cjs migrations/migrate.ts", + "migration:create": "yarn db:migrator create", + "migration:run": "yarn db:migrator up", + "migration:rollback": "yarn db:migrator down", + "db:generate": "schemats generate --config schemats.cjs -o src/db/types.ts", + "db:start": "docker-compose up -d --remove-orphans", + "db:dev": "docker compose up --remove-orphans", + "build": "bob runify --single", + "postbuild": "copyfiles -f \"migrations/actions/*.sql\" dist/actions && copyfiles -f \"migrations/actions/down/*.sql\" dist/actions/down" + }, + "dependencies": { + "@sentry/node": "6.19.7", + "@slonik/migrator": "0.8.5", + "@theguild/buddy": "0.1.0", + "dotenv": "10.0.0", + "got": "12.0.4", + "slonik": "24.1.2", + "slonik-interceptor-query-logging": "1.3.9" + }, + "devDependencies": { + "@tgriesser/schemats": "7.0.0", + "@types/pg": "8.6.5", + "esbuild-register": "3.3.2", + "pg-promise": "10.11.1", + "ts-node": "10.7.0" + }, + "engines": { + "node": ">=12" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "bin": "migrations/migrate.ts", + "external": [ + "pg-native" + ], + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/storage/schemats.cjs b/packages/services/storage/schemats.cjs new file mode 100644 index 000000000..0620598fa --- /dev/null +++ b/packages/services/storage/schemats.cjs @@ -0,0 +1,12 @@ +const { register } = require('esbuild-register/dist/node'); + +register({ + format: 'cjs', +}); + +const cn = require('./tools/db-connection-string.mjs').default; + +module.exports = { + conn: cn('registry'), + prettier: true, +}; diff --git a/packages/services/storage/src/db/index.ts b/packages/services/storage/src/db/index.ts new file mode 100644 index 000000000..d727cf34a --- /dev/null +++ b/packages/services/storage/src/db/index.ts @@ -0,0 +1,3 @@ +export * from './types'; +export * from './pool'; +export * from './utils'; diff --git a/packages/services/storage/src/db/pool.ts b/packages/services/storage/src/db/pool.ts new file mode 100644 index 000000000..d57bba3bc --- /dev/null +++ b/packages/services/storage/src/db/pool.ts @@ -0,0 +1,47 @@ +/// +import { + createPool, + TaggedTemplateLiteralInvocationType, + QueryResultRowColumnType, + CommonQueryMethodsType, +} from 'slonik'; +import { createQueryLoggingInterceptor } from 'slonik-interceptor-query-logging'; +import { createSentryInterceptor } from './sentry'; + +const dbInterceptors = [ + createQueryLoggingInterceptor(), + createSentryInterceptor(), +]; + +export function getPool(connection: string) { + const pool = createPool(connection, { + interceptors: dbInterceptors, + captureStackTrace: false, + }); + + function interceptError( + methodName: K + ) { + const original: CommonQueryMethodsType[K] = pool[methodName]; + + function interceptor( + this: any, + sql: TaggedTemplateLiteralInvocationType, + values?: QueryResultRowColumnType[] + ): any { + return (original as any).call(this, sql, values).catch((error: any) => { + error.sql = sql.sql; + error.values = sql.values || values; + + return Promise.reject(error); + }); + } + + pool[methodName] = interceptor; + } + + interceptError('one'); + interceptError('many'); + + return pool; +} diff --git a/packages/services/storage/src/db/sentry.ts b/packages/services/storage/src/db/sentry.ts new file mode 100644 index 000000000..035fca42b --- /dev/null +++ b/packages/services/storage/src/db/sentry.ts @@ -0,0 +1,70 @@ +import { InterceptorType } from 'slonik'; +import { getCurrentHub, captureException } from '@sentry/node'; +import type { Span } from '@sentry/types'; + +export const createSentryInterceptor = (): InterceptorType => { + const connections: Record> = {}; + + return { + afterPoolConnection(context) { + connections[context.connectionId] = {}; + + return null; + }, + async beforeQueryResult(context) { + if (!connections[context.connectionId]) { + return null; + } + + const span = connections[context.connectionId][context.queryId]; + + if (span) { + span.finish(); + } + + return null; + }, + beforePoolConnectionRelease(context) { + if (!connections[context.connectionId]) { + return null; + } + + delete connections[context.connectionId]; + + return null; + }, + async beforeQueryExecution(context) { + if (!connections[context.connectionId]) { + return null; + } + + const scope = getCurrentHub().getScope(); + const parentSpan = scope?.getSpan(); + const span = parentSpan?.startChild({ + description: context.originalQuery.sql, + op: 'db', + }); + + if (span) { + connections[context.connectionId][context.queryId] = span; + } + + return null; + }, + queryExecutionError(context, _, error) { + if (!connections[context.connectionId]) { + return null; + } + + console.log('Sentry interceptor error', error); + captureException(error, { + extra: { + query: context.originalQuery.sql, + values: context.originalQuery.values, + }, + }); + + return null; + }, + }; +}; diff --git a/packages/services/storage/src/db/types.ts b/packages/services/storage/src/db/types.ts new file mode 100644 index 000000000..b99eb3652 --- /dev/null +++ b/packages/services/storage/src/db/types.ts @@ -0,0 +1,169 @@ +/* tslint:disable */ + +/** + * AUTO-GENERATED FILE @ 2022-05-16 17:20:59 - DO NOT EDIT! + * + * This file was automatically generated by schemats v.7.0.0 + * $ schemats generate -c postgres://username:password@localhost:5432/registry?sslmode=disable -t activities -t alert_channels -t alerts -t commits -t migration -t organization_member -t organizations -t organizations_billing -t persisted_operations -t projects -t target_validation -t targets -t tokens -t users -t version_commit -t versions -s public + * + */ + +export type alert_channel_type = 'SLACK' | 'WEBHOOK'; +export type alert_type = 'SCHEMA_CHANGE_NOTIFICATIONS'; +export type operation_kind = 'mutation' | 'query' | 'subscription'; +export type organization_type = 'PERSONAL' | 'REGULAR'; +export type user_role = 'ADMIN' | 'MEMBER'; + +export interface activities { + activity_metadata: any; + activity_type: string; + created_at: Date; + id: string; + organization_id: string; + project_id: string | null; + target_id: string | null; + user_id: string; +} + +export interface alert_channels { + created_at: Date; + id: string; + name: string; + project_id: string; + slack_channel: string | null; + type: alert_channel_type; + webhook_endpoint: string | null; +} + +export interface alerts { + alert_channel_id: string; + created_at: Date; + id: string; + project_id: string; + target_id: string; + type: alert_type; +} + +export interface commits { + author: string; + commit: string; + content: string; + created_at: Date; + id: string; + metadata: string | null; + project_id: string; + service: string | null; + target_id: string; +} + +export interface migration { + date: Date; + hash: string; + name: string; +} + +export interface organization_member { + organization_id: string; + role: user_role; + scopes: Array | null; + user_id: string; +} + +export interface organizations { + clean_id: string; + created_at: Date; + github_app_installation_id: string | null; + id: string; + invite_code: string; + limit_operations_monthly: string; + limit_retention_days: string; + limit_schema_push_monthly: string; + name: string; + plan_name: string; + slack_token: string | null; + type: organization_type; + user_id: string; +} + +export interface organizations_billing { + billing_email_address: string | null; + external_billing_reference_id: string; + organization_id: string; +} + +export interface persisted_operations { + content: string; + created_at: Date; + id: string; + operation_hash: string; + operation_kind: operation_kind; + operation_name: string; + project_id: string; +} + +export interface projects { + build_url: string | null; + clean_id: string; + created_at: Date; + git_repository: string | null; + id: string; + name: string; + org_id: string; + type: string; + validation_url: string | null; +} + +export interface target_validation { + destination_target_id: string; + target_id: string; +} + +export interface targets { + base_schema: string | null; + clean_id: string; + created_at: Date; + id: string; + name: string; + project_id: string; + validation_enabled: boolean; + validation_percentage: number; + validation_period: number; +} + +export interface tokens { + created_at: Date; + deleted_at: Date | null; + id: string; + last_used_at: Date | null; + name: string; + organization_id: string; + project_id: string; + scopes: Array | null; + target_id: string; + token: string; + token_alias: string; +} + +export interface users { + created_at: Date; + display_name: string; + email: string; + external_auth_user_id: string; + full_name: string; + id: string; +} + +export interface version_commit { + commit_id: string; + url: string | null; + version_id: string; +} + +export interface versions { + base_schema: string | null; + commit_id: string; + created_at: Date; + id: string; + target_id: string; + valid: boolean; +} diff --git a/packages/services/storage/src/db/utils.ts b/packages/services/storage/src/db/utils.ts new file mode 100644 index 000000000..c28e54756 --- /dev/null +++ b/packages/services/storage/src/db/utils.ts @@ -0,0 +1,66 @@ +import { sql } from 'slonik'; + +export function createConnectionString(env: { + POSTGRES_HOST: string; + POSTGRES_PORT: string; + POSTGRES_PASSWORD: string; + POSTGRES_USER: string; + POSTGRES_DB: string; + POSTGRES_CONNECTION_STRING?: string; + POSTGRES_ENABLE_SSL?: boolean; +}) { + const { + POSTGRES_HOST, + POSTGRES_PORT, + POSTGRES_PASSWORD, + POSTGRES_USER, + POSTGRES_DB, + POSTGRES_ENABLE_SSL = null, + POSTGRES_CONNECTION_STRING = null, + } = env; + + return ( + POSTGRES_CONNECTION_STRING || + `postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}${ + POSTGRES_ENABLE_SSL ? '' : '?sslmode=disable' + }` + ); +} + +export function objectToParams>( + obj: T, + transformArray?: (key: K, value: T[K]) => any +) { + const identifiers = sql.join( + Object.keys(obj).map((k) => sql.identifier([k])), + sql`, ` + ); + + const values = sql.join( + Object.keys(obj).map((key) => { + if (obj[key] === undefined || obj[key] === null) { + return null; + } else if (Array.isArray(obj[key])) { + return transformArray!(key, obj[key]); + } else if (typeof obj[key] === 'object') { + return sql.json(obj[key]); + } else { + return obj[key]; + } + }), + sql`, ` + ); + + return { identifiers, values }; +} + +export function objectToUpdateParams(obj: Record) { + return sql.join( + Object.keys(obj).map((key) => sql`${sql.identifier([key])} = ${obj[key]}`), + sql`, ` + ); +} + +export function toDate(date: Date) { + return sql`to_timestamp(${date.getTime() / 1000})`; +} diff --git a/packages/services/storage/src/env.ts b/packages/services/storage/src/env.ts new file mode 100644 index 000000000..4b2f2fce8 --- /dev/null +++ b/packages/services/storage/src/env.ts @@ -0,0 +1,30 @@ +import { config as dotenv } from 'dotenv'; + +dotenv({ + debug: true, +}); + +import cn from '../tools/db-connection-string.mjs'; + +function ensureVarIf(key: string, condition: boolean) { + if (condition) { + if (!process.env[key]) { + throw new Error(`Missing env var "${key}"`); + } + + return process.env[key]; + } +} + +const isClickHouseMigration = process.env.CLICKHOUSE_MIGRATOR === 'up'; + +export const config = { + postgresConnectionString: cn('registry'), + clickhouse: { + protocol: ensureVarIf('CLICKHOUSE_PROTOCOL', isClickHouseMigration), + host: ensureVarIf('CLICKHOUSE_HOST', isClickHouseMigration), + port: ensureVarIf('CLICKHOUSE_PORT', isClickHouseMigration), + username: ensureVarIf('CLICKHOUSE_USERNAME', isClickHouseMigration), + password: ensureVarIf('CLICKHOUSE_PASSWORD', isClickHouseMigration), + }, +}; diff --git a/packages/services/storage/src/index.ts b/packages/services/storage/src/index.ts new file mode 100644 index 000000000..8ea236e2d --- /dev/null +++ b/packages/services/storage/src/index.ts @@ -0,0 +1,1966 @@ +import type { + User, + Organization, + Project, + Target, + Schema, + SchemaVersion, + Member, + ActivityObject, + TargetSettings, + PersistedOperation, + AlertChannel, + Alert, + AuthProvider, + OrganizationBilling, +} from '@hive/api'; +import { Storage, ProjectType, OrganizationType } from '@hive/api'; +import { sql, TaggedTemplateLiteralInvocationType } from 'slonik'; +import { + commits, + getPool, + organizations, + organization_member, + projects, + targets, + target_validation, + users, + versions, + version_commit, + objectToParams, + activities, + persisted_operations, + alert_channels, + alerts, + organizations_billing, +} from './db'; +import { batch } from '@theguild/buddy'; +import type { Slonik } from './shared'; + +export { createConnectionString } from './db/utils'; +export { createTokenStorage } from './tokens'; +export type { tokens } from './db/types'; + +export type WithUrl = T & Pick; +export type WithMaybeMetadata = T & { + metadata?: string | null; +}; + +function getProviderBasedOnExternalId(externalId: string): AuthProvider { + if (externalId.startsWith('github')) { + return 'GITHUB'; + } + + if (externalId.startsWith('google')) { + return 'GOOGLE'; + } + + return 'AUTH0'; +} + +function dateToSqlTimestamp(input: Date) { + return sql`TO_TIMESTAMP(${input.getTime()} / 1000.0)`; +} + +export async function createStorage(connection: string): Promise { + const pool = getPool(connection); + + function transformUser(user: users): User { + return { + id: user.id, + email: user.email, + externalAuthUserId: user.external_auth_user_id, + provider: getProviderBasedOnExternalId(user.external_auth_user_id), + fullName: user.full_name, + displayName: user.display_name, + }; + } + + function transformMember( + user: users & Pick + ): Member { + return { + id: user.id, + user: transformUser(user), + scopes: (user.scopes as Member['scopes']) || [], + organization: user.organization_id, + }; + } + + function transformOrganization(organization: organizations): Organization { + return { + id: organization.id, + cleanId: organization.clean_id, + name: organization.name, + inviteCode: organization.invite_code, + monthlyRateLimit: { + retentionInDays: parseInt(organization.limit_retention_days), + operations: parseInt(organization.limit_operations_monthly), + schemaPush: parseInt(organization.limit_schema_push_monthly), + }, + billingPlan: organization.plan_name, + type: + organization.type === 'PERSONAL' + ? OrganizationType.PERSONAL + : OrganizationType.REGULAR, + }; + } + + function transformProject(project: projects): Project { + return { + id: project.id, + cleanId: project.clean_id, + orgId: project.org_id, + name: project.name, + type: project.type as ProjectType, + buildUrl: project.build_url, + validationUrl: project.validation_url, + gitRepository: project.git_repository, + }; + } + + function transformTarget(target: targets, orgId: string): Target { + return { + id: target.id, + cleanId: target.clean_id, + name: target.name, + projectId: target.project_id, + orgId, + }; + } + + function transformSchema( + schema: WithUrl< + WithMaybeMetadata< + Pick< + commits, + | 'id' + | 'commit' + | 'author' + | 'content' + | 'created_at' + | 'project_id' + | 'service' + | 'target_id' + > + > + > + ): Schema { + const record: Schema = { + id: schema.id, + author: schema.author, + source: schema.content, + commit: schema.commit, + date: schema.created_at as any, + service: schema.service, + url: schema.url, + target: schema.target_id, + }; + if (schema.metadata != null) { + record.metadata = JSON.parse(schema.metadata); + } + + return record; + } + + function transformSchemaVersion(version: versions): SchemaVersion { + return { + id: version.id, + valid: version.valid, + date: version.created_at as any, + commit: version.commit_id, + base_schema: version.base_schema, + }; + } + + function transformActivity(row: { + activity: [activities]; + target: [targets]; + project: [projects]; + organization: [organizations]; + user: [users]; + }): ActivityObject { + const activity = row.activity[0]; + const target = row.target[0]; + const project = row.project[0]; + const organization = row.organization[0]; + const user = row.user[0]; + + return { + id: activity.id, + type: activity.activity_type, + meta: activity.activity_metadata, + createdAt: activity.created_at, + target: target ? transformTarget(target, organization.id) : undefined, + project: project ? transformProject(project) : undefined, + organization: transformOrganization(organization), + user: user ? transformUser(user) : undefined, + }; + } + + function transformTargetSettings( + row: Pick< + targets, + 'validation_enabled' | 'validation_percentage' | 'validation_period' + > & { + targets: target_validation['destination_target_id'][] | null; + } + ): TargetSettings { + return { + validation: { + enabled: row.validation_enabled, + percentage: row.validation_percentage, + period: row.validation_period, + targets: Array.isArray(row.targets) + ? row.targets.filter(isDefined) + : [], + }, + }; + } + + function transformPersistedOperation( + operation: persisted_operations + ): PersistedOperation { + return { + id: operation.id, + operationHash: operation.operation_hash, + name: operation.operation_name, + kind: operation.operation_kind as any, + project: operation.project_id, + content: operation.content, + date: operation.created_at as any, + }; + } + + function transformOrganizationBilling( + orgBilling: organizations_billing + ): OrganizationBilling { + return { + organizationId: orgBilling.organization_id, + externalBillingReference: orgBilling.external_billing_reference_id, + billingEmailAddress: orgBilling.billing_email_address, + }; + } + + function transformAlertChannel(channel: alert_channels): AlertChannel { + return { + id: channel.id, + projectId: channel.project_id, + name: channel.name, + type: channel.type, + createdAt: channel.created_at as any, + slackChannel: channel.slack_channel, + webhookEndpoint: channel.webhook_endpoint, + }; + } + + function transformAlert(alert: alerts, organization: string): Alert { + return { + id: alert.id, + type: alert.type, + createdAt: alert.created_at as any, + channelId: alert.alert_channel_id, + organizationId: organization, + projectId: alert.project_id, + targetId: alert.target_id, + }; + } + + const storage: Storage = { + async getUserByExternalId({ external }) { + const user = await pool.maybeOne>( + sql`SELECT * FROM public.users WHERE external_auth_user_id = ${external} LIMIT 1` + ); + + if (user) { + return transformUser(user); + } + + return null; + }, + async getUserById({ id }) { + const user = await pool.maybeOne>( + sql`SELECT * FROM public.users WHERE id = ${id} LIMIT 1` + ); + + if (user) { + return transformUser(user); + } + + return null; + }, + async createUser({ external, email }) { + const name = email.split('@')[0]; + return transformUser( + await pool.one>( + sql` + INSERT INTO public.users + ("email", "external_auth_user_id", "full_name", "display_name") + VALUES + (${email}, ${external}, ${name}, ${name}) + RETURNING * + ` + ) + ); + }, + async updateUser({ id, displayName, fullName }) { + return transformUser( + await pool.one>(sql` + UPDATE public.users + SET display_name = ${displayName}, full_name = ${fullName} + WHERE id = ${id} + RETURNING * + `) + ); + }, + async createOrganization({ name, cleanId, type, user, scopes }) { + const org = transformOrganization( + await pool.one>( + sql` + INSERT INTO public.organizations + ("name", "clean_id", "type", "user_id") + VALUES + (${name}, ${cleanId}, ${type}, ${user}) + RETURNING * + ` + ) + ); + + await pool.query>( + sql` + INSERT INTO public.organization_member + ("organization_id", "user_id", "scopes") + VALUES + (${org.id}, ${user}, ${sql.array(scopes, 'text')}) + ` + ); + + return org; + }, + async deleteOrganization({ organization }) { + const result = transformOrganization( + await pool.one>( + sql` + DELETE FROM public.organizations + WHERE id = ${organization} + RETURNING * + ` + ) + ); + + return result; + }, + async createProject({ + name, + organization, + cleanId, + type, + buildUrl = null, + validationUrl = null, + }) { + return transformProject( + await pool.one>( + sql` + INSERT INTO public.projects + ("name", "clean_id", "type", "org_id", "build_url", "validation_url") + VALUES + (${name}, ${cleanId}, ${type}, ${organization}, ${buildUrl}, ${validationUrl}) + RETURNING * + ` + ) + ); + }, + async getOrganizationId({ organization }) { + // Based on clean_id, resolve id + const result = await pool.one>( + sql`SELECT id FROM public.organizations WHERE clean_id = ${organization} LIMIT 1` + ); + + return result.id as string; + }, + getOrganizationOwner: batch(async (selectors) => { + const organizations = selectors.map((s) => s.organization); + const owners = await pool.query< + Slonik> + >( + sql` + SELECT u.*, om.scopes, om.organization_id FROM public.organizations as o + LEFT JOIN public.users as u ON (u.id = o.user_id) + LEFT JOIN public.organization_member as om ON (om.user_id = u.id AND om.organization_id = o.id) + WHERE o.id IN (${sql.join(organizations, sql`, `)})` + ); + + return organizations.map((organization) => { + const owner = owners.rows.find( + (row) => row.organization_id === organization + ); + + if (owner) { + return Promise.resolve(transformMember(owner)); + } + + return Promise.reject( + new Error(`Owner not found (organization=${organization})`) + ); + }); + }), + async getOrganizationMembers({ organization }) { + const results = await pool.many< + Slonik> + >( + sql` + SELECT u.*, om.scopes, om.organization_id FROM public.organization_member as om + LEFT JOIN public.users as u ON (u.id = om.user_id) + WHERE om.organization_id = ${organization} ORDER BY u.created_at DESC` + ); + + return results.map(transformMember); + }, + async getOrganizationMember({ organization, user }) { + const member = await pool.one< + Slonik> + >( + sql` + SELECT u.*, om.scopes, om.organization_id FROM public.organization_member as om + LEFT JOIN public.users as u ON (u.id = om.user_id) + WHERE om.organization_id = ${organization} AND om.user_id = ${user} ORDER BY u.created_at DESC LIMIT 1` + ); + + return transformMember(member); + }, + async getOrganizationMemberAccessPairs(pairs) { + const results = await pool.query< + Slonik< + Pick + > + >( + sql` + SELECT organization_id, user_id, scopes + FROM public.organization_member + WHERE (organization_id, user_id) IN ((${sql.join( + pairs.map((p) => sql`${p.organization}, ${p.user}`), + sql`), (` + )})) + ` + ); + + return pairs.map(({ organization, user }) => { + return (results.rows.find( + (row) => row.organization_id === organization && row.user_id === user + )?.scopes || []) as Member['scopes']; + }); + }, + async hasOrganizationMemberPairs(pairs) { + const results = await pool.query>( + sql` + SELECT organization_id, user_id + FROM public.organization_member + WHERE (organization_id, user_id) IN ((${sql.join( + pairs.map((p) => sql`${p.organization}, ${p.user}`), + sql`), (` + )})) + ` + ); + + return pairs.map(({ organization, user }) => + results.rows.some( + (row) => row.organization_id === organization && row.user_id === user + ) + ); + }, + async hasOrganizationProjectMemberPairs(pairs) { + const results = await pool.query< + Slonik + >( + sql` + SELECT om.organization_id, om.user_id, p.id AS project_id + FROM public.projects as p + LEFT JOIN public.organization_member as om ON (p.org_id = om.organization_id) + WHERE (om.organization_id, om.user_id, p.id) IN ((${sql.join( + pairs.map((p) => sql`${p.organization}, ${p.user}, ${p.project}`), + sql`), (` + )})) + ` + ); + + return pairs.map(({ organization, user, project }) => + results.rows.some( + (row) => + row.organization_id === organization && + row.project_id === project && + row.user_id === user + ) + ); + }, + async updateOrganizationName({ name, organization }) { + return transformOrganization( + await pool.one>(sql` + UPDATE public.organizations + SET name = ${name} + WHERE id = ${organization} + RETURNING * + `) + ); + }, + async updateOrganizationPlan({ billingPlan, organization }) { + return transformOrganization( + await pool.one>(sql` + UPDATE public.organizations + SET plan_name = ${billingPlan} + WHERE id = ${organization} + RETURNING * + `) + ); + }, + async updateOrganizationRateLimits({ monthlyRateLimit, organization }) { + return transformOrganization( + await pool.one>(sql` + UPDATE public.organizations + SET limit_operations_monthly = ${monthlyRateLimit.operations}, limit_schema_push_monthly = ${monthlyRateLimit.schemaPush}, limit_retention_days = ${monthlyRateLimit.retentionInDays} + WHERE id = ${organization} + RETURNING * + `) + ); + }, + async updateOrganizationInviteCode({ organization, inviteCode }) { + return transformOrganization( + await pool.one>(sql` + UPDATE public.organizations + SET invite_code = ${inviteCode} + WHERE id = ${organization} + RETURNING * + `) + ); + }, + async addOrganizationMember({ user, organization, scopes }) { + await pool.one>( + sql` + INSERT INTO public.organization_member + (organization_id, user_id, scopes) + VALUES + (${organization}, ${user}, ${sql.array(scopes, 'text')}) + RETURNING * + ` + ); + }, + async deleteOrganizationMembers({ users, organization }) { + await pool.query>( + sql` + DELETE FROM public.organization_member + WHERE organization_id = ${organization} AND user_id IN (${sql.join( + users, + sql`, ` + )}) + ` + ); + }, + async updateOrganizationMemberAccess({ user, organization, scopes }) { + await pool.query>( + sql` + UPDATE public.organization_member + SET scopes = ${sql.array(scopes, 'text')} + WHERE organization_id = ${organization} AND user_id = ${user} + ` + ); + }, + async getProjectId({ project, organization }) { + // Based on project's clean_id and organization's clean_id, resolve the actual uuid of the project + const result = await pool.one>( + sql`SELECT p.id as id + FROM public.projects as p + LEFT JOIN public.organizations as org ON (p.org_id = org.id) + WHERE p.clean_id = ${project} AND org.clean_id = ${organization} LIMIT 1` + ); + + return result.id as string; + }, + async getTargetId({ project, target, organization, useIds }) { + if (useIds) { + const result = await pool.one>( + sql` + SELECT t.id FROM public.targets as t + LEFT JOIN public.projects AS p ON (p.id = t.project_id) + LEFT JOIN public.organizations AS o ON (o.id = p.org_id) + WHERE t.clean_id = ${target} AND p.id = ${project} AND o.id = ${organization} + LIMIT 1` + ); + + return result.id as string; + } + + // Based on clean_id, resolve id + const result = await pool.one>( + sql` + SELECT t.id FROM public.targets as t + LEFT JOIN public.projects AS p ON (p.id = t.project_id) + LEFT JOIN public.organizations AS o ON (o.id = p.org_id) + WHERE t.clean_id = ${target} AND p.clean_id = ${project} AND o.clean_id = ${organization} + LIMIT 1` + ); + + return result.id as string; + }, + async getPersistedOperationId({ project, operation }) { + const result = await pool.one>( + sql` + SELECT po.id FROM public.persisted_operations as po + LEFT JOIN public.projects AS p ON (p.id = po.project_id) + WHERE po.operation_hash = ${operation} AND p.clean_id = ${project} + LIMIT 1` + ); + + return result.id; + }, + async getOrganization({ organization }) { + return transformOrganization( + await pool.one>( + sql`SELECT * FROM public.organizations WHERE id = ${organization} LIMIT 1` + ) + ); + }, + async getMyOrganization({ user }) { + const org = await pool.maybeOne>( + sql`SELECT * FROM public.organizations WHERE user_id = ${user} AND type = ${OrganizationType.PERSONAL} LIMIT 1` + ); + + return org ? transformOrganization(org) : null; + }, + async getOrganizations({ user }) { + const results = await pool.many>( + sql` + SELECT o.* + FROM public.organizations as o + LEFT JOIN public.organization_member as om ON (om.organization_id = o.id) + WHERE om.user_id = ${user} + ORDER BY o.created_at DESC + ` + ); + return results.map(transformOrganization); + }, + async getOrganizationByInviteCode({ inviteCode }) { + const result = await pool.maybeOne>( + sql` + SELECT * FROM public.organizations + WHERE invite_code = ${inviteCode} + LIMIT 1 + ` + ); + + if (result) { + return transformOrganization(result); + } + + return null; + }, + async getOrganizationByCleanId({ cleanId }) { + const result = await pool.maybeOne>( + sql`SELECT * FROM public.organizations WHERE clean_id = ${cleanId} LIMIT 1` + ); + + if (!result) { + return null; + } + + return transformOrganization(result); + }, + async getOrganizationByGitHubInstallationId({ installationId }) { + const result = await pool.maybeOne>( + sql` + SELECT * FROM public.organizations + WHERE github_app_installation_id = ${installationId} + LIMIT 1 + ` + ); + + if (result) { + return transformOrganization(result); + } + + return null; + }, + async getProject({ project }) { + return transformProject( + await pool.one>( + sql`SELECT * FROM public.projects WHERE id = ${project} LIMIT 1` + ) + ); + }, + async getProjectByCleanId({ cleanId, organization }) { + const result = await pool.maybeOne>( + sql`SELECT * FROM public.projects WHERE clean_id = ${cleanId} AND org_id = ${organization} LIMIT 1` + ); + + if (!result) { + return null; + } + + return transformProject(result); + }, + async getProjects({ organization }) { + const result = await pool.query>( + sql`SELECT * FROM public.projects WHERE org_id = ${organization} ORDER BY created_at DESC` + ); + + return result.rows.map(transformProject); + }, + async updateProjectName({ name, organization, project }) { + return transformProject( + await pool.one>(sql` + UPDATE public.projects + SET name = ${name} + WHERE id = ${project} AND org_id = ${organization} + RETURNING * + `) + ); + }, + async updateProjectGitRepository({ gitRepository, organization, project }) { + return transformProject( + await pool.one>(sql` + UPDATE public.projects + SET git_repository = ${gitRepository ?? null} + WHERE id = ${project} AND org_id = ${organization} + RETURNING * + `) + ); + }, + async deleteProject({ organization, project }) { + const result = transformProject( + await pool.one>( + sql` + DELETE FROM public.projects + WHERE id = ${project} AND org_id = ${organization} + RETURNING * + ` + ) + ); + + return result; + }, + async createTarget({ organization, project, name, cleanId }) { + return transformTarget( + await pool.one>( + sql` + INSERT INTO public.targets + (name, clean_id, project_id) + VALUES + (${name}, ${cleanId}, ${project}) + RETURNING * + ` + ), + organization + ); + }, + async updateTargetName({ organization, project, target, name }) { + return transformTarget( + await pool.one>(sql` + UPDATE public.targets + SET name = ${name} + WHERE id = ${target} AND project_id = ${project} + RETURNING * + `), + organization + ); + }, + async deleteTarget({ organization, project, target }) { + const result = transformTarget( + await pool.one>( + sql` + DELETE FROM public.targets + WHERE id = ${target} AND project_id = ${project} + RETURNING * + ` + ), + organization + ); + + await pool.query( + sql`DELETE FROM public.versions WHERE target_id = ${target}` + ); + + return result; + }, + async getTarget({ organization, project, target }) { + return transformTarget( + await pool.one>( + sql`SELECT * FROM public.targets WHERE id = ${target} AND project_id = ${project} LIMIT 1` + ), + organization + ); + }, + async getTargetByCleanId({ organization, project, cleanId }) { + const result = await pool.maybeOne>( + sql`SELECT * FROM public.targets WHERE clean_id = ${cleanId} AND project_id = ${project} LIMIT 1` + ); + + if (!result) { + return null; + } + + return transformTarget(result, organization); + }, + async getTargets({ organization, project }) { + const results = await pool.query>( + sql`SELECT * FROM public.targets WHERE project_id = ${project} ORDER BY created_at DESC` + ); + + return results.rows.map((r) => transformTarget(r, organization)); + }, + async getTargetSettings({ target, project }) { + const row = await pool.one< + Pick< + targets, + 'validation_enabled' | 'validation_percentage' | 'validation_period' + > & { + targets: target_validation['destination_target_id'][]; + } + >(sql` + SELECT + t.validation_enabled, + t.validation_percentage, + t.validation_period, + array_agg(tv.destination_target_id) as targets + FROM public.targets AS t + LEFT JOIN public.target_validation AS tv ON (tv.target_id = t.id) + WHERE t.id = ${target} AND t.project_id = ${project} + GROUP BY t.id + LIMIT 1 + `); + + return transformTargetSettings(row); + }, + async setTargetValidation({ target, project, enabled }) { + return transformTargetSettings( + await pool.transaction(async (trx) => { + const targetValidationRowExists = await trx.exists(sql` + SELECT 1 FROM target_validation WHERE target_id = ${target} + `); + + if (!targetValidationRowExists) { + await trx.query(sql` + INSERT INTO target_validation (target_id, destination_target_id) VALUES (${target}, ${target}) + `); + } + + return trx.one< + Pick< + targets, + | 'validation_enabled' + | 'validation_percentage' + | 'validation_period' + > & { + targets: target_validation['destination_target_id'][]; + } + >(sql` + UPDATE public.targets as t + SET validation_enabled = ${enabled} + FROM + ( + SELECT + it.id, + array_agg(tv.destination_target_id) as targets + FROM public.targets AS it + LEFT JOIN public.target_validation AS tv ON (tv.target_id = it.id) + WHERE it.id = ${target} AND it.project_id = ${project} + GROUP BY it.id + LIMIT 1 + ) ret + WHERE t.id = ret.id + RETURNING ret.id, t.validation_enabled, t.validation_percentage, t.validation_period, ret.targets + `); + }) + ).validation; + }, + async updateTargetValidationSettings({ + target, + project, + percentage, + period, + targets, + }) { + return transformTargetSettings( + await pool.transaction(async (trx) => { + await trx.query(sql` + DELETE + FROM public.target_validation + WHERE destination_target_id NOT IN (${sql.join(targets, sql`, `)}) + AND target_id = ${target} + `); + + await trx.query(sql` + INSERT INTO public.target_validation + (target_id, destination_target_id) + VALUES + ( + ${sql.join( + targets.map((dest) => sql.join([target, dest], sql`, `)), + sql`), (` + )} + ) + ON CONFLICT (target_id, destination_target_id) DO NOTHING + `); + + return trx.one(sql` + UPDATE public.targets as t + SET validation_percentage = ${percentage}, validation_period = ${period} + FROM ( + SELECT + it.id, + array_agg(tv.destination_target_id) as targets + FROM public.targets AS it + LEFT JOIN public.target_validation AS tv ON (tv.target_id = it.id) + WHERE it.id = ${target} AND it.project_id = ${project} + GROUP BY it.id + LIMIT 1 + ) ret + WHERE t.id = ret.id + RETURNING t.id, t.validation_enabled, t.validation_percentage, t.validation_period, ret.targets; + `); + }) + ).validation; + }, + async hasSchema({ target }) { + return pool.exists( + sql` + SELECT 1 FROM public.versions as v WHERE v.target_id = ${target} LIMIT 1 + ` + ); + }, + async getMaybeLatestValidVersion({ target }) { + const version = await pool.maybeOne< + Slonik> + >( + sql` + SELECT v.*, c.author, c.service, c.commit FROM public.versions as v + LEFT JOIN public.commits as c ON (c.id = v.commit_id) + WHERE v.target_id = ${target} AND v.valid IS TRUE + ORDER BY v.created_at DESC + LIMIT 1 + ` + ); + + if (!version) { + return null; + } + + return { + id: version.id, + valid: version.valid, + date: version.created_at as any, + commit: version.commit_id, + base_schema: version.base_schema, + }; + }, + async getLatestValidVersion({ target }) { + const version = await pool.one< + Slonik> + >( + sql` + SELECT v.*, c.author, c.service, c.commit FROM public.versions as v + LEFT JOIN public.commits as c ON (c.id = v.commit_id) + WHERE v.target_id = ${target} AND v.valid IS TRUE + ORDER BY v.created_at DESC + LIMIT 1 + ` + ); + + return { + id: version.id, + valid: version.valid, + date: version.created_at as any, + commit: version.commit_id, + base_schema: version.base_schema, + }; + }, + async getLatestVersion({ project, target }) { + const version = await pool.one< + Slonik< + versions & + Pick + > + >( + sql` + SELECT v.*, c.author, c.service, c.commit FROM public.versions as v + LEFT JOIN public.commits as c ON (c.id = v.commit_id) + LEFT JOIN public.targets as t ON (t.id = v.target_id) + WHERE v.target_id = ${target} AND t.project_id = ${project} + ORDER BY v.created_at DESC + LIMIT 1 + ` + ); + + return { + id: version.id, + valid: version.valid, + date: version.created_at as any, + commit: version.commit_id, + base_schema: version.base_schema, + }; + }, + + async getMaybeLatestVersion({ project, target }) { + const version = await pool.maybeOne< + Slonik< + versions & + Pick + > + >( + sql` + SELECT v.*, c.author, c.service, c.commit FROM public.versions as v + LEFT JOIN public.commits as c ON (c.id = v.commit_id) + LEFT JOIN public.targets as t ON (t.id = v.target_id) + WHERE v.target_id = ${target} AND t.project_id = ${project} + ORDER BY v.created_at DESC + LIMIT 1 + ` + ); + + if (!version) { + return null; + } + + return { + id: version.id, + valid: version.valid, + date: version.created_at as any, + commit: version.commit_id, + base_schema: version.base_schema, + }; + }, + async getLatestSchemas({ organization, project, target }) { + const latest = await pool.maybeOne>(sql` + SELECT v.id FROM public.versions as v + LEFT JOIN public.targets as t ON (t.id = v.target_id) + WHERE t.id = ${target} AND t.project_id = ${project} + ORDER BY v.created_at DESC + LIMIT 1 + `); + + if (!latest) { + return { + schemas: [], + }; + } + + const schemas = await storage.getSchemasOfVersion({ + version: latest.id, + organization, + project, + target, + }); + + return { + version: latest.id, + schemas, + }; + }, + async getSchemasOfVersion({ version, includeMetadata = false }) { + const results = await pool.many< + Slonik< + WithUrl< + WithMaybeMetadata< + Pick< + commits, + | 'id' + | 'commit' + | 'author' + | 'content' + | 'created_at' + | 'project_id' + | 'service' + | 'target_id' + > + > + > + > + >( + sql` + SELECT + c.id, + c.commit, + c.author, + c.content, + c.created_at, + c.project_id, + c.service, + c.target_id, + ${includeMetadata ? sql`c.metadata,` : sql``} + vc.url + FROM + public.version_commit AS vc + LEFT JOIN + public.commits AS c + ON c.id = vc.commit_id + WHERE + vc.version_id = ${version} + ORDER BY + c.created_at DESC + ` + ); + + return results.map(transformSchema); + }, + async getSchemasOfPreviousVersion({ version, target }) { + const results = await pool.query>>( + sql` + SELECT c.*, vc.url FROM public.version_commit as vc + LEFT JOIN public.commits as c ON (c.id = vc.commit_id) + WHERE vc.version_id = ( + SELECT v.id FROM public.versions as v WHERE v.created_at < ( + SELECT vi.created_at FROM public.versions as vi WHERE vi.id = ${version} + ) AND v.target_id = ${target} ORDER BY v.created_at DESC LIMIT 1 + ) + ORDER BY c.created_at DESC + ` + ); + + return results.rows.map(transformSchema); + }, + async updateSchemaUrlOfVersion({ version, commit, url }) { + await pool.query( + sql` + UPDATE public.version_commit + SET url = ${url ?? null} + WHERE version_id = ${version} AND commit_id = ${commit} + ` + ); + }, + + async updateServiceName({ commit, name }) { + await pool.query( + sql` + UPDATE public.commits + SET service = ${name ?? null} + WHERE id = ${commit} + ` + ); + }, + + async getVersion({ project, target, version }) { + const result = await pool.one< + Slonik< + versions & + Pick + > + >(sql` + SELECT v.*, c.author, c.service, c.commit FROM public.versions as v + LEFT JOIN public.commits as c ON (c.id = v.commit_id) + LEFT JOIN public.targets as t ON (t.id = v.target_id) + WHERE v.target_id = ${target} AND t.project_id = ${project} AND v.id = ${version} LIMIT 1 + `); + + return { + id: result.id, + valid: result.valid, + date: result.created_at as any, + commit: result.commit_id, + base_schema: result.base_schema, + author: result.author, + service: result.service, + }; + }, + + async getVersions({ project, target, after, limit }) { + const query = sql` + SELECT v.*, c.author, c.service, c.commit FROM public.versions as v + LEFT JOIN public.commits as c ON (c.id = v.commit_id) + LEFT JOIN public.targets as t ON (t.id = v.target_id) + WHERE v.target_id = ${target} AND t.project_id = ${project} AND v.created_at < ${ + after + ? sql`(SELECT va.created_at FROM public.versions as va WHERE va.id = ${after})` + : sql`NOW()` + } + ORDER BY v.created_at DESC + LIMIT ${limit + 1} + `; + const result = await pool.query< + Slonik< + versions & + Pick + > + >(query); + + const hasMore = result.rows.length > limit; + + const versions = result.rows.slice(0, limit).map((version) => ({ + id: version.id, + valid: version.valid, + date: version.created_at as any, + commit: version.commit_id, + base_schema: version.base_schema, + })); + + return { + versions, + hasMore, + }; + }, + async insertSchema({ + schema, + commit, + author, + project, + target, + service = null, + url = null, + metadata, + }) { + const result = await pool.one>(sql` + INSERT INTO public.commits + ( + author, + service, + commit, + content, + project_id, + target_id, + metadata + ) + VALUES + ( + ${author}, + ${service}::text, + ${commit}::text, + ${schema}::text, + ${project}, + ${target}, + ${metadata} + ) + RETURNING * + `); + + return transformSchema({ ...result, url }); + }, + async createVersion(input) { + // look for latest version in order to fetch urls of commits associated with that version + const previousVersion = await pool.maybeOne>(sql` + SELECT v.id FROM public.versions as v + LEFT JOIN public.targets as t ON (t.id = v.target_id) + WHERE t.id = ${input.target} AND t.project_id = ${input.project} + ORDER BY v.created_at DESC + LIMIT 1 + `); + // creates a new version + const newVersion = await pool.one< + Slonik> + >(sql` + INSERT INTO public.versions + ( + valid, + target_id, + commit_id, + base_schema + ) + VALUES + ( + ${input.valid}, + ${input.target}, + ${input.commit}, + ${input.base_schema} + ) + RETURNING + id, + created_at + `); + + // we want to write new url, so fill up the array with provided data + let commits: Array<{ commit_id: string; url?: string | null }> = [ + { + commit_id: input.commit, + url: input.url, + }, + ]; + + if (previousVersion?.id) { + const vid = previousVersion.id; + // fetch the rest of commits + const otherCommits = await pool.many< + Pick + >( + sql`SELECT commit_id, url FROM public.version_commit WHERE version_id = ${vid} AND commit_id != ${input.commit}` + ); + + commits = commits.concat(otherCommits); + } + + await Promise.all( + input.commits.map(async (cid) => { + await pool.query(sql` + INSERT INTO public.version_commit + (version_id, commit_id, url) + VALUES + (${newVersion.id}, ${cid}, ${ + commits.find((c) => c.commit_id === cid)?.url || null + }) + `); + }) + ); + + return { + id: newVersion.id, + date: newVersion.created_at as any, + url: input.url, + valid: input.valid, + commit: input.commit, + base_schema: input.base_schema, + }; + }, + + async updateVersionStatus({ version, valid }) { + return transformSchemaVersion( + await pool.one>(sql` + UPDATE public.versions + SET valid = ${valid} + WHERE id = ${version} + RETURNING * + `) + ); + }, + + getSchemaPushCount: async (selector) => { + if (selector.targetIds.length === 0) { + return 0; + } + + const result = await pool.query< + Slonik<{ + total: number; + }> + >(sql` + SELECT count(*) as total FROM public.versions WHERE target_id IN (${sql.join( + selector.targetIds, + sql`, ` + )}) AND created_at BETWEEN ${dateToSqlTimestamp( + selector.startTime + )} AND ${dateToSqlTimestamp(selector.endTime)}; + `); + + return result.rows[0].total; + }, + + getAllSchemaPushesGrouped: async (selector) => { + const result = await pool.query< + Slonik<{ + total: number; + target: string; + }> + >(sql` + SELECT target_id as target, count(*) as total FROM public.versions WHERE created_at BETWEEN ${dateToSqlTimestamp( + selector.startTime + )} AND ${dateToSqlTimestamp(selector.endTime)} GROUP BY target_id; + `); + + return [...result.rows]; + }, + + getSchema: batch(async (selectors) => { + const rows = await pool.many>>( + sql` + SELECT c.* + FROM public.commits as c + WHERE (c.id, c.target_id) IN ((${sql.join( + selectors.map((s) => sql`${s.commit}, ${s.target}`), + sql`), (` + )})) + ` + ); + const schemas = rows.map(transformSchema); + + return selectors.map((selector) => { + const schema = schemas.find( + (row) => row.id === selector.commit && row.target === selector.target + ); + + if (schema) { + return Promise.resolve(schema); + } + + return Promise.reject( + new Error( + `Schema not found (commit=${selector.commit}, target=${selector.target})` + ) + ); + }); + }), + + async getMaybeSchema({ commit, service, project, target }) { + const result = await pool.maybeOne>>( + sql` + SELECT c.* FROM public.commits as c + LEFT JOIN public.projects as p ON (p.id = c.project_id) + WHERE + c.commit = ${commit} + AND c.project_id = ${project} + AND c.target_id = ${target} + AND c.service = ${service ?? null}` + ); + + if (!result) { + return null; + } + + return transformSchema(result); + }, + async createActivity({ organization, project, target, user, type, meta }) { + const { identifiers, values } = objectToParams< + Omit + >({ + activity_metadata: meta, + activity_type: type, + organization_id: organization, + project_id: project ?? null, + target_id: target ?? null, + user_id: user ?? null, + }); + + await pool.query>( + sql`INSERT INTO public.activities (${identifiers}) VALUES (${values}) RETURNING *;` + ); + }, + async getActivities(selector) { + let query: TaggedTemplateLiteralInvocationType; + if ('target' in selector) { + query = sql` + SELECT + jsonb_agg(a.*) as activity, + jsonb_agg(t.*) as target, + jsonb_agg(p.*) as project, + jsonb_agg(o.*) as organization, + jsonb_agg(u.*) as user + FROM public.activities as a + LEFT JOIN public.targets as t ON (t.id = a.target_id) + LEFT JOIN public.projects as p ON (p.id = a.project_id) + LEFT JOIN public.organizations as o ON (o.id = a.organization_id) + LEFT JOIN public.users as u ON (u.id = a.user_id) + WHERE + a.target_id = ${selector.target} + AND a.project_id = ${selector.project} + AND a.organization_id = ${selector.organization} + GROUP BY a.created_at + ORDER BY a.created_at DESC LIMIT ${selector.limit} + `; + } else if ('project' in selector) { + query = sql` + SELECT + jsonb_agg(a.*) as activity, + jsonb_agg(t.*) as target, + jsonb_agg(p.*) as project, + jsonb_agg(o.*) as organization, + jsonb_agg(u.*) as user + FROM public.activities as a + LEFT JOIN public.targets as t ON (t.id = a.target_id) + LEFT JOIN public.projects as p ON (p.id = a.project_id) + LEFT JOIN public.organizations as o ON (o.id = a.organization_id) + LEFT JOIN public.users as u ON (u.id = a.user_id) + WHERE + a.project_id = ${selector.project} + AND a.organization_id = ${selector.organization} + GROUP BY a.created_at + ORDER BY a.created_at DESC LIMIT ${selector.limit} + `; + } else { + query = sql` + SELECT + jsonb_agg(a.*) as activity, + jsonb_agg(t.*) as target, + jsonb_agg(p.*) as project, + jsonb_agg(o.*) as organization, + jsonb_agg(u.*) as user + FROM public.activities as a + LEFT JOIN public.targets as t ON (t.id = a.target_id) + LEFT JOIN public.projects as p ON (p.id = a.project_id) + LEFT JOIN public.organizations as o ON (o.id = a.organization_id) + LEFT JOIN public.users as u ON (u.id = a.user_id) + WHERE a.organization_id = ${selector.organization} + GROUP BY a.created_at + ORDER BY a.created_at DESC LIMIT ${selector.limit} + `; + } + + const result = await pool.query< + Slonik<{ + activity: [activities]; + target: [targets]; + project: [projects]; + organization: [organizations]; + user: [users]; + }> + >(query); + + return result.rows.map(transformActivity); + }, + async insertPersistedOperation({ + operationHash, + project, + name, + kind, + content, + }) { + return transformPersistedOperation( + await pool.one>(sql` + INSERT INTO public.persisted_operations + (operation_hash, operation_name, operation_kind, content, project_id) + VALUES + (${operationHash}, ${name}, ${kind}, ${content}, ${project}) + RETURNING * + `) + ); + }, + async getPersistedOperations({ project }) { + const results = await pool.query>( + sql` + SELECT * FROM public.persisted_operations + WHERE project_id = ${project} + ORDER BY created_at DESC` + ); + + return results.rows.map(transformPersistedOperation); + }, + async getSelectedPersistedOperations({ project, hashes }) { + const results = await pool.query>( + sql` + SELECT * FROM public.persisted_operations + WHERE project_id = ${project} AND operation_hash IN (${sql.join( + hashes, + sql`, ` + )}) + ORDER BY created_at DESC` + ); + + return results.rows.map(transformPersistedOperation); + }, + async getPersistedOperation({ operation, project }) { + return transformPersistedOperation( + await pool.one>( + sql` + SELECT c.* FROM public.persisted_operations as c + WHERE c.id = ${operation} AND project_id = ${project}` + ) + ); + }, + async comparePersistedOperations({ project, hashes }) { + const results = await pool.query< + Pick + >( + sql` + SELECT operation_hash FROM public.persisted_operations + WHERE project_id = ${project} AND operation_hash IN (${sql.join( + hashes, + sql`, ` + )}) + ORDER BY created_at DESC` + ); + + return hashes.filter( + (hash) => !results.rows.some((row) => row.operation_hash === hash) + ); + }, + async deletePersistedOperation({ project, operation }) { + const result = transformPersistedOperation( + await pool.one>( + sql` + DELETE FROM public.persisted_operations + WHERE id = ${operation} AND project_id = ${project} + RETURNING * + ` + ) + ); + + return result; + }, + async addSlackIntegration({ organization, token }) { + await pool.query>( + sql` + UPDATE public.organizations + SET slack_token = ${token} + WHERE id = ${organization} + ` + ); + }, + async deleteSlackIntegration({ organization }) { + await pool.query>( + sql` + UPDATE public.organizations + SET slack_token = NULL + WHERE id = ${organization} + ` + ); + }, + async getSlackIntegrationToken({ organization }) { + const result = await pool.maybeOne>( + sql` + SELECT slack_token + FROM public.organizations + WHERE id = ${organization} + ` + ); + + return result?.slack_token; + }, + async addGitHubIntegration({ organization, installationId }) { + await pool.query>( + sql` + UPDATE public.organizations + SET github_app_installation_id = ${installationId} + WHERE id = ${organization} + ` + ); + }, + async deleteGitHubIntegration({ organization }) { + await pool.query>( + sql` + UPDATE public.organizations + SET github_app_installation_id = NULL + WHERE id = ${organization} + ` + ); + await pool.query>( + sql` + UPDATE public.projects + SET git_repository = NULL + WHERE org_id = ${organization} + ` + ); + }, + async getGitHubIntegrationInstallationId({ organization }) { + const result = await pool.maybeOne< + Pick + >( + sql` + SELECT github_app_installation_id + FROM public.organizations + WHERE id = ${organization} + ` + ); + + return result?.github_app_installation_id; + }, + async addAlertChannel({ project, name, type, slack, webhook }) { + return transformAlertChannel( + await pool.one>( + sql` + INSERT INTO public.alert_channels + ("name", "type", "project_id", "slack_channel", "webhook_endpoint") + VALUES + (${name}, ${type}, ${project}, ${slack?.channel ?? null}, ${ + webhook?.endpoint ?? null + }) + RETURNING * + ` + ) + ); + }, + async deleteAlertChannels({ project, channels }) { + const result = await pool.query>( + sql` + DELETE FROM public.alert_channels + WHERE + project_id = ${project} AND + id IN (${sql.join(channels, sql`, `)}) + RETURNING * + ` + ); + + return result.rows.map(transformAlertChannel); + }, + async getAlertChannels({ project }) { + const result = await pool.query>( + sql`SELECT * FROM public.alert_channels WHERE project_id = ${project} ORDER BY created_at DESC` + ); + + return result.rows.map(transformAlertChannel); + }, + + async addAlert({ organization, project, target, channel, type }) { + return transformAlert( + await pool.one>( + sql` + INSERT INTO public.alerts + ("type", "alert_channel_id", "target_id", "project_id") + VALUES + (${type}, ${channel}, ${target}, ${project}) + RETURNING * + ` + ), + organization + ); + }, + async deleteAlerts({ organization, project, alerts }) { + const result = await pool.query>( + sql` + DELETE FROM public.alerts + WHERE + project_id = ${project} AND + id IN (${sql.join(alerts, sql`, `)}) + RETURNING * + ` + ); + + return result.rows.map((row) => transformAlert(row, organization)); + }, + async getAlerts({ organization, project }) { + const result = await pool.query>( + sql`SELECT * FROM public.alerts WHERE project_id = ${project} ORDER BY created_at DESC` + ); + + return result.rows.map((row) => transformAlert(row, organization)); + }, + async adminGetOrganizationsTargetPairs() { + const results = await pool.query< + Slonik<{ + organization: string; + target: string; + }> + >( + sql` + SELECT + o.id as organization, + t.id as target + FROM public.targets AS t + LEFT JOIN public.projects AS p ON (p.id = t.project_id) + LEFT JOIN public.organizations AS o ON (o.id = p.org_id) + ` + ); + return results.rows; + }, + async getGetOrganizationsAndTargetPairsWithLimitInfo() { + const results = await pool.query< + Slonik<{ + organization: string; + target: string; + limit_operations_monthly: number; + limit_schema_push_monthly: number; + limit_retention_days: number; + }> + >( + sql` + SELECT + o.id as organization, + o.limit_operations_monthly, + o.limit_schema_push_monthly, + o.limit_retention_days, + t.id as target + FROM public.targets AS t + LEFT JOIN public.projects AS p ON (p.id = t.project_id) + LEFT JOIN public.organizations AS o ON (o.id = p.org_id) + ` + ); + return results.rows; + }, + async adminGetStats(daysLimit?: number | null) { + // count schema versions by organization + const versionsResult = pool.query< + Slonik< + Pick & { + total: number; + } + > + >(sql` + SELECT + COUNT(*) as total, + o.id + FROM versions AS v + LEFT JOIN targets AS t ON (t.id = v.target_id) + LEFT JOIN projects AS p ON (p.id = t.project_id) + LEFT JOIN organizations AS o ON (o.id = p.org_id) + ${ + daysLimit + ? sql`WHERE v.created_at > NOW() - (INTERVAL '1 days' * ${daysLimit})` + : sql`` + } + GROUP by o.id + `); + + // count users by organization + const usersResult = pool.query< + Slonik< + Pick & { + total: number; + } + > + >(sql` + SELECT + COUNT(*) as total, + o.id + FROM organization_member AS om + LEFT JOIN organizations AS o ON (o.id = om.organization_id) + GROUP by o.id + `); + + // count projects by organization + const projectsResult = pool.query< + Slonik< + Pick & { + total: number; + } + > + >(sql` + SELECT + COUNT(*) as total, + o.id + FROM projects AS p + LEFT JOIN organizations AS o ON (o.id = p.org_id) + GROUP by o.id + `); + + // count targets by organization + const targetsResult = pool.query< + Slonik< + Pick & { + total: number; + } + > + >(sql` + SELECT + COUNT(*) as total, + o.id + FROM targets AS t + LEFT JOIN projects AS p ON (p.id = t.project_id) + LEFT JOIN organizations AS o ON (o.id = p.org_id) + GROUP by o.id + `); + + // count persisted operations by organization + const persistedOperationsResult = pool.query< + Slonik< + Pick & { + total: number; + } + > + >(sql` + SELECT + COUNT(*) as total, + o.id + FROM persisted_operations AS po + LEFT JOIN projects AS p ON (p.id = po.project_id) + LEFT JOIN organizations AS o ON (o.id = p.org_id) + ${ + daysLimit + ? sql`WHERE po.created_at > NOW() - (INTERVAL '1 days' * ${daysLimit})` + : sql`` + } + GROUP by o.id + `); + + // get organizations data + const organizationsResult = pool.query>(sql` + SELECT * FROM organizations + `); + + const [ + versions, + users, + projects, + targets, + persistedOperations, + organizations, + ] = await Promise.all([ + versionsResult, + usersResult, + projectsResult, + targetsResult, + persistedOperationsResult, + organizationsResult, + ]); + + const rows: Array<{ + organization: Organization; + versions: number; + users: number; + projects: number; + targets: number; + persistedOperations: number; + daysLimit?: number | null; + }> = []; + + function extractTotal< + T extends { + total: number; + id: string; + } + >(nodes: readonly T[], id: string) { + return nodes.find((node) => node.id === id)?.total ?? 0; + } + + for (const organization of organizations.rows) { + rows.push({ + organization: transformOrganization(organization), + versions: extractTotal(versions.rows, organization.id), + users: extractTotal(users.rows, organization.id), + projects: extractTotal(projects.rows, organization.id), + targets: extractTotal(targets.rows, organization.id), + persistedOperations: extractTotal( + persistedOperations.rows, + organization.id + ), + daysLimit, + }); + } + + return rows; + }, + async getBaseSchema({ project, target }) { + const data = await pool.maybeOne>( + sql`SELECT base_schema FROM public.targets WHERE id=${target} AND project_id=${project}` + ); + return data!.base_schema; + }, + async updateBaseSchema({ project, target }, base) { + if (base) { + await pool.query( + sql`UPDATE public.targets SET base_schema = ${base} WHERE id = ${target} AND project_id = ${project}` + ); + } else { + await pool.query( + sql`UPDATE public.targets SET base_schema = null WHERE id = ${target} AND project_id = ${project}` + ); + } + }, + async getBillingParticipants() { + const results = await pool.query>( + sql`SELECT * FROM public.organizations_billing` + ); + + return results.rows.map(transformOrganizationBilling); + }, + async getOrganizationBilling(selector) { + const results = await pool.query>( + sql`SELECT * FROM public.organizations_billing WHERE organization_id = ${selector.organization}` + ); + + const mapped = results.rows.map(transformOrganizationBilling); + + return mapped[0] || null; + }, + async deleteOrganizationBilling(selector) { + await pool.query>( + sql`DELETE FROM public.organizations_billing + WHERE organization_id = ${selector.organization}` + ); + }, + async createOrganizationBilling({ + billingEmailAddress, + organizationId, + externalBillingReference, + }) { + return transformOrganizationBilling( + await pool.one>( + sql` + INSERT INTO public.organizations_billing + ("organization_id", "external_billing_reference_id", "billing_email_address") + VALUES + (${organizationId}, ${externalBillingReference}, ${ + billingEmailAddress || null + }) + RETURNING * + ` + ) + ); + }, + }; + + return storage; +} + +function isDefined(val: T | undefined | null): val is T { + return val !== undefined && val !== null; +} diff --git a/packages/services/storage/src/module.d.ts b/packages/services/storage/src/module.d.ts new file mode 100644 index 000000000..56c141c9c --- /dev/null +++ b/packages/services/storage/src/module.d.ts @@ -0,0 +1 @@ +declare module 'slonik-interceptor-query-logging'; diff --git a/packages/services/storage/src/shared.ts b/packages/services/storage/src/shared.ts new file mode 100644 index 000000000..2ed9d235e --- /dev/null +++ b/packages/services/storage/src/shared.ts @@ -0,0 +1,8 @@ +/** + * Slonik 23.8.X requires an index signature in types (which is weird) + */ +export type Slonik = { + [K in keyof T]: T[K]; +} & { + [key: string]: null; +}; diff --git a/packages/services/storage/src/tokens.ts b/packages/services/storage/src/tokens.ts new file mode 100644 index 000000000..509693e52 --- /dev/null +++ b/packages/services/storage/src/tokens.ts @@ -0,0 +1,96 @@ +/// +import { sql } from 'slonik'; +import { getPool, tokens, toDate } from './db'; +import type { Slonik } from './shared'; + +export async function createTokenStorage(connection: string) { + const pool = getPool(connection); + + return { + async getTokens({ target }: { target: string }) { + const result = await pool.query< + Slonik< + tokens & { + organization_id: string; + } + > + >( + sql` + SELECT * + FROM public.tokens + WHERE + target_id = ${target} + AND deleted_at IS NULL + ORDER BY created_at DESC + ` + ); + + return result.rows; + }, + async getToken({ token }: { token: string }) { + return pool.one>( + sql` + SELECT * + FROM public.tokens + WHERE token = ${token} AND deleted_at IS NULL + LIMIT 1 + ` + ); + }, + createToken({ + token, + tokenAlias, + target, + project, + organization, + name, + scopes, + }: { + token: string; + tokenAlias: string; + name: string; + target: string; + project: string; + organization: string; + scopes: readonly string[]; + }) { + return pool.one>( + sql` + INSERT INTO public.tokens + (name, token, token_alias, target_id, project_id, organization_id, scopes) + VALUES + (${name}, ${token}, ${tokenAlias}, ${target}, ${project}, ${organization}, ${sql.array( + scopes, + 'text' + )}) + RETURNING * + ` + ); + }, + async deleteToken({ token }: { token: string }) { + await pool.query( + sql` + UPDATE public.tokens SET deleted_at = NOW() WHERE token = ${token} + ` + ); + }, + async touchTokens({ + tokens, + }: { + tokens: Array<{ token: string; date: Date }>; + }) { + await pool.query(sql` + UPDATE public.tokens as t + SET last_used_at = c.last_used_at + FROM ( + VALUES + (${sql.join( + tokens.map((t) => sql`${t.token}, ${toDate(t.date)}`), + sql`), (` + )}) + ) as c(token, last_used_at) + WHERE c.token = t.token; + `); + }, + }; +} diff --git a/packages/services/storage/tools/create-db.mjs b/packages/services/storage/tools/create-db.mjs new file mode 100644 index 000000000..4f06848ad --- /dev/null +++ b/packages/services/storage/tools/create-db.mjs @@ -0,0 +1,44 @@ +// @ts-check +import cn from './db-connection-string.mjs'; +import pgpFactory from 'pg-promise'; + +const pgp = pgpFactory(); +const db = pgp(cn('postgres')); + +const dbName = 'registry'; + +probe().then(() => + db + .query(`SELECT 1 FROM pg_database WHERE datname = '${dbName}'`) + .then((result) => { + if (!result.length) { + console.log(`Creating "${dbName}" database`); + return db.query(`CREATE DATABASE ${dbName}`); + } + + console.log(`Database "${dbName}" already exists`); + }) + .then(() => { + process.exit(0); + }) + .catch((error) => { + console.error(error); + process.exit(1); + }) +); + +/** + * + * @param {number} numberOfRetries + * @returns {any} + */ +function probe(numberOfRetries = 0) { + return db.any(`SELECT 1`).catch(async (err) => { + if (numberOfRetries === 15) { + throw new Error('Database not ready after 15 retries. Exiting.'); + } + console.log('Database not ready. Retry in 1000ms\nReason:\n' + err); + await new Promise((res) => setTimeout(res, 1000)); + return probe(numberOfRetries + 1); + }); +} diff --git a/packages/services/storage/tools/db-connection-string.mjs b/packages/services/storage/tools/db-connection-string.mjs new file mode 100644 index 000000000..a6e5967f9 --- /dev/null +++ b/packages/services/storage/tools/db-connection-string.mjs @@ -0,0 +1,17 @@ +const { + POSTGRES_USER = 'postgres', + POSTGRES_PASSWORD = 'postgres', + POSTGRES_HOST = 'localhost', + POSTGRES_PORT = 5432, + POSTGRES_DB = 'registry', + POSTGRES_ENABLE_SSL = null, + POSTGRES_CONNECTION_STRING = null, +} = process.env; + +const cn = (dbName = POSTGRES_DB) => + POSTGRES_CONNECTION_STRING || + `postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${dbName}${ + POSTGRES_ENABLE_SSL ? '' : '?sslmode=disable' + }`; + +export default cn; diff --git a/packages/services/storage/tools/register.cjs b/packages/services/storage/tools/register.cjs new file mode 100644 index 000000000..395ffec1e --- /dev/null +++ b/packages/services/storage/tools/register.cjs @@ -0,0 +1,3 @@ +const { register } = require('esbuild-register/dist/node'); + +register({ extensions: ['.mjs', '.ts'], format: 'cjs' }); diff --git a/packages/services/storage/tsconfig.json b/packages/services/storage/tsconfig.json new file mode 100644 index 000000000..3572993a8 --- /dev/null +++ b/packages/services/storage/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "rootDir": "../.." + }, + "include": ["src", "migrations"] +} diff --git a/packages/services/stripe-billing/.env.template b/packages/services/stripe-billing/.env.template new file mode 100644 index 000000000..e2f7752f1 --- /dev/null +++ b/packages/services/stripe-billing/.env.template @@ -0,0 +1,8 @@ +PORT=4013 +USAGE_ESTIMATOR_ENDPOINT=http://localhost:4011 +STRIPE_SECRET_KEY="" +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=registry \ No newline at end of file diff --git a/packages/services/stripe-billing/package.json b/packages/services/stripe-billing/package.json new file mode 100644 index 000000000..549275ce4 --- /dev/null +++ b/packages/services/stripe-billing/package.json @@ -0,0 +1,41 @@ +{ + "private": true, + "type": "module", + "name": "@hive/stripe-billing", + "description": "A microservice for Hive SaaS, that syncs usage information to Stripe (metered billing)", + "version": "0.0.1", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --format esm --target node16 --watch --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "cross-undici-fetch": "0.4.3", + "@trpc/client": "9.23.2", + "@trpc/server": "9.23.2", + "zod": "3.15.1", + "stripe": "8.220.0", + "reflect-metadata": "0.1.13", + "@graphql-hive/core": "0.2.0", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "dotenv": "10.0.0", + "date-fns": "2.28.0", + "got": "12.0.4" + }, + "devDependencies": { + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "external": [ + "pg-native" + ], + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/stripe-billing/src/api.ts b/packages/services/stripe-billing/src/api.ts new file mode 100644 index 000000000..940e91cce --- /dev/null +++ b/packages/services/stripe-billing/src/api.ts @@ -0,0 +1,354 @@ +import * as trpc from '@trpc/server'; +import { z } from 'zod'; +import { inferProcedureInput, inferProcedureOutput } from '@trpc/server'; +import { createStorage } from '@hive/storage'; +import { Stripe } from 'stripe'; +import { addMonths, startOfMonth } from 'date-fns'; + +export type Context = { + storage$: ReturnType; + stripe: Stripe; + stripeData$: Promise<{ + schemaPushesPrice: Stripe.Price; + operationsPrice: Stripe.Price; + basePrice: Stripe.Price; + }>; +}; + +export { Stripe as StripeTypes }; + +export const stripeBillingApiRouter = trpc + .router() + .query('availablePrices', { + async resolve({ ctx }) { + return await ctx.stripeData$; + }, + }) + .query('invoices', { + input: z.object({ + organizationId: z.string().nonempty(), + }), + async resolve({ ctx, input }) { + const storage = await ctx.storage$; + const organizationBillingRecord = await storage.getOrganizationBilling({ + organization: input.organizationId, + }); + + if (!organizationBillingRecord) { + throw new Error(`Organization does not have a subscription record!`); + } + + const invoices = await ctx.stripe.invoices.list({ + customer: organizationBillingRecord.externalBillingReference, + }); + + return invoices.data; + }, + }) + .query('upcomingInvoice', { + input: z.object({ + organizationId: z.string().nonempty(), + }), + async resolve({ ctx, input }) { + const storage = await ctx.storage$; + const organizationBillingRecord = await storage.getOrganizationBilling({ + organization: input.organizationId, + }); + + if (!organizationBillingRecord) { + throw new Error(`Organization does not have a subscription record!`); + } + + try { + const upcomingInvoice = await ctx.stripe.invoices.retrieveUpcoming({ + customer: organizationBillingRecord.externalBillingReference, + }); + + return upcomingInvoice; + } catch (e) { + return null; + } + }, + }) + .query('activeSubscription', { + input: z.object({ + organizationId: z.string().nonempty(), + }), + async resolve({ ctx, input }) { + const storage = await ctx.storage$; + const organizationBillingRecord = await storage.getOrganizationBilling({ + organization: input.organizationId, + }); + + if (!organizationBillingRecord) { + throw new Error(`Organization does not have a subscription record!`); + } + + const customer = await ctx.stripe.customers.retrieve( + organizationBillingRecord.externalBillingReference + ); + + if (customer.deleted === true) { + await storage.deleteOrganizationBilling({ + organization: input.organizationId, + }); + + return null; + } + + const subscriptions = await ctx.stripe.subscriptions + .list({ + customer: organizationBillingRecord.externalBillingReference, + }) + .then((v) => v.data.filter((r) => r.metadata?.hive_subscription)); + + const actualSubscription = subscriptions[0] || null; + + const paymentMethod = await ctx.stripe.paymentMethods.list({ + customer: customer.id, + type: 'card', + }); + + return { + paymentMethod: paymentMethod.data[0] || null, + subscription: actualSubscription, + }; + }, + }) + .mutation('syncOrganizationToStripe', { + input: z.object({ + organizationId: z.string().nonempty(), + reserved: z + .object({ + /** in millions, value 1 is actually 1_000_000 */ + operations: z.number().nonnegative(), + schemaPushes: z.number().nonnegative(), + }) + .required(), + }), + async resolve({ ctx, input }) { + const storage = await ctx.storage$; + const [organizationBillingRecord, organization, stripePrices] = + await Promise.all([ + storage.getOrganizationBilling({ + organization: input.organizationId, + }), + storage.getOrganization({ + organization: input.organizationId, + }), + ctx.stripeData$, + ]); + + if (organizationBillingRecord && organization) { + const allSubscriptions = await ctx.stripe.subscriptions.list({ + customer: organizationBillingRecord.externalBillingReference, + }); + + const actualSubscription = allSubscriptions.data.find( + (r) => r.metadata?.hive_subscription + ); + + if (actualSubscription) { + for (const item of actualSubscription.items.data) { + if (item.plan.id === stripePrices.operationsPrice.id) { + await ctx.stripe.subscriptionItems.update(item.id, { + quantity: input.reserved.operations, + }); + } + if (item.plan.id === stripePrices.schemaPushesPrice.id) { + await ctx.stripe.subscriptionItems.update(item.id, { + quantity: input.reserved.schemaPushes, + }); + } + } + } + + const updateParams: Stripe.CustomerUpdateParams = {}; + + if (organizationBillingRecord.billingEmailAddress) { + updateParams.email = organizationBillingRecord.billingEmailAddress; + } + + if (Object.keys(updateParams).length > 0) { + await ctx.stripe.customers.update( + organizationBillingRecord.externalBillingReference, + updateParams + ); + } + } else { + throw new Error( + `Failed to sync subscription for organization: failed to find find active record` + ); + } + }, + }) + .mutation('cancelSubscriptionForOrganization', { + input: z.object({ + organizationId: z.string().nonempty(), + }), + async resolve({ ctx, input }) { + const storage = await ctx.storage$; + const organizationBillingRecord = await storage.getOrganizationBilling({ + organization: input.organizationId, + }); + + if (organizationBillingRecord === null) { + throw new Error( + `Failed to cancel subscription for organization: no existing participant record` + ); + } + + const subscriptions = await ctx.stripe.subscriptions + .list({ + customer: organizationBillingRecord.externalBillingReference, + }) + .then((v) => v.data.filter((r) => r.metadata?.hive_subscription)); + + if (subscriptions.length === 0) { + throw new Error( + `Failed to cancel subscription for organization: failed to find linked Stripe subscriptions` + ); + } + + const actualSubscription = subscriptions[0]; + const response = await ctx.stripe.subscriptions.del( + actualSubscription.id, + { + prorate: true, + } + ); + + return response; + }, + }) + .mutation('createSubscriptionForOrganization', { + input: z.object({ + paymentMethodId: z.string().nullish(), + organizationId: z.string().nonempty(), + couponCode: z.string().nullish(), + reserved: z + .object({ + /** in millions, value 1 is actually 1_000_000 */ + operations: z.number().nonnegative(), + schemaPushes: z.number().nonnegative(), + }) + .required(), + }), + async resolve({ ctx, input }) { + const storage = await ctx.storage$; + let organizationBillingRecord = await storage.getOrganizationBilling({ + organization: input.organizationId, + }); + const organization = await storage.getOrganization({ + organization: input.organizationId, + }); + + const orgOwner = await storage.getOrganizationOwner({ + organization: input.organizationId, + }); + + const customerId = organizationBillingRecord?.externalBillingReference + ? organizationBillingRecord.externalBillingReference + : await ctx.stripe.customers + .create({ + metadata: { + external_reference_id: input.organizationId, + }, + email: orgOwner.user.email, + name: organization.name, + }) + .then((r) => r.id); + + if (!organizationBillingRecord) { + organizationBillingRecord = await storage.createOrganizationBilling({ + externalBillingReference: customerId, + organizationId: input.organizationId, + billingEmailAddress: orgOwner.user.email, + }); + } + + const existingPaymentMethods = ( + await ctx.stripe.paymentMethods.list({ + customer: customerId, + type: 'card', + }) + ).data; + + let paymentMethodId: string | null = null; + + if (input.paymentMethodId) { + const paymentMethodConfiguredAlready = existingPaymentMethods.find( + (v) => v.id === input.paymentMethodId + ); + + if (paymentMethodConfiguredAlready) { + paymentMethodId = paymentMethodConfiguredAlready.id; + } else { + paymentMethodId = ( + await ctx.stripe.paymentMethods.attach(input.paymentMethodId, { + customer: customerId, + }) + ).id; + } + } else { + paymentMethodId = existingPaymentMethods[0]?.id || null; + } + + if (!paymentMethodId) { + throw new Error( + `Payment method is not specified, and customer does not have it configured.` + ); + } + + const stripePrices = await ctx.stripeData$; + + const subscription = await ctx.stripe.subscriptions.create({ + trial_period_days: 14, + metadata: { + hive_subscription: 'true', + }, + coupon: input.couponCode || undefined, + customer: customerId, + default_payment_method: paymentMethodId, + billing_cycle_anchor: + startOfMonth(addMonths(new Date(), 1)).getTime() / 1000, + items: [ + { + price: stripePrices.basePrice.id, + quantity: 1, + }, + { + price: stripePrices.operationsPrice.id, + quantity: input.reserved.operations, + }, + { + price: stripePrices.schemaPushesPrice.id, + quantity: input.reserved.schemaPushes, + }, + ], + }); + + return { + organizationBilling: organizationBillingRecord, + stripeCustomer: customerId, + stripeSubscription: subscription, + }; + }, + }); + +export type StripeBillingApi = typeof stripeBillingApiRouter; + +export type StripeBillingApiQuery = keyof StripeBillingApi['_def']['queries']; +export type StripeBillingQueryOutput = + inferProcedureOutput; +export type StripeBillingQueryInput = + inferProcedureInput; + +export type StripeBillingApiMutation = + keyof StripeBillingApi['_def']['mutations']; +export type StripeBillingMutationOutput< + TRouteKey extends StripeBillingApiMutation +> = inferProcedureOutput; +export type StripeBillingMutationInput< + TRouteKey extends StripeBillingApiMutation +> = inferProcedureInput; diff --git a/packages/services/stripe-billing/src/billing-sync.ts b/packages/services/stripe-billing/src/billing-sync.ts new file mode 100644 index 000000000..2264b1faa --- /dev/null +++ b/packages/services/stripe-billing/src/billing-sync.ts @@ -0,0 +1,228 @@ +import { FastifyLoggerInstance } from '@hive/service-common'; +import { createStorage as createPostgreSQLStorage } from '@hive/storage'; +import { Stripe } from 'stripe'; + +export function createStripeBilling(config: { + logger: FastifyLoggerInstance; + rateEstimator: { + endpoint: string; + }; + storage: { + connectionString: string; + }; + stripe: { + token: string; + syncIntervalMs: number; + }; +}) { + const logger = config.logger; + const postgres$ = createPostgreSQLStorage(config.storage.connectionString); + let intervalHandle: null | ReturnType = null; + // feat(metered-usage) + // const estimationApi = createTRPCClient({ + // url: `${config.rateEstimator.endpoint}/trpc`, + // fetch, + // }); + const stripeApi = new Stripe(config.stripe.token, { + apiVersion: '2020-08-27', + typescript: true, + }); + const loadStripeData$ = ensureStripeProducts(); + + async function ensureStripeProducts(): Promise<{ + schemaPushesPrice: Stripe.Price; + operationsPrice: Stripe.Price; + basePrice: Stripe.Price; + }> { + const relevantProducts = await stripeApi.products + .list({ + active: true, + type: 'service', + }) + .then((r) => r.data.filter((v) => v.metadata?.hive_plan)); + + if (relevantProducts.length !== 1) { + throw new Error( + `Invalid count of Hive products configured in Stripe: ${relevantProducts.length}` + ); + } + + const prices = (await stripeApi.prices.list({ + product: relevantProducts[0].id, + active: true, + expand: ['data.tiers'], + })) as Stripe.Response< + Stripe.ApiList + >; + + const schemaPushesPrice = prices.data.find( + (v) => v.metadata?.hive_usage === 'schema_pushes' + ); + + if (!schemaPushesPrice) { + throw new Error( + `Failed to find Stripe price ID with Hive metadata for schema-pushses` + ); + } + + const operationsPrice = prices.data.find( + (v) => v.metadata?.hive_usage === 'operations' + ); + + if (!operationsPrice) { + throw new Error( + `Failed to find Stripe price ID with Hive metadata for operations` + ); + } + + const basePrice = prices.data.find( + (v) => v.metadata?.hive_usage === 'base' + ); + + if (!basePrice) { + throw new Error( + `Failed to find Stripe price ID with Hive metadata for base price` + ); + } + + return { + operationsPrice, + schemaPushesPrice, + basePrice, + }; + } + + // feat(metered-usage) + // This is needed only if we want to enable real-time usage-based billing, and not by reserved quota. + // async function estimateAndReport() { + // const stripePrices = await loadStripeData$; + // const now = new Date(); + // const window = { + // startTime: startOfMonth(now).toUTCString(), + // endTime: endOfMonth(now).toUTCString(), + // }; + // config.logger.info( + // `Calculating billing usage information based on window: ${window.startTime} -> ${window.endTime}` + // ); + // const storage = await postgres$; + + // const [participants, pairs, operations, pushes] = await Promise.all([ + // storage.getBillingParticipants(), + // storage.adminGetOrganizationsTargetPairs(), + // estimationApi.query('estimateOperationsForAllTargets', window), + // estimationApi.query('estiamteSchemaPushesForAllTargets', window), + // ]); + + // logger.debug( + // `Fetched total of ${ + // Object.keys(participants).length + // } participants from the DB` + // ); + // logger.debug( + // `Fetched total of ${ + // Object.keys(operations).length + // } targets with usage information` + // ); + // logger.debug( + // `Fetched total of ${ + // Object.keys(pushes).length + // } targets with schema push information` + // ); + + // await Promise.all( + // participants.map(async (participant) => { + // const relevantTargetIds = pairs + // .filter((v) => v.organization === participant.organizationId) + // .map((v) => v.target); + + // if (relevantTargetIds.length === 0) { + // return; + // } + + // const totalSchemaPushes = relevantTargetIds.reduce( + // (prev, targetId) => prev + (pushes[targetId] || 0), + // 0 + // ); + // const totalOperations = relevantTargetIds.reduce( + // (prev, targetId) => prev + (operations[targetId] || 0), + // 0 + // ); + + // const subscriptions = await stripeApi.subscriptions + // .list({ + // customer: participant.externalBillingReference, + // }) + // .then((v) => v.data.filter((r) => r.metadata?.hive_subscription)); + + // if (subscriptions.length === 0) { + // return; + // } + + // const actualSubscription = subscriptions[0]; + // const subscriptionItems = actualSubscription.items.data; + + // for (const item of subscriptionItems) { + // if (item.plan.id === stripePrices.operationsPrice.id) { + // const asThausands = Math.floor(totalOperations / 1000); + // logger.info( + // `Reported total of ${asThausands}K operations for org ${participant.organizationId}` + // ); + // await stripeApi.subscriptionItems.createUsageRecord(item.id, { + // action: 'set', + // quantity: asThausands, + // }); + // } else if (item.plan.id === stripePrices.schemaPushesPrice.id) { + // logger.info( + // `Reported total of ${totalSchemaPushes} schema pushes for org ${participant.organizationId}` + // ); + // await stripeApi.subscriptionItems.createUsageRecord(item.id, { + // action: 'set', + // quantity: totalSchemaPushes, + // }); + // } + // } + // }) + // ); + // } + + return { + postgres$, + loadStripeData$, + stripeApi, + readiness() { + return true; + }, + async start() { + logger.info( + `Stripe Billing Sync starting, will sync Stripe every ${config.stripe.syncIntervalMs}ms...` + ); + + const stripeData = await loadStripeData$; + logger.info( + `Stripe is configured correctly, prices info: %o`, + stripeData + ); + + // feat(metered-usage) + // await estimateAndReport().catch((e) => { + // logger.error(e, `Failed to estimate and report`); + // }); + + // intervalHandle = setInterval(async () => { + // try { + // await estimateAndReport(); + // } catch (e) { + // logger.error(e, `Failed to estimate and report`); + // } + // }, config.stripe.syncIntervalMs); + }, + async stop() { + if (intervalHandle) { + clearInterval(intervalHandle); + intervalHandle = null; + } + + logger.info(`Stripe Billing Sync stopped...`); + }, + }; +} diff --git a/packages/services/stripe-billing/src/dev.ts b/packages/services/stripe-billing/src/dev.ts new file mode 100644 index 000000000..213339db1 --- /dev/null +++ b/packages/services/stripe-billing/src/dev.ts @@ -0,0 +1,7 @@ +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/stripe-billing/src/index.ts b/packages/services/stripe-billing/src/index.ts new file mode 100644 index 000000000..85dca2d11 --- /dev/null +++ b/packages/services/stripe-billing/src/index.ts @@ -0,0 +1,107 @@ +#!/usr/bin/env node +import 'reflect-metadata'; +import * as Sentry from '@sentry/node'; +import { + createServer, + startMetrics, + ensureEnv, + registerShutdown, +} from '@hive/service-common'; +import { createConnectionString } from '@hive/storage'; +import { createStripeBilling } from './billing-sync'; +import { fastifyTRPCPlugin } from '@trpc/server/adapters/fastify/dist/trpc-server-adapters-fastify.cjs.js'; +import { stripeBillingApiRouter, Context } from './api'; + +const STRIPE_SYNC_INTERVAL_MS = process.env.STRIPE_SYNC_INTERVAL_MS + ? parseInt(process.env.STRIPE_SYNC_INTERVAL_MS as string) + : 10 * 60_000; // default is every 10m + +async function main() { + Sentry.init({ + serverName: 'stripe-billing', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'stripe-billing', + tracing: false, + }); + + try { + const { readiness, start, stop, stripeApi, postgres$, loadStripeData$ } = + createStripeBilling({ + logger: server.log, + stripe: { + token: ensureEnv('STRIPE_SECRET_KEY', 'string'), + syncIntervalMs: STRIPE_SYNC_INTERVAL_MS, + }, + rateEstimator: { + endpoint: ensureEnv('USAGE_ESTIMATOR_ENDPOINT', 'string'), + }, + storage: { + connectionString: createConnectionString(process.env as any), + }, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await Promise.all([stop(), server.close()]); + }, + }); + + const port = process.env.PORT || 4013; + + const context: Context = { + storage$: postgres$, + stripe: stripeApi, + stripeData$: loadStripeData$, + }; + + server.register(fastifyTRPCPlugin, { + prefix: '/trpc', + trpcOptions: { + router: stripeBillingApiRouter, + createContext: () => context, + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(_, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(readiness() ? 200 : 400).send(); + }, + }); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + await server.listen(port, '0.0.0.0'); + await start(); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/stripe-billing/src/types.ts b/packages/services/stripe-billing/src/types.ts new file mode 100644 index 000000000..e69de29bb diff --git a/packages/services/stripe-billing/tsconfig.json b/packages/services/stripe-billing/tsconfig.json new file mode 100644 index 000000000..1bee108c6 --- /dev/null +++ b/packages/services/stripe-billing/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/tokens/.env.template b/packages/services/tokens/.env.template new file mode 100644 index 000000000..f69ff48a7 --- /dev/null +++ b/packages/services/tokens/.env.template @@ -0,0 +1,6 @@ +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=registry +PORT=6001 \ No newline at end of file diff --git a/packages/services/tokens/.gitignore b/packages/services/tokens/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/services/tokens/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/services/tokens/CHANGELOG.md b/packages/services/tokens/CHANGELOG.md new file mode 100644 index 000000000..0ca8f5236 --- /dev/null +++ b/packages/services/tokens/CHANGELOG.md @@ -0,0 +1,345 @@ +# @hive/tokens + +## 0.6.8 + +### Patch Changes + +- 1623aca5: Upgrade sentry +- Updated dependencies [1623aca5] + - @hive/storage@0.14.1 + +## 0.6.7 + +### Patch Changes + +- Updated dependencies [ffb6feb6] + - @hive/storage@0.14.0 + +## 0.6.6 + +### Patch Changes + +- Updated dependencies [3a435baa] + - @hive/service-common@0.1.3 + +## 0.6.5 + +### Patch Changes + +- 689610ac: fix(deps): update sentry-javascript monorepo to v6.16.1 +- Updated dependencies [689610ac] + - @hive/storage@0.13.3 + +## 0.6.4 + +### Patch Changes + +- Updated dependencies [a8485a06] + - @hive/service-common@0.1.2 + +## 0.6.3 + +### Patch Changes + +- Updated dependencies [b3e54d5a] + - @hive/storage@0.13.2 + +## 0.6.2 + +### Patch Changes + +- 33fbb5e: Bump +- Updated dependencies [33fbb5e] + - @hive/storage@0.13.1 + +## 0.6.1 + +### Patch Changes + +- bf78c16: Bump +- Updated dependencies [bf78c16] +- Updated dependencies [dc8fb96] + - @hive/storage@0.13.0 + +## 0.6.0 + +### Minor Changes + +- b5966ab: Replace undici with got +- b5966ab: Say hi to TSUP! + +### Patch Changes + +- Updated dependencies [b5966ab] + - @hive/storage@0.12.0 + +## 0.5.2 + +### Patch Changes + +- 02b00f0: Update undici, sentry, bullmq + +## 0.5.1 + +### Patch Changes + +- 7549a38: Fix startup + +## 0.5.0 + +### Minor Changes + +- 7eca7f0: Introduce access scopes + +## 0.4.3 + +### Patch Changes + +- 2f7bc32: Collect default metrics + +## 0.4.2 + +### Patch Changes + +- 19d4cd5: Bump + +## 0.4.1 + +### Patch Changes + +- cc9aa01: Update dependencies + +## 0.4.0 + +### Minor Changes + +- 94f45a5: Do not remove tokens, mark as deleted + +## 0.3.3 + +### Patch Changes + +- 3d828f4: Use latest Sentry and Sentry NextJS integration + +## 0.3.2 + +### Patch Changes + +- 542b9b9: Better health/readiness in tokens service + +## 0.3.1 + +### Patch Changes + +- 207890c: Purge target cache when creating a token + +## 0.3.0 + +### Minor Changes + +- a835491: Use 404 when token is not found + +## 0.2.19 + +### Patch Changes + +- 0527e3c: Update Sentry +- 0527e3c: Add serverName tag to Sentry.init + +## 0.2.18 + +### Patch Changes + +- c28ebdf: Improve tiny-lru in tokens + +## 0.2.17 + +### Patch Changes + +- e0a47fb: Update tsconfig target + +## 0.2.16 + +### Patch Changes + +- 5ff2e7a: Bump +- 8627a9e: Fix fastify hooks + +## 0.2.15 + +### Patch Changes + +- 8f62c26: Update fastify + +## 0.2.14 + +### Patch Changes + +- 292b30f: Display token + +## 0.2.13 + +### Patch Changes + +- b010137: Update Sentry to 6.10.0 + +## 0.2.12 + +### Patch Changes + +- bfbc724: Handle token read errors on route level and capture exceptions only once per 10m + +## 0.2.11 + +### Patch Changes + +- 455c033: Cache failed GET /:token requests for 10 minutes + +## 0.2.10 + +### Patch Changes + +- Updated dependencies [db2c1c3] +- Updated dependencies [4e9f0aa] + - @hive/service-common@0.1.1 + +## 0.2.9 + +### Patch Changes + +- Updated dependencies [6ed9bf2] +- Updated dependencies [588285c] + - @hive/service-common@0.1.0 + +## 0.2.8 + +### Patch Changes + +- 4bc83be: Use HEAD and GET for healthchecks +- 4bc83be: Node 16 + +## 0.2.7 + +### Patch Changes + +- 93674cf: Update Sentry to 6.7.0 + +## 0.2.6 + +### Patch Changes + +- c6ef3d2: Bob update + +## 0.2.5 + +### Patch Changes + +- 148b294: Fix issues with undici headers timeout + +## 0.2.4 + +### Patch Changes + +- 85b85d4: Dependencies update, cleanup, ui fixes + +## 0.2.3 + +### Patch Changes + +- f6d2ca6: bump + +## 0.2.2 + +### Patch Changes + +- 8f3e43c: Track usage of tokens + +## 0.2.1 + +### Patch Changes + +- 67660b1: Bump +- c083cb6: Use SENTRY_DSN + +## 0.2.0 + +### Minor Changes + +- 078e758: Token per Target + +### Patch Changes + +- 7113a0e: Update Sentry to 6.3.5 + +## 0.1.12 + +### Patch Changes + +- d485371: Use trustProxy + +## 0.1.11 + +### Patch Changes + +- 22d5d6e: Add more data to Http requests in Sentry + +## 0.1.10 + +### Patch Changes + +- 3a03b35: Fix release id and LOG_LEVEL debug + +## 0.1.9 + +### Patch Changes + +- df4abcb: Enable Sentry only in prod + +## 0.1.8 + +### Patch Changes + +- 93fbf26: Use Sentry Tracing +- 28124b9: X-Cache HIT MISS on tokens + +## 0.1.7 + +### Patch Changes + +- 7bfdb93: Use Sentry to track performance + +## 0.1.6 + +### Patch Changes + +- c1e705a: bump + +## 0.1.5 + +### Patch Changes + +- efc1fbd: Fix build issues + +## 0.1.4 + +### Patch Changes + +- 7e88e71: bump + +## 0.1.3 + +### Patch Changes + +- b2d686e: bump + +## 0.1.2 + +### Patch Changes + +- 9da6738: bump + +## 0.1.1 + +### Patch Changes + +- e8cb071: fix issues with ncc packages diff --git a/packages/services/tokens/LICENSE b/packages/services/tokens/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/services/tokens/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/services/tokens/package.json b/packages/services/tokens/package.json new file mode 100644 index 000000000..bc8c18852 --- /dev/null +++ b/packages/services/tokens/package.json @@ -0,0 +1,38 @@ +{ + "name": "@hive/tokens", + "type": "module", + "private": true, + "version": "0.6.8", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --format esm --target node16 --watch --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "zod": "3.15.1", + "@trpc/server": "9.23.2", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "dotenv": "10.0.0", + "ms": "2.1.3", + "p-timeout": "5.0.2", + "tiny-lru": "8.0.2", + "reflect-metadata": "0.1.13" + }, + "devDependencies": { + "@types/ms": "0.7.31", + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "external": [ + "pg-native" + ], + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/tokens/src/api.ts b/packages/services/tokens/src/api.ts new file mode 100644 index 000000000..7a811244f --- /dev/null +++ b/packages/services/tokens/src/api.ts @@ -0,0 +1,246 @@ +import { createErrorHandler } from '@hive/service-common'; +import * as trpc from '@trpc/server'; +import { inferProcedureInput, inferProcedureOutput } from '@trpc/server'; +import type { FastifyLoggerInstance } from 'fastify'; +import { z } from 'zod'; +import { useCache } from './cache'; +import { createHash } from 'crypto'; +import { Lru as LruType } from 'tiny-lru'; + +const TARGET_VALIDATION = z + .object({ + targetId: z.string().nonempty(), + }) + .required(); +const PROJECT_VALIDATION = z + .object({ + projectId: z.string().nonempty(), + }) + .required(); +const ORG_VALIDATION = z + .object({ + organizationId: z.string().nonempty(), + }) + .required(); +const TOKEN_VALIDATION = z + .object({ + token: z.string().nonempty(), + }) + .required(); + +function hashToken(token: string) { + return createHash('sha256').update(token).digest('hex'); +} + +function maskToken(token: string) { + return ( + token.substring(0, 3) + + '•'.repeat(token.length - 6) + + token.substring(token.length - 3) + ); +} + +function generateToken() { + const token = createHash('md5') + .update(Math.random() + '') + .update(Date.now() + '') + .digest('hex'); + + const hash = hashToken(token); + const alias = maskToken(token); + + return { + secret: token, + hash, + alias, + }; +} + +export type Context = { + logger: FastifyLoggerInstance; + errorHandler: ReturnType; + getStorage: ReturnType['getStorage']; + tokenReadFailuresCache: LruType<{ + error: string; + checkAt: number; + }>; + errorCachingInterval: number; +}; + +export const tokensApiRouter = trpc + .router() + .query('targetTokens', { + input: TARGET_VALIDATION, + async resolve({ ctx, input }) { + try { + const storage = await ctx.getStorage(); + + return await storage.readTarget(input.targetId); + } catch (error) { + ctx.errorHandler('Failed to get tokens of a target', error as Error); + + throw error; + } + }, + }) + .mutation('invalidateTokenByTarget', { + input: TARGET_VALIDATION, + async resolve({ ctx, input }) { + try { + const storage = await ctx.getStorage(); + storage.invalidateTarget(input.targetId); + + return true; + } catch (error) { + ctx.errorHandler( + 'Failed to invalidate tokens of a target', + error as Error + ); + + throw error; + } + }, + }) + .mutation('invalidateTokenByProject', { + input: PROJECT_VALIDATION, + async resolve({ ctx, input }) { + try { + const storage = await ctx.getStorage(); + storage.invalidateProject(input.projectId); + + return true; + } catch (error) { + ctx.errorHandler( + 'Failed to invalidate tokens of a project', + error as Error + ); + + throw error; + } + }, + }) + .mutation('invalidateTokenByOrganization', { + input: ORG_VALIDATION, + async resolve({ ctx, input }) { + try { + const storage = await ctx.getStorage(); + storage.invalidateProject(input.organizationId); + + return true; + } catch (error) { + ctx.errorHandler( + 'Failed to invalidate tokens of a org', + error as Error + ); + + throw error; + } + }, + }) + .mutation('createToken', { + input: z + .object({ + name: z.string().nonempty(), + target: z.string().nonempty(), + project: z.string().nonempty(), + organization: z.string().nonempty(), + scopes: z.array(z.string().nonempty()), + }) + .required(), + async resolve({ ctx, input }) { + try { + const { target, project, organization, name, scopes } = input; + const storage = await ctx.getStorage(); + const token = generateToken(); + const result = await storage.writeToken({ + name, + target, + project, + organization, + scopes, + token: token.hash, + tokenAlias: token.alias, + }); + + return { + ...result, + secret: token.secret, + }; + } catch (error) { + ctx.errorHandler('Failed to create a token', error as Error); + + throw error; + } + }, + }) + .mutation('deleteToken', { + input: TOKEN_VALIDATION, + async resolve({ ctx, input }) { + try { + const hashed_token = input.token; + const storage = await ctx.getStorage(); + await storage.deleteToken(hashed_token); + + return true; + } catch (error) { + ctx.errorHandler('Failed to delete a token', error as Error); + + throw error; + } + }, + }) + .query('getToken', { + input: TOKEN_VALIDATION, + async resolve({ ctx, input }) { + const hash = hashToken(input.token); + const alias = maskToken(input.token); + + // In case the token was not found (or we failed to fetch it) + const failedRead = ctx.tokenReadFailuresCache.get(hash); + + if (failedRead) { + // let's re-throw the same error + if (failedRead.checkAt >= Date.now()) { + throw new Error(failedRead.error); + } + // or look for it again if last time we checked was 10 minutes ago + } + + try { + const storage = await ctx.getStorage(); + const result = await storage.readToken(hash); + + // removes the token from the failures cache + ctx.tokenReadFailuresCache.delete(hash); + + return result; + } catch (error) { + ctx.errorHandler( + `Failed to get a token "${alias}"`, + error as Error, + ctx.logger + ); + + // set token read as failure + ctx.tokenReadFailuresCache.set(hash, { + error: (error as Error).message, + checkAt: Date.now() + ctx.errorCachingInterval, + }); + + throw error; + } + }, + }); + +export type TokensApi = typeof tokensApiRouter; +export type TokensApiMutate = keyof TokensApi['_def']['mutations']; +export type TokensApiQuery = keyof TokensApi['_def']['queries']; + +export type TokensMutationInput = + inferProcedureInput; + +export type TokensQueryInput = + inferProcedureInput; + +export type TokensQueryOutput = + inferProcedureOutput; diff --git a/packages/services/tokens/src/cache.ts b/packages/services/tokens/src/cache.ts new file mode 100644 index 000000000..82fde763f --- /dev/null +++ b/packages/services/tokens/src/cache.ts @@ -0,0 +1,308 @@ +import type { FastifyLoggerInstance } from 'fastify'; +import LRU from 'tiny-lru'; +import ms from 'ms'; +import { metrics } from '@hive/service-common'; +import type { Storage, StorageItem } from './storage'; +import { atomic, useActionTracker, until } from './helpers'; + +const cacheHits = new metrics.Counter({ + name: 'tokens_cache_hits', + help: 'Number of cache hits', +}); + +const cacheMisses = new metrics.Counter({ + name: 'tokens_cache_misses', + help: 'Number of cache misses', +}); + +const cacheFillups = new metrics.Counter({ + name: 'tokens_cache_fillups', + help: 'Number of cache fill ups', +}); + +const cacheInvalidations = new metrics.Counter({ + name: 'tokens_cache_invalidations', + help: 'Number of cache invalidations', +}); + +// share "promises" to allow reduce the number of requests even more + +interface CacheStorage extends Omit { + invalidateTarget(target: string): void; + invalidateProject(project: string): void; + invalidateOrganization(organization: string): void; +} + +export function useCache( + storagePromise: Promise, + logger: FastifyLoggerInstance +): { + start(): Promise; + stop(): Promise; + readiness(): boolean; + getStorage(): Promise; +} { + let started = false; + let cachedStoragePromise: Promise | null = null; + + function getStorage() { + if (!cachedStoragePromise) { + cachedStoragePromise = create(); + } + + return cachedStoragePromise; + } + + const tracker = useActionTracker(); + + async function create() { + const storage = await storagePromise; + const cache = LRU(100); + const relations = useRelations(); + const touch = useTokenTouchScheduler(storage, logger, updateLastUsedAt); + + function updateLastUsedAt(token: string, date: Date) { + const targetIds = cache.keys(); + + for (const target of targetIds) { + const items = cache.get(target); + + if (items) { + const item = items.find((p) => p.token === token); + + if (item) { + item.lastUsedAt = date.getTime() as any; + break; + } + } + } + } + + function invalidate(target: string): void { + logger.debug('Invalidating (target=%s)', target); + cacheInvalidations.inc(1); + cache.delete(target); + } + + const readAndFill = atomic(async function _readAndFill(target: string) { + const result = await storage.readTarget(target); + + logger.debug('Cache Fill (target=%s)', target); + cacheFillups.inc(1); + + if (result.length) { + const organization = result[0].organization; + const project = result[0].project; + + relations.ensureOrganizationProject(organization, project); + relations.ensureProjectTarget(project, target); + } + + cache.set(target, result); + + return result; + }); + + const readToken = atomic(async function _readToken(token: string) { + return storage.readToken(token); + }); + + const cachedStorage: CacheStorage = { + async readTarget(target, res) { + const cachedValue = cache.get(target); + if (cachedValue) { + res?.header('x-cache', 'HIT'); + cacheHits.inc(1); + return cachedValue; + } + + cacheMisses.inc(1); + res?.header('x-cache', 'MISS'); + + return readAndFill(target); + }, + invalidateTarget(target) { + invalidate(target); + }, + invalidateProject(project) { + relations + .getTargetsOfProject(project) + .forEach((target) => cachedStorage.invalidateTarget(target)); + }, + invalidateOrganization(organization) { + relations + .getTargetsOfOrganization(organization) + .forEach((target) => cachedStorage.invalidateTarget(target)); + }, + async readToken(hashed_token, res) { + const targetIds = cache.keys(); + + for (const target of targetIds) { + const items = cache.get(target); + + if (items) { + const item = items.find((p) => p.token === hashed_token); + + if (item) { + cacheHits.inc(1); + res?.header('x-cache', 'HIT'); + touch.schedule(hashed_token); // mark as used + return item; + } + } + } + + const item = await readToken(hashed_token); + await readAndFill(item.target).catch(() => {}); + cacheMisses.inc(1); + res?.header('x-cache', 'MISS'); + + touch.schedule(hashed_token); // mark as used + + return item; + }, + writeToken: tracker.wrap(async (item) => { + logger.debug('Writing token (target=%s)', item.target); + const result = await storage.writeToken(item); + invalidate(item.target); + + return result; + }), + deleteToken: tracker.wrap(async (hashed_token) => { + const item = await cachedStorage.readToken(hashed_token); + invalidate(item.target); + + return storage.deleteToken(hashed_token); + }), + }; + + started = true; + + return cachedStorage; + } + + async function start() { + await getStorage(); + } + + async function stop() { + logger.info('Started Tokens shutdown...'); + started = false; + + await until(tracker.idle, 10_000).catch((error) => { + logger.error('Failed to wait for tokens being idle', error); + }); + + process.exit(0); + } + + function readiness() { + return started; + } + + return { + start, + stop, + readiness, + getStorage, + }; +} + +function useRelations() { + const organizationToProjects = new Map>(); + const projectToTokens = new Map>(); + + function getTargetsOfProject(project: string): Set { + return projectToTokens.get(project) ?? new Set(); + } + + function getProjectsOfOrganization(organization: string): Set { + return organizationToProjects.get(organization) ?? new Set(); + } + + function getTargetsOfOrganization(organization: string): Set { + const targets = new Set(); + + getProjectsOfOrganization(organization).forEach((project) => { + getTargetsOfProject(project).forEach((target) => { + targets.add(target); + }); + }); + + return targets; + } + + function ensureRelation( + from: string, + to: string, + mapSet: Map> + ) { + if (!mapSet.has(from)) { + mapSet.set(from, new Set()); + } + + mapSet.get(from)!.add(to); + } + + function ensureOrganizationProject(organization: string, project: string) { + ensureRelation(organization, project, organizationToProjects); + } + + function ensureProjectTarget(project: string, target: string) { + ensureRelation(project, target, projectToTokens); + } + + return { + ensureOrganizationProject, + ensureProjectTarget, + getTargetsOfProject, + getTargetsOfOrganization, + }; +} + +function useTokenTouchScheduler( + storage: Storage, + logger: FastifyLoggerInstance, + onTouch: (token: string, date: Date) => void +) { + const scheduledTokens = new Map(); + + /** + * Mark token as used + */ + function schedule(hashed_token: string): void { + const now = new Date(); + scheduledTokens.set(hashed_token, now); + onTouch(hashed_token, now); + } + + // updated every 10m + const interval = setInterval(() => { + if (!scheduledTokens.size) { + return; + } + + const tokens = Array.from(scheduledTokens.entries()).map( + ([token, date]) => ({ + token, + date, + }) + ); + scheduledTokens.clear(); + + logger.debug(`Touch ${tokens.length} tokens`); + tokens.forEach(({ token, date }) => onTouch(token, date)); + storage.touchTokens(tokens).catch((error) => { + logger.error(error); + }); + }, ms('10m')); + + function dispose() { + clearInterval(interval); + } + + return { + schedule, + dispose, + }; +} diff --git a/packages/services/tokens/src/dev.ts b/packages/services/tokens/src/dev.ts new file mode 100644 index 000000000..ac50ff1a4 --- /dev/null +++ b/packages/services/tokens/src/dev.ts @@ -0,0 +1,8 @@ +import 'reflect-metadata'; +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/tokens/src/helpers.ts b/packages/services/tokens/src/helpers.ts new file mode 100644 index 000000000..c023c91cc --- /dev/null +++ b/packages/services/tokens/src/helpers.ts @@ -0,0 +1,58 @@ +import pTimeout from 'p-timeout'; + +const requestsInFlight = new Map>(); + +export function atomic( + fn: (arg: A) => Promise +): (arg: A) => Promise { + return function atomicWrapper(arg) { + if (requestsInFlight.has(arg)) { + return requestsInFlight.get(arg)!; + } + + const promise = fn(arg); + requestsInFlight.set(arg, promise); + + return promise.finally(() => { + requestsInFlight.delete(arg); + }); + }; +} + +export function useActionTracker() { + let actionsInProgress = 0; + + function done() { + --actionsInProgress; + } + + function started() { + ++actionsInProgress; + } + + return { + wrap(fn: (arg: A) => Promise) { + return (arg: A) => { + started(); + return fn(arg).finally(done); + }; + }, + idle() { + return actionsInProgress === 0; + }, + }; +} + +export function until(fn: () => boolean, timeout: number): Promise { + return pTimeout( + new Promise((resolve) => { + const interval = setInterval(() => { + if (fn()) { + clearInterval(interval); + resolve(); + } + }, 200); + }), + timeout + ); +} diff --git a/packages/services/tokens/src/index.ts b/packages/services/tokens/src/index.ts new file mode 100644 index 000000000..9faa1ddda --- /dev/null +++ b/packages/services/tokens/src/index.ts @@ -0,0 +1,106 @@ +#!/usr/bin/env node +import 'reflect-metadata'; +import { + createServer, + createErrorHandler, + startMetrics, + registerShutdown, +} from '@hive/service-common'; +import * as Sentry from '@sentry/node'; +import LRU from 'tiny-lru'; +import ms from 'ms'; +import { createStorage } from './storage'; +import { useCache } from './cache'; +import { fastifyTRPCPlugin } from '@trpc/server/adapters/fastify/dist/trpc-server-adapters-fastify.cjs.js'; +import { Context, tokensApiRouter } from './api'; + +export async function main() { + Sentry.init({ + serverName: 'tokens', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'tokens', + tracing: false, + }); + + const errorHandler = createErrorHandler(server); + + try { + const { start, stop, readiness, getStorage } = useCache( + createStorage(), + server.log + ); + const tokenReadFailuresCache = LRU<{ + error: string; + checkAt: number; + }>(50); + const errorCachingInterval = ms('10m'); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await server.close(); + await stop(); + }, + }); + + const port = process.env.PORT || 6001; + + const context: Context = { + errorCachingInterval, + logger: server.log, + errorHandler, + getStorage, + tokenReadFailuresCache, + }; + + server.register(fastifyTRPCPlugin, { + prefix: '/trpc', + trpcOptions: { + router: tokensApiRouter, + createContext: () => context, + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(req, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(readiness() ? 200 : 400).send(); + }, + }); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + await server.listen(port, '0.0.0.0'); + await start(); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + process.exit(1); + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/tokens/src/storage.ts b/packages/services/tokens/src/storage.ts new file mode 100644 index 000000000..791bc6974 --- /dev/null +++ b/packages/services/tokens/src/storage.ts @@ -0,0 +1,71 @@ +import { + createTokenStorage, + createConnectionString, + tokens, +} from '@hive/storage'; +import type { FastifyReply } from 'fastify'; + +export interface StorageItem { + token: string; + name: string; + tokenAlias: string; + date: string; + lastUsedAt: string; + organization: string; + project: string; + target: string; + scopes: readonly string[]; +} + +export interface Storage { + readTarget(targetId: string, res?: FastifyReply): Promise; + readToken(token: string, res?: FastifyReply): Promise; + writeToken( + item: Omit + ): Promise; + deleteToken(token: string): Promise; + touchTokens(tokens: Array<{ token: string; date: Date }>): Promise; +} + +export async function createStorage(): Promise { + const connectionString = createConnectionString(process.env as any); + const db = await createTokenStorage(connectionString); + + function transformToken(item: tokens): StorageItem { + return { + token: item.token, + tokenAlias: item.token_alias, + name: item.name, + date: item.created_at as any, + lastUsedAt: item.last_used_at as any, + organization: item.organization_id, + project: item.project_id, + target: item.target_id, + scopes: item.scopes || [], + }; + } + + return { + async readTarget(target) { + const tokens = await db.getTokens({ target }); + + return tokens.map(transformToken); + }, + async readToken(hashed_token) { + const result = await db.getToken({ token: hashed_token }); + + return transformToken(result); + }, + async writeToken(item) { + const result = await db.createToken(item); + + return transformToken(result); + }, + async deleteToken(hashed_token) { + return db.deleteToken({ token: hashed_token }); + }, + touchTokens(tokens) { + return db.touchTokens({ tokens }); + }, + }; +} diff --git a/packages/services/tokens/tsconfig.json b/packages/services/tokens/tsconfig.json new file mode 100644 index 000000000..1bee108c6 --- /dev/null +++ b/packages/services/tokens/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/usage-common/package.json b/packages/services/usage-common/package.json new file mode 100644 index 000000000..6d9234680 --- /dev/null +++ b/packages/services/usage-common/package.json @@ -0,0 +1,9 @@ +{ + "name": "@hive/usage-common", + "private": true, + "version": "0.0.1", + "license": "MIT", + "dependencies": { + "graphql": "16.5.0" + } +} diff --git a/packages/services/usage-common/src/compression.ts b/packages/services/usage-common/src/compression.ts new file mode 100644 index 000000000..2058a9596 --- /dev/null +++ b/packages/services/usage-common/src/compression.ts @@ -0,0 +1,25 @@ +import { gzip, gunzip } from 'node:zlib'; + +export async function compress(data: string): Promise { + return new Promise((resolve, reject) => { + gzip(data, (error, buffer) => { + if (error) { + reject(error); + } else { + resolve(buffer); + } + }); + }); +} + +export async function decompress(buffer: Buffer): Promise { + return new Promise((resolve, reject) => { + gunzip(buffer, (error, data) => { + if (error) { + reject(error); + } else { + resolve(data); + } + }); + }); +} diff --git a/packages/services/usage-common/src/index.ts b/packages/services/usage-common/src/index.ts new file mode 100644 index 000000000..b6b0e818a --- /dev/null +++ b/packages/services/usage-common/src/index.ts @@ -0,0 +1,3 @@ +export * from './processed'; +export * from './raw'; +export * from './compression'; diff --git a/packages/services/usage-common/src/processed.ts b/packages/services/usage-common/src/processed.ts new file mode 100644 index 000000000..f92b4bb92 --- /dev/null +++ b/packages/services/usage-common/src/processed.ts @@ -0,0 +1,26 @@ +import type { OperationTypeNode } from 'graphql'; +import type { RawOperation } from './raw'; + +export type ProcessedReport = ProcessedOperation[]; + +export interface ProcessedOperation { + target: string; + document: string; + operationName?: string | null; + operationHash: string; + operationType: OperationTypeNode; + fields: string[]; + timestamp: number; + expiresAt: number; + execution: RawOperation['execution']; + metadata?: RawOperation['metadata']; +} + +export interface ProcessedRegistryRecord { + target: string; + hash: string; + name?: string | null; + body: string; + operation: string; + inserted_at: number; +} diff --git a/packages/services/usage-common/src/raw.ts b/packages/services/usage-common/src/raw.ts new file mode 100644 index 000000000..02f031af7 --- /dev/null +++ b/packages/services/usage-common/src/raw.ts @@ -0,0 +1,35 @@ +export interface RawReport { + id: string; + size: number; + target: string; + map: RawOperationMap; + operations: RawOperation[]; +} + +export interface RawOperation { + operationMapKey: string; + timestamp: number; + expiresAt?: number; + execution: { + ok: boolean; + duration: number; + errorsTotal: number; + }; + metadata?: { + client?: { + name?: string; + version?: string; + }; + }; +} + +export interface RawOperationMapRecord { + key: string; + operation: string; + operationName?: string | null; + fields: string[]; +} + +export interface RawOperationMap { + [key: string]: RawOperationMapRecord; +} diff --git a/packages/services/usage-common/tsconfig.json b/packages/services/usage-common/tsconfig.json new file mode 100644 index 000000000..9b376c2b1 --- /dev/null +++ b/packages/services/usage-common/tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "../../../tsconfig.json", + "include": ["src"] +} diff --git a/packages/services/usage-estimator/.env.template b/packages/services/usage-estimator/.env.template new file mode 100644 index 000000000..180e7e145 --- /dev/null +++ b/packages/services/usage-estimator/.env.template @@ -0,0 +1,11 @@ +CLICKHOUSE_PROTOCOL="http" +CLICKHOUSE_HOST="localhost" +CLICKHOUSE_PORT="8123" +CLICKHOUSE_USERNAME="test" +CLICKHOUSE_PASSWORD="test" +PORT=4011 +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=registry \ No newline at end of file diff --git a/packages/services/usage-estimator/package.json b/packages/services/usage-estimator/package.json new file mode 100644 index 000000000..aa8dd3257 --- /dev/null +++ b/packages/services/usage-estimator/package.json @@ -0,0 +1,38 @@ +{ + "private": true, + "type": "module", + "name": "@hive/usage-estimator", + "description": "A microservice for Hive SaaS, that calculates and exposes usage information.", + "version": "0.0.1", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --format esm --target node16 --watch --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "cross-undici-fetch": "0.4.3", + "zod": "3.15.1", + "@trpc/server": "9.23.2", + "reflect-metadata": "0.1.13", + "@graphql-hive/core": "0.2.0", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "dotenv": "10.0.0", + "got": "12.0.4" + }, + "devDependencies": { + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "external": [ + "pg-native" + ], + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/usage-estimator/src/api.ts b/packages/services/usage-estimator/src/api.ts new file mode 100644 index 000000000..7806c101c --- /dev/null +++ b/packages/services/usage-estimator/src/api.ts @@ -0,0 +1,92 @@ +import * as trpc from '@trpc/server'; +import type { Estimator } from './estimator'; +import { z } from 'zod'; +import { inferProcedureInput, inferProcedureOutput } from '@trpc/server'; + +const DATE_RANGE_VALIDATION = { + startTime: z.string().nonempty(), + endTime: z.string().nonempty(), +}; + +const TARGET_BASED_FILTER = { + targetIds: z.array(z.string().nonempty()), +}; + +export const usageEstimatorApiRouter = trpc + .router() + .query('estimateSchemaPushesForTarget', { + input: z + .object({ + ...DATE_RANGE_VALIDATION, + ...TARGET_BASED_FILTER, + }) + .required(), + async resolve({ ctx, input }) { + const estimationResponse = await ctx.estimateSchemaPushesForTargets({ + targets: input.targetIds, + startTime: new Date(input.startTime), + endTime: new Date(input.endTime), + }); + + return { + totalSchemaPushes: estimationResponse.count, + }; + }, + }) + .query('estiamteSchemaPushesForAllTargets', { + input: z.object(DATE_RANGE_VALIDATION).required(), + async resolve({ ctx, input }) { + const estimationResponse = await ctx.estimateSchemaPushesForAllTargets({ + startTime: new Date(input.startTime), + endTime: new Date(input.endTime), + }); + + return Object.fromEntries( + estimationResponse.map((item) => [item.target, item.total]) + ); + }, + }) + .query('estimateOperationsForTarget', { + input: z + .object({ + ...DATE_RANGE_VALIDATION, + ...TARGET_BASED_FILTER, + }) + .required(), + async resolve({ ctx, input }) { + const estimationResponse = + await ctx.estimateCollectedOperationsForTargets({ + targets: input.targetIds, + startTime: new Date(input.startTime), + endTime: new Date(input.endTime), + }); + + return { + totalOperations: parseInt(estimationResponse.data[0].total), + }; + }, + }) + .query('estimateOperationsForAllTargets', { + input: z.object(DATE_RANGE_VALIDATION).required(), + async resolve({ ctx, input }) { + const estimationResponse = await ctx.estimateOperationsForAllTargets({ + startTime: new Date(input.startTime), + endTime: new Date(input.endTime), + }); + + return Object.fromEntries( + estimationResponse.data.map((item) => [ + item.target, + parseInt(item.total), + ]) + ); + }, + }); + +export type UsageEstimatorApi = typeof usageEstimatorApiRouter; +export type UsageEstimatorApiQuery = keyof UsageEstimatorApi['_def']['queries']; +export type UsageEstimatorQueryOutput< + TRouteKey extends UsageEstimatorApiQuery +> = inferProcedureOutput; +export type UsageEstimatorQueryInput = + inferProcedureInput; diff --git a/packages/services/usage-estimator/src/dev.ts b/packages/services/usage-estimator/src/dev.ts new file mode 100644 index 000000000..213339db1 --- /dev/null +++ b/packages/services/usage-estimator/src/dev.ts @@ -0,0 +1,7 @@ +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/usage-estimator/src/estimator.ts b/packages/services/usage-estimator/src/estimator.ts new file mode 100644 index 000000000..3d5a184bb --- /dev/null +++ b/packages/services/usage-estimator/src/estimator.ts @@ -0,0 +1,129 @@ +import type { FastifyLoggerInstance } from '@hive/service-common'; +import { createStorage as createPostgreSQLStorage } from '@hive/storage'; +import { HttpClient, ClickHouse, OperationsReader } from '@hive/api'; + +export type Estimator = ReturnType; + +export function createEstimator(config: { + logger: FastifyLoggerInstance; + clickhouse: { + protocol: string; + host: string; + port: number; + username: string; + password: string; + onReadEnd?: ( + label: string, + timings: { + totalSeconds: number; + elapsedSeconds: number; + } + ) => void; + }; + storage: { + connectionString: string; + }; +}) { + const { logger } = config; + const postgres$ = createPostgreSQLStorage(config.storage.connectionString); + const httpClient = new HttpClient(); + const clickhouse = new ClickHouse( + config.clickhouse, + httpClient, + config.logger + ); + const operationsReader = new OperationsReader(clickhouse); + + return { + readiness() { + return true; + }, + async start() { + logger.info('Usage Estimator starting'); + }, + async stop() { + logger.info('Usage Reported stopped'); + }, + async estimateSchemaPushesForTargets(input: { + targets: string[]; + startTime: Date; + endTime: Date; + }): Promise<{ count: number }> { + const storage = await postgres$; + const response = await storage.getSchemaPushCount({ + targetIds: input.targets, + startTime: input.startTime, + endTime: input.endTime, + }); + + return { + count: response, + }; + }, + async estimateSchemaPushesForAllTargets(input: { + startTime: Date; + endTime: Date; + }) { + const storage = await postgres$; + const response = await storage.getAllSchemaPushesGrouped({ + startTime: input.startTime, + endTime: input.endTime, + }); + + return response; + }, + async estimateOperationsForAllTargets(input: { + startTime: Date; + endTime: Date; + }) { + const filter = operationsReader.createFilter({ + period: { + from: input.startTime, + to: input.endTime, + }, + }); + + return await clickhouse.query<{ + total: string; + target: string; + }>({ + query: ` + SELECT + target, + sum(total) as total + FROM operations_new_hourly_mv + ${filter} + GROUP BY target + `, + queryId: 'usage_estimator_count_operations_all', + timeout: 60_000, + }); + }, + async estimateCollectedOperationsForTargets(input: { + targets: string[]; + startTime: Date; + endTime: Date; + }) { + const filter = operationsReader.createFilter({ + target: input.targets, + period: { + from: input.startTime, + to: input.endTime, + }, + }); + + return await clickhouse.query<{ + total: string; + }>({ + query: ` + SELECT + sum(total) as total + FROM operations_new_hourly_mv + ${filter} + `, + queryId: 'usage_estimator_count_operations', + timeout: 15_000, + }); + }, + }; +} diff --git a/packages/services/usage-estimator/src/index.ts b/packages/services/usage-estimator/src/index.ts new file mode 100644 index 000000000..9ccf6289f --- /dev/null +++ b/packages/services/usage-estimator/src/index.ts @@ -0,0 +1,105 @@ +#!/usr/bin/env node +import 'reflect-metadata'; +import * as Sentry from '@sentry/node'; +import { + createServer, + startMetrics, + ensureEnv, + registerShutdown, +} from '@hive/service-common'; +import { createEstimator } from './estimator'; +import { createConnectionString } from '@hive/storage'; +import { fastifyTRPCPlugin } from '@trpc/server/adapters/fastify/dist/trpc-server-adapters-fastify.cjs.js'; +import { usageEstimatorApiRouter } from './api'; +import { clickHouseElapsedDuration, clickHouseReadDuration } from './metrics'; + +async function main() { + Sentry.init({ + serverName: 'usage-reporter', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'usage-estimator', + tracing: false, + }); + + try { + const context = createEstimator({ + logger: server.log, + clickhouse: { + protocol: ensureEnv('CLICKHOUSE_PROTOCOL'), + host: ensureEnv('CLICKHOUSE_HOST'), + port: ensureEnv('CLICKHOUSE_PORT', 'number'), + username: ensureEnv('CLICKHOUSE_USERNAME'), + password: ensureEnv('CLICKHOUSE_PASSWORD'), + onReadEnd(query, timings) { + clickHouseReadDuration + .labels({ query }) + .observe(timings.totalSeconds); + clickHouseElapsedDuration + .labels({ query }) + .observe(timings.elapsedSeconds); + }, + }, + storage: { + connectionString: createConnectionString(process.env as any), + }, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await Promise.all([stop(), server.close()]); + }, + }); + + server.register(fastifyTRPCPlugin, { + prefix: '/trpc', + trpcOptions: { + router: usageEstimatorApiRouter, + createContext: () => context, + }, + }); + + const port = process.env.PORT || 5000; + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(_, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(context.readiness() ? 200 : 400).send(); + }, + }); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + await server.listen(port, '0.0.0.0'); + await context.start(); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/usage-estimator/src/metrics.ts b/packages/services/usage-estimator/src/metrics.ts new file mode 100644 index 000000000..9839e3500 --- /dev/null +++ b/packages/services/usage-estimator/src/metrics.ts @@ -0,0 +1,13 @@ +import { metrics } from '@hive/service-common'; + +export const clickHouseElapsedDuration = new metrics.Histogram({ + name: 'usage_estimation_clickhouse_elapsed_duration', + help: 'Usage Estimation (ClickHouse/Read)', + labelNames: ['query'], +}); + +export const clickHouseReadDuration = new metrics.Histogram({ + name: 'usage_estimation_clickhouse_read_duration', + help: 'Usage Estimation (ClickHouse/Read Duration)', + labelNames: ['query'], +}); diff --git a/packages/services/usage-estimator/tsconfig.json b/packages/services/usage-estimator/tsconfig.json new file mode 100644 index 000000000..62f6d54e8 --- /dev/null +++ b/packages/services/usage-estimator/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/usage-ingestor/.env.template b/packages/services/usage-ingestor/.env.template new file mode 100644 index 000000000..584ec8cce --- /dev/null +++ b/packages/services/usage-ingestor/.env.template @@ -0,0 +1,9 @@ +KAFKA_CONNECTION_MODE="docker" +KAFKA_BROKER="localhost:9092" +KAFKA_CONCURRENCY="1" +CLICKHOUSE_PROTOCOL="http" +CLICKHOUSE_HOST="localhost" +CLICKHOUSE_PORT="8123" +CLICKHOUSE_USERNAME="test" +CLICKHOUSE_PASSWORD="test" +PORT=4002 \ No newline at end of file diff --git a/packages/services/usage-ingestor/.gitignore b/packages/services/usage-ingestor/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/services/usage-ingestor/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/services/usage-ingestor/CHANGELOG.md b/packages/services/usage-ingestor/CHANGELOG.md new file mode 100644 index 000000000..08d7c7814 --- /dev/null +++ b/packages/services/usage-ingestor/CHANGELOG.md @@ -0,0 +1,8 @@ +# @hive/usage-ingestor + +## 0.0.2 + +### Patch Changes + +- Updated dependencies [ac9b868c] + - @graphql-hive/core@0.2.0 diff --git a/packages/services/usage-ingestor/__tests__/serializer.spec.ts b/packages/services/usage-ingestor/__tests__/serializer.spec.ts new file mode 100644 index 000000000..0b5be76c7 --- /dev/null +++ b/packages/services/usage-ingestor/__tests__/serializer.spec.ts @@ -0,0 +1,123 @@ +import { + stringifyOperation, + stringifyRegistryRecord, + joinIntoSingleMessage, +} from '../src/serializer'; + +const timestamp = { + asNumber: 1643892203027, + asString: '2022-02-03 12:43:23', +}; + +test('stringify operation in correct format and order', () => { + const serialized = joinIntoSingleMessage( + [ + { + target: 'my-target', + timestamp: timestamp.asNumber, + expiresAt: timestamp.asNumber, + operationHash: 'my-hash', + fields: ['Query', 'Query.foo'], + execution: { + ok: true, + errorsTotal: 0, + duration: 230, + }, + document: `{ foo }`, + operationType: 'query' as any, + metadata: { + client: { + name: 'clientName', + version: 'clientVersion', + }, + }, + }, + { + target: 'my-target', + timestamp: timestamp.asNumber, + expiresAt: timestamp.asNumber, + operationHash: 'my-hash-1', + fields: ['Query', 'Query.foo'], + execution: { + ok: false, + errorsTotal: 1, + duration: 250, + }, + document: `{ foo }`, + operationType: 'query' as any, + // missing metadata, on purpose + }, + ].map(stringifyOperation) + ); + expect(serialized).toBe( + [ + [ + /* target */ `"my-target"`, + /* timestamp */ timestamp.asString, + /* expires_at */ timestamp.asString, + /* hash */ `"my-hash"`, + /* ok */ 1, + /* errors */ 0, + /* duration */ 230, + /* schema */ `"['Query','Query.foo']"`, + /* client_name */ `"clientName"`, + /* client_version */ `"clientVersion"`, + ].join(','), + [ + /* target */ `"my-target"`, + /* timestamp */ timestamp.asString, + /* expires_at */ timestamp.asString, + /* hash */ `"my-hash-1"`, + /* ok */ 0, + /* errors */ 1, + /* duration */ 250, + /* schema */ `"['Query','Query.foo']"`, + /* client_name */ `\\N`, + /* client_version */ `\\N`, + ].join(','), + ].join('\n') + ); +}); + +test('stringify registry records in correct format and order', () => { + const serialized = joinIntoSingleMessage( + [ + { + target: 'my-target', + inserted_at: timestamp.asNumber, + name: 'my-name', + hash: 'my-hash', + body: `{ foo }`, + operation: 'query', + }, + { + target: 'my-target', + inserted_at: timestamp.asNumber, + // missing name, on purpose + hash: 'my-hash-1', + body: `{ foo }`, + operation: 'query', + }, + ].map(stringifyRegistryRecord) + ); + expect(serialized).toBe( + [ + [ + /* target */ `"my-target"`, + /* hash */ `"my-hash"`, + /* name */ `"my-name"`, + /* body */ `"{ foo }"`, + /* operation */ `"query"`, + /* inserted_at */ timestamp.asString, + ].join(','), + [ + /* target */ `"my-target"`, + /* hash */ `"my-hash-1"`, + /* name */ `\\N`, + /* body */ `"{ foo }"`, + /* operation */ `"query"`, + /* inserted_at */ timestamp.asString, + ].join(','), + ].join('\n') + ); +}); diff --git a/packages/services/usage-ingestor/package.json b/packages/services/usage-ingestor/package.json new file mode 100644 index 000000000..900064b15 --- /dev/null +++ b/packages/services/usage-ingestor/package.json @@ -0,0 +1,36 @@ +{ + "private": true, + "type": "module", + "name": "@hive/usage-ingestor", + "version": "0.0.2", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --watch --format esm --target node16 --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@graphql-hive/core": "0.2.0", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "agentkeepalive": "4.2.0", + "date-fns": "2.25.0", + "date-fns-tz": "1.2.2", + "dotenv": "10.0.0", + "graphql": "16.5.0", + "got": "12.0.4", + "kafkajs": "2.0.0", + "tiny-lru": "8.0.2" + }, + "devDependencies": { + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/usage-ingestor/src/dev.ts b/packages/services/usage-ingestor/src/dev.ts new file mode 100644 index 000000000..213339db1 --- /dev/null +++ b/packages/services/usage-ingestor/src/dev.ts @@ -0,0 +1,7 @@ +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/usage-ingestor/src/helpers.ts b/packages/services/usage-ingestor/src/helpers.ts new file mode 100644 index 000000000..61c317cd6 --- /dev/null +++ b/packages/services/usage-ingestor/src/helpers.ts @@ -0,0 +1,23 @@ +export function cache( + fn: (arg: A) => R, + cacheKeyFn: (arg: A) => K, + cacheMap: { + has(key: K): boolean; + set(key: K, value: R): void; + get(key: K): R | undefined; + } +) { + return (arg: A) => { + const key = cacheKeyFn(arg); + const cachedValue = cacheMap.get(key); + + if (cachedValue !== null && typeof cachedValue !== 'undefined') { + return { key, value: cachedValue }; + } + + const value = fn(arg); + cacheMap.set(key, value); + + return { key, value }; + }; +} diff --git a/packages/services/usage-ingestor/src/index.ts b/packages/services/usage-ingestor/src/index.ts new file mode 100644 index 000000000..b7b9b9584 --- /dev/null +++ b/packages/services/usage-ingestor/src/index.ts @@ -0,0 +1,97 @@ +#!/usr/bin/env node +import * as Sentry from '@sentry/node'; +import { + createServer, + startMetrics, + ensureEnv, + registerShutdown, +} from '@hive/service-common'; +import { createIngestor } from './ingestor'; + +async function main() { + Sentry.init({ + serverName: 'usage-ingestor', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'usage-ingestor', + tracing: false, + }); + + try { + const { readiness, start, stop } = createIngestor({ + logger: server.log, + clickhouse: { + protocol: ensureEnv('CLICKHOUSE_PROTOCOL'), + host: ensureEnv('CLICKHOUSE_HOST'), + port: ensureEnv('CLICKHOUSE_PORT', 'number'), + username: ensureEnv('CLICKHOUSE_USERNAME'), + password: ensureEnv('CLICKHOUSE_PASSWORD'), + }, + kafka: { + topic: 'usage_reports_v2', + concurrency: ensureEnv('KAFKA_CONCURRENCY', 'number'), + connection: + ensureEnv('KAFKA_CONNECTION_MODE') == 'hosted' + ? { + mode: 'hosted', + user: ensureEnv('KAFKA_USER'), + key: ensureEnv('KAFKA_KEY'), + broker: ensureEnv('KAFKA_BROKER'), + } + : { + mode: 'docker', + broker: ensureEnv('KAFKA_BROKER'), + }, + }, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await Promise.all([stop(), server.close()]); + }, + }); + + const port = process.env.PORT || 5000; + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(_, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(readiness() ? 200 : 400).send(); + }, + }); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + await server.listen(port, '0.0.0.0'); + await start(); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/usage-ingestor/src/ingestor.ts b/packages/services/usage-ingestor/src/ingestor.ts new file mode 100644 index 000000000..8c3fd473f --- /dev/null +++ b/packages/services/usage-ingestor/src/ingestor.ts @@ -0,0 +1,226 @@ +import { Kafka, KafkaMessage, logLevel } from 'kafkajs'; +import { decompress } from '@hive/usage-common'; +import { + errors, + processTime, + reportMessageBytes, + ingestedOperationsWrites, + ingestedOperationsFailures, + ingestedOperationRegistryWrites, + ingestedOperationRegistryFailures, +} from './metrics'; +import { ClickHouseConfig, createWriter } from './writer'; +import { createProcessor } from './processor'; + +import type { FastifyLoggerInstance } from '@hive/service-common'; +import type { RawReport } from '@hive/usage-common'; + +enum Status { + Waiting, + Ready, + Stopped, +} + +const levelMap = { + [logLevel.NOTHING]: 'trace', + [logLevel.ERROR]: 'error', + [logLevel.WARN]: 'warn', + [logLevel.INFO]: 'info', + [logLevel.DEBUG]: 'debug', +} as const; + +const retryOnFailureSymbol = Symbol.for('retry-on-failure'); + +function shouldRetryOnFailure(error: any) { + return error[retryOnFailureSymbol] === true; +} + +export function createIngestor(config: { + logger: FastifyLoggerInstance; + clickhouse: ClickHouseConfig; + kafka: { + topic: string; + concurrency: number; + connection: + | { + mode: 'hosted'; + key: string; + user: string; + broker: string; + } + | { + mode: 'docker'; + broker: string; + }; + }; +}) { + const { logger } = config; + + const kafka = new Kafka({ + clientId: 'usage-ingestor', + ...(config.kafka.connection.mode === 'hosted' + ? { + ssl: true, + sasl: { + mechanism: 'plain', + username: config.kafka.connection.user, + password: config.kafka.connection.key, + }, + brokers: [config.kafka.connection.broker], + } + : { + brokers: [config.kafka.connection.broker], + }), + logLevel: logLevel.INFO, + logCreator() { + return (entry) => { + logger[levelMap[entry.level]]({ + ...entry.log, + message: undefined, + timestamp: undefined, + msg: `[${entry.namespace}] ${entry.log.message}`, + time: new Date(entry.log.timestamp).getTime(), + }); + }; + }, + }); + const consumer = kafka.consumer({ + groupId: `usage-ingestor-v2`, + retry: { + retries: 2, + }, + // Recommended by Azure EventHub https://docs.microsoft.com/en-us/azure/event-hubs/apache-kafka-configurations + sessionTimeout: 30_000, + heartbeatInterval: 3_000, + metadataMaxAge: 180_000, + }); + + async function stop() { + logger.info('Started Usage Ingestor shutdown...'); + + status = Status.Stopped; + await consumer.disconnect(); + logger.info(`Consumer disconnected`); + + logger.info('Usage Ingestor stopped'); + } + + consumer.on('consumer.stop', async () => { + logger.info('Consumer stopped'); + if (status === Status.Stopped) { + return; + } + + await stop(); + await start(); + }); + + async function start() { + logger.info('Connecting Kafka Consumer'); + await consumer.connect(); + + logger.info('Subscribing to Kafka topic: %s', config.kafka.topic); + await consumer.subscribe({ + topic: config.kafka.topic, + fromBeginning: true, + }); + logger.info('Running consumer'); + await consumer.run({ + autoCommit: true, + autoCommitThreshold: 2, + partitionsConsumedConcurrently: config.kafka.concurrency, + eachMessage({ message }) { + const stopTimer = processTime.startTimer(); + return processMessage({ + message, + logger, + processor, + writer, + }) + .catch((error) => { + errors.inc(); + return Promise.reject(error); + }) + .finally(() => { + stopTimer(); + }); + }, + }); + logger.info('Kafka is ready'); + status = Status.Ready; + } + + const processor = createProcessor({ logger }); + const writer = createWriter({ + clickhouse: config.clickhouse, + }); + + let status: Status = Status.Waiting; + + return { + readiness() { + return status === Status.Ready; + }, + start, + stop, + }; +} + +async function processMessage({ + processor, + writer, + message, + logger, +}: { + processor: ReturnType; + writer: ReturnType; + message: KafkaMessage; + logger: FastifyLoggerInstance; +}) { + reportMessageBytes.observe(message.value!.byteLength); + // Decompress and parse the message to get a list of reports + const rawReports: RawReport[] = JSON.parse( + (await decompress(message.value!)).toString() + ); + + const { operations, registryRecords } = await processor.processReports( + rawReports + ); + + try { + // .then and .catch looks weird but async/await with try/catch and Promise.all is even weirder + await Promise.all([ + writer + .writeRegistry(registryRecords) + .then((value) => { + ingestedOperationRegistryWrites.inc(registryRecords.length); + return Promise.resolve(value); + }) + .catch((error) => { + ingestedOperationRegistryFailures.inc(registryRecords.length); + return Promise.reject(error); + }), + writer + .writeOperations(operations) + .then((value) => { + ingestedOperationsWrites.inc(operations.length); + return Promise.resolve(value); + }) + .catch((error) => { + ingestedOperationsFailures.inc(operations.length); + // We want to retry the kafka message only if the write to operations_new table fails. + // Why? Because if we retry the message for operation_registry, we will have duplicate. + // One write could succeed, the other one could fail. + // Let's stick to the operations_new table for now. + error[retryOnFailureSymbol] = true; + return Promise.reject(error); + }), + ]); + } catch (error) { + logger.error(error); + + if (shouldRetryOnFailure(error)) { + throw error; + } + } +} diff --git a/packages/services/usage-ingestor/src/metrics.ts b/packages/services/usage-ingestor/src/metrics.ts new file mode 100644 index 000000000..7a9ef791b --- /dev/null +++ b/packages/services/usage-ingestor/src/metrics.ts @@ -0,0 +1,61 @@ +import { metrics } from '@hive/service-common'; + +export const normalizeCacheMisses = new metrics.Counter({ + name: 'usage_ingestor_normalize_cache_misses', + help: 'Number of cache misses when normalizing operations', +}); + +export const schemaCoordinatesSize = new metrics.Summary({ + name: 'usage_ingestor_schema_coordinates_size', + help: 'Size of schema coordinates', +}); + +export const totalOperations = new metrics.Counter({ + name: 'usage_ingestor_operations_total', + help: 'Number of raw operations received by usage ingestor service', +}); + +export const processTime = new metrics.Summary({ + name: 'usage_ingestor_process_time', + help: 'Time spent processing and writing reports', +}); + +export const errors = new metrics.Counter({ + name: 'usage_ingestor_errors', + help: 'Number of errors occurred during processing and writing reports', +}); + +export const reportMessageBytes = new metrics.Summary({ + name: 'usage_ingestor_report_message_bytes', + help: 'Size (in bytes) of a "usage_reports" topic message received by ingestor service', +}); + +export const reportSize = new metrics.Summary({ + name: 'usage_ingestor_report_size', + help: 'Number of operations per report received by ingestor service', +}); + +export const reportMessageSize = new metrics.Summary({ + name: 'usage_ingestor_report_message_size', + help: 'Number of reports in the "usage_reports" message received by ingestor service', +}); + +export const ingestedOperationsWrites = new metrics.Counter({ + name: 'usage_ingested_operation_writes', + help: 'Number of successfully ingested operations', +}); + +export const ingestedOperationsFailures = new metrics.Counter({ + name: 'usage_ingested_operation_failures', + help: 'Number of failed to ingest operations', +}); + +export const ingestedOperationRegistryWrites = new metrics.Counter({ + name: 'usage_ingested_operation_registry_writes', + help: 'Number of successfully ingested registry records', +}); + +export const ingestedOperationRegistryFailures = new metrics.Counter({ + name: 'usage_ingested_operation_registry_failures', + help: 'Number of failed to ingest registry records', +}); diff --git a/packages/services/usage-ingestor/src/processor.ts b/packages/services/usage-ingestor/src/processor.ts new file mode 100644 index 000000000..56d0bab1b --- /dev/null +++ b/packages/services/usage-ingestor/src/processor.ts @@ -0,0 +1,177 @@ +import { normalizeOperation as coreNormalizeOperation } from '@graphql-hive/core'; +import { Kind, parse } from 'graphql'; +import LRU from 'tiny-lru'; +import { cache } from './helpers'; +import { + reportSize, + totalOperations, + reportMessageSize, + normalizeCacheMisses, + schemaCoordinatesSize, +} from './metrics'; +import { stringifyOperation, stringifyRegistryRecord } from './serializer'; + +import type { FastifyLoggerInstance } from '@hive/service-common'; +import type { + RawReport, + RawOperation, + RawOperationMap, + RawOperationMapRecord, + ProcessedOperation, +} from '@hive/usage-common'; +import type { + DefinitionNode, + DocumentNode, + OperationDefinitionNode, + OperationTypeNode, +} from 'graphql'; + +type NormalizeFunction = (arg: RawOperationMapRecord) => { + key: string; + value: { + type: OperationTypeNode; + result: string; + }; +}; + +const DAY_IN_MS = 86_400_000; + +export function createProcessor(config: { logger: FastifyLoggerInstance }) { + const { logger } = config; + const normalize = cache( + normalizeOperation, + cacheOperationKey, + LRU<{ + type: OperationTypeNode; + result: string; + }>(10_000, 1_800_000 /* 30 minutes */) + ); + + return { + async processReports(rawReports: RawReport[]) { + // Each report has `size` property that tells us the number of operations + const sizeOfAllReports = rawReports.reduce((acc, r) => acc + r.size, 0); + reportMessageSize.observe(sizeOfAllReports); + totalOperations.inc(sizeOfAllReports); + + logger.info( + `Processing (reports=%s, operations=%s)`, + rawReports.length, + sizeOfAllReports + ); + + // We do it to collect unique operations for the registry table + const processedRegistryKeys = new Set(); + const serializedOperations: string[] = []; + const serializedRegistryRecords: string[] = []; + + for (const rawReport of rawReports) { + logger.info(`Processing report (id=%s)`, rawReport.id ?? 'missing'); + reportSize.observe(rawReport.size); + + for (const rawOperation of rawReport.operations) { + const processedOperation = processSingleOperation( + rawOperation, + rawReport.map, + rawReport.target, + normalize + ); + + serializedOperations.push(stringifyOperation(processedOperation)); + + const operationKey = `${processedOperation.operationHash}-${processedOperation.target}`; + + if (!processedRegistryKeys.has(operationKey)) { + processedRegistryKeys.add(operationKey); + serializedRegistryRecords.push( + stringifyRegistryRecord({ + target: processedOperation.target, + hash: processedOperation.operationHash, + name: processedOperation.operationName, + body: processedOperation.document, + operation: processedOperation.operationType, + inserted_at: processedOperation.timestamp, + }) + ); + } + } + } + + return { + operations: serializedOperations, + registryRecords: serializedRegistryRecords, + }; + }, + }; +} + +function processSingleOperation( + operation: RawOperation, + operationMap: RawOperationMap, + target: string, + normalize: NormalizeFunction +): ProcessedOperation { + const operationMapRecord = operationMap[operation.operationMapKey]; + const { operationName, fields } = operationMapRecord; + const { execution, metadata } = operation; + + const { key: hash, value: normalized } = normalize(operationMapRecord)!; + const operationHash = normalized.result ? hash : 'unknown'; + + const unique_fields = new Set(); + + for (const field of fields) { + unique_fields.add(field); + // `Query.foo` -> `Query` + const at = field.indexOf('.'); + if (at > -1) { + unique_fields.add(field.substring(0, at)); + } + } + + schemaCoordinatesSize.observe(unique_fields.size); + + const timestamp = + typeof operation.timestamp === 'string' + ? parseInt(operation.timestamp, 10) + : operation.timestamp; + + return { + document: normalized.result, + timestamp: timestamp, + expiresAt: operation.expiresAt || timestamp + 30 * DAY_IN_MS, + operationType: normalized.type, + fields: Array.from(unique_fields.keys()), + target, + execution, + metadata, + operationHash, + operationName, + }; +} + +function isOperationDef(def: DefinitionNode): def is OperationDefinitionNode { + return def.kind === Kind.OPERATION_DEFINITION; +} + +function getOperationType(operation: DocumentNode): OperationTypeNode { + return operation.definitions.find(isOperationDef)!.operation; +} + +function normalizeOperation(operation: RawOperationMapRecord) { + normalizeCacheMisses.inc(); + const parsed = parse(operation.operation); + + return { + type: getOperationType(parsed), + result: coreNormalizeOperation({ + document: parsed, + hideLiterals: true, + removeAliases: true, + }), + }; +} + +function cacheOperationKey(operation: RawOperationMapRecord) { + return operation.key; +} diff --git a/packages/services/usage-ingestor/src/serializer.ts b/packages/services/usage-ingestor/src/serializer.ts new file mode 100644 index 000000000..06ad78f40 --- /dev/null +++ b/packages/services/usage-ingestor/src/serializer.ts @@ -0,0 +1,121 @@ +import * as dateFnsTz from 'date-fns-tz'; +import LRU from 'tiny-lru'; +import { cache } from './helpers'; +import type { + ProcessedRegistryRecord, + ProcessedOperation, +} from '@hive/usage-common'; + +const delimiter = '\n'; +const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone; + +function formatDate(date: number): string { + return dateFnsTz.formatInTimeZone( + dateFnsTz.zonedTimeToUtc(date, timezone), + 'UTC', + 'yyyy-MM-dd HH:mm:ss' + ); +} + +function dateCacheKey(date: number): string { + return String(Math.floor(date / 1000) * 1000); +} + +const cachedFormatDate = cache(formatDate, dateCacheKey, LRU(50_000)); + +export const operationsOrder = [ + 'target', + 'timestamp', + 'expires_at', + 'hash', + 'ok', + 'errors', + 'duration', + 'schema', + 'client_name', + 'client_version', +] as const; + +export const registryOrder = [ + 'target', + 'hash', + 'name', + 'body', + 'operation', + 'inserted_at', +] as const; + +export function joinIntoSingleMessage(items: string[]): string { + return items.join(delimiter); +} + +type KeysOfArray = T extends readonly (infer U)[] + ? U + : never; + +// Important, it has to be in the same order as columns in the table +export function stringifyOperation(operation: ProcessedOperation): string { + const mapper: Record, any> = { + target: castValue(operation.target), + timestamp: castDate(operation.timestamp), + expires_at: castDate(operation.expiresAt), + hash: castValue(operation.operationHash), + ok: castValue(operation.execution.ok), + errors: castValue(operation.execution.errorsTotal), + duration: castValue(operation.execution.duration), + schema: castValue(operation.fields), + client_name: castValue(operation.metadata?.client?.name), + client_version: castValue(operation.metadata?.client?.version), + }; + return Object.values(mapper).join(','); +} + +export function stringifyRegistryRecord( + record: ProcessedRegistryRecord +): string { + const mapper: Record, any> = { + target: castValue(record.target), + hash: castValue(record.hash), + name: castValue(record.name), + body: castValue(record.body), + operation: castValue(record.operation), + inserted_at: castDate(record.inserted_at), + }; + + return Object.values(mapper).join(','); +} + +function castDate(date: number): string { + return cachedFormatDate(date).value; +} + +function castValue(value: boolean): number; +function castValue(value: string): string; +function castValue(value: number): number; +function castValue(value: any[]): string; +function castValue(value?: any): string; +function castValue(value: undefined): string; +function castValue(value?: any) { + if (typeof value === 'boolean') { + return castValue(value ? 1 : 0); + } + + if (typeof value === 'string') { + // According to https://datatracker.ietf.org/doc/html/rfc4180 + // if double-quotes are used to enclose fields, + // then a double-quote appearing inside a field + // must be escaped by preceding it with another double quote + return `"${value.replace(/"/g, '""')}"`; + } + + if (typeof value === 'number') { + return value; + } + + if (Array.isArray(value)) { + return `"[${value.map((val) => `'${val}'`).join(',')}]"`; + } + + return '\\N'; // NULL is \N + // Yes, it's ᴺᵁᴸᴸ not NULL :) This is what JSONStringsEachRow does for NULLs +} diff --git a/packages/services/usage-ingestor/src/writer.ts b/packages/services/usage-ingestor/src/writer.ts new file mode 100644 index 000000000..fb1c0a408 --- /dev/null +++ b/packages/services/usage-ingestor/src/writer.ts @@ -0,0 +1,120 @@ +import * as Sentry from '@sentry/node'; +import { got } from 'got'; +import Agent from 'agentkeepalive'; +import { compress } from '@hive/usage-common'; +import { + operationsOrder, + registryOrder, + joinIntoSingleMessage, +} from './serializer'; + +export interface ClickHouseConfig { + protocol: string; + host: string; + port: number; + username: string; + password: string; +} + +const operationsFields = operationsOrder.join(', '); +const registryFields = registryOrder.join(', '); + +const agentConfig: Agent.HttpOptions = { + // Keep sockets around in a pool to be used by other requests in the future + keepAlive: true, + // Sets the working socket to timeout after N ms of inactivity on the working socket + timeout: 60_000, + // Sets the free socket to timeout after N ms of inactivity on the free socket + freeSocketTimeout: 30_000, + // Sets the socket active time to live + socketActiveTTL: 60_000, + maxSockets: 35, + maxFreeSockets: 10, + scheduling: 'lifo', +}; + +export function createWriter({ clickhouse }: { clickhouse: ClickHouseConfig }) { + const httpAgent = new Agent(agentConfig); + const httpsAgent = new Agent.HttpsAgent(agentConfig); + + const agents = { + http: httpAgent, + https: httpsAgent, + }; + + return { + async writeOperations(operations: string[]) { + const csv = joinIntoSingleMessage(operations); + + await writeCsv( + clickhouse, + agents, + `INSERT INTO operations_new (${operationsFields}) FORMAT CSV`, + await compress(csv) + ); + }, + async writeRegistry(records: string[]) { + const csv = joinIntoSingleMessage(records); + await writeCsv( + clickhouse, + agents, + `INSERT INTO operations_registry (${registryFields}) FORMAT CSV`, + await compress(csv) + ); + }, + }; +} + +async function writeCsv( + config: ClickHouseConfig, + agents: { + http: Agent; + https: Agent.HttpsAgent; + }, + query: string, + body: Buffer +) { + return got + .post(`${config.protocol ?? 'https'}://${config.host}:${config.port}`, { + body, + searchParams: { + query, + }, + username: config.username, + password: config.password, + headers: { + Accept: 'application/json', + 'Content-Type': 'text/csv', + 'Content-Encoding': 'gzip', + }, + retry: { + calculateDelay(info) { + if (info.attemptCount >= 5) { + // After 5 retries, stop. + return 0; + } + + return info.attemptCount * 250; + }, + }, + timeout: { + lookup: 2000, + connect: 2000, + secureConnect: 2000, + request: 30_000, + }, + agent: { + http: agents.http, + https: agents.https, + }, + }) + .catch((error) => { + Sentry.captureException(error, { + level: Sentry.Severity.Critical, + extra: { + query, + }, + }); + return Promise.reject(error); + }); +} diff --git a/packages/services/usage-ingestor/tsconfig.json b/packages/services/usage-ingestor/tsconfig.json new file mode 100644 index 000000000..1bee108c6 --- /dev/null +++ b/packages/services/usage-ingestor/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/usage/.env.template b/packages/services/usage/.env.template new file mode 100644 index 000000000..ac38ecd63 --- /dev/null +++ b/packages/services/usage/.env.template @@ -0,0 +1,8 @@ +TOKENS_ENDPOINT="http://localhost:6001" +KAFKA_CONNECTION_MODE="docker" +KAFKA_BROKER="localhost:9092" +KAFKA_BUFFER_SIZE="10" +KAFKA_BUFFER_INTERVAL="5000" +KAFKA_BUFFER_DYNAMIC="true" +PORT=4001 +RATE_LIMIT_ENDPOINT="http://localhost:4012" \ No newline at end of file diff --git a/packages/services/usage/.gitignore b/packages/services/usage/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/services/usage/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/services/usage/LICENSE b/packages/services/usage/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/services/usage/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/services/usage/__tests__/buffer-estimator.spec.ts b/packages/services/usage/__tests__/buffer-estimator.spec.ts new file mode 100644 index 000000000..1f5ac2697 --- /dev/null +++ b/packages/services/usage/__tests__/buffer-estimator.spec.ts @@ -0,0 +1,148 @@ +import { createEstimator } from '../src/buffer'; + +const eventHubLimitInBytes = 900_000; +const bufferSize = 1200; +const defaultBytesPerUnit = eventHubLimitInBytes / bufferSize; + +function waitFor(time: number) { + return new Promise((resolve) => setTimeout(resolve, time)); +} + +test('default estimation should be a ratio of max size and the limit in bytes', () => { + const estimator = createEstimator({ + logger: console as any, + defaultBytesPerUnit, + resetAfter: 5000, + increaseBy() { + return 0; + }, + }); + + expect(estimator.estimate(bufferSize)).toBeCloseTo(eventHubLimitInBytes); +}); + +test('teach estimator that 1 op takes 100 bytes', () => { + const estimator = createEstimator({ + logger: console as any, + defaultBytesPerUnit, + resetAfter: 5000, + increaseBy() { + return 0; + }, + }); + + estimator.teach({ + bytes: 1000, + operations: 10, + }); + + expect(estimator.estimate(10)).toBeCloseTo(1000); +}); + +test('increasing the defaultBytesPerUnit', () => { + const estimator = createEstimator({ + defaultBytesPerUnit, + resetAfter: 5000, + logger: { + info: jest.fn(), + } as any, + increaseBy() { + return 0.25; + }, + }); + + expect(estimator.estimate(bufferSize)).toBeCloseTo(eventHubLimitInBytes); + + estimator.overflowed('test'); + + expect(estimator.estimate(bufferSize)).toBeCloseTo( + 1.25 * eventHubLimitInBytes + ); +}); + +test('increasing the defaultBytesPerUnit should not go over 50% of original estimation', () => { + const estimator = createEstimator({ + defaultBytesPerUnit, + resetAfter: 5000, + logger: { + info: jest.fn(), + } as any, + increaseBy() { + return 0.6; + }, + }); + + expect(estimator.estimate(bufferSize)).toBeCloseTo(eventHubLimitInBytes); + + estimator.overflowed('test'); + + expect(estimator.estimate(bufferSize)).toBeCloseTo( + 1.5 * eventHubLimitInBytes + ); +}); + +test('teach estimator multiple times', () => { + const estimator = createEstimator({ + defaultBytesPerUnit, + resetAfter: 5000, + logger: { + info: jest.fn(), + } as any, + increaseBy() { + return 0; + }, + }); + + estimator.teach({ + bytes: 1_000, + operations: 10, + }); + + estimator.teach({ + bytes: 500, + operations: 5, + }); + + estimator.teach({ + bytes: 10_000, + operations: 100, + }); + + expect(estimator.estimate(10)).toBeCloseTo(1000); +}); + +test('reset after N milliseconds', async () => { + const estimator = createEstimator({ + defaultBytesPerUnit, + resetAfter: 100, + logger: { + info: jest.fn(), + } as any, + increaseBy() { + return 0; + }, + }); + + estimator.teach({ + bytes: 1_000, + operations: 10, + }); + + expect(estimator.estimate(10)).toBeCloseTo(1000); + + await waitFor(100); + + // reached 15 ms, so reset + estimator.teach({ + bytes: 500, + operations: 10, + }); + + // teaching from start + estimator.teach({ + bytes: 500, + operations: 10, + }); + + expect(estimator.estimate(10)).toBeCloseTo(500); +}); diff --git a/packages/services/usage/__tests__/buffer.spec.ts b/packages/services/usage/__tests__/buffer.spec.ts new file mode 100644 index 000000000..dd87c513e --- /dev/null +++ b/packages/services/usage/__tests__/buffer.spec.ts @@ -0,0 +1,320 @@ +import { createKVBuffer, calculateChunkSize } from '../src/buffer'; + +function waitFor(time: number) { + return new Promise((resolve) => setTimeout(resolve, time)); +} + +const eventHubLimitInBytes = 900_000; +const bufferSize = 1200; +const defaultBytesPerUnit = eventHubLimitInBytes / bufferSize; + +test('increase the defaultBytesPerOperation estimation by 5% when over 100 calls were made and 10% of them failed', async () => { + const logger = { + // info: jest.fn(console.info), + // error: jest.fn(console.error), + info: jest.fn(), + error: jest.fn(), + }; + const flush = jest.fn(); + const onRetry = jest.fn(); + const interval = 200; + const size = { + successful: bufferSize / 2, + overflow: bufferSize, + error: bufferSize / 2 - 1, + }; + const bytesPerUnit = eventHubLimitInBytes / size.successful; + const buffer = createKVBuffer<{ + id: string; + size: number; + }>({ + logger: logger as any, + size: size.successful, + interval, + limitInBytes: eventHubLimitInBytes, + useEstimator: true, + onRetry, + calculateReportSize(report) { + return report.size; + }, + split(report, numOfChunks) { + const reports: Array<{ + id: string; + size: number; + }> = []; + for (let chunkIndex = 0; chunkIndex < numOfChunks; chunkIndex++) { + reports.push({ + id: `${report.id}-chunk-${chunkIndex}`, + size: calculateChunkSize(report.size, numOfChunks, chunkIndex), + }); + } + return reports; + }, + async sender(reports, _bytes, _batchId, validateSize) { + const receivedSize = reports.reduce( + (sum, report) => report.size + sum, + 0 + ); + flush(reports.map((r) => r.id).join(',')); + if (receivedSize === size.error) { + validateSize(size.error * bytesPerUnit); + throw new Error('Over the size limit!'); + } else { + validateSize(receivedSize * bytesPerUnit); + } + }, + }); + + buffer.start(); + + // make 100 calls + for (let i = 0; i < 100; i++) { + buffer.add({ + id: `good - ${i}`, + size: size.successful, + }); + } + + // Interval passes + await waitFor(interval + 50); + + expect(logger.info).not.toHaveBeenCalledWith( + expect.stringContaining('Increasing default bytes per unit') + ); + + expect(flush).toBeCalledTimes(100); + + // make 10 calls that fail + for (let i = 0; i < 12; i++) { + buffer.add({ + id: `bad - ${i}`, + size: size.error, + }); + } + + // Interval passes + await waitFor(interval + 50); + + expect(flush).toBeCalledTimes(112); + + expect(logger.info).not.toHaveBeenCalledWith( + expect.stringContaining('Increasing default bytes per unit') + ); + + await waitFor(1000); + + // make 1 call that fails + buffer.add({ + id: 'decider', + size: size.error, + }); + + // Interval passes + await waitFor(interval + 50); + + const newDefault = bytesPerUnit * 1.05; + expect(logger.info).toHaveBeenCalledWith( + expect.stringContaining( + 'Increasing default bytes per unit (ratio=%s, new=%s)' + ), + 0.05, + newDefault + ); + const flushedTimes = 114; + expect(flush).toHaveBeenCalledTimes(flushedTimes); + + // Buffer should split into two reports because the defaultBytesPerUnit estimation is increased + // which means that the buffer can hold less operations than before + buffer.add({ + id: 'new-reality', + size: Math.ceil(eventHubLimitInBytes / newDefault) + 1, + }); + // We reached the limit of bytes (according to the new estimations) + // No need to wait for the interval to pass + await waitFor(interval + 50); + expect(flush).toHaveBeenCalledTimes(flushedTimes + 1); + + await buffer.stop(); + expect(flush).toHaveBeenCalledTimes(flushedTimes + 1); +}); + +test('buffer should split the report into multiple reports when the estimated size is greater than the limit', async () => { + const logger = { + info: jest.fn(), + error: jest.fn(), + }; + const flush = jest.fn(); + const interval = 200; + const buffer = createKVBuffer<{ + id: string; + size: number; + operations: number[]; + }>({ + logger: logger as any, + size: bufferSize, + interval, + limitInBytes: eventHubLimitInBytes, + useEstimator: true, + calculateReportSize(report) { + return report.size; + }, + onRetry() {}, + split(report, numOfChunks) { + const reports: Array<{ + id: string; + size: number; + operations: number[]; + }> = []; + let endedAt = 0; + for (let i = 0; i < numOfChunks; i++) { + const chunkSize = calculateChunkSize(report.size, numOfChunks, i); + const start = endedAt; + const end = start + chunkSize; + endedAt = end; + + const operations = report.operations.slice(start, end); + reports.push({ + id: `${report.id}-${i}`, + size: operations.length, + operations, + }); + } + + return reports; + }, + async sender(reports, _bytes, _batchId, validateSize) { + const receivedSize = reports.reduce( + (sum, report) => report.size + sum, + 0 + ); + flush(reports.map((r) => r.id).join(','), receivedSize); + validateSize(receivedSize * defaultBytesPerUnit); + }, + }); + + buffer.start(); + + const bigBatchSize = bufferSize + 20; + // add a report bigger than the limit + buffer.add({ + id: 'big', + size: bigBatchSize, + operations: new Array(bigBatchSize).fill(0).map((_, i) => i), + }); + + // Interval passes + await waitFor(interval + 50); + + // Buffer should flush two reports, the big report splitted in half + expect(flush).toHaveBeenNthCalledWith(1, 'big-0', bigBatchSize / 2); + expect(flush).toHaveBeenNthCalledWith(2, 'big-1', bigBatchSize / 2); + + const biggerBatchSize = bufferSize + bufferSize + 30; + buffer.add({ + id: 'bigger', + size: biggerBatchSize, + operations: new Array(biggerBatchSize).fill(0).map((_, i) => i), + }); + + // Interval passes + await waitFor(interval + 50); + + expect(flush).toHaveBeenNthCalledWith(3, 'bigger-0', biggerBatchSize / 3); + expect(flush).toHaveBeenNthCalledWith(4, 'bigger-1', biggerBatchSize / 3); + expect(flush).toHaveBeenNthCalledWith(5, 'bigger-2', biggerBatchSize / 3); + + await buffer.stop(); +}); + +test('buffer create two chunks out of one buffer when actual buffer size is too big', async () => { + const logger = { + info: jest.fn(), + error: jest.fn(), + // info: jest.fn(console.info), + // error: jest.fn(console.error), + }; + const flush = jest.fn(); + const split = jest.fn((report, numOfChunks) => { + const reports: Array<{ + id: string; + size: number; + operations: number[]; + }> = []; + let endedAt = 0; + for (let i = 0; i < numOfChunks; i++) { + const chunkSize = calculateChunkSize(report.size, numOfChunks, i); + const start = endedAt; + const end = start + chunkSize; + endedAt = end; + + const operations = report.operations.slice(start, end); + reports.push({ + id: `${report.id}-${i}`, + size: operations.length, + operations, + }); + } + + return reports; + }); + const onRetry = jest.fn(); + const interval = 200; + + const buffer = createKVBuffer<{ + id: string; + size: number; + operations: number[]; + }>({ + logger: logger as any, + size: bufferSize, + interval, + limitInBytes: eventHubLimitInBytes, + useEstimator: true, + calculateReportSize(report) { + return report.size; + }, + onRetry, + split, + async sender(reports, _bytes, batchId, validateSize) { + const receivedSize = reports.reduce( + (sum, report) => report.size + sum, + 0 + ); + validateSize(receivedSize * 2 * defaultBytesPerUnit); + flush(reports.map((r) => r.id).join(','), receivedSize, batchId); + }, + }); + + buffer.start(); + + // add a report bigger than the limit + buffer.add({ + id: 'big', + size: bufferSize, + operations: new Array(bufferSize).fill(0).map((_, i) => i), + }); + + // Interval passes + await waitFor(interval + 50); + + // Reports should be split as well, just in case we have one or few big reports. + // In our case it should be called once (1 report split into 2 reports) + expect(split).toHaveBeenCalledTimes(1); + // Flush should be retried because the buffer size was too big (twice as big) + expect(onRetry).toBeCalledTimes(1); + // Buffer should flush two reports, the big report splitted in half + expect(flush).toHaveBeenNthCalledWith( + 1, + 'big-0', + bufferSize / 2, + expect.stringContaining('--retry-chunk-0') + ); + expect(flush).toHaveBeenNthCalledWith( + 2, + 'big-1', + bufferSize / 2, + expect.stringContaining('--retry-chunk-1') + ); + + await buffer.stop(); +}); diff --git a/packages/services/usage/__tests__/usage.spec.ts b/packages/services/usage/__tests__/usage.spec.ts new file mode 100644 index 000000000..72f033843 --- /dev/null +++ b/packages/services/usage/__tests__/usage.spec.ts @@ -0,0 +1,91 @@ +import { splitReport } from '../src/usage'; +import type { RawReport } from '@hive/usage-common'; + +test('should split report based on operation map length', () => { + const now = Date.now(); + const op1 = { + operationMapKey: 'op1', + timestamp: now, + execution: { + ok: true, + errorsTotal: 0, + duration: 100, + }, + metadata: { + client: { + name: 'test-client', + version: 'test-version', + }, + }, + }; + const op2 = { + operationMapKey: 'op2', + timestamp: now, + execution: { + ok: true, + errorsTotal: 0, + duration: 100, + }, + metadata: { + client: { + name: 'test-client', + version: 'test-version', + }, + }, + }; + const op3 = { + operationMapKey: 'op3', + timestamp: now, + execution: { + ok: true, + errorsTotal: 0, + duration: 100, + }, + metadata: { + client: { + name: 'test-client', + version: 'test-version', + }, + }, + }; + const report: RawReport = { + id: 'test-id', + size: 5, + target: 'test-target', + map: { + op1: { + key: 'op1', + operation: 'test-operation-1', + fields: ['test-field-1'], + }, + op2: { + key: 'op2', + operation: 'test-operation-2', + fields: ['test-field-2'], + }, + op3: { + key: 'op3', + operation: 'test-operation-3', + fields: ['test-field-3'], + }, + }, + operations: [op1, op1, op2, op3, op3], + }; + + const reports = splitReport(report, 3); + expect(reports).toHaveLength(3); + + expect(Object.keys(reports[0].map)).toEqual(['op1']); + expect(reports[0].size).toEqual(2); + expect(reports[0].operations[0].operationMapKey).toEqual('op1'); + expect(reports[0].operations[1].operationMapKey).toEqual('op1'); + + expect(Object.keys(reports[1].map)).toEqual(['op2']); + expect(reports[1].size).toEqual(1); + expect(reports[1].operations[0].operationMapKey).toEqual('op2'); + + expect(Object.keys(reports[2].map)).toEqual(['op3']); + expect(reports[2].size).toEqual(2); + expect(reports[2].operations[0].operationMapKey).toEqual('op3'); + expect(reports[2].operations[1].operationMapKey).toEqual('op3'); +}); diff --git a/packages/services/usage/__tests__/validation.spec.ts b/packages/services/usage/__tests__/validation.spec.ts new file mode 100644 index 000000000..4a21b43d8 --- /dev/null +++ b/packages/services/usage/__tests__/validation.spec.ts @@ -0,0 +1,404 @@ +import { + validateOperation, + validateOperationMapRecord, +} from '../src/validation'; + +test('correct operation should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + timestamp: Date.now(), + execution: { + ok: true, + duration: 12405, + errorsTotal: 0, + }, + metadata: {}, + }, + { + a: { + operation: 'query foo { foo }', + operationName: 'foo', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with missing timestamp should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 0, + }, + }, + { + a: { + operation: 'query foo { foo }', + operationName: 'foo', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with missing operationName should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 0, + }, + metadata: {}, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with missing metadata should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 0, + }, + metadata: {}, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with empty metadata.client should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 0, + }, + metadata: { + client: {}, + }, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with empty metadata.client.name should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 0, + }, + metadata: { + client: { + name: 'asd', + }, + }, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with empty metadata.client.version should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 0, + }, + metadata: { + client: { + version: 'asd', + }, + }, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with empty list in metadata.client.errors should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 1, + errors: [], + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test('operation with empty metadata.client.errors.path should be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 1, + errors: [ + { + message: 'asd', + }, + ], + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual({ valid: true }); +}); + +test.skip('operation with empty metadata.client.errors.message should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 12405, + errorsTotal: 1, + errors: [{} as any], + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with empty in execution should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: {}, + } as any, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with empty in execution.ok should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + duration: 12405, + errorsTotal: 1, + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with empty execution.duration should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + errorsTotal: 1, + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with empty execution.errorsTotal should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 1245, + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with non-boolean execution.ok should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: 1, + duration: 1245, + errorsTotal: 0, + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with non-number execution.duration should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: '1234', + errorsTotal: 0, + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with non-number execution.errorsTotal should NOT be valid', () => { + expect( + validateOperation( + { + operationMapKey: 'a', + execution: { + ok: true, + duration: 1234, + errorsTotal: '0', + } as any, + }, + { + a: { + operation: 'query foo { foo }', + fields: ['Query', 'Query.foo'], + }, + } + ) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +// + +test('operation with empty operation should NOT be valid', () => { + expect( + validateOperationMapRecord({ + fields: ['Query', 'Query.foo'], + } as any) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with empty in fields should NOT be valid', () => { + expect( + validateOperationMapRecord({ + operation: 'query foo { foo }', + } as any) + ).toEqual(expect.objectContaining({ valid: false })); +}); + +test('operation with empty fields should NOT be valid', () => { + expect( + validateOperationMapRecord({ operation: 'query foo { foo }', fields: [] }) + ).toEqual(expect.objectContaining({ valid: false })); +}); diff --git a/packages/services/usage/package.json b/packages/services/usage/package.json new file mode 100644 index 000000000..e351928e0 --- /dev/null +++ b/packages/services/usage/package.json @@ -0,0 +1,34 @@ +{ + "name": "@hive/usage", + "type": "module", + "private": true, + "version": "0.18.0", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --format esm --target node16 --watch --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "cross-undici-fetch": "0.4.3", + "@trpc/client": "9.23.2", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "ajv": "8.11.0", + "dotenv": "10.0.0", + "got": "12.0.4", + "kafkajs": "2.0.0", + "tiny-lru": "8.0.2" + }, + "devDependencies": { + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/usage/src/buffer.ts b/packages/services/usage/src/buffer.ts new file mode 100644 index 000000000..9e854f1e5 --- /dev/null +++ b/packages/services/usage/src/buffer.ts @@ -0,0 +1,347 @@ +import { randomUUID } from 'crypto'; +import type { FastifyLoggerInstance } from '@hive/service-common'; + +export class BufferTooBigError extends Error { + constructor(public bytes: number) { + super(`Buffer too big: ${bytes}`); + } +} + +export function isBufferTooBigError( + error: unknown +): error is BufferTooBigError { + return error instanceof BufferTooBigError; +} + +/** + * @param totalLength Number of all items in a list + * @param numOfChunks How many chunks to split the list into + * @param chunkIndex The index of the chunk to split the list into (0-based) + */ +export function calculateChunkSize( + totalLength: number, + numOfChunks: number, + chunkIndex: number +): number { + // If x % n == 0 then the minimum, difference is 0 and all numbers are x / n + if (totalLength % numOfChunks == 0) { + return totalLength / numOfChunks; + } + // upto n-(x % n) the values will be x / n + // after that the values will be x / n + 1 + const zp = numOfChunks - (totalLength % numOfChunks); + const pp = Math.floor(totalLength / numOfChunks); + + if (chunkIndex >= zp) { + return pp + 1; + } else { + return pp; + } +} + +export function createEstimator(config: { + defaultBytesPerUnit: number; + /** + * Reset estimations after this many milliseconds. + */ + resetAfter: number; + /** + * Increase the default bytes per operation by 0-1. + */ + increaseBy: (info: { calls: number; overflows: number }) => number; + logger: FastifyLoggerInstance; +}) { + let calls = 0; + let overflows = 0; + let sumOfBytes = 0; + let sumOfReportSizes = 0; + let lastReset = Date.now(); + let defaultBytesPerUnit = config.defaultBytesPerUnit; + + function reset(increaseBy: number) { + config.logger.info('Resetting buffer estimator'); + sumOfBytes = 0; + sumOfReportSizes = 0; + calls = 0; + overflows = 0; + lastReset = Date.now(); + + if (increaseBy) { + // increase the default estimation by X ratio + // but don't go higher than 50% of original estimation + defaultBytesPerUnit = Math.min( + (1 + increaseBy) * defaultBytesPerUnit, + 1.5 * config.defaultBytesPerUnit + ); + config.logger.info( + 'Increasing default bytes per unit (ratio=%s, new=%s)', + increaseBy, + defaultBytesPerUnit + ); + } + } + + return { + getDefaultBytesPerUnit() { + return defaultBytesPerUnit; + }, + /** + * Estimate the size in bytes based on the number of units + */ + estimate(size: number) { + if (sumOfBytes === 0 || sumOfReportSizes === 0) { + return defaultBytesPerUnit * size; + } + + return (sumOfBytes / sumOfReportSizes) * size; + }, + /** + * Increase the size of units and the size in bytes in the estimator. + * It's needed for future estimations + */ + teach({ bytes, operations }: { bytes: number; operations: number }) { + calls++; + sumOfBytes += bytes; + sumOfReportSizes += operations; + + if (Date.now() - lastReset >= config.resetAfter) { + config.logger.info('Estimator reached the reset time'); + reset(0); + } + }, + overflowed(batchId: string) { + config.logger.info( + 'Payload was most likely bigger than expected (id=%s)', + batchId + ); + overflows++; + + const increaseBy = config.increaseBy({ + calls, + overflows, + }); + + if (increaseBy) { + reset(increaseBy); + } + }, + }; +} + +export function createKVBuffer(config: { + logger: FastifyLoggerInstance; + size: number; + interval: number; + limitInBytes: number; + useEstimator: boolean; + calculateReportSize(report: T): number; + split(report: T, numOfChunks: number): readonly T[]; + onRetry(reports: readonly T[]): void; + sender( + reports: readonly T[], + estimatedSizeInBytes: number, + batchId: string, + validateSize: (actualSizeInBytes: number) => void | never + ): Promise; +}) { + const { logger } = config; + let buffer: T[] = []; + let timeoutId: ReturnType | null = null; + const estimator = createEstimator({ + logger, + defaultBytesPerUnit: config.limitInBytes / config.size, + // Reset the estimator every 60s + resetAfter: 60_000, + increaseBy({ calls, overflows }) { + if (calls > 100 && overflows / calls > 0.1) { + return 0.05; + } + + return 0; + }, + }); + + function calculateBufferSize(reports: readonly T[]) { + return reports.reduce( + (sum, report) => sum + config.calculateReportSize(report), + 0 + ); + } + + function calculateBufferSizeInBytes(reports: readonly T[]) { + return estimator.estimate(calculateBufferSize(reports)); + } + + function sumOfOperationsSizeInBuffer() { + return buffer.reduce((sum, report) => sum + (report as any).size, 0); + } + + async function flushBuffer( + reports: readonly T[], + size: number, + batchId: string, + isRetry = false + ) { + logger.info( + `Flushing (reports=%s, bufferSize=%s, id=%s)`, + reports.length, + size, + batchId + ); + const estimatedSizeInBytes = estimator.estimate(size); + buffer = []; + await config + .sender( + reports, + estimatedSizeInBytes, + batchId, + function validateSize(bytes) { + if (!config.useEstimator) { + return; + } + + logger.info( + `Estimator (predicted=%s, actual=%s, errorRatio=%s, default=%s, id=%s)`, + estimatedSizeInBytes, + bytes, + (Math.abs(estimatedSizeInBytes - bytes) / bytes).toFixed(4), + estimator.getDefaultBytesPerUnit(), + batchId + ); + + estimator.teach({ + operations: size, + bytes, + }); + + if (bytes > config.limitInBytes) { + estimator.overflowed(batchId); + throw new BufferTooBigError(bytes); + } + } + ) + .catch((error) => { + if (!isRetry && isBufferTooBigError(error)) { + config.onRetry(reports); + logger.info( + `Retrying (reports=%s, bufferSize=%s, id=%s)`, + reports.length, + size, + batchId + ); + + const numOfChunks = Math.ceil(error.bytes / config.limitInBytes); + + // We split reports into chunks in case we have few big reports (or even single big report) + const newReports: T[] = []; + for (const report of reports) { + newReports.push(...config.split(report, numOfChunks)); + } + + const chunks: T[][] = []; + let endedAt = 0; + for (let chunkIndex = 0; chunkIndex < numOfChunks; chunkIndex++) { + const chunkSize = calculateChunkSize( + newReports.length, + numOfChunks, + chunkIndex + ); + const start = endedAt; + const end = start + chunkSize; + endedAt = end; + chunks.push(newReports.slice(start, end)); + } + + return Promise.all( + chunks.map((chunk, chunkIndex) => + flushBuffer( + chunk, + calculateBufferSize(chunk), + batchId + '--retry-chunk-' + chunkIndex, + true + ) + ) + ); + } + + return Promise.reject(error); + }); + } + + async function send(shouldSchedule = true): Promise { + if (timeoutId !== null) { + clearTimeout(timeoutId); + } + + if (buffer.length !== 0) { + const reports = buffer.slice(); + const size = calculateBufferSize(reports); + const batchId = randomUUID(); + + try { + await flushBuffer(reports, size, batchId); + } catch (error) { + logger.error(error); + // the payload size was most likely too big + estimator.overflowed(batchId); + } + } + + if (shouldSchedule) { + schedule(); + } + } + + function schedule() { + timeoutId = setTimeout(() => send(true), config.interval); + } + + function add(report: T) { + if (config.useEstimator) { + const currentBufferSize = calculateBufferSizeInBytes(buffer); + const estimatedReportSize = estimator.estimate( + config.calculateReportSize(report) + ); + const estimatedBufferSize = currentBufferSize + estimatedReportSize; + + if ( + currentBufferSize >= config.limitInBytes || + estimatedBufferSize >= config.limitInBytes + ) { + send(true); + } + + if (estimatedReportSize > config.limitInBytes) { + const numOfChunks = Math.ceil( + estimatedReportSize / config.limitInBytes + ); + const reports = config.split(report, numOfChunks); + for (const report of reports) { + add(report); + } + } else { + buffer.push(report); + } + } else { + buffer.push(report); + if (sumOfOperationsSizeInBuffer() >= config.size) { + send(true); + } + } + } + + return { + add, + start() { + logger.info('Started buffer'); + schedule(); + }, + async stop() { + logger.info('Stopping buffer'); + if (timeoutId) { + clearTimeout(timeoutId); + } + await send(false); + }, + }; +} diff --git a/packages/services/usage/src/dev.ts b/packages/services/usage/src/dev.ts new file mode 100644 index 000000000..213339db1 --- /dev/null +++ b/packages/services/usage/src/dev.ts @@ -0,0 +1,7 @@ +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/usage/src/index.ts b/packages/services/usage/src/index.ts new file mode 100644 index 000000000..2af18a11c --- /dev/null +++ b/packages/services/usage/src/index.ts @@ -0,0 +1,181 @@ +#!/usr/bin/env node +import * as Sentry from '@sentry/node'; +import { + createServer, + startMetrics, + ensureEnv, + registerShutdown, +} from '@hive/service-common'; +import { createTokens } from './tokens'; +import { createUsage } from './usage'; +import { + httpRequests, + httpRequestsWithoutToken, + httpRequestsWithNonExistingToken, + httpRequestsWithNoAccess, + collectLatency, +} from './metrics'; +import type { IncomingLegacyReport, IncomingReport } from './types'; +import { createUsageRateLimit } from './rate-limit'; + +async function main() { + Sentry.init({ + serverName: 'usage', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'usage', + tracing: false, + }); + + try { + const { collect, readiness, start, stop } = createUsage({ + logger: server.log, + kafka: { + topic: 'usage_reports_v2', + buffer: { + size: ensureEnv('KAFKA_BUFFER_SIZE', 'number'), + interval: ensureEnv('KAFKA_BUFFER_INTERVAL', 'number'), + dynamic: ensureEnv('KAFKA_BUFFER_DYNAMIC', 'boolean'), + }, + connection: + ensureEnv('KAFKA_CONNECTION_MODE') == 'hosted' + ? { + mode: 'hosted', + key: ensureEnv('KAFKA_KEY'), + user: ensureEnv('KAFKA_USER'), + broker: ensureEnv('KAFKA_BROKER'), + } + : { + mode: 'docker', + broker: ensureEnv('KAFKA_BROKER'), + }, + }, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await Promise.all([stop(), server.close()]); + }, + }); + + const port = process.env.PORT || 5000; + + const tokens = createTokens({ + endpoint: ensureEnv('TOKENS_ENDPOINT'), + logger: server.log, + }); + + const rateLimit = process.env.RATE_LIMIT_ENDPOINT + ? createUsageRateLimit({ + endpoint: ensureEnv('RATE_LIMIT_ENDPOINT'), + logger: server.log, + }) + : null; + + server.route<{ + Body: IncomingReport | IncomingLegacyReport; + }>({ + method: 'POST', + url: '/', + async handler(req, res) { + httpRequests.inc(); + const token = req.headers['x-api-token'] as string; + + if (!token) { + res.status(400).send('Missing token'); + httpRequestsWithoutToken.inc(); + return; + } + + const tokenInfo = await tokens.fetch(token); + + if (tokens.isNotFound(tokenInfo)) { + httpRequestsWithNonExistingToken.inc(); + res.status(400).send('Missing token'); + return; + } + + // We treat collected operations as part of registry + if (tokens.isNoAccess(tokenInfo)) { + httpRequestsWithNoAccess.inc(); + server.log.info(`No access`); + res.status(403).send('No access'); + return; + } + + if ( + await rateLimit?.isRateLimited({ + id: tokenInfo.target, + type: 'operations-reporting', + token, + entityType: 'target', + }) + ) { + // TODO: We should trigger a call to update the KV in the WAF in case we want to make sure token is being blocked? + res.status(429).send(); + + return; + } + + const retentionInfo = + (await rateLimit?.getRetentionForTargetId?.(tokenInfo.target)) || + null; + + const stopTimer = collectLatency.startTimer(); + try { + await collect(req.body, tokenInfo, retentionInfo); + stopTimer(); + res.status(200).send(); + } catch (error) { + stopTimer(); + req.log.error(error, 'Failed to collect'); + Sentry.captureException(error, { + level: Sentry.Severity.Error, + }); + res.status(500).send(); + } + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(_, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(readiness() ? 200 : 400).send(); + }, + }); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + await server.listen(port, '0.0.0.0'); + await start(); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/usage/src/metrics.ts b/packages/services/usage/src/metrics.ts new file mode 100644 index 000000000..60520971e --- /dev/null +++ b/packages/services/usage/src/metrics.ts @@ -0,0 +1,91 @@ +import { metrics } from '@hive/service-common'; + +export const tokenCacheHits = new metrics.Counter({ + name: 'usage_tokens_cache_hits', + help: 'Number of cache hits', +}); + +export const tokenRequests = new metrics.Counter({ + name: 'usage_tokens_requests', + help: 'Number of requests to Tokens service', +}); + +export const httpRequests = new metrics.Counter({ + name: 'usage_http_requests', + help: 'Number of http requests', +}); + +export const httpRequestsWithoutToken = new metrics.Counter({ + name: 'usage_http_requests_no_token', + help: 'Number of http requests without a token', +}); + +export const httpRequestsWithNonExistingToken = new metrics.Counter({ + name: 'usage_http_requests_invalid_token', + help: 'Number of http requests with a non existing token', +}); + +export const httpRequestsWithNoAccess = new metrics.Counter({ + name: 'usage_http_requests_no_access', + help: 'Number of http requests with a token with no access', +}); + +export const totalOperations = new metrics.Counter({ + name: 'usage_operations_total', + help: 'Number of operations received by usage service', +}); + +export const totalReports = new metrics.Counter({ + name: 'usage_reports_total', + help: 'Number of reports received by usage service', +}); + +export const totalLegacyReports = new metrics.Counter({ + name: 'usage_reports_legacy_format_total', + help: 'Number of legacy-format reports received by usage service', +}); + +export const rawOperationWrites = new metrics.Counter({ + name: 'usage_operation_writes', + help: 'Number of raw operations successfully collected by usage service', +}); + +export const rawOperationFailures = new metrics.Gauge({ + name: 'usage_operation_failures', + help: 'Number of raw operations NOT collected by usage service', +}); + +export const invalidRawOperations = new metrics.Counter({ + name: 'usage_operations_invalid', + help: 'Number of invalid raw operations dropped by usage service', +}); + +export const collectLatency = new metrics.Summary({ + name: 'usage_raw_collect_latency', + help: 'Collect latency', +}); + +export const compressLatency = new metrics.Summary({ + name: 'usage_raw_compress_latency', + help: 'Compress latency', +}); + +export const kafkaLatency = new metrics.Summary({ + name: 'usage_raw_kafka_latency', + help: 'Kafka latency', +}); + +export const bufferFlushes = new metrics.Counter({ + name: 'usage_buffer_flushes', + help: 'Number of buffer flushes', +}); + +export const rawOperationsSize = new metrics.Summary({ + name: 'usage_raw_operation_size', + help: 'Size of a sent batch', +}); + +export const estimationError = new metrics.Summary({ + name: 'usage_size_estimation_error', + help: 'How far off the estimation was comparing to the actual size', +}); diff --git a/packages/services/usage/src/rate-limit.ts b/packages/services/usage/src/rate-limit.ts new file mode 100644 index 000000000..73189c501 --- /dev/null +++ b/packages/services/usage/src/rate-limit.ts @@ -0,0 +1,93 @@ +import { FastifyLoggerInstance } from '@hive/service-common'; +import LRU from 'tiny-lru'; +import type { + RateLimitApi, + RateLimitQueryInput, + RateLimitQueryOutput, +} from '@hive/rate-limit'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; + +export function createUsageRateLimit(config: { + endpoint: string | null; + logger: FastifyLoggerInstance; +}) { + const logger = config.logger; + + if (!config.endpoint) { + logger.warn( + `Usage service is not configured to use rate-limit (missing config)` + ); + + return { + async isRateLimited( + _input: RateLimitQueryInput<'checkRateLimit'> + ): Promise { + return false; + }, + }; + } + const endpoint = config.endpoint.replace(/\/$/, ''); + const rateLimit = createTRPCClient({ + url: `${endpoint}/trpc`, + fetch, + }); + const cache = LRU | null>>( + 1000, + 30_000 + ); + const retentionCache = LRU< + Promise | null> + >(1000, 30_000); + + async function fetchFreshRetentionInfo( + input: RateLimitQueryInput<'getRetention'> + ): Promise | null> { + return rateLimit.query('getRetention', input); + } + + async function fetchFreshLimitInfo( + input: RateLimitQueryInput<'checkRateLimit'> + ): Promise | null> { + return rateLimit.query('checkRateLimit', input); + } + + return { + async getRetentionForTargetId(targetId: string) { + const retentionResponse = await retentionCache.get(targetId); + + if (!retentionResponse) { + const result = fetchFreshRetentionInfo({ targetId }); + + if (result) { + retentionCache.set(targetId, result); + + return result; + } + + return null; + } + + return retentionResponse; + }, + async isRateLimited( + input: RateLimitQueryInput<'checkRateLimit'> + ): Promise { + const limitInfo = await cache.get(input.id); + + if (!limitInfo) { + const result = fetchFreshLimitInfo(input); + + if (result) { + cache.set(input.id, result); + + return result.then((r) => r !== null && r.limited); + } + + return false; + } + + return limitInfo.limited; + }, + }; +} diff --git a/packages/services/usage/src/tokens.ts b/packages/services/usage/src/tokens.ts new file mode 100644 index 000000000..cc62db0be --- /dev/null +++ b/packages/services/usage/src/tokens.ts @@ -0,0 +1,79 @@ +import { FastifyLoggerInstance } from '@hive/service-common'; +import LRU from 'tiny-lru'; + +import type { TokensApi } from '@hive/tokens'; +import { createTRPCClient } from '@trpc/client'; +import { fetch } from 'cross-undici-fetch'; +import { tokenCacheHits, tokenRequests } from './metrics'; + +export enum TokenStatus { + NotFound, + NoAccess, +} + +export type TokensResponse = { + organization: string; + project: string; + target: string; + scopes: readonly string[]; +}; + +type Token = TokensResponse | TokenStatus; + +export function createTokens(config: { + endpoint: string; + logger: FastifyLoggerInstance; +}) { + const endpoint = config.endpoint.replace(/\/$/, ''); + const tokens = LRU>(1000, 30_000); + const tokensApi = createTRPCClient({ + url: `${endpoint}/trpc`, + fetch, + }); + async function fetchFreshToken(token: string) { + try { + const info = await tokensApi.query('getToken', { + token, + }); + + if (info) { + const result = info.scopes.includes('target:registry:write') + ? { + target: info.target, + project: info.project, + organization: info.organization, + scopes: info.scopes, + } + : TokenStatus.NoAccess; + return result; + } else { + return TokenStatus.NotFound; + } + } catch (error) { + return TokenStatus.NotFound; + } + } + + return { + async fetch(token: string) { + tokenRequests.inc(); + const tokenInfo = await tokens.get(token); + + if (!tokenInfo) { + const result = fetchFreshToken(token); + tokens.set(token, result); + return result; + } + + tokenCacheHits.inc(); + + return tokenInfo ?? TokenStatus.NotFound; + }, + isNotFound(token: Token): token is TokenStatus.NotFound { + return token === TokenStatus.NotFound; + }, + isNoAccess(token: Token): token is TokenStatus.NoAccess { + return token === TokenStatus.NoAccess; + }, + }; +} diff --git a/packages/services/usage/src/types.ts b/packages/services/usage/src/types.ts new file mode 100644 index 000000000..344c699f1 --- /dev/null +++ b/packages/services/usage/src/types.ts @@ -0,0 +1,54 @@ +export interface LegacyIncomingOperation { + timestamp?: number; + operation: string; + operationName?: string | null; + fields: string[]; + execution: { + ok: boolean; + duration: number; + errorsTotal: number; + errors?: Array<{ + message: string; + path?: string; + }>; + }; + metadata?: { + client?: { + name?: string; + version?: string; + }; + }; +} + +export interface OperationMapRecord { + operation: string; + operationName?: string | null; + fields: string[]; +} + +export interface OperationMap { + [key: string]: OperationMapRecord; +} + +export interface IncomingReport { + map: OperationMap; + operations: IncomingOperation[]; +} + +export type IncomingLegacyReport = LegacyIncomingOperation[]; + +export interface IncomingOperation { + operationMapKey: string; + timestamp?: number; + execution: { + ok: boolean; + duration: number; + errorsTotal: number; + }; + metadata?: { + client?: { + name?: string; + version?: string; + }; + }; +} diff --git a/packages/services/usage/src/usage.ts b/packages/services/usage/src/usage.ts new file mode 100644 index 000000000..29d19ff47 --- /dev/null +++ b/packages/services/usage/src/usage.ts @@ -0,0 +1,411 @@ +import { Kafka, CompressionTypes, logLevel, Partitioners } from 'kafkajs'; +import { createHash, randomUUID } from 'crypto'; +import { compress } from '@hive/usage-common'; +import { + rawOperationWrites, + rawOperationFailures, + rawOperationsSize, + invalidRawOperations, + totalOperations, + totalReports, + totalLegacyReports, + kafkaLatency, + compressLatency, + bufferFlushes, + estimationError, +} from './metrics'; +import { + createKVBuffer, + calculateChunkSize, + isBufferTooBigError, +} from './buffer'; +import { validateOperation, validateOperationMapRecord } from './validation'; +import type { FastifyLoggerInstance } from '@hive/service-common'; +import type { RawReport, RawOperationMap } from '@hive/usage-common'; +import type { IncomingReport, IncomingLegacyReport } from './types'; +import type { TokensResponse } from './tokens'; + +const DAY_IN_MS = 86_400_000; + +enum Status { + Waiting, + Ready, + Stopped, +} + +const levelMap = { + [logLevel.NOTHING]: 'trace', + [logLevel.ERROR]: 'error', + [logLevel.WARN]: 'warn', + [logLevel.INFO]: 'info', + [logLevel.DEBUG]: 'debug', +} as const; + +export function splitReport(report: RawReport, numOfChunks: number) { + const reports: RawReport[] = []; + const operationMapLength = Object.keys(report.map).length; + + const keyReportIndexMap: { + [operationMapKey: string]: number; + } = {}; + const operationMapEntries = Object.entries(report.map); + let endedAt = 0; + for (let chunkIndex = 0; chunkIndex < numOfChunks; chunkIndex++) { + const chunkSize = calculateChunkSize( + operationMapLength, + numOfChunks, + chunkIndex + ); + const start = endedAt; + const end = start + chunkSize; + endedAt = end; + const chunk = operationMapEntries.slice(start, end); + + const operationMap: RawOperationMap = {}; + for (const [key, record] of chunk) { + keyReportIndexMap[key] = chunkIndex; + operationMap[key] = record; + } + + reports.push({ + id: `${report.id}--chunk-${chunkIndex}`, + size: 0, + target: report.target, + map: operationMap, + operations: [], + }); + } + + for (const op of report.operations) { + const chunkIndex = keyReportIndexMap[op.operationMapKey]; + reports[chunkIndex].operations.push(op); + reports[chunkIndex].size += 1; + } + + return reports; +} + +export function createUsage(config: { + logger: FastifyLoggerInstance; + kafka: { + topic: string; + buffer: { + /** + * The maximum number of operations to buffer before flushing to Kafka. + */ + size: number; + /** + * In milliseconds + */ + interval: number; + /** + * Use smart estimator to estimate the buffer limit + */ + dynamic: boolean; + }; + connection: + | { + mode: 'hosted'; + key: string; + user: string; + broker: string; + } + | { + mode: 'docker'; + broker: string; + }; + }; +}) { + const { logger } = config; + + const kafka = new Kafka({ + clientId: 'usage', + ...(config.kafka.connection.mode === 'hosted' + ? { + ssl: true, + sasl: { + mechanism: 'plain', + username: config.kafka.connection.user, + password: config.kafka.connection.key, + }, + brokers: [config.kafka.connection.broker], + } + : { + brokers: [config.kafka.connection.broker], + }), + logLevel: logLevel.INFO, + logCreator() { + return (entry) => { + logger[levelMap[entry.level]]({ + ...entry.log, + message: undefined, + timestamp: undefined, + msg: `[${entry.namespace}] ${entry.log.message}`, + time: new Date(entry.log.timestamp).getTime(), + }); + }; + }, + // settings recommended by Azure EventHub https://docs.microsoft.com/en-us/azure/event-hubs/apache-kafka-configurations + requestTimeout: 60_000, // + }); + const producer = kafka.producer({ + idempotent: true, + // settings recommended by Azure EventHub https://docs.microsoft.com/en-us/azure/event-hubs/apache-kafka-configurations + metadataMaxAge: 180_000, + createPartitioner: Partitioners.LegacyPartitioner, + }); + const buffer = createKVBuffer({ + logger, + size: config.kafka.buffer.size, + interval: config.kafka.buffer.interval, + limitInBytes: 990_000, // 1MB is the limit of a single request to EventHub, let's keep it below that + useEstimator: config.kafka.buffer.dynamic, + calculateReportSize(report) { + return Object.keys(report.map).length; + }, + split(report, numOfChunks) { + logger.info('Split report (chunks=%s)', numOfChunks); + return splitReport(report, numOfChunks); + }, + onRetry(reports) { + // Because we do a retry, we need to decrease the number of failures + const numOfOperations = reports.reduce( + (sum, report) => report.size + sum, + 0 + ); + rawOperationFailures.dec(numOfOperations); + }, + async sender(reports, estimatedSizeInBytes, batchId, validateSize) { + const numOfOperations = reports.reduce( + (sum, report) => report.size + sum, + 0 + ); + try { + const compressLatencyStop = compressLatency.startTimer(); + const value = await compress(JSON.stringify(reports)).finally(() => { + compressLatencyStop(); + }); + const stopTimer = kafkaLatency.startTimer(); + + estimationError.observe( + Math.abs(estimatedSizeInBytes - value.byteLength) / value.byteLength + ); + + validateSize(value.byteLength); + bufferFlushes.inc(); + const meta = await producer + .send({ + topic: config.kafka.topic, + compression: CompressionTypes.None, // Event Hubs doesn't support compression + messages: [ + { + value, + }, + ], + }) + .finally(() => { + stopTimer(); + }); + if (meta[0].errorCode) { + rawOperationFailures.inc(numOfOperations); + logger.error( + `Failed to flush (id=%s, errorCode=%s)`, + batchId, + meta[0].errorCode + ); + } else { + rawOperationWrites.inc(numOfOperations); + logger.info( + `Flushed (id=%s, operations=%s)`, + batchId, + numOfOperations + ); + } + } catch (error: any) { + rawOperationFailures.inc(numOfOperations); + + if (isBufferTooBigError(error)) { + logger.debug( + 'Buffer too big, retrying (id=%s, error=%s)', + batchId, + error.message + ); + } else { + logger.error( + `Failed to flush (id=%s, error=%s)`, + batchId, + error.message + ); + } + + throw error; + } + }, + }); + + let status: Status = Status.Waiting; + + return { + async collect( + incomingReport: IncomingReport | IncomingLegacyReport, + token: TokensResponse, + targetRetentionInDays: number | null + ) { + if (status !== Status.Ready) { + throw new Error('Usage is not ready yet'); + } + + const now = Date.now(); + + const incoming = ensureReportFormat(incomingReport); + + const size = incoming.operations.length; + totalReports.inc(); + totalOperations.inc(size); + rawOperationsSize.observe(size); + + let invalidOperationSize = 0; + + const outgoing: RawReport = { + id: randomUUID(), + target: token.target, + size: 0, + map: {}, + operations: [], + }; + + for (const key in incoming.map) { + const record = incoming.map[key]; + const validationResult = validateOperationMapRecord(record); + + if (validationResult.valid) { + outgoing.map[key] = { + key, + operation: record.operation, + operationName: record.operationName, + fields: record.fields, + }; + } + } + + for (const operation of incoming.operations) { + const validationResult = validateOperation(operation, outgoing.map); + + if (validationResult.valid) { + // Increase size + outgoing.size += 1; + + // Add operation + const ts = operation.timestamp ?? now; + outgoing.operations.push({ + operationMapKey: operation.operationMapKey, + timestamp: ts, + expiresAt: targetRetentionInDays + ? ts + targetRetentionInDays * DAY_IN_MS + : undefined, + execution: { + ok: operation.execution.ok, + duration: operation.execution.duration, + errorsTotal: operation.execution.errorsTotal, + }, + metadata: { + client: { + name: operation.metadata?.client?.name, + version: operation.metadata?.client?.version, + }, + }, + }); + } else { + logger.warn( + `Detected invalid operation: %o`, + validationResult.errors + ); + invalidOperationSize += 1; + } + } + + invalidRawOperations.inc(invalidOperationSize); + buffer.add(outgoing); + }, + readiness() { + return status === Status.Ready; + }, + async start() { + logger.info('Starting Kafka producer'); + await producer.connect(); + buffer.start(); + status = Status.Ready; + logger.info('Kafka producer is ready'); + }, + async stop() { + logger.info('Started Usage shutdown...'); + + status = Status.Stopped; + await buffer.stop(); + logger.info(`Buffering stopped`); + await producer.disconnect(); + logger.info(`Producer disconnected`); + + logger.info('Usage stopped'); + }, + }; +} + +function isLegacyReport( + report: IncomingReport | IncomingLegacyReport +): report is IncomingLegacyReport { + return Array.isArray(report); +} + +function ensureReportFormat( + report: IncomingLegacyReport | IncomingReport +): IncomingReport { + if (isLegacyReport(report)) { + totalLegacyReports.inc(); + return convertLegacyReport(report); + } + + return report; +} + +function convertLegacyReport(legacy: IncomingLegacyReport): IncomingReport { + const hashMap = new Map(); + const report: IncomingReport = { + map: {}, + operations: [], + }; + + for (const op of legacy) { + let operationMapKey = hashMap.get(op.operation); + + if (!operationMapKey) { + operationMapKey = createHash('sha256') + .update(op.operation) + .update(JSON.stringify(op.fields)) + .digest('hex'); + report.map[operationMapKey] = { + operation: op.operation, + operationName: op.operationName, + fields: op.fields, + }; + } + + report.operations.push({ + operationMapKey, + timestamp: op.timestamp, + execution: { + ok: op.execution.ok, + duration: op.execution.duration, + errorsTotal: op.execution.errorsTotal, + }, + metadata: { + client: { + name: op.metadata?.client?.name, + version: op.metadata?.client?.version, + }, + }, + }); + } + + return report; +} diff --git a/packages/services/usage/src/validation.ts b/packages/services/usage/src/validation.ts new file mode 100644 index 000000000..238dba992 --- /dev/null +++ b/packages/services/usage/src/validation.ts @@ -0,0 +1,97 @@ +import type { + IncomingOperation, + OperationMap, + OperationMapRecord, +} from './types'; +import Ajv from 'ajv'; +import type { JSONSchemaType } from 'ajv'; + +const ajv = new Ajv(); + +const operationMapRecordSchema: JSONSchemaType = { + type: 'object', + required: ['operation', 'fields'], + properties: { + operation: { type: 'string' }, + operationName: { type: 'string', nullable: true }, + fields: { type: 'array', minItems: 1, items: { type: 'string' } }, + }, +}; + +const operationSchema: JSONSchemaType = { + type: 'object', + required: ['operationMapKey', 'execution'], + properties: { + timestamp: { type: 'number', nullable: true }, + operationMapKey: { type: 'string' }, + execution: { + type: 'object', + required: ['ok', 'duration', 'errorsTotal'], + properties: { + ok: { type: 'boolean' }, + duration: { type: 'number' }, + errorsTotal: { type: 'number' }, + }, + }, + metadata: { + type: 'object', + nullable: true, + required: [], + properties: { + client: { + type: 'object', + nullable: true, + required: [], + properties: { + name: { type: 'string', nullable: true }, + version: { type: 'string', nullable: true }, + }, + }, + }, + }, + }, +}; + +export function validateOperationMapRecord(record: OperationMapRecord) { + const validate = ajv.compile(operationMapRecordSchema); + + if (validate(record)) { + return { + valid: true, + }; + } else { + return { + valid: false, + errors: validate.errors, + }; + } +} + +export function validateOperation( + operation: IncomingOperation, + operationMap: OperationMap +) { + const validate = ajv.compile(operationSchema); + + if (!operationMap[operation.operationMapKey]) { + return { + valid: false, + errors: [ + { + message: `Operation map key "${operation.operationMapKey}" is not found`, + }, + ], + }; + } + + if (validate(operation)) { + return { + valid: true, + }; + } else { + return { + valid: false, + errors: validate.errors, + }; + } +} diff --git a/packages/services/usage/tsconfig.json b/packages/services/usage/tsconfig.json new file mode 100644 index 000000000..1bee108c6 --- /dev/null +++ b/packages/services/usage/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/services/webhooks/.env.template b/packages/services/webhooks/.env.template new file mode 100644 index 000000000..0d4defd38 --- /dev/null +++ b/packages/services/webhooks/.env.template @@ -0,0 +1,3 @@ +REDIS_HOST="localhost" +REDIS_PORT="6379" +REDIS_PASSWORD="" \ No newline at end of file diff --git a/packages/services/webhooks/.gitignore b/packages/services/webhooks/.gitignore new file mode 100644 index 000000000..4c9d7c35a --- /dev/null +++ b/packages/services/webhooks/.gitignore @@ -0,0 +1,4 @@ +*.log +.DS_Store +node_modules +dist diff --git a/packages/services/webhooks/CHANGELOG.md b/packages/services/webhooks/CHANGELOG.md new file mode 100644 index 000000000..ca84d7b56 --- /dev/null +++ b/packages/services/webhooks/CHANGELOG.md @@ -0,0 +1,33 @@ +# @hive/webhooks + +## 0.1.4 + +### Patch Changes + +- 1623aca5: Upgrade sentry + +## 0.1.3 + +### Patch Changes + +- Updated dependencies [3a435baa] + - @hive/service-common@0.1.3 + +## 0.1.2 + +### Patch Changes + +- 689610ac: fix(deps): update sentry-javascript monorepo to v6.16.1 +- 8a38ced6: Pass error as second or first + +## 0.1.1 + +### Patch Changes + +- 4e27e93e: Scrape schema and webhooks services + +## 0.1.0 + +### Minor Changes + +- b12a7254: Introduce Webhooks service diff --git a/packages/services/webhooks/LICENSE b/packages/services/webhooks/LICENSE new file mode 100644 index 000000000..3fef50b02 --- /dev/null +++ b/packages/services/webhooks/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Guild + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/services/webhooks/package.json b/packages/services/webhooks/package.json new file mode 100644 index 000000000..bfd2c6681 --- /dev/null +++ b/packages/services/webhooks/package.json @@ -0,0 +1,35 @@ +{ + "name": "@hive/webhooks", + "type": "module", + "private": true, + "version": "0.1.4", + "license": "MIT", + "scripts": { + "dev": "tsup-node src/dev.ts --format esm --target node16 --watch --onSuccess 'node dist/dev.js' | pino-pretty --translateTime HH:MM:ss TT --ignore pid,hostname", + "build": "bob runify --single", + "postbuild": "copyfiles -f \"../../../node_modules/bullmq/dist/esm/commands/*.lua\" dist && copyfiles -f \"../../../node_modules/bullmq/dist/esm/commands/includes/*.lua\" dist/includes", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "bullmq": "1.81.4", + "dotenv": "10.0.0", + "got": "12.0.4", + "ioredis": "4.28.3", + "p-timeout": "5.0.2" + }, + "devDependencies": { + "ioredis": "4.28.3", + "copyfiles": "2.4.1", + "pino-pretty": "6.0.0" + }, + "buildOptions": { + "runify": true, + "tsup": true, + "tags": [ + "backend" + ], + "banner": "../../../scripts/banner.js" + } +} diff --git a/packages/services/webhooks/src/dev.ts b/packages/services/webhooks/src/dev.ts new file mode 100644 index 000000000..213339db1 --- /dev/null +++ b/packages/services/webhooks/src/dev.ts @@ -0,0 +1,7 @@ +import { config } from 'dotenv'; + +config({ + debug: true, +}); + +await import('./index'); diff --git a/packages/services/webhooks/src/index.ts b/packages/services/webhooks/src/index.ts new file mode 100644 index 000000000..b139c4428 --- /dev/null +++ b/packages/services/webhooks/src/index.ts @@ -0,0 +1,107 @@ +#!/usr/bin/env node +import { + createServer, + createErrorHandler, + ensureEnv, + startMetrics, + registerShutdown, +} from '@hive/service-common'; +import * as Sentry from '@sentry/node'; +import type { WebhookInput } from './types'; +import { createScheduler } from './scheduler'; + +async function main() { + Sentry.init({ + serverName: 'schema', + enabled: process.env.ENVIRONMENT === 'prod', + environment: process.env.ENVIRONMENT, + dsn: process.env.SENTRY_DSN, + release: process.env.RELEASE || 'local', + }); + + const server = createServer({ + name: 'webhooks', + tracing: false, + }); + + const errorHandler = createErrorHandler(server); + + try { + const port = process.env.PORT || 6250; + + const { schedule, readiness, start, stop } = createScheduler({ + logger: server.log, + redis: { + host: ensureEnv('REDIS_HOST'), + port: ensureEnv('REDIS_PORT', 'number'), + password: ensureEnv('REDIS_PASSWORD'), + }, + webhookQueueName: 'webhook', + maxAttempts: 10, + backoffDelay: 2000, + }); + + registerShutdown({ + logger: server.log, + async onShutdown() { + await stop(); + }, + }); + + server.route<{ + Body: WebhookInput; + }>({ + method: 'POST', + url: '/schedule', + async handler(req, res) { + try { + const job = await schedule(req.body); + res.status(200).send({ + job: job.id ?? 'unknown', + }); + } catch (error) { + errorHandler('Failed to schedule a webhook', error as Error, req.log); + res.status(500).send(error); + } + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_health', + handler(req, res) { + res.status(200).send(); + }, + }); + + server.route({ + method: ['GET', 'HEAD'], + url: '/_readiness', + handler(_, res) { + res.status(readiness() ? 200 : 400).send(); + }, + }); + + await server.listen(port, '0.0.0.0'); + + if (process.env.METRICS_ENABLED === 'true') { + await startMetrics(); + } + + await start(); + } catch (error) { + server.log.fatal(error); + Sentry.captureException(error, { + level: Sentry.Severity.Fatal, + }); + process.exit(1); + } +} + +main().catch((err) => { + Sentry.captureException(err, { + level: Sentry.Severity.Fatal, + }); + console.error(err); + process.exit(1); +}); diff --git a/packages/services/webhooks/src/jobs.ts b/packages/services/webhooks/src/jobs.ts new file mode 100644 index 000000000..0931bc4b0 --- /dev/null +++ b/packages/services/webhooks/src/jobs.ts @@ -0,0 +1,56 @@ +import { createHash } from 'crypto'; +import { got } from 'got'; +import type { Queue, Job } from 'bullmq'; +import type { Config, WebhookInput } from './types'; + +export async function scheduleWebhook({ + queue, + webhook, + config, +}: { + webhook: WebhookInput; + config: Config; + queue: Queue; +}) { + const checksum = createHash('sha256') + .update(JSON.stringify(webhook)) + .digest('hex'); + const jobName = `${webhook.event.target.id}-${checksum}`; + config.logger.debug(`Schedule ${jobName}`); + + return queue + .add(jobName, webhook, { + attempts: config.maxAttempts, + backoff: { type: 'exponential', delay: config.backoffDelay }, + }) + .then((result) => { + config.logger.debug(`Scheduled ${jobName}`); + return Promise.resolve(result); + }); +} + +export function createWebhookJob({ config }: { config: Config }) { + return async function sendWebhook(job: Job) { + if (job.attemptsMade < config.maxAttempts) { + config.logger.debug( + 'Calling webhook (job=%s, attempt=%d of %d)', + job.name, + job.attemptsMade + 1, + config.maxAttempts + ); + await got.post(job.data.endpoint, { + headers: { + Accept: 'application/json', + 'Accept-Encoding': 'gzip, deflate, br', + 'Content-Type': 'application/json', + }, + timeout: { + request: 10_000, + }, + json: job.data.event, + }); + } else { + config.logger.warn('Giving up on webhook (job=%s)', job.name); + } + }; +} diff --git a/packages/services/webhooks/src/scheduler.ts b/packages/services/webhooks/src/scheduler.ts new file mode 100644 index 000000000..121ba2cb4 --- /dev/null +++ b/packages/services/webhooks/src/scheduler.ts @@ -0,0 +1,191 @@ +import * as Sentry from '@sentry/node'; +import { Queue, QueueScheduler, Worker, Job } from 'bullmq'; +import Redis, { Redis as RedisInstance } from 'ioredis'; +import pTimeout from 'p-timeout'; +import type { WebhookInput, Config } from './types'; +import { scheduleWebhook, createWebhookJob } from './jobs'; + +export function createScheduler(config: Config) { + let redisConnection: RedisInstance | null; + let webhookQueue: Queue | null; + let webhookQueueScheduler: QueueScheduler | null; + let stopped = false; + const logger = config.logger; + + async function clearBull() { + logger.info('Clearing BullMQ...'); + + try { + webhookQueue?.removeAllListeners(); + webhookQueueScheduler?.removeAllListeners(), + await pTimeout( + Promise.all([webhookQueue?.close(), webhookQueueScheduler?.close()]), + 5000, + 'BullMQ close timeout' + ); + } catch (e) { + logger.error('Failed to stop queues', e); + } finally { + webhookQueue = null; + webhookQueueScheduler = null; + logger.info('BullMQ stopped'); + } + } + + async function initQueueAndWorkers() { + if (!redisConnection) { + return; + } + + const prefix = 'hive-webhooks'; + + webhookQueueScheduler = new QueueScheduler(config.webhookQueueName, { + prefix, + connection: redisConnection, + sharedConnection: true, + }); + + webhookQueue = new Queue(config.webhookQueueName, { + prefix, + connection: redisConnection, + sharedConnection: true, + }); + + // Wait for Queues and Scheduler to be ready + await Promise.all([ + webhookQueueScheduler.waitUntilReady(), + webhookQueue.waitUntilReady(), + ]); + + const webhookJob = createWebhookJob({ config }); + + const webhookWorker = new Worker( + config.webhookQueueName, + webhookJob, + { + prefix, + connection: redisConnection, + sharedConnection: true, + } + ); + + webhookWorker.on('error', onError('webhookWorker')); + webhookWorker.on('failed', onFailed); + + // Wait for Workers + await webhookWorker.waitUntilReady(); + } + + async function start() { + redisConnection = new Redis({ + host: config.redis.host, + port: config.redis.port, + password: config.redis.password, + retryStrategy(times) { + return Math.min(times * 500, 2000); + }, + reconnectOnError(error) { + onError('redis:reconnectOnError')(error); + return 1; + }, + db: 0, + maxRetriesPerRequest: null, + enableReadyCheck: false, + }); + + redisConnection.on('error', (err) => { + onError('redis:error')(err); + }); + + redisConnection.on('connect', () => { + logger.info('Redis connection established'); + }); + + redisConnection.on('ready', async () => { + logger.info('Redis connection ready... creating queues and workers...'); + await initQueueAndWorkers(); + }); + + redisConnection.on('close', () => { + logger.info('Redis connection closed'); + }); + + redisConnection.on('reconnecting', (timeToReconnect) => { + logger.info('Redis reconnecting in %s', timeToReconnect); + }); + + redisConnection.on('end', async () => { + logger.info('Redis ended - no more reconnections will be made'); + await stop(); + }); + } + + function onError(source: string) { + return (error: Error) => { + logger.error(`onError called from source ${source}`, error); + Sentry.captureException(error, { + extra: { + error, + source, + }, + level: Sentry.Severity.Critical, + }); + }; + } + + function onFailed(job: Job, error: Error) { + logger.debug( + `Job %s failed after %s attempts, reason: %s`, + job.name, + job.attemptsMade, + job.failedReason + ); + logger.error(error); + } + + async function stop() { + logger.info('Started Usage shutdown...'); + + stopped = true; + + await clearBull(); + + if (redisConnection) { + logger.info('Stopping Redis...'); + + try { + redisConnection.disconnect(false); + } catch (e) { + logger.error('Failed to stop Redis connection', e); + } finally { + redisConnection = null; + webhookQueue = null; + logger.info('Redis stopped'); + } + } + + logger.info('Existing'); + process.exit(0); + } + + async function schedule(webhook: WebhookInput) { + return scheduleWebhook({ queue: webhookQueue!, webhook, config }); + } + + return { + schedule, + start, + stop, + readiness() { + if (stopped) { + return false; + } + + return ( + webhookQueue !== null && + redisConnection !== null && + redisConnection?.status === 'ready' + ); + }, + }; +} diff --git a/packages/services/webhooks/src/types.ts b/packages/services/webhooks/src/types.ts new file mode 100644 index 000000000..cfded4e0c --- /dev/null +++ b/packages/services/webhooks/src/types.ts @@ -0,0 +1,41 @@ +import type { FastifyLoggerInstance } from '@hive/service-common'; + +export interface Config { + logger: FastifyLoggerInstance; + redis: { + host: string; + port: number; + password: string; + }; + webhookQueueName: string; + maxAttempts: number; + backoffDelay: number; +} + +export interface WebhookInput { + endpoint: string; + event: { + organization: { + id: string; + cleanId: string; + name: string; + }; + project: { + id: string; + cleanId: string; + name: string; + }; + target: { + id: string; + cleanId: string; + name: string; + }; + schema: { + id: string; + valid: boolean; + commit: string; + }; + changes: any[]; + errors: any[]; + }; +} diff --git a/packages/services/webhooks/tsconfig.json b/packages/services/webhooks/tsconfig.json new file mode 100644 index 000000000..1bee108c6 --- /dev/null +++ b/packages/services/webhooks/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "esnext", + "skipLibCheck": true, + "rootDir": "../.." + }, + "files": ["src/index.ts"] +} diff --git a/packages/web/app/.babelrc.js b/packages/web/app/.babelrc.js new file mode 100644 index 000000000..f5a02744e --- /dev/null +++ b/packages/web/app/.babelrc.js @@ -0,0 +1,20 @@ +const { babelPlugin } = require('@graphql-codegen/gql-tag-operations-preset'); + +module.exports = { + presets: [ + [ + 'next/babel', + { + 'preset-react': { + runtime: 'automatic', + importSource: '@emotion/react', + }, + }, + ], + ], + plugins: [ + '@emotion/babel-plugin', + 'babel-plugin-macros', + [babelPlugin, { artifactDirectory: './src/gql' }], + ], +}; diff --git a/packages/web/app/.env.template b/packages/web/app/.env.template new file mode 100644 index 000000000..79bc967c7 --- /dev/null +++ b/packages/web/app/.env.template @@ -0,0 +1,25 @@ +GRAPHQL_ENDPOINT="http://localhost:4000/graphql" +APP_BASE_URL="http://localhost:3000" + +# Auth0 + +AUTH0_CALLBACK="/api/callback" +AUTH0_SECRET="" +AUTH0_DOMAIN="" +AUTH0_CLIENT_ID="" +AUTH0_CLIENT_SECRET="" +AUTH0_SCOPE="openid profile offline_access" +AUTH0_AUDIENCE="" +AUTH0_BASE_URL="http://localhost:3000" +AUTH0_ISSUER_BASE_URL="" + +# Slack + +SLACK_CLIENT_ID="" +SLACK_CLIENT_SECRET="" + +# GitHub +GITHUB_APP_NAME="" + +# Stripe +NEXT_PUBLIC_STRIPE_PUBLIC_KEY="" \ No newline at end of file diff --git a/packages/web/app/.gitignore b/packages/web/app/.gitignore new file mode 100644 index 000000000..a6c3b6b10 --- /dev/null +++ b/packages/web/app/.gitignore @@ -0,0 +1,36 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env.development.local +.env.test.local +.env.production.local + +# vercel +.vercel + +# Graphql +lib/graphql-operations.ts \ No newline at end of file diff --git a/packages/web/app/CHANGELOG.md b/packages/web/app/CHANGELOG.md new file mode 100644 index 000000000..04789590e --- /dev/null +++ b/packages/web/app/CHANGELOG.md @@ -0,0 +1,920 @@ +# @hive/app + +## 0.16.2 + +### Patch Changes + +- fef049c0: Capture exceptions in Urql + +## 0.16.1 + +### Patch Changes + +- 1623aca5: Upgrade sentry + +## 0.16.0 + +### Minor Changes + +- ffb6feb6: Allow to check usage from multiple targets + +## 0.15.6 + +### Patch Changes + +- ba69e567: Use NextJS API to redirect to an organization page + +## 0.15.5 + +### Patch Changes + +- 3a435baa: Show one value of x-request-id + +## 0.15.4 + +### Patch Changes + +- 3936341a: Show data from last 24h in admin view + +## 0.15.3 + +### Patch Changes + +- 689610ac: fix(deps): update sentry-javascript monorepo to v6.16.1 + +## 0.15.2 + +### Patch Changes + +- d003d7b8: Fix decimals + +## 0.15.1 + +### Patch Changes + +- 4d511e3f: Fix + +## 0.15.0 + +### Minor Changes + +- 74f8187b: Add failure rate column to operations list + +## 0.14.3 + +### Patch Changes + +- 33fbb5e: Bump + +## 0.14.2 + +### Patch Changes + +- 5798314: Single x-request-id header + +## 0.14.1 + +### Patch Changes + +- bca0950: Improve UI and UX of base schema editor + +## 0.14.0 + +### Minor Changes + +- c6a842c: Introduce base schema + +## 0.13.5 + +### Patch Changes + +- 02b00f0: Update undici, sentry, bullmq + +## 0.13.4 + +### Patch Changes + +- 0df6a60: Enable global handlers + +## 0.13.3 + +### Patch Changes + +- c31e08b: Enable Sentry for nextjs client-side + +## 0.13.2 + +### Patch Changes + +- aebf898: Fixes + +## 0.13.1 + +### Patch Changes + +- 7549a38: Fix startup + +## 0.13.0 + +### Minor Changes + +- 7eca7f0: Introduce access scopes + +## 0.12.4 + +### Patch Changes + +- 7cc3bc6: Allow access to schema for single-schema projects + +## 0.12.3 + +### Patch Changes + +- 58b0c95: Set padding-bottom in Settings + +## 0.12.2 + +### Patch Changes + +- 1a98713: Smaller color mode button + +## 0.12.1 + +### Patch Changes + +- 57f56c3: Remove redirect_uri + +## 0.12.0 + +### Minor Changes + +- 14494fd: Notify user when slack is added + +## 0.11.4 + +### Patch Changes + +- 45541e7: add gql-tag-operations preset + +## 0.11.3 + +### Patch Changes + +- 19d4cd5: Bump + +## 0.11.2 + +### Patch Changes + +- cc9aa01: Update dependencies + +## 0.11.1 + +### Patch Changes + +- 14446ed: Adjust dark theme + +## 0.11.0 + +### Minor Changes + +- 301fdf2: Dark mode is here + +## 0.10.30 + +### Patch Changes + +- 273f096: fix GraphiQL editor styles + +## 0.10.29 + +### Patch Changes + +- 91a6957: Allow Integrations in Personal org" + +## 0.10.28 + +### Patch Changes + +- 9752ccb: Add Connect to Stitching projects + +## 0.10.27 + +### Patch Changes + +- d567beb: Update pages/api/proxy.ts and remove no longer needed console.log + +## 0.10.26 + +### Patch Changes + +- 2d561b7: No Sentry cli release + reuse transaction if available + +## 0.10.25 + +### Patch Changes + +- 28d8aec: Fix sentry + +## 0.10.24 + +### Patch Changes + +- 3d828f4: Use latest Sentry and Sentry NextJS integration +- efd8648: Better error ui with basic information + +## 0.10.23 + +### Patch Changes + +- 1da24b0: Push-only to crisp + +## 0.10.22 + +### Patch Changes + +- b15020b: Fix Crisp exceptions + +## 0.10.21 + +### Patch Changes + +- 6ac2116: Identify user on Crisp + +## 0.10.20 + +### Patch Changes + +- ce155d4: Adjust is-beta-accepted to changes in Crisp API +- 91573fb: Better error handling for checking beta approved + +## 0.10.19 + +### Patch Changes + +- 740bfd1: Better ui and error handling for Lab + +## 0.10.18 + +### Patch Changes + +- f99c737: Bump version to release recent changes (spinner) + +## 0.10.17 + +### Patch Changes + +- 7cceb48: Replace bee spinner + +## 0.10.16 + +### Patch Changes + +- 62b1d90: Add service filter to schema view + +## 0.10.15 + +### Patch Changes + +- c1971c2: Added missing robots.txt file + +## 0.10.14 + +### Patch Changes + +- b264205: Show organization clean id in Admin Stats + +## 0.10.13 + +### Patch Changes + +- df6c501: Make Query.lab nullable + +## 0.10.12 + +### Patch Changes + +- 3e2fdd8: Limit version name to 32 characters + +## 0.10.11 + +### Patch Changes + +- aff0857: Pass x-request-id to responses + +## 0.10.10 + +### Patch Changes + +- 9494f7f: Fit labels and move clients to a separate row + +## 0.10.9 + +### Patch Changes + +- 7c5c710: Show stats for client versions + +## 0.10.8 + +### Patch Changes + +- 5eee602: Add with-schema-pushes filter + +## 0.10.7 + +### Patch Changes + +- 249e484: Add "schedule a meeting" link to user menu + +## 0.10.6 + +### Patch Changes + +- 056f51d: Fix time series and fill missing gaps + +## 0.10.5 + +### Patch Changes + +- 5f99c67: Batch getOrganizationOwner calls (homemade dataloader) + +## 0.10.4 + +### Patch Changes + +- e5240d8: Skip if the email is not found + +## 0.10.3 + +### Patch Changes + +- 5fdfc22: Expose /is-beta-accepted endpoint + +## 0.10.2 + +### Patch Changes + +- 88fe4b6: Show more data in admin stats + +## 0.10.1 + +### Patch Changes + +- 4ee9a3b: Fix operations count + +## 0.10.0 + +### Minor Changes + +- efd7b74: Admin panel + +## 0.9.1 + +### Patch Changes + +- 28ff6df: Change opacity when loading data in operations view + +## 0.9.0 + +### Minor Changes + +- 889368b: Bump + +## 0.8.0 + +### Minor Changes + +- 11e6800: Allow multiple auth providers and add displayName and fullName to profiles + +## 0.7.32 + +### Patch Changes + +- ea7b7f9: Create empty series + +## 0.7.31 + +### Patch Changes + +- 4647d25: Show last 24h by default +- 4647d25: Dynamically calculate windows for operations data based on resolution +- 0527e3c: Update Sentry +- 0527e3c: Add serverName tag to Sentry.init + +## 0.7.30 + +### Patch Changes + +- deee331: Fix inputs in Conditional Breaking Changes +- 4a90860: Use react-virtualized to improve perf of operations filter +- 4a90860: Add pagination to the operations table + +## 0.7.29 + +### Patch Changes + +- 168a1f2: Improve History view + +## 0.7.28 + +### Patch Changes + +- bde9548: Create a separate view for schema version + +## 0.7.27 + +### Patch Changes + +- 5e8b34f: Update dependencies +- 5e8b34f: Activities are missing when switching pages + +## 0.7.26 + +### Patch Changes + +- c22c0c0: Add link to Using the Registry with a Apollo Gateway chapter + +## 0.7.25 + +### Patch Changes + +- 23636de: Add link to documentation in top bar navigation +- 23636de: Use VSCode icons +- 23636de: Remove Identifier from the CDN +- 23636de: Improve the design of the lab view +- 23636de: Store supergraph in CDN +- 23636de: Minor UI improvements +- 23636de: Add CDN access generation (to supergraph) to the Schema view page + +## 0.7.24 + +### Patch Changes + +- 3d4852c: Update urql and codegen + +## 0.7.23 + +### Patch Changes + +- 2dbfaf3: Redesign token creation +- 202ac80: Improve feedback modal +- f6c868f: Fix operations filter droping state +- f6c868f: Fix checkboxes in operations filter + +## 0.7.22 + +### Patch Changes + +- 9295075: Bump + +## 0.7.21 + +### Patch Changes + +- 1ac74a4: Fix an issue with stale data + +## 0.7.20 + +### Patch Changes + +- aa4e661: Use Fetch in Next + +## 0.7.19 + +### Patch Changes + +- fb3efda: Use defer in general operations stats query + +## 0.7.18 + +### Patch Changes + +- 66369be: Track inflight requests without forking fetchExchange + +## 0.7.17 + +### Patch Changes + +- 8d1811b: Fix target validation period and percentage being a string + +## 0.7.16 + +### Patch Changes + +- e0a47fb: Use Undici instead of Got and Agentkeepalive + +## 0.7.15 + +### Patch Changes + +- d7348a3: Merge few stats related queries into one query (improves perf in ClickHouse) +- d7348a3: Increase window times + +## 0.7.14 + +### Patch Changes + +- b010137: Update Sentry to 6.10.0 + +## 0.7.13 + +### Patch Changes + +- abd3d3e: Use p75, p90, p95 and p99 only + +## 0.7.12 + +### Patch Changes + +- 11b3eb9: Added CDN using CF + +## 0.7.11 + +### Patch Changes + +- a154c1c: Upgrade Auth0 and Chakra UI + +## 0.7.10 + +### Patch Changes + +- f87350d: Floor dates (in date range) to 1 minute + +## 0.7.9 + +### Patch Changes + +- dae2b90: Add operations filter to operations stats page + +## 0.7.8 + +### Patch Changes + +- 553c093: UI Fixes +- 553c093: Alert icon with tooltip on anonymous operations + +## 0.7.7 + +### Patch Changes + +- 30b8847: Use NextJS 11 + +## 0.7.6 + +### Patch Changes + +- 4bc83be: Node 16 + +## 0.7.5 + +### Patch Changes + +- bed8e34: Reset feedback form and handle loading + +## 0.7.4 + +### Patch Changes + +- bf76381: Feedback dialog + +## 0.7.3 + +### Patch Changes + +- 93674cf: Update Sentry to 6.7.0 +- ec5fa1d: Do not fetch unavailable fields when deleting an entity + +## 0.7.2 + +### Patch Changes + +- 3365d22: Fix double slash in slack integration endpoints + +## 0.7.1 + +### Patch Changes + +- 5aa5e93: Bump + +## 0.7.0 + +### Minor Changes + +- 87e3d2e: Alerts, yay! + +## 0.6.4 + +### Patch Changes + +- 1a16360: Send GraphQL Client name and version + +## 0.6.3 + +### Patch Changes + +- b1fc400: Add Release Notes + +## 0.6.2 + +### Patch Changes + +- 203c563: Use "experiment" as the default branch instead of "development" + +## 0.6.1 + +### Patch Changes + +- 4224cb9: Add info with a link to documentation on missing data +- c6ef3d2: Bob update + +## 0.6.0 + +### Minor Changes + +- 143fa32: Added Schema Laboratory + +## 0.5.8 + +### Patch Changes + +- e65b9cc: Do not capture Access Token related errors +- e65b9cc: Identify and update profile in Mixpanel once per page load + +## 0.5.7 + +### Patch Changes + +- d76ba3f: Fix no-auth redirect on API Proxy level + +## 0.5.6 + +### Patch Changes + +- aa12cdc: Small fixes + +## 0.5.5 + +### Patch Changes + +- 148b294: Fix issues with undici headers timeout + +## 0.5.4 + +### Patch Changes + +- 25fec29: Notify on modals only when open + +## 0.5.3 + +### Patch Changes + +- c346c4b: Fix mixpanel, NEXT_PUBLIC is compiled at build time, we don't have envs at that stage + +## 0.5.2 + +### Patch Changes + +- 85b85d4: Dependencies update, cleanup, ui fixes + +## 0.5.1 + +### Patch Changes + +- 9b14d18: Bump + +## 0.5.0 + +### Minor Changes + +- 36097a6: Add mixpanel + +## 0.4.3 + +### Patch Changes + +- 167f81e: ui fixes + +## 0.4.2 + +### Patch Changes + +- dbcfa69: Split operations stats query into smaller queries (looks WAY better) + +## 0.4.1 + +### Patch Changes + +- 824a403: Duration over time stats + +## 0.4.0 + +### Minor Changes + +- acab74b: Added support for persisted operations - Changes made in API, APP, CLI, Server, Storage + +### Patch Changes + +- b84a685: UI fixes + +## 0.3.3 + +### Patch Changes + +- bd4b28e: Better UI + +## 0.3.2 + +### Patch Changes + +- 0873fba: Use logarithim scale in latency histogram + +## 0.3.1 + +### Patch Changes + +- 6bf8518: Adjustements + +## 0.3.0 + +### Minor Changes + +- c507159: Redesign, fixes, different structure of components and RPM over time + +## 0.2.14 + +### Patch Changes + +- c591b5b: Distribution of latency +- ba5f690: Show requests per minute + +## 0.2.13 + +### Patch Changes + +- 3c72c34: Percentiles per operation + +## 0.2.12 + +### Patch Changes + +- ec400f8: Show failures over time +- a471c88: Support percentiles of request duration + +## 0.2.11 + +### Patch Changes + +- a32277c: Use ?period=window + +## 0.2.10 + +### Patch Changes + +- 4a1de8c: Change windows and add min/max to xAxis + +## 0.2.9 + +### Patch Changes + +- f6d2ca6: bump + +## 0.2.8 + +### Patch Changes + +- 6e68e25: More stats + +## 0.2.7 + +### Patch Changes + +- 0c08558: Fix barWidth + +## 0.2.6 + +### Patch Changes + +- 23e19fe: Add Requests Over Time plot + +## 0.2.5 + +### Patch Changes + +- ed8b326: Show simple stats + +## 0.2.4 + +### Patch Changes + +- 65dfbe9: Logout on auth error +- f7347a1: Distributed Tracing +- 8f3e43c: Track usage of tokens +- 4822bf2: Add TargetSwitcher +- dd5f0b0: Add a date range filter to operations + +## 0.2.3 + +### Patch Changes + +- b33bf11: List of collected operations + +## 0.2.2 + +### Patch Changes + +- c083cb6: Use SENTRY_DSN + +## 0.2.1 + +### Patch Changes + +- b036111: Fix sentry + +## 0.2.0 + +### Minor Changes + +- 60cd35d: Consider Usage in Inspector +- 078e758: Token per Target + +### Patch Changes + +- 7113a0e: Sentry + +## 0.1.13 + +### Patch Changes + +- 64b6c15: X-Request-ID + +## 0.1.12 + +### Patch Changes + +- ab5c204: Collect more with Sentry + +## 0.1.11 + +### Patch Changes + +- 6a344d3: Fix invitations, expose only organization.name + +## 0.1.10 + +### Patch Changes + +- d433269: Fixes + +## 0.1.9 + +### Patch Changes + +- 93fbf26: Use Sentry Tracing + +## 0.1.8 + +### Patch Changes + +- 2269c61: No extra calls to Auth0 + +## 0.1.7 + +### Patch Changes + +- ede30d2: Added healthcheck endpoint + +## 0.1.6 + +### Patch Changes + +- fc3de1d: bump app to test partial deployment + +## 0.1.5 + +### Patch Changes + +- 519ee98: fix next start + +## 0.1.4 + +### Patch Changes + +- 20e886c: Fix missing public + +## 0.1.3 + +### Patch Changes + +- c1e705a: bump + +## 0.1.2 + +### Patch Changes + +- 7e88e71: bump + +## 0.1.1 + +### Patch Changes + +- b2d686e: bump diff --git a/packages/web/app/modules.d.ts b/packages/web/app/modules.d.ts new file mode 100644 index 000000000..248e4aba2 --- /dev/null +++ b/packages/web/app/modules.d.ts @@ -0,0 +1 @@ +declare module 'node-crisp-api'; diff --git a/packages/web/app/next-env.d.ts b/packages/web/app/next-env.d.ts new file mode 100644 index 000000000..4f11a03dc --- /dev/null +++ b/packages/web/app/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/basic-features/typescript for more information. diff --git a/packages/web/app/next.config.js b/packages/web/app/next.config.js new file mode 100644 index 000000000..a4b7ddc31 --- /dev/null +++ b/packages/web/app/next.config.js @@ -0,0 +1,23 @@ +const { withSentryConfig } = require('@sentry/nextjs'); +const { version } = require('./package.json'); + +/** + * @type {import('@sentry/webpack-plugin').SentryCliPluginOptions} + */ +const SentryWebpackPluginOptions = { + silent: true, + release: version, +}; + +module.exports = withSentryConfig( + { + eslint: { + ignoreDuringBuilds: true, + }, + sentry: { + disableServerWebpackPlugin: true, + disableClientWebpackPlugin: true, + }, + }, + SentryWebpackPluginOptions +); diff --git a/packages/web/app/package.json b/packages/web/app/package.json new file mode 100644 index 000000000..349300391 --- /dev/null +++ b/packages/web/app/package.json @@ -0,0 +1,79 @@ +{ + "name": "@hive/app", + "private": true, + "version": "0.16.2", + "scripts": { + "dev": "next dev", + "start": "next start", + "build": "bob runify --single" + }, + "dependencies": { + "@stripe/react-stripe-js": "1.7.2", + "@stripe/stripe-js": "1.29.0", + "hyperid": "2.3.1", + "@auth0/nextjs-auth0": "1.6.1", + "@chakra-ui/react": "1.6.10", + "@emotion/css": "11.5.0", + "@emotion/react": "11.5.0", + "@emotion/server": "11.4.0", + "@emotion/styled": "11.3.0", + "@graphql-tools/mock": "8.4.0", + "@monaco-editor/react": "4.3.1", + "@n1ru4l/react-time-ago": "1.1.0", + "@sentry/nextjs": "6.19.7", + "@sentry/react": "6.19.7", + "@sentry/node": "6.19.7", + "@sentry/tracing": "6.19.7", + "@theguild/editor": "0.4.1", + "@urql/core": "2.3.6", + "@urql/devtools": "2.0.3", + "@urql/exchange-graphcache": "4.3.6", + "date-fns": "2.25.0", + "echarts": "5.2.2", + "echarts-for-react": "3.0.2", + "formik": "2.2.9", + "framer-motion": "4.1.17", + "graphiql": "1.4.2", + "graphql": "16.5.0", + "immer": "9.0.12", + "mixpanel-browser": "2.45.0", + "monaco-editor": "0.27.0", + "monaco-themes": "0.3.3", + "node-crisp-api": "1.12.3", + "next": "12.1.0", + "react": "17.0.2", + "react-date-range": "1.4.0", + "react-dom": "17.0.2", + "react-icons": "4.3.1", + "react-string-replace": "0.4.4", + "react-table": "7.7.0", + "react-virtualized": "9.22.3", + "regenerator-runtime": "0.13.9", + "tailwindcss": "2.2.19", + "tslib": "2.3.1", + "twin.macro": "2.8.2", + "urql": "2.0.5", + "use-debounce": "6.0.1", + "valtio": "1.2.5", + "wonka": "4.0.15", + "yup": "0.32.11" + }, + "devDependencies": { + "@types/mixpanel-browser": "2.35.7", + "@types/react": "17.0.45", + "@types/react-date-range": "1.1.8", + "@types/react-dom": "17.0.17", + "@types/react-virtualized": "9.21.21" + }, + "babelMacros": { + "twin": { + "preset": "emotion" + } + }, + "buildOptions": { + "runify": true, + "tags": [ + "frontend" + ] + } +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/[targetId]/history.tsx b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/history.tsx new file mode 100644 index 000000000..d163eeeae --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/history.tsx @@ -0,0 +1,33 @@ +import * as React from 'react'; +import 'twin.macro'; +import { useRouteSelector } from '@/lib/hooks/use-route-selector'; +import { Page } from '@/components/common'; +import { TargetView } from '@/components/target/View'; +import { Versions } from '@/components/target/history/Versions'; + +const HistoryView: React.FC = () => { + const router = useRouteSelector(); + + return ( + +
+
+ +
+
+
+ ); +}; + +export default function TargetHistory() { + return {() => }; +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/[targetId]/history/[versionId].tsx b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/history/[versionId].tsx new file mode 100644 index 000000000..b7b4844f7 --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/history/[versionId].tsx @@ -0,0 +1,150 @@ +import * as React from 'react'; +import tw, { styled } from 'twin.macro'; +import formatDate from 'date-fns/format'; +import { useQuery } from 'urql'; +import { Tooltip } from '@chakra-ui/react'; +import { useRouteSelector } from '@/lib/hooks/use-route-selector'; +import { CompareDocument, OrganizationFieldsFragment } from '@/graphql'; +import { Page } from '@/components/common'; +import { TextToggle } from '@/components/common/Toogle'; +import { DataWrapper } from '@/components/common/DataWrapper'; +import { TargetView } from '@/components/target/View'; +import { Compare, View } from '@/components/target/history/Compare'; +import { MarkAsValid } from '@/components/target/history/MarkAsValid'; +import { useTargetAccess, TargetAccessScope } from '@/lib/access/target'; + +const Value = tw.div`text-base text-gray-900 dark:text-white`; +const ValueLabel = tw.div`text-xs text-gray-500 dark:text-gray-400`; +const Status = styled.span(({ valid }: { valid?: boolean }) => [ + tw`mx-auto my-2 block w-2 h-2 rounded-full`, + valid ? tw`bg-emerald-400` : tw`bg-red-400`, +]); + +const VersionView: React.FC<{ + organization: OrganizationFieldsFragment; +}> = ({ organization }) => { + const router = useRouteSelector(); + const [view, setView] = React.useState(View.Text); + const [query] = useQuery({ + query: CompareDocument, + variables: { + organization: router.organizationId, + project: router.projectId, + target: router.targetId, + version: router.versionId, + }, + }); + const canModifyState = useTargetAccess({ + scope: TargetAccessScope.RegistryWrite, + member: organization.me, + redirect: false, + }); + + const version = query.data?.schemaVersion; + + return ( + + {() => ( + + {canModifyState && } + + + } + > +
+
+
+ + {formatDate(new Date(version.date), 'yyyy-MM-dd HH:mm')} + + Published +
+ + +
+ {version.commit.commit} + Commit +
+
+ +
+ {version.commit.author} + Author +
+
+ + {version.commit.service && ( + +
+ {version.commit.service} + Service +
+
+ )} + {version.valid && ( + +
+ + + + Status +
+
+ )} +
+
+ +
+
+
+ )} +
+ ); +}; + +export default function TargetHistory() { + return ( + + {({ organization }) => } + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/[targetId]/index.tsx b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/index.tsx new file mode 100644 index 000000000..04327d2c5 --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/index.tsx @@ -0,0 +1,540 @@ +import React, { useCallback } from 'react'; +import tw from 'twin.macro'; +import { + useDisclosure, + Modal, + Button, + ModalOverlay, + ModalContent, + ModalHeader, + ModalBody, + Code, + ModalFooter, + ModalCloseButton, + Alert, + AlertTitle, + AlertDescription, + Spinner, + Link, + InputGroup, + Input, + InputRightElement, + IconButton, + useColorModeValue, + Tooltip, + Editable, + EditablePreview, + EditableInput, +} from '@chakra-ui/react'; +import { VscPlug, VscClose, VscSync } from 'react-icons/vsc'; +import { useQuery, useMutation } from 'urql'; +import { useDebouncedCallback } from 'use-debounce'; +import { + SchemasDocument, + SchemasQuery, + ProjectFieldsFragment, + ProjectType, + TargetFieldsFragment, + OrganizationFieldsFragment, + CreateCdnTokenDocument, + SchemaSyncCdnDocument, + UpdateSchemaServiceNameDocument, +} from '@/graphql'; +import { Description, Page } from '@/components/common'; +import { DataWrapper } from '@/components/common/DataWrapper'; +import { GraphQLSDLBlock } from '@/components/common/GraphQLSDLBlock'; +import { TargetView } from '@/components/target/View'; +import { NoSchemasYet } from '@/components/target/NoSchemasYet'; +import { CopyValue } from '@/components/common/CopyValue'; +import { useTargetAccess, TargetAccessScope } from '@/lib/access/target'; + +const Block = tw.div`mb-8`; + +const SchemaServiceName: React.FC<{ + version: string; + schema: SchemasQuery['target']['latestSchemaVersion']['schemas']['nodes'][0]; + target: SchemasQuery['target']; + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; +}> = ({ target, project, organization, schema, version }) => { + const [mutation, mutate] = useMutation(UpdateSchemaServiceNameDocument); + const hasAccess = useTargetAccess({ + scope: TargetAccessScope.RegistryWrite, + member: organization.me, + redirect: false, + }); + + const submit = useCallback( + (newName: string) => { + if (schema.service === newName) { + return; + } + + if (newName.trim().length === 0) { + return; + } + + mutate({ + input: { + organization: organization.cleanId, + project: project.cleanId, + target: target.cleanId, + version, + name: schema.service!, + newName, + }, + }); + }, + [mutate] + ); + + if ( + (project.type !== ProjectType.Federation && + project.type !== ProjectType.Stitching) || + !hasAccess + ) { + return <>{schema.service}; + } + + return ( + + + + + ); +}; + +const Schemas: React.FC<{ + organization: OrganizationFieldsFragment; + project: ProjectFieldsFragment; + target: SchemasQuery['target']; + filterService?: string; +}> = ({ organization, project, target, filterService }) => { + const schemas = target.latestSchemaVersion?.schemas.nodes ?? []; + + if (!schemas.length) { + return ; + } + + if (project.type === ProjectType.Single) { + return ( + + ); + } + + return ( + <> + {schemas + .filter((schema) => { + if (filterService && schema.service) { + return schema.service + .toLowerCase() + .includes(filterService.toLowerCase()); + } + + return true; + }) + .map((schema) => ( + + + } + /> + + ))} + + ); +}; + +const SchemaView: React.FC<{ + organization: OrganizationFieldsFragment; + project: ProjectFieldsFragment; + target: TargetFieldsFragment; + filterService?: string; +}> = ({ organization, project, target, filterService }) => { + const [query] = useQuery({ + query: SchemasDocument, + variables: { + selector: { + organization: organization.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + }, + requestPolicy: 'cache-and-network', + }); + + return ( + + {() => ( + + )} + + ); +}; + +const ConnectSchemaModal: React.FC<{ + target: TargetFieldsFragment; + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; + onClose(): void; + onOpen(): void; + isOpen: boolean; +}> = ({ target, project, organization, onClose, isOpen }) => { + const [generating, setGenerating] = React.useState(true); + const [mutation, mutate] = useMutation(CreateCdnTokenDocument); + + React.useEffect(() => { + if (!isOpen) { + setGenerating(true); + return; + } + + mutate({ + selector: { + organization: organization.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + }).then(() => { + setTimeout(() => { + setGenerating(false); + }, 2000); + }); + }, [isOpen, setGenerating, mutate]); + + const description = `With high-availability and multi-zone CDN service based on + Cloudflare, Hive allows you to access ${ + project.type === ProjectType.Federation + ? 'the supergraph' + : project.type === ProjectType.Stitching + ? 'the list of services' + : 'the schema' + } of your API, + through a secured external service, that's always up regardless of + Hive.`; + + const generatingDescription = `Hive is now generating an authentication token and an URL you can use to fetch your ${ + project.type === ProjectType.Federation + ? 'supergraph schema' + : project.type === ProjectType.Stitching + ? 'services' + : 'schema' + }.`; + + return ( + + + + Connect to Hive + + + {description} +
+ {generating && ( + + + + Generating access... + + + {generatingDescription} + + + )} +
+ {!generating && mutation.data && ( + <> + + You can use the following endpoint: + + + + To authenticate, use the following HTTP headers: + + + X-Hive-CDN-Key: {mutation.data.createCdnToken.token} + + {project.type === ProjectType.Federation && ( + + Read the{' '} + + "Using the Registry with a Apollo Gateway" + {' '} + chapter in our documentation. + + )} + {project.type === ProjectType.Stitching && ( + + Read the{' '} + + "Using the Registry when Stitching" + {' '} + chapter in our documentation. + + )} + {project.type === ProjectType.Single && ( + + Read the{' '} + + "Using the Registry with any tool" + {' '} + chapter in our documentation. + + )} + + )} +
+ + + +
+
+ ); +}; + +const ConnectSchemaButton: React.FC<{ + target: TargetFieldsFragment; + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; +}> = ({ target, project, organization }) => { + const { onClose, onOpen, isOpen } = useDisclosure(); + const color = useColorModeValue('#fff', '#000'); + + return ( + <> + + + + ); +}; + +const SyncSchemaButton: React.FC<{ + target: TargetFieldsFragment; + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; +}> = ({ target, project, organization }) => { + const color = useColorModeValue('#fff', '#000'); + const [status, setStatus] = React.useState<'idle' | 'error' | 'success'>( + 'idle' + ); + const [mutation, mutate] = useMutation(SchemaSyncCdnDocument); + const hasAccess = useTargetAccess({ + scope: TargetAccessScope.RegistryWrite, + member: organization.me, + redirect: false, + }); + + const sync = useCallback(() => { + mutate({ + input: { + organization: organization.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + }).then((result) => { + if (result.error) { + setStatus('error'); + } else { + setStatus( + result.data?.schemaSyncCDN.__typename === 'SchemaSyncCDNError' + ? 'error' + : 'success' + ); + } + setTimeout(() => { + setStatus('idle'); + }, 5000); + }); + }, [mutate, setStatus]); + + if (!hasAccess || !target.hasSchema) { + return null; + } + + return ( + + + + ); +}; + +function TargetSchemaInner({ + organization, + project, + target, +}: { + organization: OrganizationFieldsFragment; + project: ProjectFieldsFragment; + target: TargetFieldsFragment; +}) { + const [filterService, setFilterService] = React.useState(null); + const [term, setTerm] = React.useState(null); + const debouncedFilter = useDebouncedCallback((value: string) => { + setFilterService(value); + }, 500); + const handleChange = React.useCallback( + (event) => { + debouncedFilter(event.target.value); + setTerm(event.target.value); + }, + [debouncedFilter, setTerm] + ); + const reset = React.useCallback(() => { + setFilterService(''); + setTerm(''); + }, [setFilterService]); + + const isDistributed = + project.type === ProjectType.Federation || + project.type === ProjectType.Stitching; + + return ( + + {isDistributed && ( +
{ + event.preventDefault(); + }} + > + + + + } + /> + + +
+ )} + + + + } + > + +
+ ); +} + +export default function TargetSchema() { + return ( + + {({ organization, project, target }) => ( + + )} + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/[targetId]/lab.tsx b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/lab.tsx new file mode 100644 index 000000000..a88f956ed --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/lab.tsx @@ -0,0 +1,113 @@ +/* eslint-disable import/no-extraneous-dependencies */ + +import 'graphiql/graphiql.css'; +import 'twin.macro'; + +import { Page } from '@/components/common'; +import { TargetView } from '@/components/target/View'; +import { Button, useDisclosure, useColorModeValue } from '@chakra-ui/react'; +import { VscPlug, VscSettings } from 'react-icons/vsc'; +import { ConnectLabModal } from '@/components/lab/ConnectLabScreen'; +import { CustomizeLabModal } from '@/components/lab/CustomizeLabScreen'; +import { createGraphiQLFetcher } from '@graphiql/toolkit'; +import React from 'react'; +import type { GraphiQL as GraphiQLType } from 'graphiql'; +import { Logo } from '@/components/common/Logo'; +import { NoSchemasYet } from '@/components/target/NoSchemasYet'; + +const GraphiQL: typeof GraphiQLType = process.browser + ? // eslint-disable-next-line @typescript-eslint/no-var-requires + require('graphiql').default + : null; + +export const ConnectLabTrigger: React.FC<{ endpoint: string }> = ({ + endpoint, +}) => { + const { isOpen, onClose, onOpen: open } = useDisclosure(); + const color = useColorModeValue('#fff', '#000'); + + return ( + <> + + + + ); +}; + +// TODO: unused +export const CustomizeLabTrigger = () => { + const { isOpen, onClose, onOpen: open } = useDisclosure(); + + return ( + <> + + + + ); +}; + +const SchemaLabContent: React.FC<{ endpoint: string }> = ({ endpoint }) => { + return ( + <> +
+ + + + + +
+ + ); +}; + +export default function SchemaLabPage() { + return ( + + {({ organization, project, target }) => { + const endpoint = `${window.location.origin}/api/lab/${organization.cleanId}/${project.cleanId}/${target.cleanId}`; + const noSchemas = + target.latestSchemaVersion?.schemas.nodes?.length === 0; + + return ( + + ) : null + } + > + {GraphiQL ? ( + noSchemas ? ( + + ) : ( + + ) + ) : null} + + ); + }} + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/[targetId]/operations.tsx b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/operations.tsx new file mode 100644 index 000000000..d41f0ae3d --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/operations.tsx @@ -0,0 +1,160 @@ +import React from 'react'; +import 'twin.macro'; +import { useQuery } from 'urql'; + +import { Select, Stack } from '@chakra-ui/react'; +import { VscChevronDown } from 'react-icons/vsc'; +import { + HasCollectedOperationsDocument, + ProjectFieldsFragment, + TargetFieldsFragment, + OrganizationFieldsFragment, +} from '@/graphql'; +import { Page } from '@/components/common'; +import { DataWrapper } from '@/components/common/DataWrapper'; +import { TargetView } from '@/components/target/View'; +import { OperationsList } from '@/components/target/operations/List'; +import { OperationsStats } from '@/components/target/operations/Stats'; +import { EmptyList } from '@/components/common/EmptyList'; +import { OperationsFilterTrigger } from '@/components/target/operations/Filters'; +import { useRouteSelector } from '@/lib/hooks/use-route-selector'; +import { + calculatePeriod, + DATE_RANGE_OPTIONS, + PeriodKey, +} from '@/components/common/TimeFilter'; + +const OperationsView: React.FC<{ + organization: OrganizationFieldsFragment; + project: ProjectFieldsFragment; + target: TargetFieldsFragment; +}> = ({ organization, project, target }) => { + const router = useRouteSelector(); + const selectedPeriod: PeriodKey = (router.query.period as PeriodKey) ?? '1d'; + const [selectedOperations, setSelectedOperations] = React.useState( + [] + ); + + const period = React.useMemo( + () => calculatePeriod(selectedPeriod), + [selectedPeriod] + ); + const updatePeriod = React.useCallback( + (ev: any) => { + router.update({ period: ev.target.value }); + }, + [router.update] + ); + + return ( + <> +
+ +
+ +
+
+ +
+
+
+
+ + +
+ + ); +}; + +const OperationsViewGate: React.FC<{ + organization: OrganizationFieldsFragment; + project: ProjectFieldsFragment; + target: TargetFieldsFragment; +}> = ({ organization, project, target }) => { + const [query] = useQuery({ + query: HasCollectedOperationsDocument, + variables: { + selector: { + organization: organization.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + }, + }); + + return ( + + {(result) => { + if (!result.data.hasCollectedOperations) { + return ( + + ); + } + + return ( + + ); + }} + + ); +}; + +export default function TargetOperations() { + return ( + + {({ organization, project, target }) => ( + + + + )} + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/[targetId]/settings.tsx b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/settings.tsx new file mode 100644 index 000000000..8a3d352e8 --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/[targetId]/settings.tsx @@ -0,0 +1,83 @@ +import React from 'react'; +import 'twin.macro'; +import { useQuery } from 'urql'; +import { Settings } from '@/components/common/Settings'; +import { DataWrapper } from '@/components/common/DataWrapper'; +import { TargetView } from '@/components/target/View'; +import { NameSettings } from '@/components/target/settings/Name'; +import { DeleteSettings } from '@/components/target/settings/Delete'; +import { TokensSettings } from '@/components/target/settings/Tokens'; +import { ValidationSettings } from '@/components/target/settings/Validation'; +import { BaseSchemaSettings } from '@/components/target/settings/BaseSchema'; +import { + OrganizationFieldsFragment, + ProjectFieldsFragment, + TargetFieldsFragment, + TargetSettingsDocument, +} from '@/graphql'; +import { useTargetAccess, TargetAccessScope } from '@/lib/access/target'; + +const Inner: React.FC<{ + target: TargetFieldsFragment; + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; +}> = ({ target, project, organization }) => { + const canAccess = useTargetAccess({ + scope: TargetAccessScope.Settings, + member: organization.me, + redirect: true, + }); + const canAccessTokens = useTargetAccess({ + scope: TargetAccessScope.TokensRead, + member: organization.me, + redirect: false, + }); + const [settings] = useQuery({ + query: TargetSettingsDocument, + variables: { + selector: { + organization: organization.cleanId, + project: project.cleanId, + target: target.cleanId, + }, + targetsSelector: { + organization: organization.cleanId, + project: project.cleanId, + }, + }, + }); + + if (!canAccess) { + return null; + } + + return ( + + {() => ( + + + {canAccessTokens && ( + + )} + + + + + )} + + ); +}; + +export default function TargetSettingsPage() { + return ( + + {({ target, project, organization }) => ( + + )} + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/alerts.tsx b/packages/web/app/pages/[orgId]/[projectId]/alerts.tsx new file mode 100644 index 000000000..6738b6b36 --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/alerts.tsx @@ -0,0 +1,39 @@ +import React from 'react'; +import 'twin.macro'; +import { OrganizationFieldsFragment } from '@/graphql'; +import { Page } from '@/components/common'; +import { ProjectView } from '@/components/project/View'; +import { useProjectAccess, ProjectAccessScope } from '@/lib/access/project'; +import { Alerts } from '@/components/project/alerts/Alerts'; +import { Channels } from '@/components/project/alerts/Channels'; + +const Gate: React.FC<{ + organization: OrganizationFieldsFragment; +}> = ({ organization }) => { + const canAccess = useProjectAccess({ + scope: ProjectAccessScope.Alerts, + member: organization.me, + redirect: true, + }); + + if (!canAccess) { + return null; + } + + return ( + +
+ + +
+
+ ); +}; + +export default function ProjectSettingsPage() { + return ( + + {({ organization }) => } + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/index.tsx b/packages/web/app/pages/[orgId]/[projectId]/index.tsx new file mode 100644 index 000000000..2ac012431 --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/index.tsx @@ -0,0 +1,46 @@ +import * as React from 'react'; +import 'twin.macro'; +import { OrganizationFieldsFragment, ProjectFieldsFragment } from '@/graphql'; +import { Page } from '@/components/common'; +import { ProjectView } from '@/components/project/View'; +import { ProjectTargets } from '@/components/project/Targets'; +import { ProjectActivities } from '@/components/project/Activities'; +import { TargetCreatorTrigger } from '@/components/target/Creator'; +import { ProjectAccessScope, useProjectAccess } from '@/lib/access/project'; + +const Inner: React.FC<{ + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; +}> = ({ project, organization }) => { + const canCreate = useProjectAccess({ + scope: ProjectAccessScope.Read, + member: organization.me, + }); + + return ( + } + > +
+
+ +
+
+ +
+
+
+ ); +}; + +export default function ProjectPage() { + return ( + + {({ project, organization }) => ( + + )} + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/operations-store.tsx b/packages/web/app/pages/[orgId]/[projectId]/operations-store.tsx new file mode 100644 index 000000000..fb0f44c81 --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/operations-store.tsx @@ -0,0 +1,42 @@ +import React from 'react'; +import 'twin.macro'; +import { Page } from '@/components/common'; +import { Dashboard } from '@/components/project/persisted-operations/Dashboard'; +import { ProjectView } from '@/components/project/View'; +import { ProjectFieldsFragment, OrganizationFieldsFragment } from '@/graphql'; +import { ProjectAccessScope, useProjectAccess } from '@/lib/access/project'; + +const Inner: React.FC<{ + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; +}> = ({ project, organization }) => { + const canAccess = useProjectAccess({ + scope: ProjectAccessScope.OperationsStoreRead, + member: organization.me, + redirect: true, + }); + + if (!canAccess) { + return null; + } + + return ( + + + + ); +}; + +export default function PersistedOperationsPage() { + return ( + + {({ project, organization }) => ( + + )} + + ); +} diff --git a/packages/web/app/pages/[orgId]/[projectId]/settings.tsx b/packages/web/app/pages/[orgId]/[projectId]/settings.tsx new file mode 100644 index 000000000..99107a94a --- /dev/null +++ b/packages/web/app/pages/[orgId]/[projectId]/settings.tsx @@ -0,0 +1,45 @@ +import React from 'react'; +import 'twin.macro'; +import { Settings } from '@/components/common/Settings'; +import { ProjectView } from '@/components/project/View'; +import { NameSettings } from '@/components/project/settings/Name'; +import { DeleteSettings } from '@/components/project/settings/Delete'; +import { GitRepositorySettings } from '@/components/project/settings/GitRepository'; +import { ProjectFieldsFragment, OrganizationFieldsFragment } from '@/graphql'; +import { useProjectAccess, ProjectAccessScope } from '@/lib/access/project'; + +const Inner: React.FC<{ + project: ProjectFieldsFragment; + organization: OrganizationFieldsFragment; +}> = ({ project, organization }) => { + const canAccess = useProjectAccess({ + scope: ProjectAccessScope.Settings, + member: organization.me, + redirect: true, + }); + + if (!canAccess) { + return null; + } + + return ( + + + + + + ); +}; + +export default function ProjectSettingsPage() { + return ( + + {({ project, organization }) => ( + + )} + + ); +} diff --git a/packages/web/app/pages/[orgId]/index.tsx b/packages/web/app/pages/[orgId]/index.tsx new file mode 100644 index 000000000..df99e4e69 --- /dev/null +++ b/packages/web/app/pages/[orgId]/index.tsx @@ -0,0 +1,46 @@ +import * as React from 'react'; +import 'twin.macro'; +import { OrganizationFieldsFragment } from '@/graphql'; +import { Page } from '@/components/common'; +import { OrganizationView } from '@/components/organization/View'; +import { OrganizationProjects } from '@/components/organization/Projects'; +import { OrganizationActivities } from '@/components/organization/Activities'; +import { ProjectCreatorTrigger } from '@/components/project/Creator'; +import { + OrganizationAccessScope, + useOrganizationAccess, +} from '@/lib/access/organization'; + +const Inner: React.FC<{ organization: OrganizationFieldsFragment }> = ({ + organization, +}) => { + const canCreate = useOrganizationAccess({ + scope: OrganizationAccessScope.Read, + member: organization?.me, + }); + + return ( + } + > +
+
+ +
+
+ +
+
+
+ ); +}; + +export default function OrganizationPage() { + return ( + + {({ organization }) => } + + ); +} diff --git a/packages/web/app/pages/[orgId]/members.tsx b/packages/web/app/pages/[orgId]/members.tsx new file mode 100644 index 000000000..b10f1b876 --- /dev/null +++ b/packages/web/app/pages/[orgId]/members.tsx @@ -0,0 +1,235 @@ +import React from 'react'; +import 'twin.macro'; +import { track } from '@/lib/mixpanel'; +import { useQuery, useMutation } from 'urql'; +import { + Button, + Checkbox, + Table, + Thead, + Tbody, + Tr, + Th, + Td, + useDisclosure, +} from '@chakra-ui/react'; +import { FaGoogle, FaGithub, FaKey } from 'react-icons/fa'; +import { Page } from '@/components/common'; +import { CopyValue } from '@/components/common/CopyValue'; +import { OrganizationView } from '@/components/organization/View'; +import { MemberPermisssonsModal } from '@/components/organization/members/PermissionsModal'; +import { + OrganizationFieldsFragment, + OrganizationMembersDocument, + ResetInviteCodeDocument, + OrganizationMembersQuery, + DeleteOrganizationMembersDocument, + MemberFieldsFragment, + AuthProvider, +} from '@/graphql'; +import { + OrganizationAccessScope, + useOrganizationAccess, +} from '@/lib/access/organization'; +import { useNotifications } from '@/lib/hooks/use-notifications'; +import { useRouteSelector } from '@/lib/hooks/use-route-selector'; +import { DataWrapper } from '@/components/common/DataWrapper'; + +const Invitation: React.FC<{ + organization: OrganizationMembersQuery['organization']['organization']; +}> = ({ organization }) => { + const inviteUrl = `${window.location.origin}/join/${organization.inviteCode}`; + const router = useRouteSelector(); + const notify = useNotifications(); + const [mutation, mutate] = useMutation(ResetInviteCodeDocument); + + const generate = React.useCallback(() => { + track('GENERATE_NEW_INVITATION_LINK_ATTEMPT', { + organization: router.organizationId, + }); + mutate({ + selector: { + organization: router.organizationId, + }, + }).finally(() => { + notify('Generated new invitation link', 'info'); + }); + }, [mutate, notify]); + + return ( +
+ + +
+ ); +}; + +const MemberRow: React.FC<{ + member: MemberFieldsFragment; + owner: MemberFieldsFragment; + organization: OrganizationFieldsFragment; + checked: string[]; + onCheck(id: string): void; +}> = ({ member, owner, checked, onCheck, organization }) => { + const isOwner = member.id === owner.id; + const isMe = member.id === organization.me.id; + const { isOpen, onOpen, onClose } = useDisclosure(); + + const canManage = !isOwner && !isMe; + + const provider = member.user.provider; + const providerIcon = + provider === AuthProvider.Google ? ( + + ) : provider === AuthProvider.Github ? ( + + ) : ( + + ); + + return ( + <> + + + + onCheck(member.id)} + /> + + {providerIcon} + {member.user.displayName} + {member.user.email} + + {canManage && ( + + )} + + + + ); +}; + +const MembersManager: React.FC<{ + organization: OrganizationFieldsFragment; +}> = ({ organization }) => { + const [query] = useQuery({ + query: OrganizationMembersDocument, + variables: { + selector: { + organization: organization.cleanId, + }, + }, + }); + const [checked, setChecked] = React.useState([]); + const onCheck = React.useCallback( + (id: string) => { + if (checked.includes(id)) { + setChecked(checked.filter((i) => i !== id)); + } else { + setChecked(checked.concat(id)); + } + }, + [checked, setChecked] + ); + const [mutation, mutate] = useMutation(DeleteOrganizationMembersDocument); + const deleteMembers = React.useCallback(() => { + mutate({ + selector: { + organization: organization.cleanId, + users: checked, + }, + }).finally(() => { + setChecked([]); + }); + }, [mutate, checked, setChecked]); + + return ( + + {() => ( +
+
+ +
+ +
+
+ + + + + + + + + + + + {query.data.organization.organization?.members.nodes.map( + (member) => ( + + ) + )} + +
NameEmailPermissions
+
+ )} +
+ ); +}; + +const Inner: React.FC<{ organization: OrganizationFieldsFragment }> = ({ + organization, +}) => { + const canAccess = useOrganizationAccess({ + scope: OrganizationAccessScope.Members, + member: organization?.me, + redirect: true, + }); + + if (!canAccess) { + return null; + } + + return ( + + + + ); +}; + +export default function OrganizationSettingsPage() { + return ( + + {({ organization }) => } + + ); +} diff --git a/packages/web/app/pages/[orgId]/settings.tsx b/packages/web/app/pages/[orgId]/settings.tsx new file mode 100644 index 000000000..e95c4668b --- /dev/null +++ b/packages/web/app/pages/[orgId]/settings.tsx @@ -0,0 +1,54 @@ +import React from 'react'; +import 'twin.macro'; +import { + OrganizationAccessScope, + useOrganizationAccess, +} from '@/lib/access/organization'; +import { Settings } from '@/components/common/Settings'; +import { OrganizationView } from '@/components/organization/View'; +import { DeleteSettings } from '@/components/organization/settings/Delete'; +import { NameSettings } from '@/components/organization/settings/Name'; +import { IntegrationsSettings } from '@/components/organization/settings/Integrations'; +import { OrganizationFieldsFragment, OrganizationType } from '@/graphql'; + +const Inner: React.FC<{ organization: OrganizationFieldsFragment }> = ({ + organization, +}) => { + const canAccess = useOrganizationAccess({ + scope: OrganizationAccessScope.Settings, + member: organization?.me, + redirect: true, + }); + const canAccessIntegrations = useOrganizationAccess({ + scope: OrganizationAccessScope.Integrations, + member: organization?.me, + redirect: false, + }); + + if (!canAccess) { + return null; + } + + const isRegular = organization.type === OrganizationType.Regular; + + return ( + + {isRegular && } + {canAccessIntegrations && ( + + )} + {isRegular && } + + ); +}; + +export default function OrganizationSettingsPage() { + return ( + + {({ organization }) => } + + ); +} diff --git a/packages/web/app/pages/[orgId]/subscription/index.tsx b/packages/web/app/pages/[orgId]/subscription/index.tsx new file mode 100644 index 000000000..ceac03071 --- /dev/null +++ b/packages/web/app/pages/[orgId]/subscription/index.tsx @@ -0,0 +1,120 @@ +import React from 'react'; +import 'twin.macro'; +import { + OrganizationAccessScope, + useOrganizationAccess, +} from '@/lib/access/organization'; +import { OrganizationView } from '@/components/organization/View'; +import { OrganizationUsageEstimationView } from '@/components/organization/Usage'; +import { + OrganizationFieldsFragment, + OrgBillingInfoFieldsFragment, + OrgRateLimitFieldsFragment, +} from '@/graphql'; +import { Card, Page } from '@/components/common'; +import { BillingView } from '@/components/organization/billing/Billing'; +import { + Button, + Stat, + StatHelpText, + StatLabel, + StatNumber, +} from '@chakra-ui/react'; +import { useRouteSelector } from '@/lib/hooks/use-route-selector'; +import { InvoicesList } from '@/components/organization/billing/InvoicesList'; +import { CurrencyFormatter } from '@/components/organization/billing/helpers'; +import { RateLimitWarn } from '@/components/organization/billing/RateLimitWarn'; + +const Inner: React.FC<{ + organization: OrganizationFieldsFragment & + OrgBillingInfoFieldsFragment & + OrgRateLimitFieldsFragment; +}> = ({ organization }) => { + const router = useRouteSelector(); + const canAccess = useOrganizationAccess({ + scope: OrganizationAccessScope.Settings, + member: organization?.me, + redirect: true, + }); + + if (!canAccess) { + return null; + } + + return ( + + Manage Subscription + + } + > + +
+
+
+ + Plan and Reserved Volume + + + {organization.billingConfiguration?.upcomingInvoice ? ( + + Next Invoice + + {CurrencyFormatter.format( + organization.billingConfiguration.upcomingInvoice + .amount + )} + + + {organization.billingConfiguration.upcomingInvoice.date} + + + ) : null} + + + +
+
+
+ + Monthly Usage Overview + + + + +
+
+ {organization.billingConfiguration?.invoices?.length > 0 ? ( + + Invoices + + + + + ) : null} +
+ ); +}; + +export default function SubscriptionPage() { + return ( + + {({ organization }) => } + + ); +} diff --git a/packages/web/app/pages/[orgId]/subscription/manage.tsx b/packages/web/app/pages/[orgId]/subscription/manage.tsx new file mode 100644 index 000000000..179b57d1b --- /dev/null +++ b/packages/web/app/pages/[orgId]/subscription/manage.tsx @@ -0,0 +1,302 @@ +import React, { useState } from 'react'; +import 'twin.macro'; +import { + OrganizationAccessScope, + useOrganizationAccess, +} from '@/lib/access/organization'; +import { OrganizationView } from '@/components/organization/View'; +import { + BillingPlansDocument, + DowngradeToHobbyDocument, + OrganizationFieldsFragment, + OrgBillingInfoFieldsFragment, + UpdateOrgRateLimitDocument, + UpgradeToProDocument, +} from '@/graphql'; +import { Card, Page, Section } from '@/components/common'; +import { useMutation, useQuery } from 'urql'; +import { DataWrapper, QueryError } from '@/components/common/DataWrapper'; +import { BillingPlanType } from '@/gql/graphql'; +import { + Button, + Input, + Stat, + StatHelpText, + StatLabel, + StatNumber, +} from '@chakra-ui/react'; +import { BillingPlanPicker } from '@/components/organization/billing/BillingPlanPicker'; +import { PlanSummary } from '@/components/organization/billing/PlanSummary'; +import { BillingPaymentMethod } from '@/components/organization/billing/BillingPaymentMethod'; +import { CardElement, useElements, useStripe } from '@stripe/react-stripe-js'; + +const Inner: React.FC<{ + organization: OrganizationFieldsFragment & OrgBillingInfoFieldsFragment; +}> = ({ organization }) => { + const stripe = useStripe(); + const elements = useElements(); + const canAccess = useOrganizationAccess({ + scope: OrganizationAccessScope.Settings, + member: organization?.me, + redirect: true, + }); + + const [query] = useQuery({ + query: BillingPlansDocument, + }); + + const [paymentDetailsValid, setPaymentDetailsValid] = useState( + !!organization.billingConfiguration?.paymentMethod + ); + const upgradeToProMutation = useMutation(UpgradeToProDocument); + const downgradeToHobbyMutation = useMutation(DowngradeToHobbyDocument); + const updateOrgRateLimitMutation = useMutation(UpdateOrgRateLimitDocument); + + const [plan, setPlan] = React.useState( + (organization?.plan || 'HOBBY') as BillingPlanType + ); + const [couponCode, setCouponCode] = React.useState(null); + const [operationsRateLimit, setOperationsRateLimit] = React.useState( + Math.floor(organization.rateLimit.operations / 1_000_000) + ); + const [schemaPushesRateLimit, setSchemaPushesLimit] = React.useState( + organization.rateLimit.schemaPushes + ); + + React.useEffect(() => { + if (query.data?.billingPlans?.length > 0) { + if (organization.plan !== plan) { + const actualPlan = query.data.billingPlans.find( + (v) => v.planType === plan + ); + + setOperationsRateLimit( + Math.floor(actualPlan.includedOperationsLimit / 1_000_000) + ); + setSchemaPushesLimit(actualPlan.includedSchemaPushLimit); + } else { + setOperationsRateLimit( + Math.floor(organization.rateLimit.operations / 1_000_000) + ); + setSchemaPushesLimit(organization.rateLimit.schemaPushes); + } + } + }, [plan, query.data?.billingPlans?.length]); + + if (!canAccess) { + return null; + } + + const openChatSupport = () => { + if (typeof window !== 'undefined' && (window as any).$crisp) { + (window as any).$crisp.push(['do', 'chat:open']); + } + }; + + const upgrade = async () => { + let paymentMethodId: string | null = null; + + if (organization.billingConfiguration.paymentMethod === null) { + const { paymentMethod } = await stripe.createPaymentMethod({ + type: 'card', + card: elements.getElement(CardElement), + }); + paymentMethodId = paymentMethod.id; + } + + upgradeToProMutation[1]({ + organization: organization.cleanId, + monthlyLimits: { + operations: operationsRateLimit * 1_000_000, + schemaPushes: schemaPushesRateLimit, + }, + paymentMethodId: paymentMethodId, + couponCode, + }); + }; + + const downgrade = () => { + downgradeToHobbyMutation[1]({ + organization: organization.cleanId, + }); + }; + + const updateLimits = () => { + updateOrgRateLimitMutation[1]({ + organization: organization.cleanId, + monthlyLimits: { + operations: operationsRateLimit * 1_000_000, + schemaPushes: schemaPushesRateLimit, + }, + }); + }; + + const renderPaymentDetails = () => { + if (plan === BillingPlanType.Pro && plan !== organization.plan) { + return ( +
+ setCouponCode(e.target.value)} + placeholder="Discount Code" + /> +
+ ); + } + + return null; + }; + + const renderActions = () => { + if (plan === organization.plan) { + if ( + organization.rateLimit.operations !== operationsRateLimit * 1_000_000 || + organization.rateLimit.schemaPushes !== schemaPushesRateLimit + ) { + return ( + <> + + + Updating your organization limitations might take a few minutes to + update. + + + ); + } + + return null; + } + + if (plan === 'ENTERPRISE') { + return ( + + ); + } else if (plan === 'PRO') { + return ( + + ); + } else if (plan === 'HOBBY') { + return ( + + ); + } + + return null; + }; + + const error = + upgradeToProMutation[0].error || + downgradeToHobbyMutation[0].error || + updateOrgRateLimitMutation[0].error; + + return ( + + + {(result) => { + const selectedPlan = result.data.billingPlans.find( + (v) => v.planType === plan + ); + + return ( +
+
+
+ + Choose Your Plan + + + + +
+
+
+ + Plan Summary + + {error ? ( + + ) : null} + + {selectedPlan.planType === BillingPlanType.Pro ? ( + + Free Trial + 14 + days + + ) : null} + + setPaymentDetailsValid(v)} + /> + {renderPaymentDetails()} + {renderActions()} + + +
+
+ ); + }} +
+
+ ); +}; + +export default function ManageSubscriptionPage() { + return ( + + {({ organization }) => } + + ); +} diff --git a/packages/web/app/pages/_app.tsx b/packages/web/app/pages/_app.tsx new file mode 100644 index 000000000..0fff3950b --- /dev/null +++ b/packages/web/app/pages/_app.tsx @@ -0,0 +1,105 @@ +import React from 'react'; +import type { AppProps } from 'next/app'; +import Script from 'next/script'; +import { useRouter } from 'next/router'; +import { initMixpanel } from '@/lib/mixpanel'; +import { Provider as UrqlProvider } from 'urql'; +import { ChakraProvider, extendTheme } from '@chakra-ui/react'; +import { urqlClient } from '@/lib/urql'; +import GlobalStylesComponent from '@/components/common/GlobalStyles'; +import { AuthProvider } from '@/components/auth/AuthProvider'; +import { NavigationProvider } from '@/components/common/Navigation'; +import { Page } from '@/components/common/Page'; +import * as gtag from '@/lib/gtag'; +import { colors } from '@/lib/theme'; +import { LoadingAPIIndicator } from '@/components/common/LoadingAPI'; +import '@/lib/graphiql.css'; +import { HiveStripeWrapper } from '@/lib/billing/stripe'; + +const theme = extendTheme({ colors }); + +const CRISP_WEBSITE_ID = process.env.NEXT_PUBLIC_CRISP_WEBSITE_ID; + +initMixpanel(); + +function App({ Component, pageProps }: AppProps) { + const router = useRouter(); + React.useEffect(() => { + const handleRouteChange = (url) => { + gtag.pageview(url); + }; + router.events.on('routeChangeComplete', handleRouteChange); + router.events.on('hashChangeComplete', handleRouteChange); + return () => { + router.events.off('routeChangeComplete', handleRouteChange); + router.events.off('hashChangeComplete', handleRouteChange); + }; + }, [router.events]); + + return ( + <> + + {gtag.GA_TRACKING_ID && ( + +