$ prettier <all> (#46)

Co-authored-by: Dimitri POSTOLOV <dmytropostolov@gmail.com>
This commit is contained in:
Kamil Kisiela 2022-05-24 15:31:53 +02:00 committed by GitHub
parent 243f1b79fd
commit 20e3129caa
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
420 changed files with 2643 additions and 7440 deletions

View file

@ -19,10 +19,7 @@ module.exports = {
plugins: ['@typescript-eslint', 'import'],
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended'],
rules: {
'@typescript-eslint/no-unused-vars': [
'error',
{ argsIgnorePattern: '^_', ignoreRestSiblings: true },
],
'@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_', ignoreRestSiblings: true }],
'no-empty': ['error', { allowEmptyCatch: true }],
'import/no-absolute-path': 'error',
@ -48,7 +45,7 @@ module.exports = {
overrides: [
{
// TODO: replace with packages/web/**
files: ['packages/web/app/src/components/v2/**','packages/web/app/pages/\\[orgId\\]/**'],
files: ['packages/web/app/src/components/v2/**', 'packages/web/app/pages/\\[orgId\\]/**'],
extends: [
'plugin:react/recommended',
'plugin:react-hooks/recommended',
@ -88,18 +85,9 @@ module.exports = {
'react/prop-types': 'off',
'react/no-unescaped-entities': 'off',
'react/jsx-curly-brace-presence': 'error',
'jsx-a11y/anchor-is-valid': [
'off',
{ components: ['Link', 'NextLink'] },
],
'jsx-a11y/alt-text': [
'warn',
{ elements: ['img'], img: ['Image', 'NextImage'] },
],
'@typescript-eslint/consistent-type-imports': [
'error',
{ prefer: 'no-type-imports' },
],
'jsx-a11y/anchor-is-valid': ['off', { components: ['Link', 'NextLink'] }],
'jsx-a11y/alt-text': ['warn', { elements: ['img'], img: ['Image', 'NextImage'] }],
'@typescript-eslint/consistent-type-imports': ['error', { prefer: 'no-type-imports' }],
'simple-import-sort/exports': 'error',
'simple-import-sort/imports': [
'error',
@ -108,7 +96,7 @@ module.exports = {
[
// Node.js builtins
`^(node:)?(${builtinModules
.filter((mod) => !mod.startsWith('_') && !mod.includes('/'))
.filter(mod => !mod.startsWith('_') && !mod.includes('/'))
.join('|')})(/.*|$)`,
'^react(-dom)?$',
'^next(/.*|$)',

View file

@ -3,7 +3,7 @@ on: [pull_request]
jobs:
setup:
name: 'Setup'
name: Setup
runs-on: ubuntu-latest
services:
@ -65,7 +65,7 @@ jobs:
key: ${{ github.sha }}
integration-tests:
name: 'Integration Tests'
name: Integration Tests
runs-on: ubuntu-latest
needs: setup
@ -121,7 +121,7 @@ jobs:
run: cat integration-tests/*.log
schema-check:
name: 'Schema Check'
name: Schema Check
runs-on: ubuntu-latest
needs: setup
@ -160,7 +160,7 @@ jobs:
run: ./packages/libraries/cli/bin/dev schema:check "packages/services/api/src/modules/*/module.graphql.ts" ${{ steps.pr-label-check.outputs.SAFE_FLAG }} --github
test:
name: 'Tests'
name: Tests
runs-on: ubuntu-latest
needs: setup
@ -193,7 +193,7 @@ jobs:
run: yarn test
build:
name: 'Build'
name: Build
runs-on: ubuntu-latest
needs: setup
@ -229,7 +229,7 @@ jobs:
run: yarn build
type-check:
name: 'Check types'
name: Check types
runs-on: ubuntu-latest
needs: setup
@ -263,3 +263,39 @@ jobs:
- name: Type Check
run: yarn typecheck
prettier-check:
name: Prettier Check
runs-on: ubuntu-latest
needs: setup
env:
TURBO_TOKEN: ${{secrets.TURBO_TOKEN}}
TURBO_TEAM: ${{secrets.TURBO_TEAM}}
TURBO_API_URL: ${{secrets.TURBO_API_URL}}
TURBO_REMOTE_ONLY: 'true'
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 5
- uses: actions/setup-node@v2
with:
node-version: 16
- name: Pull node_modules
uses: actions/cache@v2
with:
path: '**/node_modules'
key: ${{github.sha}}
- name: Setup Turbo
run: node ./scripts/turborepo-setup.js
- name: Generate Types
run: yarn graphql:generate
- name: Prettier Check
run: 'yarn prettier:check'

15
.gitignore vendored
View file

@ -70,11 +70,11 @@ typings/
.cache
# Next.js build output
.next
.next/
# Nuxt.js build / generate output
.nuxt
dist
dist/
# Gatsby files
.cache/
@ -101,13 +101,14 @@ temp
.DS_STORE
__generated__
__generated__/
integration-tests/testkit/gql
.turbo
.turbo/config.json
integration-tests/testkit/gql/
.turbo/
# IntelliJ's project specific settings files
.idea/
*.pem
*.pem
/.husky/_/

1
.husky/.gitignore vendored
View file

@ -1 +0,0 @@
_

View file

@ -1,16 +1,14 @@
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log
coverage
*.lcov
.env
.env.test
.next
out
dist
temp
__generated__
packages/services/storage/src/db/types.ts
.env.template
dist/
.next/
__generated__/
/.husky/_/
/.turbo/
/integration-tests/testkit/gql/
/packages/services/storage/src/db/types.ts
/packages/libraries/cli/src/sdk.ts
/packages/web/app/src/gql/gql.d.ts
/packages/web/app/src/gql/graphql.ts
/packages/web/app/src/graphql/index.ts

View file

@ -1,7 +1,3 @@
{
"recommendations": [
"fabiospampinato.vscode-terminals",
"fabiospampinato.vscode-commands",
"esbenp.prettier-vscode"
]
"recommendations": ["fabiospampinato.vscode-terminals", "fabiospampinato.vscode-commands", "esbenp.prettier-vscode"]
}

View file

@ -1,4 +1,4 @@
/bin/
/node_modules/
Pulumi.*.yaml
Pulumi.yaml
Pulumi.yaml

View file

@ -89,9 +89,7 @@ export function deployApp({
{ name: 'NEXT_PUBLIC_SENTRY_DSN', value: commonEnv.SENTRY_DSN },
{
name: 'GRAPHQL_ENDPOINT',
value: serviceLocalEndpoint(graphql.service).apply(
(s) => `${s}/graphql`
),
value: serviceLocalEndpoint(graphql.service).apply(s => `${s}/graphql`),
},
{
name: 'APP_BASE_URL',

View file

@ -40,9 +40,7 @@ export function deployStripeBilling({
RELEASE: packageHelper.currentReleaseId(),
USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service),
STRIPE_SECRET_KEY: billingConfig.requireSecret('stripePrivateKey'),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret(
'postgresConnectionString'
),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret('postgresConnectionString'),
},
exposesMetrics: true,
packageInfo: packageHelper.npmPack('@hive/stripe-billing'),

View file

@ -8,11 +8,7 @@ export function deployBotKube({ envName }: { envName: string }) {
return;
}
if (
botkubeConfig &&
botkubeConfig.get('slackChannel') &&
botkubeConfig.getSecret('slackToken')
) {
if (botkubeConfig && botkubeConfig.get('slackChannel') && botkubeConfig.getSecret('slackToken')) {
new BotKube().deploy({
clusterName: envName,
enableKubectl: true,

View file

@ -6,13 +6,7 @@ const cfConfig = new pulumi.Config('cloudflareCustom');
export type Cloudflare = ReturnType<typeof deployCloudflare>;
export function deployCloudflare({
rootDns,
envName,
}: {
rootDns: string;
envName: string;
}) {
export function deployCloudflare({ rootDns, envName }: { rootDns: string; envName: string }) {
const cdnAuthPrivateKey = commonConfig.requireSecret('cdnAuthPrivateKey');
const cdn = new CloudflareCDN(
envName,

View file

@ -26,9 +26,7 @@ export function deployDbMigrations({
'db-migrations',
{
env: {
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret(
'postgresConnectionString'
),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret('postgresConnectionString'),
MIGRATOR: 'up',
CLICKHOUSE_MIGRATOR: 'up',
CLICKHOUSE_HOST: clickhouse.config.host,

View file

@ -78,9 +78,7 @@ export function deployGraphQL({
REDIS_PORT: String(redis.config.port),
REDIS_PASSWORD: redis.config.password,
RELEASE: packageHelper.currentReleaseId(),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret(
'postgresConnectionString'
),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret('postgresConnectionString'),
AUTH0_DOMAIN: commonConfig.require('auth0Domain'),
AUTH0_CLIENT_ID: commonConfig.require('auth0ClientId'),
AUTH0_CLIENT_SECRET: commonConfig.requireSecret('auth0ClientSecret'),

View file

@ -3,13 +3,7 @@ import { HivePolice } from '../utils/police';
const cfCustomConfig = new pulumi.Config('cloudflareCustom');
export function deployCloudflarePolice({
envName,
rootDns,
}: {
envName: string;
rootDns: string;
}) {
export function deployCloudflarePolice({ envName, rootDns }: { envName: string; rootDns: string }) {
const police = new HivePolice(
envName,
cfCustomConfig.require('zoneId'),

View file

@ -37,13 +37,10 @@ export function deployRateLimit({
env: {
...deploymentEnv,
...commonEnv,
LIMIT_CACHE_UPDATE_INTERVAL_MS:
rateLimitConfig.require('updateIntervalMs'),
LIMIT_CACHE_UPDATE_INTERVAL_MS: rateLimitConfig.require('updateIntervalMs'),
RELEASE: packageHelper.currentReleaseId(),
USAGE_ESTIMATOR_ENDPOINT: serviceLocalEndpoint(usageEstimator.service),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret(
'postgresConnectionString'
),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret('postgresConnectionString'),
},
exposesMetrics: true,
packageInfo: packageHelper.npmPack('@hive/rate-limit'),

View file

@ -8,11 +8,7 @@ const redisConfig = new pulumi.Config('redis');
export type Redis = ReturnType<typeof deployRedis>;
export function deployRedis({
deploymentEnv,
}: {
deploymentEnv: DeploymentEnvironment;
}) {
export function deployRedis({ deploymentEnv }: { deploymentEnv: DeploymentEnvironment }) {
const redisPassword = redisConfig.require('password');
const redisApi = new RedisStore({
password: redisPassword,

View file

@ -29,9 +29,7 @@ export function deployTokens({
env: {
...deploymentEnv,
...commonEnv,
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret(
'postgresConnectionString'
),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret('postgresConnectionString'),
RELEASE: packageHelper.currentReleaseId(),
},
readinessProbe: '/_readiness',

View file

@ -41,9 +41,7 @@ export function deployUsageEstimation({
CLICKHOUSE_USERNAME: clickhouse.config.username,
CLICKHOUSE_PASSWORD: clickhouse.config.password,
RELEASE: packageHelper.currentReleaseId(),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret(
'postgresConnectionString'
),
POSTGRES_CONNECTION_STRING: apiConfig.requireSecret('postgresConnectionString'),
},
exposesMetrics: true,
packageInfo: packageHelper.npmPack('@hive/usage-estimator'),

View file

@ -33,9 +33,7 @@ export function deployUsageIngestor({
const cpuLimit = isProduction(deploymentEnv) ? '600m' : '300m';
const maxReplicas = isProduction(deploymentEnv) ? 4 : 2;
const partitionsConsumedConcurrently = Math.floor(
numberOfPartitions / replicas
);
const partitionsConsumedConcurrently = Math.floor(numberOfPartitions / replicas);
return new RemoteArtifactAsServiceDeployment(
'usage-ingestor-service',

View file

@ -68,12 +68,6 @@ export function deployUsage({
maxReplicas: maxReplicas,
},
},
[
dbMigrations,
tokens.deployment,
tokens.service,
rateLimit.deployment,
rateLimit.service,
]
[dbMigrations, tokens.deployment, tokens.service, rateLimit.deployment, rateLimit.service]
).deploy();
}

View file

@ -3,13 +3,7 @@ import * as resources from '@pulumi/azure-native/resources';
import * as storage from '@pulumi/azure-native/storage';
import * as web from '@pulumi/azure-native/web';
import { tmpdir } from 'os';
import {
mkdtempSync,
copyFileSync,
writeFileSync,
mkdirSync,
readFileSync,
} from 'fs';
import { mkdtempSync, copyFileSync, writeFileSync, mkdirSync, readFileSync } from 'fs';
import { join } from 'path';
import { createHash } from 'crypto';
@ -22,9 +16,7 @@ function createFunctionFolder({
functionDefinition: Record<string, any>;
functionFile: string;
}) {
const hostDir = mkdtempSync(
join(tmpdir(), Math.random().toString(16).slice(2))
);
const hostDir = mkdtempSync(join(tmpdir(), Math.random().toString(16).slice(2)));
const fnDir = join(hostDir, name);
mkdirSync(fnDir);
@ -40,10 +32,7 @@ function createFunctionFolder({
);
copyFileSync(functionFile, join(fnDir, 'index.js'));
writeFileSync(
join(fnDir, 'function.json'),
JSON.stringify(functionDefinition, null, 2)
);
writeFileSync(join(fnDir, 'function.json'), JSON.stringify(functionDefinition, null, 2));
return {
checksum: createHash('sha256')
@ -66,19 +55,14 @@ export class AzureFunction {
) {}
deployAsJob() {
const resourceGroup = new resources.ResourceGroup(
`hive-${this.config.envName}-fn-rg`
);
const storageAccount = new storage.StorageAccount(
`hive${this.config.envName}fn`,
{
resourceGroupName: resourceGroup.name,
sku: {
name: storage.SkuName.Standard_LRS,
},
kind: storage.Kind.StorageV2,
}
);
const resourceGroup = new resources.ResourceGroup(`hive-${this.config.envName}-fn-rg`);
const storageAccount = new storage.StorageAccount(`hive${this.config.envName}fn`, {
resourceGroupName: resourceGroup.name,
sku: {
name: storage.SkuName.Standard_LRS,
},
kind: storage.Kind.StorageV2,
});
const codeContainer = new storage.BlobContainer('functions', {
resourceGroupName: resourceGroup.name,
@ -106,16 +90,8 @@ export class AzureFunction {
},
});
const storageConnectionString = getConnectionString(
resourceGroup.name,
storageAccount.name
);
const codeBlobUrl = signedBlobReadUrl(
codeBlob,
codeContainer,
storageAccount,
resourceGroup
);
const storageConnectionString = getConnectionString(resourceGroup.name, storageAccount.name);
const codeBlobUrl = signedBlobReadUrl(codeBlob, codeContainer, storageAccount, resourceGroup);
const app = new web.WebApp(
`${this.config.name}-${this.config.envName}-fn`,

View file

@ -57,14 +57,7 @@ export class BotKube {
restrictAccess: 'true',
enabled: String(config.enableKubectl),
commands: {
verbs: [
'cluster-info',
'describe',
'get',
'logs',
'top',
'restart',
],
verbs: ['cluster-info', 'describe', 'get', 'logs', 'top', 'restart'],
resources: [
'deployments',
'pods',

View file

@ -17,10 +17,7 @@ export class CloudflareCDN {
});
const script = new cf.WorkerScript('hive-ha-worker', {
content: readFileSync(
resolve(__dirname, '../../packages/services/cdn-worker/dist/worker.js'),
'utf-8'
),
content: readFileSync(resolve(__dirname, '../../packages/services/cdn-worker/dist/worker.js'), 'utf-8'),
name: `hive-storage-cdn-${this.envName}`,
kvNamespaceBindings: [
{

View file

@ -1,27 +1,15 @@
import { DeploymentEnvironment } from '../types';
export function isProduction(
deploymentEnv: DeploymentEnvironment | string
): boolean {
export function isProduction(deploymentEnv: DeploymentEnvironment | string): boolean {
return !isStaging(deploymentEnv);
}
export function isStaging(
deploymentEnv: DeploymentEnvironment | string
): boolean {
return isDeploymentEnvironment(deploymentEnv)
? deploymentEnv.ENVIRONMENT === 'staging'
: deploymentEnv === 'staging';
export function isStaging(deploymentEnv: DeploymentEnvironment | string): boolean {
return isDeploymentEnvironment(deploymentEnv) ? deploymentEnv.ENVIRONMENT === 'staging' : deploymentEnv === 'staging';
}
export function isDeploymentEnvironment(
value: any
): value is DeploymentEnvironment {
return (
value &&
typeof value === 'object' &&
typeof value['ENVIRONMENT'] === 'string'
);
export function isDeploymentEnvironment(value: any): value is DeploymentEnvironment {
return value && typeof value === 'object' && typeof value['ENVIRONMENT'] === 'string';
}
export function isDefined<T>(value: T | null | undefined): value is T {

View file

@ -2,32 +2,22 @@ import * as k8s from '@pulumi/kubernetes';
import * as pulumi from '@pulumi/pulumi';
export function serviceLocalEndpoint(service: k8s.types.input.core.v1.Service) {
return pulumi
.all([service.metadata, service.spec])
.apply(([metadata, spec]) => {
const defaultPort = (spec.ports || [])[0];
const portText = defaultPort ? `:${defaultPort.port}` : '';
return pulumi.all([service.metadata, service.spec]).apply(([metadata, spec]) => {
const defaultPort = (spec.ports || [])[0];
const portText = defaultPort ? `:${defaultPort.port}` : '';
return `http://${metadata.name}.${
metadata.namespace || 'default'
}.svc.cluster.local${portText}`;
});
return `http://${metadata.name}.${metadata.namespace || 'default'}.svc.cluster.local${portText}`;
});
}
export function serviceLocalHost(service: k8s.types.input.core.v1.Service) {
return pulumi.all([service.metadata]).apply(([metadata]) => {
return `${metadata.name}.${
metadata.namespace || 'default'
}.svc.cluster.local`;
return `${metadata.name}.${metadata.namespace || 'default'}.svc.cluster.local`;
});
}
export function serviceLocalMetricsEndpoint(
service: k8s.types.input.core.v1.Service
) {
export function serviceLocalMetricsEndpoint(service: k8s.types.input.core.v1.Service) {
return pulumi.all([service.metadata]).apply(([metadata]) => {
return `${metadata.name}.${
metadata.namespace || 'default'
}.svc.cluster.local:10254/metrics`;
return `${metadata.name}.${metadata.namespace || 'default'}.svc.cluster.local:10254/metrics`;
});
}

View file

@ -71,12 +71,7 @@ export class Observability {
},
{
apiGroups: ['apps'],
resources: [
'daemonsets',
'deployments',
'replicasets',
'statefulsets',
],
resources: ['daemonsets', 'deployments', 'replicasets', 'statefulsets'],
verbs: ['get', 'list', 'watch'],
},
{
@ -193,40 +188,29 @@ export class Observability {
metrics_path: '/metrics',
relabel_configs: [
{
source_labels: [
'__meta_kubernetes_pod_container_port_name',
],
source_labels: ['__meta_kubernetes_pod_container_port_name'],
action: 'keep',
regex: 'metrics',
},
{
source_labels: [
'__meta_kubernetes_pod_annotation_prometheus_io_scrape',
],
source_labels: ['__meta_kubernetes_pod_annotation_prometheus_io_scrape'],
action: 'keep',
regex: true,
},
{
source_labels: [
'__meta_kubernetes_pod_annotation_prometheus_io_scheme',
],
source_labels: ['__meta_kubernetes_pod_annotation_prometheus_io_scheme'],
action: 'replace',
target_label: '__scheme__',
regex: '(https?)',
},
{
source_labels: [
'__meta_kubernetes_pod_annotation_prometheus_io_path',
],
source_labels: ['__meta_kubernetes_pod_annotation_prometheus_io_path'],
action: 'replace',
target_label: '__metrics_path__',
regex: '(.+)',
},
{
source_labels: [
'__address__',
'__meta_kubernetes_pod_annotation_prometheus_io_port',
],
source_labels: ['__address__', '__meta_kubernetes_pod_annotation_prometheus_io_port'],
action: 'replace',
regex: '([^:]+)(?::d+)?;(d+)',
replacement: '$1:$2',

View file

@ -12,7 +12,7 @@ export function createPackageHelper(dir = resolve(process.cwd(), '../')) {
return {
currentReleaseId: () => revision,
npmPack(name: string): PackageInfo {
const dir = packages.find((p) => p.packageJson.name === name)?.dir;
const dir = packages.find(p => p.packageJson.name === name)?.dir;
if (!dir) {
throw new Error(`Failed to find package "${name}" in workspace!`);

View file

@ -5,7 +5,7 @@ import * as pulumi from '@pulumi/pulumi';
export function normalizeEnv(env: kx.types.Container['env']): any[] {
return Array.isArray(env)
? env
: Object.keys(env as kx.types.EnvMap).map((name) => ({
: Object.keys(env as kx.types.EnvMap).map(name => ({
name,
value: (env as kx.types.EnvMap)[name],
}));

View file

@ -17,13 +17,7 @@ export class HivePolice {
});
const script = new cf.WorkerScript('hive-police-worker', {
content: readFileSync(
resolve(
__dirname,
'../../packages/services/police-worker/dist/worker.js'
),
'utf-8'
),
content: readFileSync(resolve(__dirname, '../../packages/services/police-worker/dist/worker.js'), 'utf-8'),
name: `hive-police-${this.envName}`,
kvNamespaceBindings: [
{

View file

@ -13,11 +13,7 @@ export class Redis {
}
) {}
deploy({
limits,
}: {
limits: k8s.types.input.core.v1.ResourceRequirements['limits'];
}) {
deploy({ limits }: { limits: k8s.types.input.core.v1.ResourceRequirements['limits'] }) {
const name = 'redis-store';
const image = DEFAULT_IMAGE;

View file

@ -116,9 +116,9 @@ export class RemoteArtifactAsServiceDeployment {
volumeMounts,
command:
this.options.packageInfo.runtime === 'node'
? ['/bin/sh', '-c', artifactUrl.apply((v) => `yarn add ${v}`)]
? ['/bin/sh', '-c', artifactUrl.apply(v => `yarn add ${v}`)]
: this.options.packageInfo.runtime === 'rust'
? ['/bin/sh', '-c', artifactUrl.apply((v) => `wget ${v}`)]
? ['/bin/sh', '-c', artifactUrl.apply(v => `wget ${v}`)]
: ['echo missing script!'],
},
],
@ -242,17 +242,13 @@ export class RemoteArtifactAsServiceDeployment {
name: 'cpu',
target: {
type: 'Utilization',
averageUtilization:
this.options.autoScaling.cpu.cpuAverageToScale,
averageUtilization: this.options.autoScaling.cpu.cpuAverageToScale,
},
},
},
],
maxReplicas: this.options.autoScaling.maxReplicas,
minReplicas:
this.options.autoScaling.minReplicas ||
this.options.replicas ||
1,
minReplicas: this.options.autoScaling.minReplicas || this.options.replicas || 1,
},
},
{

View file

@ -4,10 +4,7 @@ import { Output } from '@pulumi/pulumi';
export class Proxy {
private lbService: Output<k8s.core.v1.Service> | null = null;
constructor(
private tlsSecretName: string,
private staticIp?: { address?: string }
) {}
constructor(private tlsSecretName: string, private staticIp?: { address?: string }) {}
registerService(
dns: { record: string; apex?: boolean },
@ -56,16 +53,13 @@ export class Proxy {
secretName: dns.record,
},
corsPolicy: {
allowOrigin: [
'https://app.graphql-hive.com',
'https://graphql-hive.com',
],
allowOrigin: ['https://app.graphql-hive.com', 'https://graphql-hive.com'],
allowMethods: ['GET', 'POST', 'OPTIONS'],
allowHeaders: ['*'],
exposeHeaders: ['*'],
},
},
routes: routes.map((route) => ({
routes: routes.map(route => ({
conditions: [
{
prefix: route.path,
@ -176,10 +170,7 @@ export class Proxy {
},
});
this.lbService = proxyController.getResource(
'v1/Service',
'contour/contour-proxy-envoy'
);
this.lbService = proxyController.getResource('v1/Service', 'contour/contour-proxy-envoy');
new k8s.apiextensions.CustomResource(
'secret-delegation',

View file

@ -1,4 +1,4 @@
docker-compose.dockest-generated.yml
db-clickhouse
tarballs
volumes
volumes

View file

@ -73,18 +73,7 @@ services:
soft: 20000
hard: 40000
healthcheck:
test:
[
'CMD',
'cub',
'kafka-ready',
'1',
'5',
'-b',
'127.0.0.1:9092',
'-c',
'/etc/kafka/kafka.properties',
]
test: ['CMD', 'cub', 'kafka-ready', '1', '5', '-b', '127.0.0.1:9092', '-c', '/etc/kafka/kafka.properties']
interval: 15s
timeout: 10s
retries: 6

View file

@ -27,7 +27,7 @@ async function main() {
return dockest.run(createServices());
}
await main().catch((err) => {
await main().catch(err => {
console.error(err);
process.exit(1);
});

View file

@ -25,30 +25,21 @@ async function main() {
fsExtra.mkdirSync(tarballDir, { recursive: true });
function isBackendPackage(manifestPath) {
return JSON.parse(
fs.readFileSync(manifestPath, 'utf-8')
).buildOptions?.tags.includes('backend');
return JSON.parse(fs.readFileSync(manifestPath, 'utf-8')).buildOptions?.tags.includes('backend');
}
function listBackendPackages() {
const manifestPathCollection = glob.sync(
'packages/services/*/package.json',
{
cwd,
absolute: true,
ignore: ['**/node_modules/**', '**/dist/**'],
}
);
const manifestPathCollection = glob.sync('packages/services/*/package.json', {
cwd,
absolute: true,
ignore: ['**/node_modules/**', '**/dist/**'],
});
return manifestPathCollection
.filter(isBackendPackage)
.map((filepath) => path.relative(cwd, path.dirname(filepath)));
return manifestPathCollection.filter(isBackendPackage).map(filepath => path.relative(cwd, path.dirname(filepath)));
}
async function pack(location) {
const { version, name } = JSON.parse(
await fsExtra.readFile(path.join(cwd, location, 'package.json'), 'utf-8')
);
const { version, name } = JSON.parse(await fsExtra.readFile(path.join(cwd, location, 'package.json'), 'utf-8'));
const stdout = await new Promise((resolve, reject) => {
exec(
`npm pack ${path.join(cwd, location, 'dist')}`,
@ -69,10 +60,7 @@ async function main() {
const lines = stdout.split('\n');
const org_filename = path.resolve(cwd, lines[lines.length - 2]);
let filename = org_filename
.replace(cwd, tarballDir)
.replace('hive-', '')
.replace(`-${version}`, '');
let filename = org_filename.replace(cwd, tarballDir).replace('hive-', '').replace(`-${version}`, '');
if (/-\d+\.\d+\.\d+\.tgz$/.test(filename)) {
throw new Error(`Build ${name} package first!`);
@ -86,7 +74,7 @@ async function main() {
const locations = listBackendPackages();
await Promise.all(
locations.map(async (loc) => {
locations.map(async loc => {
try {
const filename = await pack(loc);

View file

@ -4,19 +4,9 @@ import { run } from '../../packages/libraries/cli/src/index';
const registryAddress = utils.getServiceAddress('server', 3001);
export async function schemaPublish(args: string[]) {
return run([
'schema:publish',
`--registry`,
`http://${registryAddress}/graphql`,
...args,
]);
return run(['schema:publish', `--registry`, `http://${registryAddress}/graphql`, ...args]);
}
export async function schemaCheck(args: string[]) {
return run([
'schema:check',
`--registry`,
`http://${registryAddress}/graphql`,
...args,
]);
return run(['schema:check', `--registry`, `http://${registryAddress}/graphql`, ...args]);
}

View file

@ -11,7 +11,7 @@ export async function resetClickHouse() {
`operations_new`,
`schema_coordinates_daily`,
`client_names_daily`,
].map((table) => `TRUNCATE TABLE default.${table}`);
].map(table => `TRUNCATE TABLE default.${table}`);
for await (const query of queries) {
await axios.post(endpoint, query, {

View file

@ -9,15 +9,13 @@ export const resetDb = async (conn: DatabasePoolConnectionType) => {
WHERE "schemaname" = 'public';
`);
const tablenames = result
.map(({ tablename }) => tablename)
.filter((tablename) => !migrationTables.includes(tablename));
const tablenames = result.map(({ tablename }) => tablename).filter(tablename => !migrationTables.includes(tablename));
if (tablenames.length) {
await conn.query(sql`
TRUNCATE TABLE
${sql.join(
tablenames.map((name) => sql.identifier([name])),
tablenames.map(name => sql.identifier([name])),
sql`,`
)}
RESTART IDENTITY

View file

@ -1,8 +1,5 @@
import { DockestService, execa } from 'dockest';
import {
containerIsHealthyReadinessCheck,
zeroExitCodeReadinessCheck,
} from 'dockest/dist/readiness-check/index.js';
import { containerIsHealthyReadinessCheck, zeroExitCodeReadinessCheck } from 'dockest/dist/readiness-check/index.js';
import { DepGraph } from 'dependency-graph';
import { readFileSync } from 'fs';
import { join } from 'path';
@ -28,9 +25,7 @@ export function createServices() {
graph.addNode(serviceName, {
serviceName,
dependsOn: [],
readinessCheck: service.healthcheck
? containerIsHealthyReadinessCheck
: zeroExitCodeReadinessCheck,
readinessCheck: service.healthcheck ? containerIsHealthyReadinessCheck : zeroExitCodeReadinessCheck,
});
}
@ -58,20 +53,16 @@ export function createServices() {
registry[serviceName] = {
...service,
dependsOn: graph
.directDependenciesOf(serviceName)
.map((dep) => graph.getNodeData(dep)),
dependsOn: graph.directDependenciesOf(serviceName).map(dep => graph.getNodeData(dep)),
};
}
// And return a list of services
return allServices.map((serviceName) => graph.getNodeData(serviceName));
return allServices.map(serviceName => graph.getNodeData(serviceName));
}
export function cleanDockerContainers() {
const output = execa(
`docker ps --all --filter "name=integration-tests" --format={{.ID}}:{{.Status}}`
);
const output = execa(`docker ps --all --filter "name=integration-tests" --format={{.ID}}:{{.Status}}`);
if (output.stdout.length) {
const runningContainers = output.stdout.split('\n');

View file

@ -18,8 +18,7 @@ export function invariant(
// When not in production we allow the message to pass through
// *This block will be removed in production builds*
const provided: string | undefined =
typeof message === 'function' ? message() : message;
const provided: string | undefined = typeof message === 'function' ? message() : message;
// Options:
// 1. message provided: `${prefix}: ${provided}`

View file

@ -22,13 +22,10 @@ import type {
import { execute } from './graphql';
export function waitFor(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
return new Promise(resolve => setTimeout(resolve, ms));
}
export function createOrganization(
input: CreateOrganizationInput,
authToken: string
) {
export function createOrganization(input: CreateOrganizationInput, authToken: string) {
return execute({
document: gql(/* GraphQL */ `
mutation createOrganization($input: CreateOrganizationInput!) {
@ -159,15 +156,10 @@ export function createToken(input: CreateTokenInput, authToken: string) {
});
}
export function updateMemberAccess(
input: OrganizationMemberAccessInput,
authToken: string
) {
export function updateMemberAccess(input: OrganizationMemberAccessInput, authToken: string) {
return execute({
document: gql(/* GraphQL */ `
mutation updateOrganizationMemberAccess(
$input: OrganizationMemberAccessInput!
) {
mutation updateOrganizationMemberAccess($input: OrganizationMemberAccessInput!) {
updateOrganizationMemberAccess(input: $input) {
organization {
cleanId
@ -319,9 +311,7 @@ export function updateTargetValidationSettings(
) {
return execute({
document: gql(/* GraphQL */ `
mutation updateTargetValidationSettings(
$input: UpdateTargetValidationSettingsInput!
) {
mutation updateTargetValidationSettings($input: UpdateTargetValidationSettingsInput!) {
updateTargetValidationSettings(input: $input) {
ok {
updatedTargetValidationSettings {
@ -360,10 +350,7 @@ export function updateBaseSchema(input: UpdateBaseSchemaInput, token: string) {
});
}
export function readOperationsStats(
input: OperationsStatsSelectorInput,
token: string
) {
export function readOperationsStats(input: OperationsStatsSelectorInput, token: string) {
return execute({
document: gql(/* GraphQL */ `
query readOperationsStats($input: OperationsStatsSelectorInput!) {
@ -437,11 +424,7 @@ export function fetchLatestValidSchema(token: string) {
});
}
export function fetchVersions(
selector: SchemaVersionsInput,
limit: number,
token: string
) {
export function fetchVersions(selector: SchemaVersionsInput, limit: number, token: string) {
return execute({
document: gql(/* GraphQL */ `
query schemaVersions($limit: Int!, $selector: SchemaVersionsInput!) {
@ -473,15 +456,10 @@ export function fetchVersions(
});
}
export function publishPersistedOperations(
input: PublishPersistedOperationInput[],
token: string
) {
export function publishPersistedOperations(input: PublishPersistedOperationInput[], token: string) {
return execute({
document: gql(/* GraphQL */ `
mutation publishPersistedOperations(
$input: [PublishPersistedOperationInput!]!
) {
mutation publishPersistedOperations($input: [PublishPersistedOperationInput!]!) {
publishPersistedOperations(input: $input) {
summary {
total
@ -504,10 +482,7 @@ export function publishPersistedOperations(
});
}
export function updateSchemaVersionStatus(
input: SchemaVersionUpdateInput,
token: string
) {
export function updateSchemaVersionStatus(input: SchemaVersionUpdateInput, token: string) {
return execute({
document: gql(/* GraphQL */ `
mutation updateSchemaVersionStatus($input: SchemaVersionUpdateInput!) {
@ -568,10 +543,7 @@ export function createCdnAccess(selector: TargetSelectorInput, token: string) {
});
}
export async function fetchSchemaFromCDN(
selector: TargetSelectorInput,
token: string
) {
export async function fetchSchemaFromCDN(selector: TargetSelectorInput, token: string) {
const cdnAccessResult = await createCdnAccess(selector, token);
if (cdnAccessResult.body.errors) {
@ -594,10 +566,7 @@ export async function fetchSchemaFromCDN(
};
}
export async function fetchMetadataFromCDN(
selector: TargetSelectorInput,
token: string
) {
export async function fetchMetadataFromCDN(selector: TargetSelectorInput, token: string) {
const cdnAccessResult = await createCdnAccess(selector, token);
if (cdnAccessResult.body.errors) {

View file

@ -1,11 +1,7 @@
/* eslint-disable import/no-extraneous-dependencies */
import Redis from 'ioredis';
export const resetRedis = async (conn: {
host: string;
port: number;
password: string;
}) => {
export const resetRedis = async (conn: { host: string; port: number; password: string }) => {
const redis = new Redis({
host: conn.host,
port: conn.port,

View file

@ -21,10 +21,7 @@ export interface CollectedOperation {
};
}
export async function collect(params: {
operations: CollectedOperation[];
token: string;
}) {
export async function collect(params: { operations: CollectedOperation[]; token: string }) {
const res = await axios.post(`http://${usageAddress}`, params.operations, {
headers: {
'Content-Type': 'application/json',

View file

@ -1,13 +1,5 @@
import {
OrganizationAccessScope,
ProjectAccessScope,
TargetAccessScope,
} from '@app/gql/graphql';
import {
createOrganization,
joinOrganization,
updateMemberAccess,
} from '../../../testkit/flow';
import { OrganizationAccessScope, ProjectAccessScope, TargetAccessScope } from '@app/gql/graphql';
import { createOrganization, joinOrganization, updateMemberAccess } from '../../../testkit/flow';
import { authenticate } from '../../../testkit/auth';
test('owner of an organization should have all scopes', async () => {
@ -21,19 +13,17 @@ test('owner of an organization should have all scopes', async () => {
expect(result.body.errors).not.toBeDefined();
const owner =
result.body.data!.createOrganization.ok.createdOrganizationPayload
.organization.owner;
const owner = result.body.data!.createOrganization.ok.createdOrganizationPayload.organization.owner;
Object.values(OrganizationAccessScope).forEach((scope) => {
Object.values(OrganizationAccessScope).forEach(scope => {
expect(owner.organizationAccessScopes).toContain(scope);
});
Object.values(ProjectAccessScope).forEach((scope) => {
Object.values(ProjectAccessScope).forEach(scope => {
expect(owner.projectAccessScopes).toContain(scope);
});
Object.values(TargetAccessScope).forEach((scope) => {
Object.values(TargetAccessScope).forEach(scope => {
expect(owner.targetAccessScopes).toContain(scope);
});
});
@ -49,44 +39,32 @@ test('regular member of an organization should have basic scopes', async () => {
// Join
const { access_token: member_access_token } = await authenticate('extra');
const code =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization.inviteCode;
const code = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization.inviteCode;
const joinResult = await joinOrganization(code, member_access_token);
expect(joinResult.body.errors).not.toBeDefined();
expect(joinResult.body.data?.joinOrganization.__typename).toBe(
'OrganizationPayload'
);
expect(joinResult.body.data?.joinOrganization.__typename).toBe('OrganizationPayload');
if (
joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload'
) {
if (joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload') {
throw new Error('Join failed');
}
const member = joinResult.body.data!.joinOrganization.organization.me;
// Should have only organization:read access
expect(member.organizationAccessScopes).toContainEqual(
OrganizationAccessScope.Read
);
expect(member.organizationAccessScopes).toContainEqual(OrganizationAccessScope.Read);
// Nothing more
expect(member.organizationAccessScopes).toHaveLength(1);
// Should have only project:read and project:operations-store:read access
expect(member.projectAccessScopes).toContainEqual(ProjectAccessScope.Read);
expect(member.projectAccessScopes).toContainEqual(
ProjectAccessScope.OperationsStoreRead
);
expect(member.projectAccessScopes).toContainEqual(ProjectAccessScope.OperationsStoreRead);
// Nothing more
expect(member.projectAccessScopes).toHaveLength(2);
// Should have only target:read and target:registry:read access
expect(member.targetAccessScopes).toContainEqual(TargetAccessScope.Read);
expect(member.targetAccessScopes).toContainEqual(
TargetAccessScope.RegistryRead
);
expect(member.targetAccessScopes).toContainEqual(TargetAccessScope.RegistryRead);
// Nothing more
expect(member.targetAccessScopes).toHaveLength(2);
});
@ -102,18 +80,12 @@ test('cannot grant an access scope to another user if user has no access to that
// Join
const { access_token: member_access_token } = await authenticate('extra');
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
const joinResult = await joinOrganization(code, member_access_token);
if (
joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload'
) {
throw new Error(
`Join failed: ${joinResult.body.data!.joinOrganization.message}`
);
if (joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload') {
throw new Error(`Join failed: ${joinResult.body.data!.joinOrganization.message}`);
}
const member = joinResult.body.data!.joinOrganization.organization.me;

View file

@ -1,10 +1,5 @@
import { ProjectType, ProjectAccessScope } from '@app/gql/graphql';
import {
createOrganization,
publishPersistedOperations,
createProject,
createToken,
} from '../../../testkit/flow';
import { createOrganization, publishPersistedOperations, createProject, createToken } from '../../../testkit/flow';
import { authenticate } from '../../../testkit/auth';
test('can publish persisted operations only with project:operations-store:write', async () => {
@ -15,9 +10,7 @@ test('can publish persisted operations only with project:operations-store:write'
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -69,10 +62,7 @@ test('can publish persisted operations only with project:operations-store:write'
project: project.cleanId,
target: target.cleanId,
organizationScopes: [],
projectScopes: [
ProjectAccessScope.OperationsStoreRead,
ProjectAccessScope.OperationsStoreWrite,
],
projectScopes: [ProjectAccessScope.OperationsStoreRead, ProjectAccessScope.OperationsStoreWrite],
targetScopes: [],
},
owner_access_token
@ -96,16 +86,12 @@ test('can publish persisted operations only with project:operations-store:write'
// Cannot persist operations with no read and write rights
let result = await publishPersistedOperations(operations, noAccessToken);
expect(result.body.errors).toHaveLength(1);
expect(result.body.errors![0].message).toMatch(
'project:operations-store:write'
);
expect(result.body.errors![0].message).toMatch('project:operations-store:write');
// Cannot persist operations with read rights
result = await publishPersistedOperations(operations, readToken);
expect(result.body.errors).toHaveLength(1);
expect(result.body.errors![0].message).toMatch(
'project:operations-store:write'
);
expect(result.body.errors![0].message).toMatch('project:operations-store:write');
// Persist operations with write rights
result = await publishPersistedOperations(operations, writeToken);
@ -117,9 +103,7 @@ test('can publish persisted operations only with project:operations-store:write'
expect(persisted.summary.total).toEqual(2);
expect(persisted.summary.unchanged).toEqual(0);
expect(persisted.operations).toHaveLength(2);
expect(persisted.operations[0].operationHash).toEqual(
operations[0].operationHash
);
expect(persisted.operations[0].operationHash).toEqual(operations[0].operationHash);
expect(persisted.operations[1].operationHash).toBeDefined();
});
@ -131,9 +115,7 @@ test('should skip on already persisted operations', async () => {
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -155,10 +137,7 @@ test('should skip on already persisted operations', async () => {
project: project.cleanId,
target: target.cleanId,
organizationScopes: [],
projectScopes: [
ProjectAccessScope.OperationsStoreRead,
ProjectAccessScope.OperationsStoreWrite,
],
projectScopes: [ProjectAccessScope.OperationsStoreRead, ProjectAccessScope.OperationsStoreWrite],
targetScopes: [],
},
owner_access_token
@ -187,9 +166,7 @@ test('should skip on already persisted operations', async () => {
expect(persisted.summary.total).toEqual(2);
expect(persisted.summary.unchanged).toEqual(0);
expect(persisted.operations).toHaveLength(2);
expect(persisted.operations[0].operationHash).toEqual(
operations[0].operationHash
);
expect(persisted.operations[0].operationHash).toEqual(operations[0].operationHash);
expect(persisted.operations[1].operationHash).toBeDefined();
// Persist operations with read rights
@ -204,12 +181,8 @@ test('should skip on already persisted operations', async () => {
expect(persisted.summary.unchanged).toEqual(1);
expect(persisted.operations).toHaveLength(2);
const meOperation = persisted.operations.find(
(op) => op.operationHash === operations[0].operationHash
);
const userOperation = persisted.operations.find(
(op) => op.operationHash === operations[1].operationHash
);
const meOperation = persisted.operations.find(op => op.operationHash === operations[0].operationHash);
const userOperation = persisted.operations.find(op => op.operationHash === operations[1].operationHash);
expect(meOperation?.operationHash).toEqual(operations[0].operationHash);
expect(userOperation?.operationHash).toEqual(operations[1].operationHash);

View file

@ -17,9 +17,7 @@ test('can check a schema with target:registry:read access', async () => {
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
// Join
@ -47,10 +45,7 @@ test('can check a schema with target:registry:read access', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -69,9 +64,7 @@ test('can check a schema with target:registry:read access', async () => {
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
// Create a token with no rights
const noAccessTokenResult = await createToken(
@ -124,9 +117,7 @@ test('can check a schema with target:registry:read access', async () => {
readToken
);
expect(checkResult.body.errors).not.toBeDefined();
expect(checkResult.body.data!.schemaCheck.__typename).toBe(
'SchemaCheckSuccess'
);
expect(checkResult.body.data!.schemaCheck.__typename).toBe('SchemaCheckSuccess');
});
test('should match indentation of previous description', async () => {
@ -137,9 +128,7 @@ test('should match indentation of previous description', async () => {
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
// Join
@ -167,10 +156,7 @@ test('should match indentation of previous description', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -196,9 +182,7 @@ test('should match indentation of previous description', async () => {
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
// Create a token with read rights
const readTokenResult = await createToken(

View file

@ -27,9 +27,7 @@ test('cannot publish a schema without target:registry:write access', async () =>
// Join
const { access_token: member_access_token } = await authenticate('extra');
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
await joinOrganization(code, member_access_token);
@ -84,9 +82,7 @@ test('can publish a schema with target:registry:write access', async () => {
// Join
const { access_token: member_access_token } = await authenticate('extra');
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
await joinOrganization(code, member_access_token);
@ -110,10 +106,7 @@ test('can publish a schema with target:registry:write access', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -132,9 +125,7 @@ test('can publish a schema with target:registry:write access', async () => {
);
expect(result.body.errors).not.toBeDefined();
expect(result.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(result.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
result = await publishSchema(
{
@ -146,9 +137,7 @@ test('can publish a schema with target:registry:write access', async () => {
);
expect(result.body.errors).not.toBeDefined();
expect(result.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(result.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const versionsResult = await fetchVersions(
{
@ -172,9 +161,7 @@ test('base schema should not affect the output schema persisted in db', async ()
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -197,10 +184,7 @@ test('base schema should not affect the output schema persisted in db', async ()
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -219,9 +203,7 @@ test('base schema should not affect the output schema persisted in db', async ()
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const updateBaseResult = await updateBaseSchema(
{
@ -246,9 +228,7 @@ test('base schema should not affect the output schema persisted in db', async ()
writeToken
);
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const versionsResult = await fetchVersions(
{
@ -266,18 +246,12 @@ test('base schema should not affect the output schema persisted in db', async ()
const latestResult = await fetchLatestSchema(writeToken);
expect(latestResult.body.errors).not.toBeDefined();
expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe(
'abc234'
);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe('abc234');
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch(
'type Query { ping: String @auth pong: String }'
);
expect(
latestResult.body.data!.latestVersion.schemas.nodes[0].source
).not.toMatch('directive');
expect(latestResult.body.data!.latestVersion.baseSchema).toMatch(
'directive @auth on OBJECT | FIELD_DEFINITION'
);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).not.toMatch('directive');
expect(latestResult.body.data!.latestVersion.baseSchema).toMatch('directive @auth on OBJECT | FIELD_DEFINITION');
});
test('directives should not be removed (federation)', async () => {
@ -288,9 +262,7 @@ test('directives should not be removed (federation)', async () => {
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -313,10 +285,7 @@ test('directives should not be removed (federation)', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -335,9 +304,7 @@ test('directives should not be removed (federation)', async () => {
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const versionsResult = await fetchVersions(
{
@ -355,9 +322,7 @@ test('directives should not be removed (federation)', async () => {
const latestResult = await fetchLatestSchema(writeToken);
expect(latestResult.body.errors).not.toBeDefined();
expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe(
'abc123'
);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe('abc123');
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch(
`type Query { me: User } type User @key(fields: "id") { id: ID! name: String }`
);
@ -371,9 +336,7 @@ test('directives should not be removed (stitching)', async () => {
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -396,10 +359,7 @@ test('directives should not be removed (stitching)', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -418,9 +378,7 @@ test('directives should not be removed (stitching)', async () => {
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const versionsResult = await fetchVersions(
{
@ -438,9 +396,7 @@ test('directives should not be removed (stitching)', async () => {
const latestResult = await fetchLatestSchema(writeToken);
expect(latestResult.body.errors).not.toBeDefined();
expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe(
'abc123'
);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe('abc123');
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch(
`type Query { me: User } type User @key(selectionSet: "{ id }") { id: ID! name: String }`
);
@ -454,9 +410,7 @@ test('directives should not be removed (single)', async () => {
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -479,10 +433,7 @@ test('directives should not be removed (single)', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -501,9 +452,7 @@ test('directives should not be removed (single)', async () => {
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const versionsResult = await fetchVersions(
{
@ -521,9 +470,7 @@ test('directives should not be removed (single)', async () => {
const latestResult = await fetchLatestSchema(writeToken);
expect(latestResult.body.errors).not.toBeDefined();
expect(latestResult.body.data!.latestVersion.schemas.total).toBe(1);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe(
'abc123'
);
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].commit).toBe('abc123');
expect(latestResult.body.data!.latestVersion.schemas.nodes[0].source).toMatch(
`directive @auth on FIELD_DEFINITION type Query { me: User @auth } type User { id: ID! name: String }`
);
@ -537,9 +484,7 @@ test('share publication of schema using redis', async () => {
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -562,10 +507,7 @@ test('share publication of schema using redis', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -584,9 +526,7 @@ test('share publication of schema using redis', async () => {
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const [publishResult1, publishResult2] = await Promise.all([
publishSchema(
@ -608,12 +548,8 @@ test('share publication of schema using redis', async () => {
]);
expect(publishResult1.body.errors).not.toBeDefined();
expect(publishResult2.body.errors).not.toBeDefined();
expect(publishResult1.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult2.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult1.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
expect(publishResult2.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
});
test("Two targets with the same commit id shouldn't return an error", async () => {
@ -624,9 +560,7 @@ test("Two targets with the same commit id shouldn't return an error", async () =
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
organization: org.cleanId,
@ -645,10 +579,7 @@ test("Two targets with the same commit id shouldn't return an error", async () =
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -679,10 +610,7 @@ test("Two targets with the same commit id shouldn't return an error", async () =
target: target2.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -697,13 +625,9 @@ test("Two targets with the same commit id shouldn't return an error", async () =
);
// Schema publish should be successful
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
expect(publishResult2.body.errors).not.toBeDefined();
expect(publishResult2.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult2.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
});
test('marking versions as valid', async () => {
@ -717,9 +641,7 @@ test('marking versions as valid', async () => {
// Join
const { access_token: member_access_token } = await authenticate('extra');
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
await joinOrganization(code, member_access_token);
@ -743,10 +665,7 @@ test('marking versions as valid', async () => {
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -766,9 +685,7 @@ test('marking versions as valid', async () => {
);
expect(result.body.errors).not.toBeDefined();
expect(result.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(result.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
// Second version with a forced breaking change
result = await publishSchema(
@ -814,18 +731,11 @@ test('marking versions as valid', async () => {
// the initial version should be the latest valid version
let latestValidSchemaResult = await fetchLatestValidSchema(token);
expect(latestValidSchemaResult.body.errors).not.toBeDefined();
expect(
latestValidSchemaResult.body.data!.latestValidVersion.schemas.total
).toEqual(1);
expect(
latestValidSchemaResult.body.data!.latestValidVersion.schemas.nodes[0]
.commit
).toEqual('c0');
expect(latestValidSchemaResult.body.data!.latestValidVersion.schemas.total).toEqual(1);
expect(latestValidSchemaResult.body.data!.latestValidVersion.schemas.nodes[0].commit).toEqual('c0');
const versionId = (commit: string) =>
versionsResult.body.data!.schemaVersions.nodes.find(
(node) => node.commit.commit === commit
)!.id;
versionsResult.body.data!.schemaVersions.nodes.find(node => node.commit.commit === commit)!.id;
// marking the third version as valid should promote it to be the latest valid version
let versionStatusUpdateResult = await updateSchemaVersionStatus(
@ -840,15 +750,11 @@ test('marking versions as valid', async () => {
);
expect(versionStatusUpdateResult.body.errors).not.toBeDefined();
expect(
versionStatusUpdateResult.body.data!.updateSchemaVersionStatus.id
).toEqual(versionId('c2'));
expect(versionStatusUpdateResult.body.data!.updateSchemaVersionStatus.id).toEqual(versionId('c2'));
latestValidSchemaResult = await fetchLatestValidSchema(token);
expect(latestValidSchemaResult.body.errors).not.toBeDefined();
expect(latestValidSchemaResult.body.data!.latestValidVersion.id).toEqual(
versionId('c2')
);
expect(latestValidSchemaResult.body.data!.latestValidVersion.id).toEqual(versionId('c2'));
// marking the second (not the most recent) version as valid should NOT promote it to be the latest valid version
versionStatusUpdateResult = await updateSchemaVersionStatus(
@ -865,9 +771,7 @@ test('marking versions as valid', async () => {
latestValidSchemaResult = await fetchLatestValidSchema(token);
expect(latestValidSchemaResult.body.errors).not.toBeDefined();
expect(latestValidSchemaResult.body.data!.latestValidVersion.id).toEqual(
versionId('c2')
);
expect(latestValidSchemaResult.body.data!.latestValidVersion.id).toEqual(versionId('c2'));
});
test('marking only the most recent version as valid result in an update of CDN', async () => {
@ -881,9 +785,7 @@ test('marking only the most recent version as valid result in an update of CDN',
// Join
const { access_token: member_access_token } = await authenticate('extra');
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
await joinOrganization(code, member_access_token);
@ -907,10 +809,7 @@ test('marking only the most recent version as valid result in an update of CDN',
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -931,9 +830,7 @@ test('marking only the most recent version as valid result in an update of CDN',
);
expect(result.body.errors).not.toBeDefined();
expect(result.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(result.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
// Second version with a forced breaking change
result = await publishSchema(
@ -980,9 +877,7 @@ test('marking only the most recent version as valid result in an update of CDN',
const versionsResult = await fetchVersions(targetSelector, 3, token);
const versionId = (commit: string) =>
versionsResult.body.data!.schemaVersions.nodes.find(
(node) => node.commit.commit === commit
)!.id;
versionsResult.body.data!.schemaVersions.nodes.find(node => node.commit.commit === commit)!.id;
// marking the third version as valid should promote it to be the latest valid version and publish it to CDN
await updateSchemaVersionStatus(

View file

@ -21,9 +21,7 @@ test('marking only the most recent version as valid result in an update of CDN',
// Join
const { access_token: member_access_token } = await authenticate('extra');
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
await joinOrganization(code, member_access_token);
@ -47,10 +45,7 @@ test('marking only the most recent version as valid result in an update of CDN',
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -70,9 +65,7 @@ test('marking only the most recent version as valid result in an update of CDN',
);
expect(publishResult.body.errors).not.toBeDefined();
expect(publishResult.body.data!.schemaPublish.__typename).toBe(
'SchemaPublishSuccess'
);
expect(publishResult.body.data!.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const targetSelector = {
organization: org.cleanId,
@ -95,9 +88,7 @@ test('marking only the most recent version as valid result in an update of CDN',
);
expect(syncResult.body.errors).not.toBeDefined();
expect(syncResult.body.data!.schemaSyncCDN.__typename).toBe(
'SchemaSyncCDNSuccess'
);
expect(syncResult.body.data!.schemaSyncCDN.__typename).toBe('SchemaSyncCDNSuccess');
// the initial version should available on CDN
cdnResult = await fetchSchemaFromCDN(targetSelector, token);

View file

@ -19,9 +19,7 @@ test('cannot set a scope on a token if user has no access to that scope', async
// Join
const { access_token: member_access_token } = await authenticate('extra');
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
const joinResult = await joinOrganization(code, member_access_token);
@ -34,12 +32,8 @@ test('cannot set a scope on a token if user has no access to that scope', async
owner_access_token
);
if (
joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload'
) {
throw new Error(
`Join failed: ${joinResult.body.data!.joinOrganization.message}`
);
if (joinResult.body.data!.joinOrganization.__typename !== 'OrganizationPayload') {
throw new Error(`Join failed: ${joinResult.body.data!.joinOrganization.message}`);
}
const member = joinResult.body.data!.joinOrganization.organization.me;

View file

@ -1,9 +1,4 @@
import {
TargetAccessScope,
ProjectType,
ProjectAccessScope,
OrganizationAccessScope,
} from '@app/gql/graphql';
import { TargetAccessScope, ProjectType, ProjectAccessScope, OrganizationAccessScope } from '@app/gql/graphql';
import formatISO from 'date-fns/formatISO';
import subHours from 'date-fns/subHours';
import {
@ -26,11 +21,7 @@ import { normalizeOperation } from '@graphql-hive/core';
// eslint-disable-next-line import/no-extraneous-dependencies
import { parse, print } from 'graphql';
function sendBatch(
amount: number,
operation: CollectedOperation,
token: string
) {
function sendBatch(amount: number, operation: CollectedOperation, token: string) {
return Promise.all(
new Array(amount).fill(null).map(() =>
collect({
@ -50,9 +41,7 @@ test('collect operation', async () => {
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -87,11 +76,7 @@ test('collect operation', async () => {
target: target.cleanId,
organizationScopes: [OrganizationAccessScope.Read],
projectScopes: [ProjectAccessScope.Read],
targetScopes: [
TargetAccessScope.Read,
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.Read, TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -112,9 +97,7 @@ test('collect operation', async () => {
);
expect(schemaPublishResult.body.errors).not.toBeDefined();
expect((schemaPublishResult.body.data!.schemaPublish as any).valid).toEqual(
true
);
expect((schemaPublishResult.body.data!.schemaPublish as any).valid).toEqual(true);
const targetValidationResult = await setTargetValidation(
{
@ -129,15 +112,9 @@ test('collect operation', async () => {
);
expect(targetValidationResult.body.errors).not.toBeDefined();
expect(targetValidationResult.body.data!.setTargetValidation.enabled).toEqual(
true
);
expect(
targetValidationResult.body.data!.setTargetValidation.percentage
).toEqual(0);
expect(targetValidationResult.body.data!.setTargetValidation.period).toEqual(
30
);
expect(targetValidationResult.body.data!.setTargetValidation.enabled).toEqual(true);
expect(targetValidationResult.body.data!.setTargetValidation.percentage).toEqual(0);
expect(targetValidationResult.body.data!.setTargetValidation.period).toEqual(30);
// should not be breaking because the field is unused
const unusedCheckResult = await checkSchema(
@ -147,9 +124,7 @@ test('collect operation', async () => {
token
);
expect(unusedCheckResult.body.errors).not.toBeDefined();
expect(unusedCheckResult.body.data!.schemaCheck.__typename).toEqual(
'SchemaCheckSuccess'
);
expect(unusedCheckResult.body.data!.schemaCheck.__typename).toEqual('SchemaCheckSuccess');
const collectResult = await collect({
operations: [
@ -179,14 +154,8 @@ test('collect operation', async () => {
token
);
if (
usedCheckResult.body.data!.schemaCheck.__typename !== 'SchemaCheckError'
) {
throw new Error(
`Expected SchemaCheckError, got ${
usedCheckResult.body.data!.schemaCheck.__typename
}`
);
if (usedCheckResult.body.data!.schemaCheck.__typename !== 'SchemaCheckError') {
throw new Error(`Expected SchemaCheckError, got ${usedCheckResult.body.data!.schemaCheck.__typename}`);
}
expect(usedCheckResult.body.data!.schemaCheck.valid).toEqual(false);
@ -235,9 +204,7 @@ test('normalize and collect operation without breaking its syntax', async () =>
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -272,11 +239,7 @@ test('normalize and collect operation without breaking its syntax', async () =>
target: target.cleanId,
organizationScopes: [OrganizationAccessScope.Read],
projectScopes: [ProjectAccessScope.Read],
targetScopes: [
TargetAccessScope.Read,
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.Read, TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -418,9 +381,7 @@ test('number of produced and collected operations should match', async () => {
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -442,11 +403,7 @@ test('number of produced and collected operations should match', async () => {
target: target.cleanId,
organizationScopes: [OrganizationAccessScope.Read],
projectScopes: [ProjectAccessScope.Read],
targetScopes: [
TargetAccessScope.Read,
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.Read, TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -522,9 +479,7 @@ test('check usage from two selected targets', async () => {
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -547,8 +502,7 @@ test('check usage from two selected targets', async () => {
owner_access_token
);
const production =
productionTargetResult.body.data!.createTarget.ok.createdTarget;
const production = productionTargetResult.body.data!.createTarget.ok.createdTarget;
const stagingTokenResult = await createToken(
{
@ -558,11 +512,7 @@ test('check usage from two selected targets', async () => {
target: staging.cleanId,
organizationScopes: [OrganizationAccessScope.Read],
projectScopes: [ProjectAccessScope.Read],
targetScopes: [
TargetAccessScope.Read,
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.Read, TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -575,11 +525,7 @@ test('check usage from two selected targets', async () => {
target: production.cleanId,
organizationScopes: [OrganizationAccessScope.Read],
projectScopes: [ProjectAccessScope.Read],
targetScopes: [
TargetAccessScope.Read,
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.Read, TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -588,8 +534,7 @@ test('check usage from two selected targets', async () => {
expect(productionTokenResult.body.errors).not.toBeDefined();
const tokenForStaging = stagingTokenResult.body.data!.createToken.ok.secret;
const tokenForProduction =
productionTokenResult.body.data!.createToken.ok.secret;
const tokenForProduction = productionTokenResult.body.data!.createToken.ok.secret;
const schemaPublishResult = await publishSchema(
{
@ -601,9 +546,7 @@ test('check usage from two selected targets', async () => {
);
expect(schemaPublishResult.body.errors).not.toBeDefined();
expect((schemaPublishResult.body.data!.schemaPublish as any).valid).toEqual(
true
);
expect((schemaPublishResult.body.data!.schemaPublish as any).valid).toEqual(true);
const targetValidationResult = await setTargetValidation(
{
@ -618,15 +561,9 @@ test('check usage from two selected targets', async () => {
);
expect(targetValidationResult.body.errors).not.toBeDefined();
expect(targetValidationResult.body.data!.setTargetValidation.enabled).toEqual(
true
);
expect(
targetValidationResult.body.data!.setTargetValidation.percentage
).toEqual(0);
expect(targetValidationResult.body.data!.setTargetValidation.period).toEqual(
30
);
expect(targetValidationResult.body.data!.setTargetValidation.enabled).toEqual(true);
expect(targetValidationResult.body.data!.setTargetValidation.percentage).toEqual(0);
expect(targetValidationResult.body.data!.setTargetValidation.period).toEqual(30);
const collectResult = await collect({
operations: [
@ -680,9 +617,7 @@ test('check usage from two selected targets', async () => {
tokenForStaging
);
expect(unusedCheckResult.body.errors).not.toBeDefined();
expect(unusedCheckResult.body.data!.schemaCheck.__typename).toEqual(
'SchemaCheckSuccess'
);
expect(unusedCheckResult.body.data!.schemaCheck.__typename).toEqual('SchemaCheckSuccess');
// Now switch to using checking both staging and production
@ -702,16 +637,13 @@ test('check usage from two selected targets', async () => {
expect(updateValidationResult.body.errors).not.toBeDefined();
expect(
updateValidationResult.body.data!.updateTargetValidationSettings.ok
.updatedTargetValidationSettings.percentage
updateValidationResult.body.data!.updateTargetValidationSettings.ok.updatedTargetValidationSettings.percentage
).toEqual(50);
expect(
updateValidationResult.body.data!.updateTargetValidationSettings.ok
.updatedTargetValidationSettings.period
updateValidationResult.body.data!.updateTargetValidationSettings.ok.updatedTargetValidationSettings.period
).toEqual(30);
expect(
updateValidationResult.body.data!.updateTargetValidationSettings.ok
.updatedTargetValidationSettings.targets
updateValidationResult.body.data!.updateTargetValidationSettings.ok.updatedTargetValidationSettings.targets
).toHaveLength(2);
// should be non-breaking because the field is used in production and we are checking staging and production now
@ -723,14 +655,8 @@ test('check usage from two selected targets', async () => {
tokenForStaging
);
if (
usedCheckResult.body.data!.schemaCheck.__typename !== 'SchemaCheckSuccess'
) {
throw new Error(
`Expected SchemaCheckSuccess, got ${
usedCheckResult.body.data!.schemaCheck.__typename
}`
);
if (usedCheckResult.body.data!.schemaCheck.__typename !== 'SchemaCheckSuccess') {
throw new Error(`Expected SchemaCheckSuccess, got ${usedCheckResult.body.data!.schemaCheck.__typename}`);
}
expect(usedCheckResult.body.data!.schemaCheck.valid).toEqual(true);
@ -746,9 +672,7 @@ test('number of produced and collected operations should match', async () => {
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const projectResult = await createProject(
{
@ -770,11 +694,7 @@ test('number of produced and collected operations should match', async () => {
target: target.cleanId,
organizationScopes: [OrganizationAccessScope.Read],
projectScopes: [ProjectAccessScope.Read],
targetScopes: [
TargetAccessScope.Read,
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.Read, TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -785,9 +705,7 @@ test('number of produced and collected operations should match', async () => {
const batchSize = 10;
const totalAmount = 10_000;
for await (const i of new Array(totalAmount / batchSize)
.fill(null)
.map((_, i) => i)) {
for await (const i of new Array(totalAmount / batchSize).fill(null).map((_, i) => i)) {
await sendBatch(
batchSize,
i % 2 === 0

View file

@ -1,12 +1,7 @@
import { TargetAccessScope, ProjectType } from '@app/gql/graphql';
import { schemaPublish, schemaCheck } from '../../testkit/cli';
import { authenticate } from '../../testkit/auth';
import {
createOrganization,
joinOrganization,
createProject,
createToken,
} from '../../testkit/flow';
import { createOrganization, joinOrganization, createProject, createToken } from '../../testkit/flow';
test('can publish and check a schema with target:registry:read access', async () => {
const { access_token: owner_access_token } = await authenticate('main');
@ -16,9 +11,7 @@ test('can publish and check a schema with target:registry:read access', async ()
},
owner_access_token
);
const org =
orgResult.body.data!.createOrganization.ok.createdOrganizationPayload
.organization;
const org = orgResult.body.data!.createOrganization.ok.createdOrganizationPayload.organization;
const code = org.inviteCode;
// Join
@ -46,10 +39,7 @@ test('can publish and check a schema with target:registry:read access', async ()
target: target.cleanId,
organizationScopes: [],
projectScopes: [],
targetScopes: [
TargetAccessScope.RegistryRead,
TargetAccessScope.RegistryWrite,
],
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
},
owner_access_token
);
@ -66,13 +56,9 @@ test('can publish and check a schema with target:registry:read access', async ()
'fixtures/init-schema.graphql',
]);
await schemaCheck([
'--token',
writeToken,
'fixtures/nonbreaking-schema.graphql',
]);
await schemaCheck(['--token', writeToken, 'fixtures/nonbreaking-schema.graphql']);
await expect(
schemaCheck(['--token', writeToken, 'fixtures/breaking-schema.graphql'])
).rejects.toThrowError('EXIT: 1');
await expect(schemaCheck(['--token', writeToken, 'fixtures/breaking-schema.graphql'])).rejects.toThrowError(
'EXIT: 1'
);
});

View file

@ -20,7 +20,8 @@
"release": "changeset publish",
"test": "jest",
"lint": "eslint --ignore-path .gitignore \"packages/**/*.{ts,tsx}\"",
"format": "prettier --write .",
"prettier": "prettier --write --list-different .",
"prettier:check": "prettier --check .",
"setup": "yarn workspace @hive/storage run setup",
"generate": "yarn workspace @hive/storage run db:generate && yarn graphql:generate",
"graphql:generate": "graphql-codegen",

View file

@ -7,4 +7,4 @@
/tmp
node_modules
src/sdk.ts
schema.graphql
schema.graphql

View file

@ -40,12 +40,8 @@ export default abstract class extends Command {
const findDoubleQuotes = /"([^"]+)"/gim;
return msg
.replace(findSingleQuotes, (_: string, value: string) =>
colors.bold(value)
)
.replace(findDoubleQuotes, (_: string, value: string) =>
colors.bold(value)
);
.replace(findSingleQuotes, (_: string, value: string) => colors.bold(value))
.replace(findDoubleQuotes, (_: string, value: string) => colors.bold(value));
}
/**
@ -144,11 +140,7 @@ export default abstract class extends Command {
}
>(flags: TFlags) {
if (flags.require && flags.require.length > 0) {
await Promise.all(
flags.require.map(
(mod) => import(require.resolve(mod, { paths: [process.cwd()] }))
)
);
await Promise.all(flags.require.map(mod => import(require.resolve(mod, { paths: [process.cwd()] }))));
}
}
}

View file

@ -13,8 +13,6 @@ export default class DeleteConfig extends Command {
async run() {
const { args } = await this.parse(DeleteConfig);
this._userConfig.set(args.key, args.value);
this.success(
this.bolderize(`Config flag "${args.key}" was set to "${args.value}"!`)
);
this.success(this.bolderize(`Config flag "${args.key}" was set to "${args.value}"!`));
}
}

View file

@ -18,8 +18,6 @@ export default class SetConfig extends Command {
async run() {
const { args } = await this.parse(SetConfig);
this._userConfig.set(args.key, args.value);
this.success(
this.bolderize(`Config flag "${args.key}" was set to "${args.value}"!`)
);
this.success(this.bolderize(`Config flag "${args.key}" was set to "${args.value}"!`));
}
}

View file

@ -14,8 +14,7 @@ export default class OperationsCheck extends Command {
description: 'api token',
}),
require: Flags.string({
description:
'Loads specific require.extensions before running the command',
description: 'Loads specific require.extensions before running the command',
default: [],
multiple: true,
}),
@ -59,10 +58,7 @@ export default class OperationsCheck extends Command {
return;
}
const result = await this.registryApi(
registry,
token
).fetchLatestVersion();
const result = await this.registryApi(registry, token).fetchLatestVersion();
const sdl = result.latestVersion.sdl;
@ -77,7 +73,7 @@ export default class OperationsCheck extends Command {
const invalidOperations = validate(
schema,
operations.map((s) => new Source(s.content, s.location))
operations.map(s => new Source(s.content, s.location))
);
if (invalidOperations.length === 0) {
@ -88,29 +84,19 @@ export default class OperationsCheck extends Command {
this.fail('Some operations are invalid');
this.log(
[
'',
`Total: ${operations.length}`,
`Invalid: ${invalidOperations.length}`,
'',
].join('\n')
);
this.log(['', `Total: ${operations.length}`, `Invalid: ${invalidOperations.length}`, ''].join('\n'));
this.printInvalidDocuments(invalidOperations, 'errors');
} catch (error) {
if (error instanceof Errors.ExitError) {
throw error;
} else {
const parsedError: Error & { response?: any } =
error instanceof Error ? error : new Error(error as string);
const parsedError: Error & { response?: any } = error instanceof Error ? error : new Error(error as string);
this.fail('Failed to validate operations');
if ('response' in parsedError) {
this.error(parsedError.response.errors[0].message, {
ref: this.cleanRequestId(
parsedError.response?.headers?.get('x-request-id')
),
ref: this.cleanRequestId(parsedError.response?.headers?.get('x-request-id')),
});
} else {
this.error(parsedError);
@ -119,13 +105,10 @@ export default class OperationsCheck extends Command {
}
}
private printInvalidDocuments(
invalidDocuments: InvalidDocument[],
listKey: 'errors' | 'deprecated'
): void {
invalidDocuments.forEach((doc) => {
private printInvalidDocuments(invalidDocuments: InvalidDocument[], listKey: 'errors' | 'deprecated'): void {
invalidDocuments.forEach(doc => {
if (doc.errors.length) {
this.renderErrors(doc.source.name, doc[listKey]).forEach((line) => {
this.renderErrors(doc.source.name, doc[listKey]).forEach(line => {
this.log(line);
});
}
@ -133,9 +116,7 @@ export default class OperationsCheck extends Command {
}
private renderErrors(sourceName: string, errors: GraphQLError[]): string[] {
const errorsAsString = errors
.map((e) => ` - ${this.bolderize(e.message)}`)
.join('\n');
const errorsAsString = errors.map(e => ` - ${this.bolderize(e.message)}`).join('\n');
return [`ERROR in ${sourceName}:\n`, errorsAsString, '\n\n'];
}

View file

@ -1,6 +1,4 @@
mutation publishPersistedOperations(
$input: [PublishPersistedOperationInput!]!
) {
mutation publishPersistedOperations($input: [PublishPersistedOperationInput!]!) {
publishPersistedOperations(input: $input) {
summary {
total

View file

@ -12,8 +12,7 @@ export default class OperationsPublish extends Command {
description: 'api token',
}),
require: Flags.string({
description:
'Loads specific require.extensions before running the codegen and reading the configuration',
description: 'Loads specific require.extensions before running the codegen and reading the configuration',
default: [],
multiple: true,
}),
@ -51,41 +50,29 @@ export default class OperationsPublish extends Command {
normalize: true,
});
const collectedOperationsTotal = operations.length;
const noMissingHashes = operations.some((op) => !!op.operationHash);
const noMissingHashes = operations.some(op => !!op.operationHash);
if (noMissingHashes) {
const comparisonResult = await this.registryApi(
registry,
token
).comparePersistedOperations({
hashes: operations.map((op) => op.operationHash!),
const comparisonResult = await this.registryApi(registry, token).comparePersistedOperations({
hashes: operations.map(op => op.operationHash!),
});
const operationsToPublish = comparisonResult.comparePersistedOperations;
operations = operations.filter((op) =>
operationsToPublish.includes(op.operationHash!)
);
operations = operations.filter(op => operationsToPublish.includes(op.operationHash!));
}
const unchangedTotal = collectedOperationsTotal - operations.length;
if (!operations.length) {
return this.success(
[
`Nothing to publish`,
'',
` Total: ${collectedOperationsTotal}`,
` Unchanged: ${unchangedTotal}`,
'',
].join('\n')
[`Nothing to publish`, '', ` Total: ${collectedOperationsTotal}`, ` Unchanged: ${unchangedTotal}`, ''].join(
'\n'
)
);
}
const result = await this.registryApi(
registry,
token
).publishPersistedOperations({
const result = await this.registryApi(registry, token).publishPersistedOperations({
input: operations,
});
@ -107,15 +94,12 @@ export default class OperationsPublish extends Command {
if (error instanceof Errors.ExitError) {
throw error;
} else {
const parsedError: Error & { response?: any } =
error instanceof Error ? error : new Error(error as string);
const parsedError: Error & { response?: any } = error instanceof Error ? error : new Error(error as string);
this.fail('Failed to publish operations');
if ('response' in parsedError) {
this.error(parsedError.response.errors[0].message, {
ref: this.cleanRequestId(
parsedError.response?.headers?.get('x-request-id')
),
ref: this.cleanRequestId(parsedError.response?.headers?.get('x-request-id')),
});
} else {
this.error(parsedError);

View file

@ -1,10 +1,5 @@
import { Flags, Errors } from '@oclif/core';
import {
loadSchema,
renderChanges,
renderErrors,
minifySchema,
} from '../../helpers/schema';
import { loadSchema, renderChanges, renderErrors, minifySchema } from '../../helpers/schema';
import { invariant } from '../../helpers/validation';
import { gitInfo } from '../../helpers/git';
import Command from '../../base-command';
@ -29,8 +24,7 @@ export default class SchemaCheck extends Command {
default: false,
}),
require: Flags.string({
description:
'Loads specific require.extensions before running the codegen and reading the configuration',
description: 'Loads specific require.extensions before running the codegen and reading the configuration',
default: [],
multiple: true,
}),
@ -72,16 +66,10 @@ export default class SchemaCheck extends Command {
});
const commit = git.commit;
invariant(
typeof sdl === 'string' && sdl.length > 0,
'Schema seems empty'
);
invariant(typeof sdl === 'string' && sdl.length > 0, 'Schema seems empty');
if (usesGitHubApp) {
invariant(
typeof commit === 'string',
`Couldn't resolve commit sha required for GitHub Application`
);
invariant(typeof commit === 'string', `Couldn't resolve commit sha required for GitHub Application`);
}
const result = await this.registryApi(registry, token).schemaCheck({
@ -130,15 +118,12 @@ export default class SchemaCheck extends Command {
if (error instanceof Errors.ExitError) {
throw error;
} else {
const parsedError: Error & { response?: any } =
error instanceof Error ? error : new Error(error as string);
const parsedError: Error & { response?: any } = error instanceof Error ? error : new Error(error as string);
this.fail('Failed to check schema');
if ('response' in parsedError) {
this.error(parsedError.response.errors[0].message, {
ref: this.cleanRequestId(
parsedError.response?.headers?.get('x-request-id')
),
ref: this.cleanRequestId(parsedError.response?.headers?.get('x-request-id')),
});
} else {
this.error(parsedError);

View file

@ -4,12 +4,7 @@ import { print } from 'graphql';
import Command from '../../base-command';
import { gitInfo } from '../../helpers/git';
import { invariant } from '../../helpers/validation';
import {
loadSchema,
minifySchema,
renderChanges,
renderErrors,
} from '../../helpers/schema';
import { loadSchema, minifySchema, renderChanges, renderErrors } from '../../helpers/schema';
import { existsSync, readFileSync } from 'fs';
export default class SchemaPublish extends Command {
@ -46,8 +41,7 @@ export default class SchemaPublish extends Command {
default: false,
}),
require: Flags.string({
description:
'Loads specific require.extensions before running the codegen and reading the configuration',
description: 'Loads specific require.extensions before running the codegen and reading the configuration',
default: [],
multiple: true,
}),
@ -124,9 +118,7 @@ export default class SchemaPublish extends Command {
if (!commit || !author) {
const git = await gitInfo(() => {
this.warn(
`No git information found. Couldn't resolve author and commit.`
);
this.warn(`No git information found. Couldn't resolve author and commit.`);
});
if (!commit) {
@ -148,10 +140,7 @@ export default class SchemaPublish extends Command {
const sdl = await loadSchema(file);
invariant(
typeof sdl === 'string' && sdl.length > 0,
'Schema seems empty'
);
invariant(typeof sdl === 'string' && sdl.length > 0, 'Schema seems empty');
const transformedSDL = print(transformCommentsToDescriptions(sdl));
const minifiedSDL = minifySchema(transformedSDL);
@ -183,12 +172,8 @@ export default class SchemaPublish extends Command {
renderChanges.call(this, changes);
this.success('Schema published');
}
} else if (
result.schemaPublish.__typename === 'SchemaPublishMissingServiceError'
) {
this.fail(
`${result.schemaPublish.missingServiceError} Please use the '--service <name>' parameter.`
);
} else if (result.schemaPublish.__typename === 'SchemaPublishMissingServiceError') {
this.fail(`${result.schemaPublish.missingServiceError} Please use the '--service <name>' parameter.`);
this.exit(1);
} else if (result.schemaPublish.__typename === 'SchemaPublishError') {
const changes = result.schemaPublish.changes;
@ -207,9 +192,7 @@ export default class SchemaPublish extends Command {
} else {
this.success('Schema published (forced)');
}
} else if (
result.schemaPublish.__typename === 'GitHubSchemaPublishSuccess'
) {
} else if (result.schemaPublish.__typename === 'GitHubSchemaPublishSuccess') {
this.success(result.schemaPublish.message);
} else {
this.error(result.schemaPublish.message);
@ -218,15 +201,12 @@ export default class SchemaPublish extends Command {
if (error instanceof Errors.ExitError) {
throw error;
} else {
const parsedError: Error & { response?: any } =
error instanceof Error ? error : new Error(error as string);
const parsedError: Error & { response?: any } = error instanceof Error ? error : new Error(error as string);
this.fail('Failed to publish schema');
if ('response' in parsedError) {
this.error(parsedError.response.errors[0].message, {
ref: this.cleanRequestId(
parsedError.response?.headers?.get('x-request-id')
),
ref: this.cleanRequestId(parsedError.response?.headers?.get('x-request-id')),
});
} else {
this.error(parsedError);

View file

@ -33,9 +33,7 @@ export default class WhoAmI extends Command {
.catch((error: Error & { response?: any }) => {
if ('response' in error) {
this.error(error.response.errors[0].message, {
ref: this.cleanRequestId(
error.response?.headers?.get('x-request-id')
),
ref: this.cleanRequestId(error.response?.headers?.get('x-request-id')),
});
} else {
this.error(error);
@ -58,43 +56,30 @@ export default class WhoAmI extends Command {
const print = createPrinter({
'Token name:': [colors.bold(tokenInfo.token.name)],
' ': [''],
'Organization:': [
colors.bold(organization.name),
colors.dim(organizationUrl),
],
'Organization:': [colors.bold(organization.name), colors.dim(organizationUrl)],
'Project:': [colors.bold(project.name), colors.dim(projectUrl)],
'Target:': [colors.bold(target.name), colors.dim(targetUrl)],
' ': [''],
'Access to schema:publish': [
tokenInfo.canPublishSchema ? access.yes : access.not,
],
'Access to schema:check': [
tokenInfo.canCheckSchema ? access.yes : access.not,
],
'Access to operation:publish': [
tokenInfo.canPublishOperations ? access.yes : access.not,
],
'Access to schema:publish': [tokenInfo.canPublishSchema ? access.yes : access.not],
'Access to schema:check': [tokenInfo.canCheckSchema ? access.yes : access.not],
'Access to operation:publish': [tokenInfo.canPublishOperations ? access.yes : access.not],
});
this.log(print());
} else if (result.tokenInfo.__typename === 'TokenNotFoundError') {
this.error(`Token not found. Reason: ${result.tokenInfo.message}`, {
exit: 0,
suggestions: [
`How to create a token? https://docs.graphql-hive.com/features/tokens`,
],
suggestions: [`How to create a token? https://docs.graphql-hive.com/features/tokens`],
});
}
}
}
function createPrinter(records: {
[label: string]: [value: string, extra?: string];
}) {
function createPrinter(records: { [label: string]: [value: string, extra?: string] }) {
const labels = Object.keys(records);
const values = Object.values(records).map((v) => v[0]);
const maxLabelsLen = Math.max(...labels.map((v) => v.length)) + 4;
const maxValuesLen = Math.max(...values.map((v) => v.length)) + 4;
const values = Object.values(records).map(v => v[0]);
const maxLabelsLen = Math.max(...labels.map(v => v.length)) + 4;
const maxValuesLen = Math.max(...values.map(v => v.length)) + 4;
return () => {
const lines: string[] = [];
@ -102,11 +87,7 @@ function createPrinter(records: {
for (const label in records) {
const [value, extra] = records[label];
lines.push(
label.padEnd(maxLabelsLen, ' ') +
value.padEnd(maxValuesLen, ' ') +
(extra || '')
);
lines.push(label.padEnd(maxLabelsLen, ' ') + value.padEnd(maxValuesLen, ' ') + (extra || ''));
}
return lines.join('\n');

View file

@ -66,9 +66,7 @@ export class Config<TValue = any> {
private read() {
try {
if (!this.cache) {
this.cache = this.readSpace(
JSON.parse(fs.readFileSync(this.filepath, 'utf-8'))
);
this.cache = this.readSpace(JSON.parse(fs.readFileSync(this.filepath, 'utf-8')));
}
} catch (error) {
this.cache = {};

View file

@ -40,8 +40,7 @@ function useGitHubAction(): CIRunner {
},
env() {
const isPr =
process.env.GITHUB_EVENT_NAME === 'pull_request' ||
process.env.GITHUB_EVENT_NAME === 'pull_request_target';
process.env.GITHUB_EVENT_NAME === 'pull_request' || process.env.GITHUB_EVENT_NAME === 'pull_request_target';
if (isPr) {
try {
@ -82,10 +81,7 @@ export async function gitInfo(noGit: () => void) {
if (!commit || !author) {
const rootFromEnv = 'root' in env ? env.root : null;
const git =
rootFromEnv ??
findParentDir(__dirname, '.git') ??
findParentDir(process.cwd(), '.git');
const git = rootFromEnv ?? findParentDir(__dirname, '.git') ?? findParentDir(process.cwd(), '.git');
if (git) {
const commits = await gitToJs(git);

View file

@ -58,7 +58,7 @@ export async function loadOperations(
loaders: [new CodeFileLoader(), new GraphQLFileLoader()],
});
return sources.map((source) => ({
return sources.map(source => ({
content: normalizeOperation({
document: source.document!,
hideLiterals: false,

View file

@ -5,11 +5,7 @@ import { CodeFileLoader } from '@graphql-tools/code-file-loader';
import { GraphQLFileLoader } from '@graphql-tools/graphql-file-loader';
import { JsonFileLoader } from '@graphql-tools/json-file-loader';
import { UrlLoader } from '@graphql-tools/url-loader';
import {
CriticalityLevel,
SchemaChangeConnection,
SchemaErrorConnection,
} from '../sdk';
import { CriticalityLevel, SchemaChangeConnection, SchemaErrorConnection } from '../sdk';
import baseCommand from '../base-command';
const indent = ' ';
@ -24,39 +20,27 @@ export function renderErrors(this: baseCommand, errors: SchemaErrorConnection) {
this.fail(`Detected ${errors.total} error${errors.total > 1 ? 's' : ''}`);
this.log('');
errors.nodes.forEach((error) => {
errors.nodes.forEach(error => {
this.log(`${indent}`, colors.red('-'), this.bolderize(error.message));
});
}
export function renderChanges(
this: baseCommand,
changes: SchemaChangeConnection
) {
export function renderChanges(this: baseCommand, changes: SchemaChangeConnection) {
this.info(`Detected ${changes.total} change${changes.total > 1 ? 's' : ''}`);
this.log('');
changes.nodes.forEach((change) => {
this.log(
indent,
criticalityMap[change.criticality],
this.bolderize(change.message)
);
changes.nodes.forEach(change => {
this.log(indent, criticalityMap[change.criticality], this.bolderize(change.message));
});
}
export async function loadSchema(file: string) {
const sources = await loadTypedefs(file, {
cwd: process.cwd(),
loaders: [
new CodeFileLoader(),
new GraphQLFileLoader(),
new JsonFileLoader(),
new UrlLoader(),
],
loaders: [new CodeFileLoader(), new GraphQLFileLoader(), new JsonFileLoader(), new UrlLoader()],
});
return print(concatAST(sources.map((s) => s.document!)));
return print(concatAST(sources.map(s => s.document!)));
}
export function minifySchema(schema: string): string {

View file

@ -22,8 +22,8 @@ If you're not familiar with Envelop - in "short" it's a lightweight JavaScript l
Here's [more](https://github.com/dotansimha/envelop#envelop) on that topic.
```ts
import { envelop } from '@envelop/core';
import { useHive } from '@graphql-hive/client';
import { envelop } from '@envelop/core'
import { useHive } from '@graphql-hive/client'
const envelopProxy = envelop({
plugins: [
@ -35,12 +35,12 @@ const envelopProxy = envelop({
reporting: {
// feel free to set dummy values here
author: 'Author of the schema version',
commit: 'git sha or any identifier',
commit: 'git sha or any identifier'
},
usage: true, // Collects schema usage based on operations
}),
],
});
usage: true // Collects schema usage based on operations
})
]
})
```
#### With Apollo Server
@ -48,8 +48,8 @@ const envelopProxy = envelop({
Thanks to the plugin system it's a matter of adding hiveApollo plugin to ApolloServer instance:
```ts
import { ApolloServer } from 'apollo-server';
import { hiveApollo } from '@graphql-hive/client';
import { ApolloServer } from 'apollo-server'
import { hiveApollo } from '@graphql-hive/client'
const server = new ApolloServer({
typeDefs,
@ -62,12 +62,12 @@ const server = new ApolloServer({
reporting: {
// feel free to set dummy values here
author: 'Author of the latest change',
commit: 'git sha or any identifier',
commit: 'git sha or any identifier'
},
usage: true, // Collects schema usage based on operations
}),
],
});
usage: true // Collects schema usage based on operations
})
]
})
```
#### With Other Servers
@ -125,29 +125,29 @@ Prerequisites:
The `createServicesFetcher` factory function returns another function that is responsible for fetching a list of services from Hive's high-availability endpoint.
```ts
import { createServicesFetcher } from '@graphql-hive/client';
import { createServicesFetcher } from '@graphql-hive/client'
const fetchServices = createServicesFetcher({
endpoint: process.env.HIVE_CDN_ENDPOINT,
key: process.env.HIVE_CDN_KEY,
});
key: process.env.HIVE_CDN_KEY
})
// This is your GraphQL gateway with built-in polling mechanism, in which the `stitchServices` method is called every 10 seconds.
startMyGraphQLGateway({
// a function that resolves a list of services to stitch them together
async stitchServices() {
const services = await fetchServices();
const services = await fetchServices()
return services.map((service) => {
return services.map(service => {
return {
sdl: service.sdl,
url: service.url,
checksum: service.id, // to check if service's schema was modified
};
});
checksum: service.id // to check if service's schema was modified
}
})
},
pollingInSec: 10, // every 10s
});
pollingInSec: 10 // every 10s
})
```
#### Using the registry with Apollo Gateway
@ -160,23 +160,23 @@ The `experimental_pollInterval` value is up to you. Apollo Gateway uses 10s (10_
- `HIVE_CDN_KEY` - the access
```ts
import { createSupergraphSDLFetcher } from '@graphql-hive/client';
import { ApolloGateway } from '@apollo/gateway';
import { ApolloServer } from 'apollo-server';
import { createSupergraphSDLFetcher } from '@graphql-hive/client'
import { ApolloGateway } from '@apollo/gateway'
import { ApolloServer } from 'apollo-server'
const gateway = new ApolloGateway({
experimental_pollInterval: 10_000, // define the poll interval (in ms)
experimental_updateSupergraphSdl: createSupergraphFetcher({
endpoint: HIVE_CDN_ENDPOINT,
key: HIVE_CDN_KEY,
}),
});
key: HIVE_CDN_KEY
})
})
const server = new ApolloServer({
gateway,
});
gateway
})
server.listen().then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});
console.log(`🚀 Server ready at ${url}`)
})
```

View file

@ -3,9 +3,7 @@ import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
const pkg = JSON.parse(
fs.readFileSync(join(__dirname, '../package.json'), 'utf-8')
);
const pkg = JSON.parse(fs.readFileSync(join(__dirname, '../package.json'), 'utf-8'));
const code = `export const version = '${pkg.version}';\n`;
fs.writeFileSync(join(__dirname, '../src/version.ts'), code);

View file

@ -1,19 +1,12 @@
import type { ApolloServerPlugin } from 'apollo-server-plugin-base';
import type { DocumentNode } from 'graphql';
import type {
HiveClient,
HivePluginOptions,
SupergraphSDLFetcherOptions,
} from './internal/types';
import type { HiveClient, HivePluginOptions, SupergraphSDLFetcherOptions } from './internal/types';
import { createHash } from 'crypto';
import axios from 'axios';
import { createHive } from './client';
import { isHiveClient } from './internal/utils';
export function createSupergraphSDLFetcher({
endpoint,
key,
}: SupergraphSDLFetcherOptions) {
export function createSupergraphSDLFetcher({ endpoint, key }: SupergraphSDLFetcherOptions) {
return function supergraphSDLFetcher() {
return axios
.get(endpoint + '/supergraph', {
@ -21,25 +14,21 @@ export function createSupergraphSDLFetcher({
'X-Hive-CDN-Key': key,
},
})
.then((response) => {
.then(response => {
if (response.status >= 200 && response.status < 300) {
return response.data;
}
return Promise.reject(
new Error(`Failed to fetch supergraph [${response.status}]`)
);
return Promise.reject(new Error(`Failed to fetch supergraph [${response.status}]`));
})
.then((supergraphSdl) => ({
.then(supergraphSdl => ({
id: createHash('sha256').update(supergraphSdl).digest('base64'),
supergraphSdl,
}));
};
}
export function hiveApollo(
clientOrOptions: HiveClient | HivePluginOptions
): ApolloServerPlugin {
export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): ApolloServerPlugin {
const hive = isHiveClient(clientOrOptions)
? clientOrOptions
: createHive({

View file

@ -96,20 +96,8 @@ export function createHive(options: HivePluginOptions): HiveClient {
if (result.data?.tokenInfo.__typename === 'TokenInfo') {
const { tokenInfo } = result.data;
const {
organization,
project,
target,
canReportSchema,
canCollectUsage,
canReadOperations,
} = tokenInfo;
const print = createPrinter([
tokenInfo.token.name,
organization.name,
project.name,
target.name,
]);
const { organization, project, target, canReportSchema, canCollectUsage, canReadOperations } = tokenInfo;
const print = createPrinter([tokenInfo.token.name, organization.name, project.name, target.name]);
const organizationUrl = `https://app.graphql-hive.com/${organization.cleanId}`;
const projectUrl = `${organizationUrl}/${project.cleanId}`;
@ -120,38 +108,25 @@ export function createHive(options: HivePluginOptions): HiveClient {
'[hive][info] Token details',
'',
`Token name: ${print(tokenInfo.token.name)}`,
`Organization: ${print(
organization.name,
organizationUrl
)}`,
`Organization: ${print(organization.name, organizationUrl)}`,
`Project: ${print(project.name, projectUrl)}`,
`Target: ${print(target.name, targetUrl)}`,
'',
`Can report schema? ${print(canReportSchema ? 'Yes' : 'No')}`,
`Can collect usage? ${print(canCollectUsage ? 'Yes' : 'No')}`,
`Can read operations? ${print(
canReadOperations ? 'Yes' : 'No'
)}`,
`Can read operations? ${print(canReadOperations ? 'Yes' : 'No')}`,
'',
].join('\n')
);
} else if (result.data?.tokenInfo.message) {
logger.error(
`[hive][info] Token not found. Reason: ${result.data?.tokenInfo.message}`
);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`
);
logger.error(`[hive][info] Token not found. Reason: ${result.data?.tokenInfo.message}`);
logger.info(`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`);
} else {
logger.error(`[hive][info] ${result.errors![0].message}`);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`
);
logger.info(`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`);
}
} else {
logger.error(
`[hive][info] Error ${response.status}: ${response.statusText}`
);
logger.error(`[hive][info] Error ${response.status}: ${response.statusText}`);
}
} catch (error: any) {
logger.error(`[hive][info] Error ${error.message}`);
@ -168,7 +143,7 @@ export function createHive(options: HivePluginOptions): HiveClient {
}
function createPrinter(values: string[]) {
const maxLen = Math.max(...values.map((v) => v.length)) + 4;
const maxLen = Math.max(...values.map(v => v.length)) + 4;
return (base: string, extra?: string) => {
return base.padEnd(maxLen, ' ') + (extra || '');

View file

@ -5,9 +5,7 @@ import { isHiveClient } from './internal/utils';
export function useHive(clientOrOptions: HiveClient): Plugin;
export function useHive(clientOrOptions: HivePluginOptions): Plugin;
export function useHive(
clientOrOptions: HiveClient | HivePluginOptions
): Plugin {
export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin {
const hive = isHiveClient(clientOrOptions)
? clientOrOptions
: createHive({

View file

@ -1,9 +1,6 @@
import axios from 'axios';
import { createHash } from 'crypto';
import type {
SchemaFetcherOptions,
ServicesFetcherOptions,
} from './internal/types';
import type { SchemaFetcherOptions, ServicesFetcherOptions } from './internal/types';
interface Schema {
sdl: string;
@ -11,10 +8,7 @@ interface Schema {
name: string;
}
function createFetcher<T>({
endpoint,
key,
}: SchemaFetcherOptions & ServicesFetcherOptions) {
function createFetcher<T>({ endpoint, key }: SchemaFetcherOptions & ServicesFetcherOptions) {
return function fetcher(): Promise<T> {
return axios
.get(endpoint + '/schema', {
@ -24,14 +18,12 @@ function createFetcher<T>({
},
responseType: 'json',
})
.then((response) => {
.then(response => {
if (response.status >= 200 && response.status < 300) {
return response.data;
}
return Promise.reject(
new Error(`Failed to fetch [${response.status}]`)
);
return Promise.reject(new Error(`Failed to fetch [${response.status}]`));
});
};
}
@ -40,31 +32,20 @@ export function createSchemaFetcher({ endpoint, key }: SchemaFetcherOptions) {
const fetcher = createFetcher<Schema>({ endpoint, key });
return function schemaFetcher() {
return fetcher().then((schema) => ({
id: createHash('sha256')
.update(schema.sdl)
.update(schema.url)
.update(schema.name)
.digest('base64'),
return fetcher().then(schema => ({
id: createHash('sha256').update(schema.sdl).update(schema.url).update(schema.name).digest('base64'),
...schema,
}));
};
}
export function createServicesFetcher({
endpoint,
key,
}: ServicesFetcherOptions) {
export function createServicesFetcher({ endpoint, key }: ServicesFetcherOptions) {
const fetcher = createFetcher<readonly Schema[]>({ endpoint, key });
return function schemaFetcher() {
return fetcher().then((services) =>
services.map((service) => ({
id: createHash('sha256')
.update(service.sdl)
.update(service.url)
.update(service.name)
.digest('base64'),
return fetcher().then(services =>
services.map(service => ({
id: createHash('sha256').update(service.sdl).update(service.url).update(service.name).digest('base64'),
...service,
}))
);

View file

@ -100,9 +100,7 @@ export function createAgent<T>(
function debugLog(msg: string) {
if (options.debug) {
options.logger.info(
`[hive][${prefix}]${enabled ? '' : '[DISABLED]'} ${msg}`
);
options.logger.info(`[hive][${prefix}]${enabled ? '' : '[DISABLED]'} ${msg}`);
}
}
@ -155,7 +153,7 @@ export function createAgent<T>(
responseType: 'json',
timeout: options.timeout,
})
.catch((error) => {
.catch(error => {
debugLog(`Attempt ${attempt} failed: ${error.message}`);
return Promise.reject(error);
});
@ -175,16 +173,12 @@ export function createAgent<T>(
});
if (response.statusCode < 200 || response.statusCode >= 300) {
throw new Error(
`[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${response.data}`
);
throw new Error(`[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${response.data}`);
}
debugLog(`Sent!`);
} catch (error: any) {
options.logger.error(
`[hive][${prefix}] Failed to send data: ${error.message}`
);
options.logger.error(`[hive][${prefix}] Failed to send data: ${error.message}`);
}
if (!runOnce) {

View file

@ -10,9 +10,7 @@ export interface OperationsStore {
reload(): Promise<void>;
}
export function createOperationsStore(
pluginOptions: HivePluginOptions
): OperationsStore {
export function createOperationsStore(pluginOptions: HivePluginOptions): OperationsStore {
const operationsStoreOptions = pluginOptions.operationsStore;
const token = pluginOptions.token;
@ -31,18 +29,17 @@ export function createOperationsStore(
const store = new Map<string, DocumentNode>();
const canHandle: OperationsStore['canHandle'] = (key) => {
const canHandle: OperationsStore['canHandle'] = key => {
return typeof key === 'string' && !key.includes('{');
};
const get: OperationsStore['get'] = (key) => {
const get: OperationsStore['get'] = key => {
return store.get(key)!;
};
const load: OperationsStore['load'] = async () => {
const response = await axios.post(
operationsStoreOptions.endpoint ??
'https://app.graphql-hive.com/registry',
operationsStoreOptions.endpoint ?? 'https://app.graphql-hive.com/registry',
{
query,
operationName: 'loadStoredOperations',

View file

@ -9,9 +9,7 @@ export interface SchemaReporter {
dispose(): Promise<void>;
}
export function createReporting(
pluginOptions: HivePluginOptions
): SchemaReporter {
export function createReporting(pluginOptions: HivePluginOptions): SchemaReporter {
if (!pluginOptions.reporting) {
return {
report() {},
@ -28,8 +26,7 @@ export function createReporting(
{
logger,
...(pluginOptions.agent ?? {}),
endpoint:
reportingOptions.endpoint ?? 'https://app.graphql-hive.com/registry',
endpoint: reportingOptions.endpoint ?? 'https://app.graphql-hive.com/registry',
token: token,
enabled: pluginOptions.enabled,
debug: pluginOptions.debug,
@ -117,14 +114,10 @@ function isFederatedSchema(schema: GraphQLSchema): boolean {
* Extracts the SDL of a federated service from a GraphQLSchema object
* We do it to not send federated schema to the registry but only the original schema provided by user
*/
async function extractFederationServiceSDL(
schema: GraphQLSchema
): Promise<string> {
async function extractFederationServiceSDL(schema: GraphQLSchema): Promise<string> {
const queryType = schema.getQueryType()!;
const serviceField = queryType.getFields()._service;
const resolved = await (
serviceField.resolve as () => Promise<{ sdl: string }>
)();
const resolved = await (serviceField.resolve as () => Promise<{ sdl: string }>)();
return resolved.sdl;
}
@ -155,9 +148,7 @@ function printSchemaWithDirectives(schema: GraphQLSchema) {
// We do it to avoid sending schema definition to the registry, which may be unwanted by federated services or something
return print({
kind: Kind.DOCUMENT,
definitions: doc.definitions.filter(
(def) => def.kind !== Kind.SCHEMA_DEFINITION
),
definitions: doc.definitions.filter(def => def.kind !== Kind.SCHEMA_DEFINITION),
});
}
@ -166,8 +157,6 @@ function printSchemaWithDirectives(schema: GraphQLSchema) {
async function printToSDL(schema: GraphQLSchema) {
return stripIgnoredCharacters(
isFederatedSchema(schema)
? await extractFederationServiceSDL(schema)
: printSchemaWithDirectives(schema)
isFederatedSchema(schema) ? await extractFederationServiceSDL(schema) : printSchemaWithDirectives(schema)
);
}

View file

@ -1,8 +1,6 @@
export function randomSampling(sampleRate: number) {
if (sampleRate > 1 || sampleRate < 0) {
throw new Error(
`Expected usage.sampleRate to be 0 <= x <= 1, received ${sampleRate}`
);
throw new Error(`Expected usage.sampleRate to be 0 <= x <= 1, received ${sampleRate}`);
}
return function shouldInclude(): boolean {

View file

@ -13,9 +13,7 @@ export interface HiveClient {
export type AsyncIterableIteratorOrValue<T> = AsyncIterableIterator<T> | T;
export type CollectUsageCallback = (
result: AsyncIterableIteratorOrValue<GraphQLErrorsResult>
) => void;
export type CollectUsageCallback = (result: AsyncIterableIteratorOrValue<GraphQLErrorsResult>) => void;
export interface ClientInfo {
name: string;
version: string;

View file

@ -27,19 +27,8 @@ import { normalizeOperation } from '@graphql-hive/core';
import { createAgent } from './agent';
import { randomSampling } from './sampling';
import { version } from '../version';
import {
cache,
cacheDocumentKey,
measureDuration,
memo,
isAsyncIterableIterator,
} from './utils';
import type {
HivePluginOptions,
HiveUsagePluginOptions,
CollectUsageCallback,
ClientInfo,
} from './types';
import { cache, cacheDocumentKey, measureDuration, memo, isAsyncIterableIterator } from './utils';
import type { HivePluginOptions, HiveUsagePluginOptions, CollectUsageCallback, ClientInfo } from './types';
interface UsageCollector {
collect(args: ExecutionArgs): CollectUsageCallback;
@ -61,12 +50,9 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
map: {},
operations: [],
};
const options =
typeof pluginOptions.usage === 'boolean'
? ({} as HiveUsagePluginOptions)
: pluginOptions.usage;
const options = typeof pluginOptions.usage === 'boolean' ? ({} as HiveUsagePluginOptions) : pluginOptions.usage;
const logger = pluginOptions.agent?.logger ?? console;
const collector = memo(createCollector, (arg) => arg.schema);
const collector = memo(createCollector, arg => arg.schema);
const excludeSet = new Set(options.exclude ?? []);
const agent = createAgent<CollectedOperation>(
{
@ -147,16 +133,15 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
}
const rootOperation = args.document.definitions.find(
(o) => o.kind === Kind.OPERATION_DEFINITION
o => o.kind === Kind.OPERATION_DEFINITION
) as OperationDefinitionNode;
const document = args.document;
const operationName =
args.operationName || rootOperation.name?.value || 'anonymous';
const operationName = args.operationName || rootOperation.name?.value || 'anonymous';
const duration = finish();
if (!excludeSet.has(operationName) && shouldInclude()) {
const errors =
result.errors?.map((error) => ({
result.errors?.map(error => ({
message: error.message,
path: error.path?.join('.'),
})) ?? [];
@ -181,8 +166,7 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
},
// TODO: operationHash is ready to accept hashes of persisted operations
client:
typeof args.contextValue !== 'undefined' &&
typeof options.clientInfo !== 'undefined'
typeof args.contextValue !== 'undefined' && typeof options.clientInfo !== 'undefined'
? options.clientInfo(args.contextValue)
: null,
});
@ -200,15 +184,7 @@ interface CacheResult {
fields: string[];
}
export function createCollector({
schema,
max,
ttl,
}: {
schema: GraphQLSchema;
max?: number;
ttl?: number;
}) {
export function createCollector({ schema, max, ttl }: { schema: GraphQLSchema; max?: number; ttl?: number }) {
const typeInfo = new TypeInfo(schema);
function collect(doc: DocumentNode): CacheResult {
@ -254,10 +230,7 @@ export function createCollector({
if (node.value.kind === Kind.ENUM) {
// Collect only a specific enum value
collectInputType(inputTypeName, node.value.value);
} else if (
node.value.kind !== Kind.OBJECT &&
node.value.kind !== Kind.LIST
) {
} else if (node.value.kind !== Kind.OBJECT && node.value.kind !== Kind.LIST) {
collectInputType(inputTypeName);
}
}
@ -271,7 +244,7 @@ export function createCollector({
}
if (isEnumType(namedType)) {
namedType.getValues().forEach((value) => {
namedType.getValues().forEach(value => {
markAsUsed(makeId(namedType.name, value.name));
});
return;
@ -312,7 +285,7 @@ export function createCollector({
const inputType = typeInfo.getInputType()!;
const inputTypeName = resolveTypeName(inputType);
node.values.forEach((value) => {
node.values.forEach(value => {
if (value.kind !== Kind.OBJECT) {
// if a value is not an object we need to collect all fields
collectInputType(inputTypeName);
@ -335,7 +308,7 @@ export function createCollector({
if (all) {
markEntireTypeAsUsed(schema.getType(inputTypeName) as any);
} else {
fields.forEach((field) => {
fields.forEach(field => {
markAsUsed(makeId(inputTypeName, field));
});
}
@ -369,10 +342,7 @@ function unwrapType(type: GraphQLType): GraphQLNamedType {
return type;
}
type GraphQLNamedInputType = Exclude<
GraphQLNamedType,
GraphQLObjectType | GraphQLInterfaceType | GraphQLUnionType
>;
type GraphQLNamedInputType = Exclude<GraphQLNamedType, GraphQLObjectType | GraphQLInterfaceType | GraphQLUnionType>;
type GraphQLNamedOutputType = Exclude<GraphQLNamedType, GraphQLInputObjectType>;
export interface Report {

View file

@ -1,20 +1,11 @@
import { createHash } from 'crypto';
import type {
HiveClient,
HivePluginOptions,
AsyncIterableIteratorOrValue,
} from './types';
import type { HiveClient, HivePluginOptions, AsyncIterableIteratorOrValue } from './types';
export function isAsyncIterableIterator<T>(
value: AsyncIterableIteratorOrValue<T>
): value is AsyncIterableIterator<T> {
export function isAsyncIterableIterator<T>(value: AsyncIterableIteratorOrValue<T>): value is AsyncIterableIterator<T> {
return typeof (value as any)?.[Symbol.asyncIterator] === 'function';
}
export function memo<R, A, K>(
fn: (arg: A) => R,
cacheKeyFn: (arg: A) => K
): (arg: A) => R {
export function memo<R, A, K>(fn: (arg: A) => R, cacheKeyFn: (arg: A) => K): (arg: A) => R {
let memoizedResult: R | null = null;
let memoizedKey: K | null = null;
@ -88,11 +79,7 @@ export function measureDuration() {
};
}
export function addProperty<T, K extends string>(
key: K,
value: undefined | null,
obj: T
): T;
export function addProperty<T, K extends string>(key: K, value: undefined | null, obj: T): T;
export function addProperty<T, K extends string, V>(
key: K,
value: V,
@ -100,11 +87,7 @@ export function addProperty<T, K extends string, V>(
): T & {
[k in K]: V;
};
export function addProperty<T, K extends string, V>(
key: K,
value: V | undefined | null,
obj: T
): any {
export function addProperty<T, K extends string, V>(key: K, value: V | undefined | null, obj: T): any {
if (value === null || typeof value === 'undefined') {
return obj;
}
@ -115,8 +98,6 @@ export function addProperty<T, K extends string, V>(
};
}
export function isHiveClient(
clientOrOptions: HiveClient | HivePluginOptions
): clientOrOptions is HiveClient {
export function isHiveClient(clientOrOptions: HiveClient | HivePluginOptions): clientOrOptions is HiveClient {
return 'operationsStore' in clientOrOptions;
}

View file

@ -25,8 +25,6 @@ test('should not leak the exception', async () => {
.then(() => 'OK')
.catch(() => 'ERROR');
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining(`[hive][info] Error`)
);
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(`[hive][info] Error`));
expect(result).toBe('OK');
});

View file

@ -85,22 +85,16 @@ test('GraphQL Yoga - should not interrupt the process', async () => {
}
`,
})
.catch(async (error) => {
.catch(async error => {
await stop();
return Promise.reject(error);
});
await waitFor(5_000);
await stop();
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('[hive][info]')
);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('[hive][usage]')
);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('[hive][reporting]')
);
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][info]'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][usage]'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting]'));
clean();
}, 10_000);
@ -147,13 +141,7 @@ test('Apollo Server - should not interrupt the process', async () => {
await waitFor(5_000);
await apollo.stop();
clean();
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('[hive][info]')
);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('[hive][usage]')
);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('[hive][reporting]')
);
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][info]'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][usage]'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting]'));
}, 10_000);

View file

@ -50,19 +50,11 @@ test('should not leak the exception', async () => {
await waitFor(2000);
await hive.dispose();
expect(logger.info).toHaveBeenCalledWith(
'[hive][reporting] Sending (queue 1) (attempt 1)'
);
expect(logger.info).toHaveBeenCalledWith(
expect.stringContaining('[hive][reporting] Attempt 1 failed:')
);
expect(logger.info).toHaveBeenCalledWith(
'[hive][reporting] Sending (queue 1) (attempt 2)'
);
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting] Attempt 1 failed:'));
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 2)');
expect(logger.error).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining(`[hive][reporting] Failed to send data`)
);
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(`[hive][reporting] Failed to send data`));
});
test('should send data to Hive', async () => {
@ -121,9 +113,7 @@ test('should send data to Hive', async () => {
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(
'[hive][reporting] Sending (queue 1) (attempt 1)'
);
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`);
expect(body.variables.input.sdl).toBe(`type Query{foo:String}`);
@ -187,14 +177,10 @@ test.only('should send data to Hive immediately', async () => {
});
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(
'[hive][reporting] Sending immediately'
);
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending immediately');
expect(logger.info).toHaveBeenCalledTimes(1);
await waitFor(50);
expect(logger.info).toHaveBeenCalledWith(
'[hive][reporting] Sending (queue 1) (attempt 1)'
);
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`);
expect(logger.info).toHaveBeenCalledTimes(3);

View file

@ -1,5 +1,5 @@
export function waitFor(ms: number) {
return new Promise<void>((resolve) => {
return new Promise<void>(resolve => {
setTimeout(resolve, ms);
});
}

View file

@ -146,9 +146,7 @@ test('should send data to Hive', async () => {
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(
`[hive][usage] Sending (queue 1) (attempt 1)`
);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
// Map
@ -221,19 +219,11 @@ test('should not leak the exception', async () => {
await waitFor(1000);
await hive.dispose();
expect(logger.info).toHaveBeenCalledWith(
`[hive][usage] Sending (queue 1) (attempt 1)`
);
expect(logger.info).toHaveBeenCalledWith(
expect.stringContaining(`[hive][usage] Attempt 1 failed:`)
);
expect(logger.info).toHaveBeenCalledWith(
`[hive][usage] Sending (queue 1) (attempt 2)`
);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`[hive][usage] Attempt 1 failed:`));
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 2)`);
expect(logger.error).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining(`[hive][usage] Failed to send data`)
);
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(`[hive][usage] Failed to send data`));
});
test('sendImmediately should not stop the schedule', async () => {
@ -284,13 +274,9 @@ test('sendImmediately should not stop the schedule', async () => {
// Because maxSize is 2 and sendInterval is 100ms
// the scheduled send task should be done by now
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(
`[hive][usage] Sending (queue 1) (attempt 1)`
);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.info).not.toHaveBeenCalledWith(
`[hive][usage] Sending immediately`
);
expect(logger.info).not.toHaveBeenCalledWith(`[hive][usage] Sending immediately`);
expect(logger.info).toHaveBeenCalledTimes(2);
// Now we will check the maxSize
@ -298,9 +284,7 @@ test('sendImmediately should not stop the schedule', async () => {
collect({});
collect({});
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(
`[hive][usage] Sending (queue 1) (attempt 1)`
);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending immediately`);
await waitFor(1); // we run setImmediate under the hood
// It should be sent already
@ -314,9 +298,7 @@ test('sendImmediately should not stop the schedule', async () => {
collect({});
await waitFor(200);
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(
`[hive][usage] Sending (queue 1) (attempt 1)`
);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.info).toHaveBeenCalledTimes(7);

View file

@ -27,92 +27,77 @@ export function normalizeOperation({
}): string {
return stripIgnoredCharacters(
print(
visit(
dropUnusedDefinitions(
document,
operationName ??
document.definitions.find(isOperationDef)?.name?.value
),
{
// hide literals
IntValue(node) {
return hideLiterals ? { ...node, value: '0' } : node;
},
FloatValue(node) {
return hideLiterals ? { ...node, value: '0' } : node;
},
StringValue(node) {
return hideLiterals ? { ...node, value: '', block: false } : node;
},
Field(node) {
return {
...node,
// remove aliases
alias: removeAliases ? undefined : node.alias,
// sort arguments
arguments: sortNodes(node.arguments),
};
},
Document(node) {
return {
...node,
definitions: sortNodes(node.definitions),
};
},
OperationDefinition(node) {
return {
...node,
variableDefinitions: sortNodes(node.variableDefinitions),
};
},
SelectionSet(node) {
return {
...node,
selections: sortNodes(node.selections),
};
},
FragmentSpread(node) {
return {
...node,
directives: sortNodes(node.directives),
};
},
InlineFragment(node) {
return {
...node,
directives: sortNodes(node.directives),
};
},
FragmentDefinition(node) {
return {
...node,
directives: sortNodes(node.directives),
variableDefinitions: sortNodes(node.variableDefinitions),
};
},
Directive(node) {
return { ...node, arguments: sortNodes(node.arguments) };
},
}
)
visit(dropUnusedDefinitions(document, operationName ?? document.definitions.find(isOperationDef)?.name?.value), {
// hide literals
IntValue(node) {
return hideLiterals ? { ...node, value: '0' } : node;
},
FloatValue(node) {
return hideLiterals ? { ...node, value: '0' } : node;
},
StringValue(node) {
return hideLiterals ? { ...node, value: '', block: false } : node;
},
Field(node) {
return {
...node,
// remove aliases
alias: removeAliases ? undefined : node.alias,
// sort arguments
arguments: sortNodes(node.arguments),
};
},
Document(node) {
return {
...node,
definitions: sortNodes(node.definitions),
};
},
OperationDefinition(node) {
return {
...node,
variableDefinitions: sortNodes(node.variableDefinitions),
};
},
SelectionSet(node) {
return {
...node,
selections: sortNodes(node.selections),
};
},
FragmentSpread(node) {
return {
...node,
directives: sortNodes(node.directives),
};
},
InlineFragment(node) {
return {
...node,
directives: sortNodes(node.directives),
};
},
FragmentDefinition(node) {
return {
...node,
directives: sortNodes(node.directives),
variableDefinitions: sortNodes(node.variableDefinitions),
};
},
Directive(node) {
return { ...node, arguments: sortNodes(node.arguments) };
},
})
)
);
}
function sortNodes(nodes: readonly DefinitionNode[]): readonly DefinitionNode[];
function sortNodes(nodes: readonly SelectionNode[]): readonly SelectionNode[];
function sortNodes(
nodes: readonly ArgumentNode[] | undefined
): readonly ArgumentNode[] | undefined;
function sortNodes(
nodes: readonly VariableDefinitionNode[] | undefined
): readonly VariableDefinitionNode[] | undefined;
function sortNodes(
nodes: readonly DirectiveNode[] | undefined
): readonly DirectiveNode[] | undefined;
function sortNodes(
nodes: readonly any[] | undefined
): readonly any[] | undefined {
function sortNodes(nodes: readonly ArgumentNode[] | undefined): readonly ArgumentNode[] | undefined;
function sortNodes(nodes: readonly VariableDefinitionNode[] | undefined): readonly VariableDefinitionNode[] | undefined;
function sortNodes(nodes: readonly DirectiveNode[] | undefined): readonly DirectiveNode[] | undefined;
function sortNodes(nodes: readonly any[] | undefined): readonly any[] | undefined {
if (nodes) {
if (nodes.length === 0) {
return [];
@ -130,13 +115,7 @@ function sortNodes(
return sortBy(nodes, 'name.value');
}
if (
isOfKindList<SelectionNode>(nodes, [
Kind.FIELD,
Kind.FRAGMENT_SPREAD,
Kind.INLINE_FRAGMENT,
])
) {
if (isOfKindList<SelectionNode>(nodes, [Kind.FIELD, Kind.FRAGMENT_SPREAD, Kind.INLINE_FRAGMENT])) {
return sortBy(nodes, 'kind', 'name.value');
}
@ -146,13 +125,8 @@ function sortNodes(
return;
}
function isOfKindList<T>(
nodes: readonly any[],
kind: string | string[]
): nodes is T[] {
return typeof kind === 'string'
? nodes[0].kind === kind
: kind.includes(nodes[0].kind);
function isOfKindList<T>(nodes: readonly any[], kind: string | string[]): nodes is T[] {
return typeof kind === 'string' ? nodes[0].kind === kind : kind.includes(nodes[0].kind);
}
function isOperationDef(def: DefinitionNode): def is OperationDefinitionNode {

View file

@ -3,10 +3,7 @@ import { activityModule } from './modules/activity';
import { authModule } from './modules/auth';
import { labModule } from './modules/lab';
import { operationsModule } from './modules/operations';
import {
ClickHouseConfig,
CLICKHOUSE_CONFIG,
} from './modules/operations/providers/tokens';
import { ClickHouseConfig, CLICKHOUSE_CONFIG } from './modules/operations/providers/tokens';
import { organizationModule } from './modules/organization';
import { persistedOperationModule } from './modules/persisted-operations';
import { projectModule } from './modules/project';
@ -17,15 +14,8 @@ import { IdTranslator } from './modules/shared/providers/id-translator';
import { IdempotentRunner } from './modules/shared/providers/idempotent-runner';
import { Logger } from './modules/shared/providers/logger';
import { MessageBus } from './modules/shared/providers/message-bus';
import {
CryptoProvider,
encryptionSecretProvider,
} from './modules/shared/providers/crypto';
import {
RedisConfig,
REDIS_CONFIG,
RedisProvider,
} from './modules/shared/providers/redis';
import { CryptoProvider, encryptionSecretProvider } from './modules/shared/providers/crypto';
import { RedisConfig, REDIS_CONFIG, RedisProvider } from './modules/shared/providers/redis';
import { Storage } from './modules/shared/providers/storage';
import { Tracking } from './modules/shared/providers/tracking';
import { targetModule } from './modules/target';
@ -38,35 +28,20 @@ import { alertsModule } from './modules/alerts';
import { tokenModule } from './modules/token';
import { feedbackModule } from './modules/feedback';
import { TokensConfig, TOKENS_CONFIG } from './modules/token/providers/tokens';
import {
WebhooksConfig,
WEBHOOKS_CONFIG,
} from './modules/alerts/providers/tokens';
import {
SchemaServiceConfig,
SCHEMA_SERVICE_CONFIG,
} from './modules/schema/providers/orchestrators/tokens';
import { WebhooksConfig, WEBHOOKS_CONFIG } from './modules/alerts/providers/tokens';
import { SchemaServiceConfig, SCHEMA_SERVICE_CONFIG } from './modules/schema/providers/orchestrators/tokens';
import { CDN_CONFIG, CDNConfig } from './modules/cdn/providers/tokens';
import { cdnModule } from './modules/cdn';
import { adminModule } from './modules/admin';
import {
FEEDBACK_SLACK_CHANNEL,
FEEDBACK_SLACK_TOKEN,
} from './modules/feedback/providers/tokens';
import { FEEDBACK_SLACK_CHANNEL, FEEDBACK_SLACK_TOKEN } from './modules/feedback/providers/tokens';
import { usageEstimationModule } from './modules/usage-estimation';
import {
UsageEstimationServiceConfig,
USAGE_ESTIMATION_SERVICE_CONFIG,
} from './modules/usage-estimation/providers/tokens';
import { rateLimitModule } from './modules/rate-limit';
import {
RateLimitServiceConfig,
RATE_LIMIT_SERVICE_CONFIG,
} from './modules/rate-limit/providers/tokens';
import {
BillingConfig,
BILLING_CONFIG,
} from './modules/billing/providers/tokens';
import { RateLimitServiceConfig, RATE_LIMIT_SERVICE_CONFIG } from './modules/rate-limit/providers/tokens';
import { BillingConfig, BILLING_CONFIG } from './modules/billing/providers/tokens';
import { billingModule } from './modules/billing';
const modules = [

View file

@ -8,9 +8,7 @@ export default gql`
}
extend type Query {
organizationActivities(
selector: OrganizationActivitiesSelector!
): ActivityConnection!
organizationActivities(selector: OrganizationActivitiesSelector!): ActivityConnection!
projectActivities(selector: ProjectActivitiesSelector!): ActivityConnection!
targetActivities(selector: TargetActivitiesSelector!): ActivityConnection!
}

View file

@ -4,12 +4,7 @@ import { AuthManager } from '../../auth/providers/auth-manager';
import { OrganizationAccessScope } from '../../auth/providers/organization-access';
import { ProjectAccessScope } from '../../auth/providers/project-access';
import { Logger } from '../../shared/providers/logger';
import {
Storage,
OrganizationSelector,
ProjectSelector,
TargetSelector,
} from '../../shared/providers/storage';
import { Storage, OrganizationSelector, ProjectSelector, TargetSelector } from '../../shared/providers/storage';
import { Tracking } from '../../shared/providers/tracking';
import { Activity } from './activities';
@ -24,12 +19,7 @@ interface PaginationSelector {
export class ActivityManager {
private logger: Logger;
constructor(
logger: Logger,
private authManager: AuthManager,
private storage: Storage,
private tracking: Tracking
) {
constructor(logger: Logger, private authManager: AuthManager, private storage: Storage, private tracking: Tracking) {
this.logger = logger.child({
source: 'ActivityManager',
});
@ -39,18 +29,12 @@ export class ActivityManager {
try {
this.logger.debug('Creating an activity');
const user = activity.user
? activity.user.id
: (await this.authManager.getCurrentUser()).id;
const user = activity.user ? activity.user.id : (await this.authManager.getCurrentUser()).id;
await this.storage.createActivity({
organization: activity.selector.organization,
project:
'project' in activity.selector
? activity.selector.project
: undefined,
target:
'target' in activity.selector ? activity.selector.target : undefined,
project: 'project' in activity.selector ? activity.selector.project : undefined,
target: 'target' in activity.selector ? activity.selector.target : undefined,
user,
type: activity.type,
meta: 'meta' in activity ? activity.meta : {},
@ -81,9 +65,7 @@ export class ActivityManager {
return this.storage.getActivities(selector);
}
public async getByProject(
selector: ProjectSelector & PaginationSelector
): Promise<readonly ActivityObject[]> {
public async getByProject(selector: ProjectSelector & PaginationSelector): Promise<readonly ActivityObject[]> {
await this.authManager.ensureProjectAccess({
...selector,
scope: ProjectAccessScope.READ,
@ -91,9 +73,7 @@ export class ActivityManager {
return this.storage.getActivities(selector);
}
public async getByTarget(
selector: TargetSelector & PaginationSelector
): Promise<readonly ActivityObject[]> {
public async getByTarget(selector: TargetSelector & PaginationSelector): Promise<readonly ActivityObject[]> {
await this.authManager.ensureProjectAccess({
...selector,
scope: ProjectAccessScope.READ,

View file

@ -7,9 +7,7 @@ import { createConnection } from '../../shared/schema';
export const resolvers: ActivityModule.Resolvers = {
Query: {
async organizationActivities(_, { selector }, { injector }) {
const organization = await injector
.get(IdTranslator)
.translateOrganizationId(selector);
const organization = await injector.get(IdTranslator).translateOrganizationId(selector);
return injector.get(ActivityManager).getByOrganization({
organization,

View file

@ -36,10 +36,7 @@ export class AdminManager {
}
async getOperationsOverTime({ daysLimit }: { daysLimit: number }) {
this.logger.debug(
'Fetching collected operations over time (admin, daysLimit=%s)',
daysLimit
);
this.logger.debug('Fetching collected operations over time (admin, daysLimit=%s)', daysLimit);
const user = await this.authManager.getCurrentUser();
if (!user.isAdmin) {
@ -50,7 +47,7 @@ export class AdminManager {
daysLimit,
});
return points.map((point) => ({
return points.map(point => ({
date: point.date,
count: point.total,
}));
@ -58,23 +55,17 @@ export class AdminManager {
@atomic((arg: { daysLimit: number }) => arg.daysLimit + '')
async countOperationsPerOrganization({ daysLimit }: { daysLimit: number }) {
this.logger.info(
'Counting collected operations per organization (admin, daysLimit=%s)',
daysLimit
);
this.logger.info('Counting collected operations per organization (admin, daysLimit=%s)', daysLimit);
const user = await this.authManager.getCurrentUser();
if (user.isAdmin) {
const pairs = await this.storage.adminGetOrganizationsTargetPairs();
const operations =
await this.operationsReader.adminCountOperationsPerTarget({
daysLimit,
});
const operations = await this.operationsReader.adminCountOperationsPerTarget({
daysLimit,
});
const organizationCountMap = new Map<string, number>();
const targetOrganizationMap = new Map<string, string>(
pairs.map((p) => [p.target, p.organization])
);
const targetOrganizationMap = new Map<string, string>(pairs.map(p => [p.target, p.organization]));
for (const op of operations) {
const organizationId = targetOrganizationMap.get(op.target);
@ -85,7 +76,7 @@ export class AdminManager {
}
}
return Array.from(organizationCountMap.entries()).map((entry) => ({
return Array.from(organizationCountMap.entries()).map(entry => ({
organization: entry[0],
total: entry[1],
}));

View file

@ -32,17 +32,12 @@ export const resolvers: AdminModule.Resolvers = {
},
AdminOrganizationStats: {
async operations(stats, _, { injector }) {
const results = await injector
.get(AdminManager)
.countOperationsPerOrganization({
// Max days limit is 30 (that's the default TTL in ClickHouse table)
daysLimit: stats.daysLimit ?? 30,
});
const results = await injector.get(AdminManager).countOperationsPerOrganization({
// Max days limit is 30 (that's the default TTL in ClickHouse table)
daysLimit: stats.daysLimit ?? 30,
});
return (
results.find((r) => r.organization === stats.organization.id)?.total ??
0
);
return results.find(r => r.organization === stats.organization.id)?.total ?? 0;
},
},
};

View file

@ -10,9 +10,5 @@ export const alertsModule = createModule({
dirname: __dirname,
typeDefs,
resolvers,
providers: [
AlertsManager,
SlackCommunicationAdapter,
WebhookCommunicationAdapter,
],
providers: [AlertsManager, SlackCommunicationAdapter, WebhookCommunicationAdapter],
});

View file

@ -1,12 +1,5 @@
import type * as Types from '../../../../__generated__/types';
import {
Alert,
AlertChannel,
Organization,
Project,
Target,
SchemaVersion,
} from '../../../../shared/entities';
import { Alert, AlertChannel, Organization, Project, Target, SchemaVersion } from '../../../../shared/entities';
export interface SchemaChangeNotificationInput {
event: {
@ -42,9 +35,7 @@ export interface ChannelConfirmationInput {
}
export interface CommunicationAdapter {
sendSchemaChangeNotification(
input: SchemaChangeNotificationInput
): Promise<void>;
sendSchemaChangeNotification(input: SchemaChangeNotificationInput): Promise<void>;
sendChannelConfirmation(input: ChannelConfirmationInput): Promise<void>;
}
@ -60,9 +51,7 @@ export function quotesTransformer(msg: string, symbols = '**') {
return `${symbols}${value}${symbols}`;
}
return msg
.replace(findSingleQuotes, transformm)
.replace(findDoubleQuotes, transformm);
return msg.replace(findSingleQuotes, transformm).replace(findDoubleQuotes, transformm);
}
export function filterChangesByLevel(level: Types.CriticalityLevel) {

View file

@ -89,9 +89,7 @@ export class SlackCommunicationAdapter implements CommunicationAdapter {
}
const actionMessage =
input.event.kind === 'created'
? `I will send here notifications`
: `I will no longer send here notifications`;
input.event.kind === 'created' ? `I will send here notifications` : `I will no longer send here notifications`;
try {
const projectLink = this.createLink({
@ -102,10 +100,9 @@ export class SlackCommunicationAdapter implements CommunicationAdapter {
const client = new WebClient(token);
await client.chat.postMessage({
channel: input.channel.slackChannel!,
text: [
`:wave: Hi! I'm the notification :bee:.`,
`${actionMessage} about your ${projectLink} project.`,
].join('\n'),
text: [`:wave: Hi! I'm the notification :bee:.`, `${actionMessage} about your ${projectLink} project.`].join(
'\n'
),
});
} catch (error) {
this.logger.error(`Failed to send Slack notification`, error);
@ -166,9 +163,7 @@ function renderAttachments({
title: string;
changes: readonly Types.SchemaChange[];
}): MessageAttachment {
const text = changes
.map((change) => slackCoderize(change.message))
.join('\n');
const text = changes.map(change => slackCoderize(change.message)).join('\n');
return {
mrkdwn_in: ['text'],

View file

@ -1,8 +1,5 @@
import { Injectable, Inject } from 'graphql-modules';
import type {
CommunicationAdapter,
SchemaChangeNotificationInput,
} from './common';
import type { CommunicationAdapter, SchemaChangeNotificationInput } from './common';
import { Logger } from '../../../shared/providers/logger';
import { HttpClient } from '../../../shared/providers/http-client';
import { WEBHOOKS_CONFIG } from '../tokens';
@ -12,11 +9,7 @@ import type { WebhooksConfig } from '../tokens';
export class WebhookCommunicationAdapter implements CommunicationAdapter {
private logger: Logger;
constructor(
logger: Logger,
private http: HttpClient,
@Inject(WEBHOOKS_CONFIG) private config: WebhooksConfig
) {
constructor(logger: Logger, private http: HttpClient, @Inject(WEBHOOKS_CONFIG) private config: WebhooksConfig) {
this.logger = logger.child({ service: 'WebhookCommunicationAdapter' });
}

View file

@ -40,9 +40,7 @@ export class AlertsManager {
});
}
async addChannel(
input: AlertsModule.AddAlertChannelInput
): Promise<AlertChannel> {
async addChannel(input: AlertsModule.AddAlertChannelInput): Promise<AlertChannel> {
this.logger.debug(
'Adding Alert Channel (organization=%s, project=%s, type=%s)',
input.organization,
@ -100,7 +98,7 @@ export class AlertsManager {
const channels = await this.storage.deleteAlertChannels(input);
await Promise.all(
channels.map((channel) =>
channels.map(channel =>
this.triggerChannelConfirmation({
kind: 'deleted',
channel,
@ -113,17 +111,9 @@ export class AlertsManager {
return channels;
}
@cache<ProjectSelector>(
(selector) => selector.project + selector.organization
)
async getChannels(
selector: ProjectSelector
): Promise<readonly AlertChannel[]> {
this.logger.debug(
'Fetching Alert Channels (organization=%s, project=%s)',
selector.organization,
selector.project
);
@cache<ProjectSelector>(selector => selector.project + selector.organization)
async getChannels(selector: ProjectSelector): Promise<readonly AlertChannel[]> {
this.logger.debug('Fetching Alert Channels (organization=%s, project=%s)', selector.organization, selector.project);
await this.authManager.ensureProjectAccess({
...selector,
scope: ProjectAccessScope.READ,
@ -181,11 +171,7 @@ export class AlertsManager {
}
async getAlerts(selector: ProjectSelector): Promise<readonly Alert[]> {
this.logger.debug(
'Fetching Alerts (organization=%s, project=%s)',
selector.organization,
selector.project
);
this.logger.debug('Fetching Alerts (organization=%s, project=%s)', selector.organization, selector.project);
await this.authManager.ensureProjectAccess({
...selector,
scope: ProjectAccessScope.READ,
@ -193,9 +179,7 @@ export class AlertsManager {
return this.storage.getAlerts(selector);
}
async triggerSchemaChangeNotifications(
event: SchemaChangeNotificationInput['event']
) {
async triggerSchemaChangeNotifications(event: SchemaChangeNotificationInput['event']) {
const organization = event.organization.id;
const project = event.project.id;
const target = event.target.id;
@ -226,14 +210,12 @@ export class AlertsManager {
]);
const matchingAlerts = alerts.filter(
(alert) =>
alert.type === 'SCHEMA_CHANGE_NOTIFICATIONS' &&
alert.targetId === target
alert => alert.type === 'SCHEMA_CHANGE_NOTIFICATIONS' && alert.targetId === target
);
const pairs = matchingAlerts.map((alert) => {
const pairs = matchingAlerts.map(alert => {
return {
alert,
channel: channels.find((channel) => channel.id === alert.channelId)!,
channel: channels.find(channel => channel.id === alert.channelId)!,
};
});

View file

@ -4,6 +4,4 @@ export interface WebhooksConfig {
endpoint: string;
}
export const WEBHOOKS_CONFIG = new InjectionToken<WebhooksConfig>(
'webhooks-endpoint'
);
export const WEBHOOKS_CONFIG = new InjectionToken<WebhooksConfig>('webhooks-endpoint');

View file

@ -6,8 +6,7 @@ import { z } from 'zod';
const AlertChannelNameModel = z.string().min(1).max(100);
const SlackChannelNameModel = z.string().min(1).max(80);
const MaybeModel = <T extends z.ZodType>(value: T) =>
z.union([z.null(), z.undefined(), value]);
const MaybeModel = <T extends z.ZodType>(value: T) => z.union([z.null(), z.undefined(), value]);
export const resolvers: AlertsModule.Resolvers = {
Mutation: {
@ -128,7 +127,7 @@ export const resolvers: AlertsModule.Resolvers = {
project: alert.projectId,
});
return channels.find((c) => c.id === alert.channelId)!;
return channels.find(c => c.id === alert.channelId)!;
},
target(alert, _, { injector }) {
return injector.get(TargetManager).getTarget({

View file

@ -13,12 +13,5 @@ export const authModule = createModule({
dirname: __dirname,
typeDefs,
resolvers,
providers: [
AuthManager,
UserManager,
ApiTokenProvider,
OrganizationAccess,
ProjectAccess,
TargetAccess,
],
providers: [AuthManager, UserManager, ApiTokenProvider, OrganizationAccess, ProjectAccess, TargetAccess],
});

Some files were not shown because too many files have changed in this diff Show more