feat: full HTTP request logging (#5234)

This commit is contained in:
Laurin Quast 2024-08-01 17:06:10 +02:00 committed by GitHub
parent 5953562444
commit e6dc5c9df3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 814 additions and 503 deletions

View file

@ -0,0 +1,27 @@
---
'@graphql-hive/apollo': minor
---
Better HTTP info, error and debug logging.
For the supergraph manager, pass a `console` instance as the `logger` property.
```ts
import { createSupergraphManager } from '@graphql-hive/apollo';
const manager = createSupergraphManager({
...otherOptions,
logger: console,
})
```
For the supergraph SDL fetcher pass a `console` instance as the `logger` property.
```ts
import { createSupergraphSDLFetcher } from '@graphql-hive/apollo';
const manager = createSupergraphSDLFetcher({
...otherOptions,
logger: console,
})
```

View file

@ -0,0 +1,7 @@
---
'@graphql-hive/core': minor
'@graphql-hive/yoga': minor
'@graphql-hive/apollo': minor
---
Improved logging output of HTTP requests and retires.

View file

@ -0,0 +1,5 @@
---
'@graphql-hive/cli': minor
---
Provide debug logging for HTTP requests when providing the `--debug` flag.

View file

@ -14,7 +14,6 @@ const OPERATIONS_PATHS = [
const rulesToExtends = Object.fromEntries(
Object.entries(guildConfig.rules).filter(([key]) =>
[
'no-implicit-coercion',
'import/first',
'no-restricted-globals',
'@typescript-eslint/no-unused-vars',
@ -189,6 +188,7 @@ module.exports = {
'jsx-a11y/no-static-element-interactions': 'off',
'@next/next/no-html-link-for-pages': 'off',
'unicorn/no-negated-condition': 'off',
'no-implicit-coercion': 'off',
},
},
{

View file

@ -9,6 +9,7 @@ import {
http,
isHiveClient,
joinUrl,
Logger,
} from '@graphql-hive/core';
import { version } from './version.js';
@ -17,6 +18,7 @@ export { atLeastOnceSampler, createSchemaFetcher, createServicesFetcher } from '
export interface SupergraphSDLFetcherOptions {
endpoint: string;
key: string;
logger?: Logger;
}
export function createSupergraphSDLFetcher(options: SupergraphSDLFetcherOptions) {
@ -44,13 +46,13 @@ export function createSupergraphSDLFetcher(options: SupergraphSDLFetcherOptions)
return http
.get(endpoint, {
headers,
isRequestOk: response => response.status === 304 || response.ok,
retry: {
retryWhen: response => response.status >= 500,
okWhen: response => response.status === 304,
retries: 10,
maxTimeout: 200,
minTimeout: 1,
},
logger: options.logger,
})
.then(async response => {
if (response.ok) {
@ -87,6 +89,7 @@ export function createSupergraphManager(
const fetchSupergraph = createSupergraphSDLFetcher({
endpoint: options.endpoint,
key: options.key,
logger: options.logger,
});
let timer: ReturnType<typeof setTimeout> | null = null;

View file

@ -82,12 +82,12 @@ test('should not interrupt the process', async () => {
logger,
},
reporting: {
endpoint: 'http://404.localhost/registry',
endpoint: 'http://404.localhost.noop/registry',
author: 'jest',
commit: 'js',
},
usage: {
endpoint: 'http://404.localhost/usage',
endpoint: 'http://404.localhost.noop/usage',
},
}),
],
@ -100,7 +100,7 @@ test('should not interrupt the process', async () => {
}
`,
});
await waitFor(50);
await waitFor(200);
await apollo.stop();
clean();
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][info]'));
@ -299,7 +299,7 @@ describe('supergraph SDL fetcher', async () => {
await fetcher();
} catch (err) {
expect(err).toMatchInlineSnapshot(
`[Error: Failed to fetch http://localhost/supergraph, received: 500 Internal Server Error]`,
`[Error: GET http://localhost/supergraph failed with status 500.]`,
);
}
});

View file

@ -3,22 +3,53 @@ import { print, type GraphQLError } from 'graphql';
import type { ExecutionResult } from 'graphql';
import { http } from '@graphql-hive/core';
import type { TypedDocumentNode } from '@graphql-typed-document-node/core';
import { Command, Errors, Config as OclifConfig } from '@oclif/core';
import { Command, Errors, Flags, Interfaces } from '@oclif/core';
import { Config, GetConfigurationValueType, ValidConfigurationKeys } from './helpers/config';
export type Flags<T extends typeof Command> = Interfaces.InferredFlags<
(typeof BaseCommand)['baseFlags'] & T['flags']
>;
export type Args<T extends typeof Command> = Interfaces.InferredArgs<T['args']>;
type OmitNever<T> = { [K in keyof T as T[K] extends never ? never : K]: T[K] };
export default abstract class extends Command {
protected _userConfig: Config;
export default abstract class BaseCommand<T extends typeof Command> extends Command {
protected _userConfig: Config | undefined;
protected constructor(argv: string[], config: OclifConfig) {
super(argv, config);
static baseFlags = {
debug: Flags.boolean({
default: false,
summary: 'Whether debug output for HTTP calls and similar should be enabled.',
}),
};
protected flags!: Flags<T>;
protected args!: Args<T>;
protected get userConfig(): Config {
if (!this._userConfig) {
throw new Error('User config is not initialized');
}
return this._userConfig!;
}
public async init(): Promise<void> {
await super.init();
this._userConfig = new Config({
// eslint-disable-next-line no-process-env
filepath: process.env.HIVE_CONFIG,
rootDir: process.cwd(),
});
const { args, flags } = await this.parse({
flags: this.ctor.flags,
baseFlags: (super.ctor as typeof BaseCommand).baseFlags,
args: this.ctor.args,
strict: this.ctor.strict,
});
this.flags = flags as Flags<T>;
this.args = args as Args<T>;
}
success(...args: any[]) {
@ -122,7 +153,7 @@ export default abstract class extends Command {
return process.env[env] as TArgs[keyof TArgs] as NonNullable<GetConfigurationValueType<TKey>>;
}
const userConfigValue = this._userConfig.get(key);
const userConfigValue = this._userConfig!.get(key);
if (userConfigValue != null) {
return userConfigValue;
@ -161,6 +192,8 @@ export default abstract class extends Command {
...additionalHeaders,
};
const isDebug = this.flags.debug;
return {
async request<TResult, TVariables>(
operation: TypedDocumentNode<TResult, TVariables>,
@ -173,6 +206,16 @@ export default abstract class extends Command {
variables,
}),
{
logger: {
info: (...args) => {
if (isDebug) {
console.info(...args);
}
},
error: (...args) => {
console.error(...args);
},
},
headers: requestHeaders,
},
);

View file

@ -2,7 +2,7 @@ import { http, URL } from '@graphql-hive/core';
import { Flags } from '@oclif/core';
import Command from '../../base-command';
export default class ArtifactsFetch extends Command {
export default class ArtifactsFetch extends Command<typeof ArtifactsFetch> {
static description = 'fetch artifacts from the CDN';
static flags = {
'cdn.endpoint': Flags.string({
@ -47,9 +47,6 @@ export default class ArtifactsFetch extends Command {
},
retry: {
retries: 3,
retryWhen(response) {
return response.status >= 500;
},
},
});

View file

@ -1,7 +1,7 @@
import { Args } from '@oclif/core';
import Command from '../../base-command';
export default class DeleteConfig extends Command {
export default class DeleteConfig extends Command<typeof DeleteConfig> {
static description = 'deletes specific cli configuration';
static args = {
key: Args.string({
@ -13,7 +13,7 @@ export default class DeleteConfig extends Command {
async run() {
const { args } = await this.parse(DeleteConfig);
this._userConfig.delete(args.key);
this._userConfig!.delete(args.key);
this.success(this.bolderize(`Config flag "${args.key}" was deleted`));
}
}

View file

@ -2,7 +2,7 @@ import { Args } from '@oclif/core';
import Command from '../../base-command';
import { allowedKeys, ValidConfigurationKeys } from '../../helpers/config';
export default class GetConfig extends Command {
export default class GetConfig extends Command<typeof GetConfig> {
static description = 'prints specific cli configuration';
static args = {
key: Args.string({
@ -15,6 +15,6 @@ export default class GetConfig extends Command {
async run() {
const { args } = await this.parse(GetConfig);
console.dir(this._userConfig.get(args.key as ValidConfigurationKeys));
console.dir(this.userConfig.get(args.key as ValidConfigurationKeys));
}
}

View file

@ -1,10 +1,10 @@
import Command from '../../base-command';
export default class ResetConfig extends Command {
export default class ResetConfig extends Command<typeof ResetConfig> {
static description = 'resets local cli configuration';
async run() {
this._userConfig.clear();
this.userConfig.clear();
this.success('Config cleared.');
}
}

View file

@ -2,7 +2,7 @@ import { Args } from '@oclif/core';
import Command from '../../base-command';
import { allowedKeys, ValidConfigurationKeys } from '../../helpers/config';
export default class SetConfig extends Command {
export default class SetConfig extends Command<typeof SetConfig> {
static description = 'updates specific cli configuration';
static args = {
key: Args.string({
@ -20,7 +20,7 @@ export default class SetConfig extends Command {
async run() {
const { args } = await this.parse(SetConfig);
this._userConfig.set(args.key as ValidConfigurationKeys, args.value);
this.userConfig.set(args.key as ValidConfigurationKeys, args.value);
this.success(this.bolderize(`Config flag "${args.key}" was set to "${args.value}"`));
}
}

View file

@ -83,7 +83,7 @@ type ServiceWithSource = {
};
};
export default class Dev extends Command {
export default class Dev extends Command<typeof Dev> {
static description = [
'Develop and compose Supergraph with your local services.',
'Only available for Federation projects.',

View file

@ -5,7 +5,7 @@ import { Args, Flags } from '@oclif/core';
import Command from '../base-command';
import { loadSchema } from '../helpers/schema';
export default class Introspect extends Command {
export default class Introspect extends Command<typeof Introspect> {
static description = 'introspects a GraphQL Schema';
static flags = {
write: Flags.string({

View file

@ -14,7 +14,7 @@ const fetchLatestVersionQuery = graphql(/* GraphQL */ `
}
`);
export default class OperationsCheck extends Command {
export default class OperationsCheck extends Command<typeof OperationsCheck> {
static description = 'checks operations against a published schema';
static flags = {
'registry.endpoint': Flags.string({

View file

@ -86,7 +86,7 @@ const schemaCheckMutation = graphql(/* GraphQL */ `
}
`);
export default class SchemaCheck extends Command {
export default class SchemaCheck extends Command<typeof SchemaCheck> {
static description = 'checks schema';
static flags = {
service: Flags.string({

View file

@ -37,7 +37,7 @@ const schemaDeleteMutation = graphql(/* GraphQL */ `
}
`);
export default class SchemaDelete extends Command {
export default class SchemaDelete extends Command<typeof SchemaDelete> {
static description = 'deletes a schema';
static flags = {
'registry.endpoint': Flags.string({

View file

@ -20,7 +20,7 @@ const SchemaVersionForActionIdQuery = graphql(/* GraphQL */ `
}
`);
export default class SchemaFetch extends Command {
export default class SchemaFetch extends Command<typeof SchemaFetch> {
static description = 'fetch schema or supergraph from the Hive API';
static flags = {
/** @deprecated */

View file

@ -63,7 +63,7 @@ const schemaPublishMutation = graphql(/* GraphQL */ `
}
`);
export default class SchemaPublish extends Command {
export default class SchemaPublish extends Command<typeof SchemaPublish> {
static description = 'publishes schema';
static flags = {
service: Flags.string({

View file

@ -35,7 +35,7 @@ const myTokenInfoQuery = graphql(/* GraphQL */ `
}
`);
export default class WhoAmI extends Command {
export default class WhoAmI extends Command<typeof WhoAmI> {
static description = 'shows information about the current token';
static flags = {
'registry.endpoint': Flags.string({

View file

@ -5,7 +5,7 @@ import { GraphQLFileLoader } from '@graphql-tools/graphql-file-loader';
import { JsonFileLoader } from '@graphql-tools/json-file-loader';
import { loadTypedefs } from '@graphql-tools/load';
import { UrlLoader } from '@graphql-tools/url-loader';
import baseCommand from '../base-command';
import BaseCommand from '../base-command';
import { FragmentType, graphql, useFragment as unmaskFragment } from '../gql';
import { CriticalityLevel, SchemaErrorConnection, SchemaWarningConnection } from '../gql/graphql';
@ -17,7 +17,7 @@ const criticalityMap: Record<CriticalityLevel, string> = {
[CriticalityLevel.Dangerous]: colors.green('-'),
};
export function renderErrors(this: baseCommand, errors: SchemaErrorConnection) {
export function renderErrors(this: BaseCommand<any>, errors: SchemaErrorConnection) {
this.fail(`Detected ${errors.total} error${errors.total > 1 ? 's' : ''}`);
this.log('');
@ -43,7 +43,7 @@ const RenderChanges_SchemaChanges = graphql(`
`);
export function renderChanges(
this: baseCommand,
this: BaseCommand<any>,
maskedChanges: FragmentType<typeof RenderChanges_SchemaChanges>,
) {
const changes = unmaskFragment(RenderChanges_SchemaChanges, maskedChanges);
@ -91,7 +91,7 @@ export function renderChanges(
}
}
export function renderWarnings(this: baseCommand, warnings: SchemaWarningConnection) {
export function renderWarnings(this: BaseCommand<any>, warnings: SchemaWarningConnection) {
this.log('');
this.infoWarning(`Detected ${warnings.total} warning${warnings.total > 1 ? 's' : ''}`);
this.log('');

View file

@ -1,9 +1,8 @@
import retry from 'async-retry';
import { version } from '../version.js';
import { http } from './http-client.js';
import type { Logger } from './types.js';
type ReadOnlyResponse = Pick<Response, 'status' | 'text' | 'json'>;
type ReadOnlyResponse = Pick<Response, 'status' | 'text' | 'json' | 'statusText'>;
export interface AgentOptions {
enabled?: boolean;
@ -55,12 +54,10 @@ export interface AgentOptions {
export function createAgent<TEvent>(
pluginOptions: AgentOptions,
{
prefix,
data,
body,
headers = () => ({}),
}: {
prefix: string;
data: {
clear(): void;
set(data: TEvent): void;
@ -97,10 +94,14 @@ export function createAgent<TEvent>(
function debugLog(msg: string) {
if (options.debug) {
options.logger.info(`[hive][${prefix}]${enabled ? '' : '[DISABLED]'} ${msg}`);
options.logger.info(msg);
}
}
function errorLog(msg: string) {
options.logger.error(msg);
}
let scheduled = false;
let inProgressCaptures: Promise<void>[] = [];
@ -132,115 +133,59 @@ export function createAgent<TEvent>(
if (data.size() >= options.maxSize) {
debugLog('Sending immediately');
setImmediate(() => send({ runOnce: true, throwOnError: false }));
setImmediate(() => send({ throwOnError: false }));
}
}
function sendImmediately(event: TEvent): Promise<ReadOnlyResponse | null> {
data.set(event);
debugLog('Sending immediately');
return send({ runOnce: true, throwOnError: true });
return send({ throwOnError: true });
}
async function send(sendOptions: {
runOnce?: boolean;
throwOnError: true;
}): Promise<ReadOnlyResponse | null>;
async function send(sendOptions: {
runOnce?: boolean;
throwOnError: false;
}): Promise<ReadOnlyResponse | null>;
async function send(sendOptions?: {
runOnce?: boolean;
throwOnError: boolean;
}): Promise<ReadOnlyResponse | null> {
const runOnce = sendOptions?.runOnce ?? false;
if (!data.size()) {
if (!runOnce) {
schedule();
}
async function send(sendOptions?: { throwOnError?: boolean }): Promise<ReadOnlyResponse | null> {
if (!data.size() || !enabled) {
return null;
}
try {
const buffer = await body();
const dataToSend = data.size();
const buffer = await body();
const dataToSend = data.size();
data.clear();
data.clear();
const sendReport: retry.RetryFunction<{
status: number;
text(): Promise<string>;
json(): Promise<unknown>;
}> = async (_bail, attempt) => {
debugLog(`Sending (queue ${dataToSend}) (attempt ${attempt})`);
debugLog(`Sending report (queue ${dataToSend})`);
const response = await http
.post(options.endpoint, buffer, {
headers: {
accept: 'application/json',
'content-type': 'application/json',
Authorization: `Bearer ${options.token}`,
'User-Agent': `${options.name}/${version}`,
...headers(),
},
timeout: options.timeout,
retry: {
retries: options.maxRetries,
factor: 2,
},
logger: options.logger,
fetchImplementation: pluginOptions.__testing?.fetch,
})
.then(res => {
debugLog(`Report sent!`);
return res;
})
.catch(error => {
errorLog(`Failed to send report.`);
if (!enabled) {
return {
status: 200,
text: async () => 'OK',
json: async () => ({}),
};
if (sendOptions?.throwOnError) {
throw error;
}
const response = await http
.post(options.endpoint, buffer, {
headers: {
accept: 'application/json',
'content-type': 'application/json',
Authorization: `Bearer ${options.token}`,
'User-Agent': `${options.name}/${version}`,
...headers(),
},
timeout: options.timeout,
fetchImplementation: pluginOptions.__testing?.fetch,
})
.catch(error => {
debugLog(`Attempt ${attempt} failed: ${error.message}`);
return Promise.reject(error);
});
if (response.status >= 200 && response.status < 300) {
return response;
}
debugLog(`Attempt ${attempt} failed: ${response.status}`);
throw new Error(`${response.status}: ${response.statusText} ${await response.text()}`);
};
const response = await retry(sendReport, {
retries: options.maxRetries,
minTimeout: options.minTimeout,
factor: 2,
return null;
});
if (response.status < 200 || response.status >= 300) {
throw new Error(
`[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${await response.text()}`,
);
}
debugLog(`Sent!`);
if (!runOnce) {
schedule();
}
return response;
} catch (error: any) {
if (!runOnce) {
schedule();
}
if (sendOptions?.throwOnError) {
throw error;
}
options.logger.error(`[hive][${prefix}] Failed to send data: ${error.message}`);
return null;
}
return response;
}
async function dispose() {
@ -254,7 +199,6 @@ export function createAgent<TEvent>(
}
await send({
runOnce: true,
throwOnError: false,
});
}

View file

@ -10,19 +10,19 @@ import { createPersistedDocuments } from './persisted-documents.js';
import { createReporting } from './reporting.js';
import type { HiveClient, HivePluginOptions } from './types.js';
import { createUsage } from './usage.js';
import { logIf } from './utils.js';
import { createHiveLogger, logIf } from './utils.js';
export function createHive(options: HivePluginOptions): HiveClient {
const logger = options?.agent?.logger ?? console;
const logger = createHiveLogger(options?.agent?.logger ?? console, '[hive]');
let enabled = options.enabled ?? true;
if (enabled === false) {
logIf(options.debug === true, '[hive] is not enabled.', logger.info);
logIf(options.debug === true, 'plugin is not enabled.', logger.info);
}
if (!options.token && enabled) {
enabled = false;
logger.info('[hive] Missing token, disabling.');
logger.info('Missing token, disabling.');
}
const mergedOptions: HivePluginOptions = { ...options, enabled } as HivePluginOptions;
@ -50,6 +50,7 @@ export function createHive(options: HivePluginOptions): HiveClient {
const printTokenInfo = enabled
? options.printTokenInfo === true || (!!options.debug && options.printTokenInfo !== false)
: false;
const infoLogger = createHiveLogger(logger, '[info]');
const info = printTokenInfo
? async () => {
@ -97,6 +98,8 @@ export function createHive(options: HivePluginOptions): HiveClient {
}
`;
infoLogger.info('Fetching token details...');
const response = await http.post(
endpoint,
JSON.stringify({
@ -113,6 +116,7 @@ export function createHive(options: HivePluginOptions): HiveClient {
},
timeout: 30_000,
fetchImplementation: options?.agent?.__testing?.fetch,
logger: infoLogger,
},
);
@ -144,9 +148,9 @@ export function createHive(options: HivePluginOptions): HiveClient {
const projectUrl = `${organizationUrl}/${project.cleanId}`;
const targetUrl = `${projectUrl}/${target.cleanId}`;
logger.info(
infoLogger.info(
[
'[hive][info] Token details',
'Token details',
'',
`Token name: ${print(tokenInfo.token.name)}`,
`Organization: ${print(organization.name, organizationUrl)}`,
@ -160,23 +164,21 @@ export function createHive(options: HivePluginOptions): HiveClient {
].join('\n'),
);
} else if (result.data?.tokenInfo.message) {
logger.error(
`[hive][info] Token not found. Reason: ${result.data?.tokenInfo.message}`,
);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`,
infoLogger.error(`Token not found. Reason: ${result.data?.tokenInfo.message}`);
infoLogger.info(
`How to create a token? https://docs.graphql-hive.com/features/tokens`,
);
} else {
logger.error(`[hive][info] ${result.errors![0].message}`);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`,
infoLogger.error(`${result.errors![0].message}`);
infoLogger.info(
`How to create a token? https://docs.graphql-hive.com/features/tokens`,
);
}
} else {
logger.error(`[hive][info] Error ${response.status}: ${response.statusText}`);
infoLogger.error(`Error ${response.status}: ${response.statusText}`);
}
} catch (error) {
logger.error(`[hive][info] Error ${(error as Error)?.message ?? error}`);
infoLogger.error(`Error ${(error as Error)?.message ?? error}`);
}
}
: () => {};

View file

@ -10,6 +10,7 @@ interface Schema {
}
function createFetcher(options: SchemaFetcherOptions & ServicesFetcherOptions) {
const logger = options.logger ?? console;
let cacheETag: string | null = null;
let cached: {
id: string;
@ -37,12 +38,12 @@ function createFetcher(options: SchemaFetcherOptions & ServicesFetcherOptions) {
.get(endpoint, {
headers,
retry: {
retryWhen: response => response.status >= 500,
okWhen: response => response.status === 304,
retries: 10,
maxTimeout: 200,
minTimeout: 1,
},
isRequestOk: response => response.ok || response.status === 304,
logger,
})
.then(async response => {
if (response.ok) {
@ -61,9 +62,7 @@ function createFetcher(options: SchemaFetcherOptions & ServicesFetcherOptions) {
return cached;
}
throw new Error(
`Failed to GET ${endpoint}, received: ${response.status} ${response.statusText ?? 'Internal Server Error'}`,
);
throw new Error(`Unexpected error.`);
});
};
}

View file

@ -1,46 +1,52 @@
import asyncRetry from 'async-retry';
import { fetch, URL } from '@whatwg-node/fetch';
import type { Logger } from './types.js';
type RetryOptions = Parameters<typeof asyncRetry>[1] & {
retryWhen(response: Response): boolean;
okWhen?(response: Response): boolean;
};
interface SharedConfig {
headers: Record<string, string>;
/**
* timeout in milliseconds (for each single fetch call)
* @default 20_000
*/
timeout?: number;
/** Retry configuration. Set to `false` for having no retries. */
retry?: RetryOptions | false;
/** custom fetch implementation. */
fetchImplementation?: typeof fetch;
/** Logger for HTTP info and request errors. Uses `console` by default. */
logger?: Logger;
/**
* Function for determining whether the request response is okay.
* You can override it if you want to accept other status codes as well.
* @default {response => response.ok}
**/
isRequestOk?: ResponseAssertFunction;
}
function get(
endpoint: string,
config: {
headers: Record<string, string>;
timeout?: number;
fetchImplementation?: typeof fetch;
retry?: RetryOptions;
},
) {
/**
* Return a string that contains the reason on why the request should be retried.
*/
type ResponseAssertFunction = (response: Response) => boolean;
type RetryOptions = Parameters<typeof asyncRetry>[1];
function get(endpoint: string, config: SharedConfig) {
return makeFetchCall(endpoint, {
method: 'GET',
headers: config.headers,
timeout: config.timeout,
retry: config.retry,
fetchImplementation: config.fetchImplementation,
logger: config.logger ?? console,
isRequestOk: config.isRequestOk,
});
}
function post(
endpoint: string,
data: string | Buffer,
config: {
headers: Record<string, string>;
timeout?: number;
retry?: RetryOptions;
fetchImplementation?: typeof fetch;
},
) {
function post(endpoint: string, data: string | Buffer, config: SharedConfig) {
return makeFetchCall(endpoint, {
body: data,
method: 'POST',
headers: config.headers,
timeout: config.timeout,
retry: config.retry,
fetchImplementation: config.fetchImplementation,
...config,
});
}
@ -49,87 +55,160 @@ export const http = {
post,
};
async function makeFetchCall(
endpoint: string,
export async function makeFetchCall(
endpoint: URL | string,
config: {
body?: string | Buffer;
method: 'GET' | 'POST';
headers: Record<string, string>;
/**
* timeout in milliseconds (for each single fetch call)
* @default 20_000
*/
timeout?: number;
retry?: RetryOptions;
/** Retry configuration. Set to `false` for having no retries. */
retry?: RetryOptions | false;
/** custom fetch implementation. */
fetchImplementation?: typeof fetch;
/** Logger for HTTP info and request errors. Uses `console` by default. */
logger?: Logger;
/**
* Function for determining whether the request response is okay.
* You can override it if you want to accept other status codes as well.
* @default {response => response.ok}
**/
isRequestOk?: ResponseAssertFunction;
},
) {
const controller = new AbortController();
let timeoutId: ReturnType<typeof setTimeout> | undefined = undefined;
): Promise<Response> {
const logger = config.logger ?? console;
const isRequestOk: ResponseAssertFunction = config.isRequestOk ?? (response => response.ok);
let retries = 0;
let minTimeout = 200;
let maxTimeout = 2000;
let factor = 1.2;
if (config.timeout) {
timeoutId = setTimeout(() => controller.abort(), config.timeout);
if (config.retry !== false) {
retries = config.retry?.retries ?? 5;
minTimeout = config.retry?.minTimeout ?? 200;
maxTimeout = config.retry?.maxTimeout ?? 2000;
factor = config.retry?.factor ?? 1.2;
}
try {
const retryOptions = config.retry;
if (!retryOptions) {
return await (config.fetchImplementation ?? fetch)(endpoint, {
return await asyncRetry(
async (bail, attempt) => {
logger.info(
`${config.method} ${endpoint}` +
(retries > 0 ? ' ' + getAttemptMessagePart(attempt, retries + 1) : ''),
);
const getDuration = measureTime();
const signal = AbortSignal.timeout(config.timeout ?? 20_000);
const response = await (config.fetchImplementation ?? fetch)(endpoint, {
method: config.method,
body: config.body,
headers: config.headers,
signal: controller.signal,
});
}
const result = await asyncRetry(
async bail => {
const res = await (config.fetchImplementation ?? fetch)(endpoint, {
method: config.method,
body: config.body,
headers: config.headers,
signal: controller.signal,
});
if (res.ok || retryOptions.okWhen?.(res)) {
return res;
}
if (!retryOptions.retryWhen(res)) {
bail(
new Error(
`Failed to fetch ${endpoint}, received: ${res.status} ${res.statusText ?? 'Internal Server Error'}`,
),
signal,
}).catch((error: unknown) => {
const logErrorMessage = () =>
logger.error(
`${config.method} ${endpoint} failed ${getDuration()}. ` + getErrorMessage(error),
);
return;
if (isAggregateError(error)) {
for (const err of error.errors) {
logger.error(err);
}
logErrorMessage();
throw new Error('Unexpected HTTP error.', { cause: error });
}
throw new Error(
`Failed to fetch ${endpoint}, received: ${res.status} ${res.statusText ?? 'Internal Server Error'}`,
);
},
{
...retryOptions,
retries: retryOptions?.retries ?? 5,
minTimeout: retryOptions?.minTimeout ?? 200,
maxTimeout: retryOptions?.maxTimeout ?? 2000,
factor: retryOptions?.factor ?? 1.2,
},
);
if (result === undefined) {
throw new Error('Failed to bail out of retry.');
}
return result;
} catch (error) {
if (isAggregateError(error)) {
throw new Error(error.errors.map(e => e.message).join(', '), {
cause: error,
logger.error(error);
logErrorMessage();
throw new Error('Unexpected HTTP error.', { cause: error });
});
}
throw error;
} finally {
if (timeoutId !== undefined) {
clearTimeout(timeoutId);
}
if (isRequestOk(response)) {
logger.info(
`${config.method} ${endpoint} succeeded with status ${response.status} ${getDuration()}.`,
);
return response;
}
logger.error(
`${config.method} ${endpoint} failed with status ${response.status} ${getDuration()}: ${(await response.text()) || '<empty response body>'}`,
);
if (retries > 0 && attempt > retries) {
logger.error(
`${config.method} ${endpoint} retry limit exceeded after ${attempt} attempts.`,
);
}
const error = new Error(
`${config.method} ${endpoint} failed with status ${response.status}.`,
);
if (response.status >= 400 && response.status < 500) {
if (retries > 0) {
logger.error(`Abort retry because of status code ${response.status}.`);
}
bail(error);
}
throw error;
},
{
retries,
minTimeout,
maxTimeout,
factor,
},
);
}
function getErrorMessage(error: unknown): string {
if (error && typeof error === 'object' && 'message' in error) {
return String(error.message);
}
return '<no error message>';
}
function getAttemptMessagePart(attempt: number, retry: number): string {
return `Attempt (${attempt}/${retry})`;
}
function measureTime() {
const start = Date.now();
return () => '(' + formatTimestamp(Date.now() - start) + ')';
}
function formatTimestamp(timestamp: number): string {
const milliseconds = timestamp % 1000;
const seconds = Math.floor((timestamp / 1000) % 60);
const minutes = Math.floor((timestamp / (1000 * 60)) % 60);
const hours = Math.floor(timestamp / (1000 * 60 * 60));
const parts: string[] = [];
if (hours > 0) {
parts.push(`${hours}h`);
}
if (minutes > 0 || hours > 0) {
// Include minutes if hours exist, even if minutes are 0
parts.push(`${minutes}m`);
}
if (seconds > 0 || minutes > 0 || hours > 0) {
parts.push(`${seconds}s`);
}
parts.push(`${milliseconds}ms`);
return parts.join(':');
}
interface AggregateError extends Error {

View file

@ -10,9 +10,9 @@ import {
import { getDocumentNodeFromSchema } from '@graphql-tools/utils';
import { version } from '../version.js';
import type { SchemaPublishMutation } from './__generated__/types.js';
import { createAgent } from './agent.js';
import { http } from './http-client.js';
import type { HivePluginOptions } from './types.js';
import { logIf } from './utils.js';
import { createHiveLogger, logIf } from './utils.js';
export interface SchemaReporter {
report(args: { schema: GraphQLSchema }): void;
@ -30,7 +30,7 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
const token = pluginOptions.token;
const selfHostingOptions = pluginOptions.selfHosting;
const reportingOptions = pluginOptions.reporting;
const logger = pluginOptions.agent?.logger ?? console;
const logger = createHiveLogger(pluginOptions.agent?.logger ?? console, '[hive][reporting]');
logIf(
typeof reportingOptions.author !== 'string' || reportingOptions.author.length === 0,
@ -48,62 +48,41 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
logger.error,
);
let currentSchema: GraphQLSchema | null = null;
const agent = createAgent<GraphQLSchema>(
{
logger,
...pluginOptions.agent,
endpoint:
selfHostingOptions?.graphqlEndpoint ??
reportingOptions.endpoint ??
'https://app.graphql-hive.com/graphql',
token,
enabled: pluginOptions.enabled,
debug: pluginOptions.debug,
__testing: pluginOptions.agent?.__testing,
},
{
prefix: 'reporting',
data: {
set(incomingSchema) {
currentSchema = incomingSchema;
},
size() {
return currentSchema ? 1 : 0;
},
clear() {
currentSchema = null;
},
},
headers() {
return {
'graphql-client-name': 'Hive Client',
'graphql-client-version': version,
};
},
async body() {
return JSON.stringify({
query,
operationName: 'schemaPublish',
variables: {
input: {
sdl: await printToSDL(currentSchema!),
author: reportingOptions.author,
commit: reportingOptions.commit,
service: reportingOptions.serviceName ?? null,
url: reportingOptions.serviceUrl ?? null,
force: true,
},
},
});
},
},
);
const endpoint =
selfHostingOptions?.graphqlEndpoint ??
reportingOptions.endpoint ??
'https://app.graphql-hive.com/graphql';
return {
async report({ schema }) {
logger.info(`Publish schema`);
try {
const response = await agent.sendImmediately(schema);
const response = await http.post(
endpoint,
JSON.stringify({
query,
operationName: 'schemaPublish',
variables: {
input: {
sdl: await printToSDL(schema),
author: reportingOptions.author,
commit: reportingOptions.commit,
service: reportingOptions.serviceName ?? null,
url: reportingOptions.serviceUrl ?? null,
force: true,
},
},
}),
{
headers: {
'graphql-client-name': 'Hive Client',
'graphql-client-version': version,
authorization: `Bearer ${token}`,
'content-type': 'application/json',
},
logger,
},
);
if (response === null) {
throw new Error('Empty response');
@ -119,7 +98,7 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
switch (data.__typename) {
case 'SchemaPublishSuccess': {
logger.info(`[hive][reporting] ${data.successMessage ?? 'Published schema'}`);
logger.info(`${data.successMessage ?? 'Published schema'}`);
return;
}
case 'SchemaPublishMissingServiceError': {
@ -129,9 +108,7 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
throw new Error('Service url is not defined');
}
case 'SchemaPublishError': {
logger.info(
`[hive][reporting] Published schema (forced with ${data.errors.total} errors)`,
);
logger.info(`Published schema (forced with ${data.errors.total} errors)`);
data.errors.nodes.slice(0, 5).forEach(error => {
logger.info(` - ${error.message}`);
});
@ -140,13 +117,15 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
}
} catch (error) {
logger.error(
`[hive][reporting] Failed to report schema: ${
`Failed to report schema: ${
error instanceof Error && 'message' in error ? error.message : error
}`,
);
}
},
dispose: agent.dispose,
dispose() {
return Promise.resolve();
},
};
}

View file

@ -259,6 +259,7 @@ export interface GraphQLErrorsResult {
export interface SchemaFetcherOptions {
endpoint: string;
key: string;
logger?: Logger;
}
export interface ServicesFetcherOptions {

View file

@ -20,7 +20,14 @@ import type {
HivePluginOptions,
HiveUsagePluginOptions,
} from './types.js';
import { cache, cacheDocumentKey, logIf, measureDuration, memo } from './utils.js';
import {
cache,
cacheDocumentKey,
createHiveLogger,
logIf,
measureDuration,
memo,
} from './utils.js';
interface UsageCollector {
collect(): CollectUsageCallback;
@ -64,15 +71,16 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
const options =
typeof pluginOptions.usage === 'boolean' ? ({} as HiveUsagePluginOptions) : pluginOptions.usage;
const selfHostingOptions = pluginOptions.selfHosting;
const logger = pluginOptions.agent?.logger ?? console;
const logger = createHiveLogger(pluginOptions.agent?.logger ?? console, '[hive][usage]');
const collector = memo(createCollector, arg => arg.schema);
const excludeSet = new Set(options.exclude ?? []);
const agent = createAgent<AgentAction>(
{
logger,
...(pluginOptions.agent ?? {
maxSize: 1500,
}),
logger,
endpoint:
selfHostingOptions?.usageEndpoint ??
options.endpoint ??
@ -83,7 +91,6 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
__testing: pluginOptions.agent?.__testing,
},
{
prefix: 'usage',
data: {
set(action) {
if (action.type === 'request') {
@ -156,7 +163,7 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
logIf(
typeof pluginOptions.token !== 'string' || pluginOptions.token.length === 0,
'[hive][usage] token is missing',
'token is missing',
logger.error,
);

View file

@ -1,6 +1,6 @@
import { crypto, TextEncoder } from '@whatwg-node/fetch';
import { hiveClientSymbol } from './client.js';
import type { HiveClient, HivePluginOptions } from './types.js';
import type { HiveClient, HivePluginOptions, Logger } from './types.js';
export const isCloudflareWorker =
typeof caches !== 'undefined' && 'default' in caches && !!caches.default;
@ -178,3 +178,42 @@ export function joinUrl(url: string, subdirectory: string) {
return normalizedUrl + '/' + normalizedSubdirectory;
}
const hiveSymbol = Symbol('hive-logger');
type HiveLogger = {
info(message: string): void;
error(error: any, ...data: any[]): void;
[hiveSymbol]: {
path: string;
logger: Logger;
};
};
export function createHiveLogger(baseLogger: Logger, prefix: string): HiveLogger {
const context: HiveLogger[typeof hiveSymbol] = {
path: '',
logger: baseLogger,
// @ts-expect-error internal stuff
...baseLogger?.[hiveSymbol],
};
context.path = context.path + prefix;
const { logger, path } = context;
return {
[hiveSymbol]: context,
info: (message: string) => {
logger.info(`${path} ${message}`);
},
error: (error: any, ...data: any[]) => {
if (error.stack) {
for (const stack of error.stack.split('\n')) {
logger.error(`${path} ${stack}`);
}
} else {
logger.error(`${path} ${String(error)}`, ...data);
}
},
};
}

View file

@ -5,3 +5,4 @@ export { createHive, autoDisposeSymbol } from './client/client.js';
export { atLeastOnceSampler } from './client/samplers.js';
export { isHiveClient, isAsyncIterable, createHash, joinUrl } from './client/utils.js';
export { http, URL } from './client/http-client.js';
export type { Logger } from './client/types.js';

View file

@ -19,7 +19,9 @@ test("should log that it's not enabled", async () => {
.then(() => 'OK')
.catch(() => 'ERROR');
expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`[hive] is not enabled.`));
expect(logger.info).toHaveBeenCalledWith(
expect.stringContaining(`[hive] plugin is not enabled.`),
);
expect(result).toBe('OK');
});

View file

@ -290,7 +290,7 @@ test('fail in case of unexpected CDN status code (nRetryCount=11)', async () =>
await fetcher();
} catch (e) {
expect(e).toMatchInlineSnapshot(
`[Error: Failed to fetch http://localhost/services, received: 500 Internal Server Error]`,
`[Error: GET http://localhost/services failed with status 500.]`,
);
}
});

View file

@ -0,0 +1,146 @@
import { makeFetchCall } from '../src/client/http-client';
import { createHiveTestingLogger } from './test-utils';
test('HTTP call without retries and system level error', async () => {
const logger = createHiveTestingLogger();
const response = await makeFetchCall('https://ap.localhost.noop', {
method: 'GET',
retry: false,
headers: {},
logger,
}).catch(_ => {});
if (response) {
throw new Error('Should have rejected.');
}
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] GET https://ap.localhost.noop
[ERR] Error: getaddrinfo ENOTFOUND ap.localhost.noop
[ERR] GET https://ap.localhost.noop failed (666ms). getaddrinfo ENOTFOUND ap.localhost.noop
`);
});
test('HTTP with retries and system', async () => {
const logger = createHiveTestingLogger();
await makeFetchCall('https://ap.localhost.noop', {
method: 'GET',
retry: {
retries: 1,
},
headers: {},
logger,
}).catch(_ => {});
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] GET https://ap.localhost.noop Attempt (1/2)
[ERR] Error: getaddrinfo ENOTFOUND ap.localhost.noop
[ERR] GET https://ap.localhost.noop failed (666ms). getaddrinfo ENOTFOUND ap.localhost.noop
[INF] GET https://ap.localhost.noop Attempt (2/2)
[ERR] Error: getaddrinfo ENOTFOUND ap.localhost.noop
[ERR] GET https://ap.localhost.noop failed (666ms). getaddrinfo ENOTFOUND ap.localhost.noop
`);
});
test('HTTP with 4xx status code will not be retried', async () => {
const logger = createHiveTestingLogger();
await makeFetchCall('https://ap.localhost.noop', {
method: 'GET',
retry: {
retries: 1,
},
headers: {},
logger,
fetchImplementation: async () => {
return new Response('Bubatzbieber', {
status: 404,
statusText: 'Not Found',
});
},
}).catch(_ => {});
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] GET https://ap.localhost.noop Attempt (1/2)
[ERR] GET https://ap.localhost.noop failed with status 404 (666ms): Bubatzbieber
[ERR] Abort retry because of status code 404.
`);
});
test('HTTP with 5xx status code will be retried', async () => {
const logger = createHiveTestingLogger();
await makeFetchCall('https://ap.localhost.noop', {
method: 'GET',
retry: {
retries: 1,
},
headers: {},
logger,
fetchImplementation: async () => {
return new Response('Bubatzbieber', {
status: 500,
statusText: 'Internal Server Error',
});
},
}).catch(_ => {});
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] GET https://ap.localhost.noop Attempt (1/2)
[ERR] GET https://ap.localhost.noop failed with status 500 (666ms): Bubatzbieber
[INF] GET https://ap.localhost.noop Attempt (2/2)
[ERR] GET https://ap.localhost.noop failed with status 500 (666ms): Bubatzbieber
[ERR] GET https://ap.localhost.noop retry limit exceeded after 2 attempts.
`);
});
test('HTTP with status 3xx will be retried', async () => {
const logger = createHiveTestingLogger();
await makeFetchCall('https://ap.localhost.noop', {
method: 'GET',
retry: {
retries: 1,
},
headers: {},
logger,
fetchImplementation: async () => {
return new Response('Bubatzbieber', {
status: 302,
statusText: 'Found',
});
},
}).catch(_ => {});
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] GET https://ap.localhost.noop Attempt (1/2)
[ERR] GET https://ap.localhost.noop failed with status 302 (666ms): Bubatzbieber
[INF] GET https://ap.localhost.noop Attempt (2/2)
[ERR] GET https://ap.localhost.noop failed with status 302 (666ms): Bubatzbieber
[ERR] GET https://ap.localhost.noop retry limit exceeded after 2 attempts.
`);
});
test('HTTP with status 3xx will not be retried with custom "isRequestOk" implementation', async () => {
const logger = createHiveTestingLogger();
await makeFetchCall('https://ap.localhost.noop', {
method: 'GET',
retry: {
retries: 1,
},
headers: {},
logger,
fetchImplementation: async () => {
return new Response('Bubatzbieber', {
status: 302,
statusText: 'Found',
});
},
isRequestOk: response => response.status === 302,
}).catch(_ => {});
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] GET https://ap.localhost.noop Attempt (1/2)
[INF] GET https://ap.localhost.noop succeeded with status 302 (666ms).
`);
});

View file

@ -4,7 +4,7 @@ import { buildSubgraphSchema as buildSubgraphSchemaV1 } from '@apollo/federation
import { buildSubgraphSchema as buildSubgraphSchemaV2 } from '@apollo/subgraph';
import { createHive } from '../src/client/client';
import { version } from '../src/version';
import { waitFor } from './test-utils';
import { createHiveTestingLogger, waitFor } from './test-utils';
afterEach(() => {
nock.cleanAll();
@ -17,10 +17,7 @@ const headers = {
};
test('should not leak the exception', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const hive = createHive({
enabled: true,
@ -51,24 +48,17 @@ test('should not leak the exception', async () => {
await waitFor(50);
await hive.dispose();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.info).toHaveBeenCalledWith(
expect.stringContaining('[hive][reporting] Attempt 1 failed:'),
);
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 2)');
expect(logger.error).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining(
`[hive][reporting] Failed to report schema: connect ECONNREFUSED 127.0.0.1:55404`,
),
);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://127.0.0.1:55404 Attempt (1/6)
[ERR] [hive][reporting] Error: connect ECONNREFUSED 127.0.0.1:55404
[ERR] [hive][reporting] at TCPConnectWrap.afterConnect [as oncomplete] (node:net:666:666)
[ERR] [hive][reporting] POST http://127.0.0.1:55404 failed (666ms). connect ECONNREFUSED 127.0.0.1:55404
`);
});
test('should send data to Hive', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const author = 'Test';
const commit = 'Commit';
@ -76,7 +66,6 @@ test('should send data to Hive', async () => {
const serviceUrl = 'https://api.com';
const serviceName = 'my-api';
let body: any = {};
const http = nock('http://localhost')
.post('/200')
.matchHeader('Authorization', `Bearer ${token}`)
@ -85,7 +74,6 @@ test('should send data to Hive', async () => {
.matchHeader('graphql-client-version', headers['graphql-client-version'])
.once()
.reply((_, _body) => {
body = _body;
return [
200,
{
@ -135,23 +123,16 @@ test('should send data to Hive', async () => {
await hive.dispose();
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`);
expect(body.variables.input.sdl).toBe(`type Query{foo:String}`);
expect(body.variables.input.author).toBe(author);
expect(body.variables.input.commit).toBe(commit);
expect(body.variables.input.service).toBe(serviceName);
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][reporting] Published schema
`);
});
test('should send data to Hive (deprecated endpoint)', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const author = 'Test';
const commit = 'Commit';
@ -215,9 +196,12 @@ test('should send data to Hive (deprecated endpoint)', async () => {
await hive.dispose();
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][reporting] Published schema
`);
expect(body.variables.input.sdl).toBe(`type Query{foo:String}`);
expect(body.variables.input.author).toBe(author);
@ -228,10 +212,7 @@ test('should send data to Hive (deprecated endpoint)', async () => {
});
test('should send data to app.graphql-hive.com/graphql by default', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const author = 'Test';
const commit = 'Commit';
@ -290,9 +271,12 @@ test('should send data to app.graphql-hive.com/graphql by default', async () =>
await hive.dispose();
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST https://app.graphql-hive.com/graphql Attempt (1/6)
[INF] [hive][reporting] POST https://app.graphql-hive.com/graphql succeeded with status 200 (666ms).
[INF] [hive][reporting] Published schema
`);
expect(body.variables.input.sdl).toBe(`type Query{foo:String}`);
expect(body.variables.input.author).toBe(author);
@ -301,10 +285,7 @@ test('should send data to app.graphql-hive.com/graphql by default', async () =>
});
test('should send data to Hive immediately', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const author = 'Test';
const commit = 'Commit';
@ -364,16 +345,14 @@ test('should send data to Hive immediately', async () => {
`),
});
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending immediately');
expect(logger.info).toHaveBeenCalledTimes(1);
expect(logger.getLogs()).toMatchInlineSnapshot(`[INF] [hive][reporting] Publish schema`);
logger.clear();
await waitFor(50);
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Sent!`);
expect(logger.info).toHaveBeenCalledWith(`[hive][reporting] Successfully published schema`);
expect(logger.info).toHaveBeenCalledTimes(4);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][reporting] Successfully published schema
`);
expect(body.variables.input.sdl).toBe(`type Query{foo:String}`);
expect(body.variables.input.author).toBe(author);
expect(body.variables.input.commit).toBe(commit);
@ -382,17 +361,18 @@ test('should send data to Hive immediately', async () => {
expect(body.variables.input.force).toBe(true);
await waitFor(100);
expect(logger.info).toHaveBeenCalledTimes(4);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][reporting] Successfully published schema
`);
await hive.dispose();
http.done();
});
test('should send original schema of a federated (v1) service', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const author = 'Test';
const commit = 'Commit';
@ -434,7 +414,7 @@ test('should send original schema of a federated (v1) service', async () => {
expect(body.variables.input.service).toBe(serviceName);
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
return [200];
return [200, '{"data":{"schemaPublish":{"__typename":"SchemaPublishSuccess"}}}'];
});
hive.reportSchema({
@ -447,15 +427,21 @@ test('should send original schema of a federated (v1) service', async () => {
),
});
await waitFor(50);
await hive.dispose();
const logs = logger.getLogs();
expect(logs).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][reporting] Published schema
`);
http.done();
});
test('should send original schema of a federated (v2) service', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const author = 'Test';
const commit = 'Commit';
@ -497,7 +483,7 @@ test('should send original schema of a federated (v2) service', async () => {
expect(body.variables.input.service).toBe(serviceName);
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
return [200];
return [200, '{"data":{"schemaPublish":{"__typename":"SchemaPublishSuccess"}}}'];
});
hive.reportSchema({
@ -510,16 +496,21 @@ test('should send original schema of a federated (v2) service', async () => {
),
});
await waitFor(50);
await hive.dispose();
const logs = logger.getLogs();
expect(logs).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][reporting] Published schema
`);
http.done();
});
test('should display SchemaPublishMissingServiceError', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const token = 'Token';
const http = nock('http://localhost')
.post('/200')
@ -570,17 +561,16 @@ test('should display SchemaPublishMissingServiceError', async () => {
await hive.dispose();
http.done();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.error).toHaveBeenCalledWith(
`[hive][reporting] Failed to report schema: Service name is not defined`,
);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[ERR] [hive][reporting] Failed to report schema: Service name is not defined
`);
});
test('should display SchemaPublishMissingUrlError', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const token = 'Token';
const http = nock('http://localhost')
@ -632,18 +622,19 @@ test('should display SchemaPublishMissingUrlError', async () => {
await hive.dispose();
http.done();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logger.error).toHaveBeenCalledWith(
`[hive][reporting] Failed to report schema: Service url is not defined`,
);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://localhost/200 Attempt (1/6)
[INF] [hive][reporting] POST http://localhost/200 succeeded with status 200 (666ms).
[ERR] [hive][reporting] Failed to report schema: Service url is not defined
`);
expect(logger.getLogs()).toContain('POST http://localhost/200 Attempt (1/6)');
expect(logger.getLogs()).toContain('Service url is not defined');
});
test('retry on non-200', async () => {
const logSpy = vi.fn();
const logger = {
error: logSpy,
info: logSpy,
};
const logger = createHiveTestingLogger();
const token = 'Token';
@ -684,13 +675,15 @@ test('retry on non-200', async () => {
await waitFor(50);
await hive.dispose();
expect(logSpy).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logSpy).toHaveBeenCalledWith(
expect.stringContaining(`[hive][reporting] Attempt 1 failed`),
);
expect(logSpy).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 2)');
expect(logSpy).toHaveBeenCalledWith(
expect.stringContaining(`[hive][reporting] Attempt 2 failed`),
);
expect(logSpy).not.toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 3)');
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][reporting] Publish schema
[INF] [hive][reporting] POST http://localhost/registry Attempt (1/6)
[ERR] [hive][reporting] Error: connect ECONNREFUSED ::1:80
[ERR] [hive][reporting] at createConnectionError (node:net:666:666)
[ERR] [hive][reporting] at afterConnectMultiple (node:net:666:666)
[ERR] [hive][reporting] Error: connect ECONNREFUSED 127.0.0.1:80
[ERR] [hive][reporting] at createConnectionError (node:net:666:666)
[ERR] [hive][reporting] at afterConnectMultiple (node:net:666:666)
[ERR] [hive][reporting] POST http://localhost/registry failed (666ms).
`);
});

View file

@ -3,3 +3,36 @@ export function waitFor(ms: number) {
setTimeout(resolve, ms);
});
}
/** helper function to get log lines and replace milliseconds with static value. */
function getLogLines(calls: Array<Array<unknown>>) {
return calls.map(log => {
let msg: string;
if (typeof log[1] === 'string') {
msg = log[1]
// Replace milliseconds with static value
.replace(/\(\d{1,3}ms\)/, '(666ms)')
// Replace stack trace line numbers with static value
.replace(/\(node:net:\d+:\d+\)/, '(node:net:666:666)')
.replace(/\(node:dns:\d+:\d+\)/, '(node:dns:666:666)');
} else {
msg = String(log[1]);
}
return '[' + log[0] + ']' + ' ' + msg;
});
}
export function createHiveTestingLogger() {
let fn = vi.fn();
return {
error: (message: unknown) => fn('ERR', message),
info: (message: unknown) => fn('INF', message),
getLogs() {
return getLogLines(fn.mock.calls).join('\n');
},
clear() {
fn = vi.fn();
},
};
}

View file

@ -4,7 +4,7 @@ import { createHive } from '../src/client/client';
import { atLeastOnceSampler } from '../src/client/samplers';
import type { Report } from '../src/client/usage';
import { version } from '../src/version';
import { waitFor } from './test-utils';
import { createHiveTestingLogger, waitFor } from './test-utils';
const headers = {
'Content-Type': 'application/json',
@ -110,10 +110,7 @@ afterEach(() => {
});
test('should send data to Hive', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const token = 'Token';
@ -167,9 +164,13 @@ test('should send data to Hive', async () => {
await waitFor(30);
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Disposing
[INF] [hive][usage] Sending report (queue 1)
[INF] [hive][usage] POST http://localhost/200
[INF] [hive][usage] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][usage] Report sent!
`);
// Map
expect(report.size).toEqual(1);
@ -219,11 +220,7 @@ test('should send data to Hive', async () => {
});
test('should send data to Hive (deprecated endpoint)', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const token = 'Token';
let report: Report = {
@ -273,9 +270,13 @@ test('should send data to Hive (deprecated endpoint)', async () => {
await waitFor(50);
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Disposing
[INF] [hive][usage] Sending report (queue 1)
[INF] [hive][usage] POST http://localhost/200
[INF] [hive][usage] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][usage] Report sent!
`);
// Map
expect(report.size).toEqual(1);
@ -325,10 +326,7 @@ test('should send data to Hive (deprecated endpoint)', async () => {
});
test('should not leak the exception', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const hive = createHive({
enabled: true,
@ -342,7 +340,7 @@ test('should not leak the exception', async () => {
},
token: 'Token',
usage: {
endpoint: 'http://404.localhost',
endpoint: 'http://404.localhost.noop',
},
});
@ -358,22 +356,18 @@ test('should not leak the exception', async () => {
await waitFor(50);
await hive.dispose();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(
expect.stringContaining(`[hive][usage] Attempt 1 failed:`),
);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 2)`);
expect(logger.error).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining(`[hive][usage] Failed to send data`),
);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Sending report (queue 1)
[INF] [hive][usage] POST http://404.localhost.noop Attempt (1/2)
[ERR] [hive][usage] Error: getaddrinfo ENOTFOUND 404.localhost.noop
[ERR] [hive][usage] at GetAddrInfoReqWrap.onlookupall [as oncomplete] (node:dns:666:666)
[ERR] [hive][usage] POST http://404.localhost.noop failed (666ms). getaddrinfo ENOTFOUND 404.localhost.noop
[INF] [hive][usage] Disposing
`);
});
test('sendImmediately should not stop the schedule', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const token = 'Token';
@ -414,8 +408,6 @@ test('sendImmediately should not stop the schedule', async () => {
const collect = hive.collectUsage();
expect(logger.info).toHaveBeenCalledTimes(0);
await collect(
{
schema,
@ -427,44 +419,43 @@ test('sendImmediately should not stop the schedule', async () => {
await waitFor(120);
// Because maxSize is 2 and sendInterval is 50ms (+120ms buffer)
// the scheduled send task should be done by now
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
// since we sent only 1 element, the buffer was not full,
// so we should not see the following log:
expect(logger.info).not.toHaveBeenCalledWith(`[hive][usage] Sending immediately`);
expect(logger.info).toHaveBeenCalledTimes(2);
// so we should not see "Sending immediately"
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Sending report (queue 1)
[INF] [hive][usage] POST http://localhost/200
[INF] [hive][usage] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][usage] Report sent!
`);
logger.clear();
// Now we will hit the maxSize
// We run collect two times
await Promise.all([collect(args, {}), collect(args, {})]);
await waitFor(1);
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledTimes(4);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 2) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending immediately`);
// we run setImmediate under the hood
// It should be sent already
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.info).toHaveBeenCalledTimes(4);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Sending immediately
[INF] [hive][usage] Sending report (queue 2)
[INF] [hive][usage] POST http://localhost/200
`);
logger.clear();
await waitFor(100);
expect(logger.info).toHaveBeenCalledTimes(5);
// Let's check if the scheduled send task is still running
await collect(args, {});
await waitFor(30);
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][usage] Report sent!
`);
await hive.dispose();
http.done();
});
test('should send data to Hive at least once when using atLeastOnceSampler', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const token = 'Token';
@ -548,9 +539,13 @@ test('should send data to Hive at least once when using atLeastOnceSampler', asy
await waitFor(50);
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 2) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Disposing
[INF] [hive][usage] Sending report (queue 2)
[INF] [hive][usage] POST http://localhost/200
[INF] [hive][usage] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][usage] Report sent!
`);
// Map
expect(report.size).toEqual(2);
@ -571,10 +566,7 @@ test('should send data to Hive at least once when using atLeastOnceSampler', asy
});
test('should not send excluded operation name data to Hive', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const token = 'Token';
@ -651,9 +643,13 @@ test('should not send excluded operation name data to Hive', async () => {
await waitFor(50);
http.done();
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 2) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Disposing
[INF] [hive][usage] Sending report (queue 2)
[INF] [hive][usage] POST http://localhost/200
[INF] [hive][usage] POST http://localhost/200 succeeded with status 200 (666ms).
[INF] [hive][usage] Report sent!
`);
// Map
expect(report.size).toEqual(2);
@ -703,11 +699,7 @@ test('should not send excluded operation name data to Hive', async () => {
});
test('retry on non-200', async () => {
const logSpy = vi.fn();
const logger = {
error: logSpy,
info: logSpy,
};
const logger = createHiveTestingLogger();
const token = 'Token';
@ -750,9 +742,10 @@ test('retry on non-200', async () => {
await waitFor(50);
await hive.dispose();
expect(logSpy).toHaveBeenCalledWith('[hive][usage] Sending (queue 1) (attempt 1)');
expect(logSpy).toHaveBeenCalledWith(expect.stringContaining(`[hive][usage] Attempt 1 failed`));
expect(logSpy).toHaveBeenCalledWith('[hive][usage] Sending (queue 1) (attempt 2)');
expect(logSpy).toHaveBeenCalledWith(expect.stringContaining(`[hive][usage] Attempt 2 failed`));
expect(logSpy).not.toHaveBeenCalledWith('[hive][usage] Sending (queue 1) (attempt 3)');
expect(logger.getLogs()).toMatchInlineSnapshot(`
[INF] [hive][usage] Sending report (queue 1)
[INF] [hive][usage] POST http://localhost/200 Attempt (1/2)
[ERR] [hive][usage] POST http://localhost/200 failed with status 500 (666ms): No no no
[INF] [hive][usage] Disposing
`);
});

View file

@ -11,6 +11,7 @@ import { useDisableIntrospection } from '@graphql-yoga/plugin-disable-introspect
import { useGraphQLSSE } from '@graphql-yoga/plugin-graphql-sse';
import { useResponseCache } from '@graphql-yoga/plugin-response-cache';
import { Response } from '@whatwg-node/fetch';
import { createHiveTestingLogger } from '../../core/tests/test-utils';
import { createHive, useHive } from '../src/index.js';
beforeAll(() => {
@ -52,10 +53,7 @@ function handleProcess() {
}
test('should not interrupt the process', async () => {
const logger = {
error: vi.fn(),
info: vi.fn(),
};
const logger = createHiveTestingLogger();
const clean = handleProcess();
const hive = createHive({
enabled: true,
@ -68,12 +66,12 @@ test('should not interrupt the process', async () => {
logger,
},
reporting: {
endpoint: 'http://404.localhost/registry',
endpoint: 'http://404.localhost.noop/registry',
author: 'jest',
commit: 'js',
},
usage: {
endpoint: 'http://404.localhost/usage',
endpoint: 'http://404.localhost.noop/usage',
},
});
@ -101,12 +99,25 @@ test('should not interrupt the process', async () => {
},
}),
);
await waitFor(50);
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][info] Error'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting] Failed'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][usage] Failed'));
const reportingLogs = logger
.getLogs()
.split(`\n`)
.filter(item => item.includes(`[hive][reporting]`))
.join(`\n`);
expect(reportingLogs).includes('Publish schema');
expect(reportingLogs).includes('POST http://404.localhost.noop/registry');
const usageLogs = logger
.getLogs()
.split(`\n`)
.filter(item => item.includes(`[hive][usage]`))
.join(`\n`);
expect(usageLogs).includes('POST http://404.localhost.noop/usage');
await hive.dispose();
clean();
}, 1_000);