Node agnostic client (#4494)

This commit is contained in:
Kamil Kisiela 2024-04-23 13:49:51 +02:00 committed by GitHub
parent cdaadb1fb9
commit c5eeac5cce
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 1010 additions and 775 deletions

View file

@ -0,0 +1,5 @@
---
"@graphql-hive/core": minor
---
Node agnostic

View file

@ -0,0 +1,5 @@
---
"@graphql-hive/client": minor
---
Node agnostic

View file

@ -0,0 +1,5 @@
---
"@graphql-hive/client": minor
---
🚨 BREAKING CHANGE 🚨 Requires now Node v16+

View file

@ -48,6 +48,35 @@ const server = createYoga({
server.start()
```
### GraphQL Yoga and Cloudflare Workers
If you're using Cloudflare Workers, you can use the following code:
```typescript
import { useYogaHive, createYogaHive, useYogaHive } from '@graphql-hive/client'
import { createYoga } from 'graphql-yoga'
export default {
async fetch(request, env, ctx) {
const hive = createYogaHive({
enabled: true, // Enable/Disable Hive Client
token: 'YOUR-TOKEN',
usage: true // Collects schema usage based on operations
});
const yoga = createYoga({
plugins: [
useYogaHive(hive)
]
});
const response = await yoga.fetch(request, env, ctx);
ctx.waitUntil(hive.dispose());
return response;
}
}
```
### With Envelop
If you're not familiar with Envelop - in "short" it's a lightweight JavaScript library for wrapping
@ -150,6 +179,11 @@ app.post(
}
})
)
// When server is shutting down
async function onShutdown() {
await hive.close()
}
```
### Using the registry when Stitching

View file

@ -48,8 +48,8 @@
"dependencies": {
"@graphql-hive/core": "^0.2.4",
"@graphql-tools/utils": "^10.0.0",
"@whatwg-node/fetch": "0.9.17",
"async-retry": "1.3.3",
"axios": "^1.6.0",
"tiny-lru": "8.0.2"
},
"optionalDependencies": {
@ -72,7 +72,7 @@
"graphql": "16.8.1",
"graphql-ws": "5.16.0",
"graphql-yoga": "5.2.0",
"nock": "13.5.4",
"nock": "14.0.0-beta.5",
"vitest": "1.5.0",
"ws": "8.16.0"
},

View file

@ -1,14 +1,13 @@
import { createHash } from 'node:crypto';
import axios from 'axios';
import type { DocumentNode } from 'graphql';
import type { ApolloServerPlugin, HTTPGraphQLRequest } from '@apollo/server';
import { autoDisposeSymbol, createHive } from './client.js';
import { get } from './internal/http-client.js';
import type {
HiveClient,
HivePluginOptions,
SupergraphSDLFetcherOptions,
} from './internal/types.js';
import { isHiveClient, joinUrl } from './internal/utils.js';
import { createHash, isHiveClient, joinUrl } from './internal/utils.js';
import { version } from './version.js';
export function createSupergraphSDLFetcher(options: SupergraphSDLFetcherOptions) {
@ -46,42 +45,31 @@ export function createSupergraphSDLFetcher(options: SupergraphSDLFetcherOptions)
};
const fetchWithRetry = (): Promise<{ id: string; supergraphSdl: string }> => {
return axios
.get(endpoint, {
headers,
})
.then(response => {
if (response.status >= 200 && response.status < 300) {
const supergraphSdl = response.data;
const result = {
id: createHash('sha256').update(supergraphSdl).digest('base64'),
supergraphSdl,
};
return get(endpoint, {
headers,
}).then(async response => {
if (response.ok) {
const supergraphSdl = await response.text();
const result = {
id: await createHash('SHA-256').update(supergraphSdl).digest('base64'),
supergraphSdl,
};
const etag = response.headers['etag'];
if (etag) {
cached = result;
cacheETag = etag;
}
return result;
const etag = response.headers.get('etag');
if (etag) {
cached = result;
cacheETag = etag;
}
return retry(response.status);
})
.catch(async error => {
if (axios.isAxiosError(error)) {
if (error.response?.status === 304 && cached !== null) {
return cached;
}
return result;
}
if (error.response?.status) {
return retry(error.response.status);
}
}
if (response.status === 304 && cached !== null) {
return cached;
}
throw error;
});
return retry(response.status);
});
};
return fetchWithRetry();
@ -188,7 +176,7 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
},
willSendResponse(ctx: any) {
if (!didResolveSource) {
complete(args, {
void complete(args, {
action: 'abort',
reason: 'Did not resolve source',
logging: false,
@ -196,7 +184,7 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
return;
}
doc = ctx.document;
complete(args, ctx.response);
void complete(args, ctx.response);
},
} as any;
}
@ -208,7 +196,7 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
},
async willSendResponse(ctx) {
if (!didResolveSource) {
complete(args, {
void complete(args, {
action: 'abort',
reason: 'Did not resolve source',
logging: false,
@ -218,7 +206,7 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
if (!ctx.document) {
const details = ctx.operationName ? `operationName: ${ctx.operationName}` : '';
complete(args, {
void complete(args, {
action: 'abort',
reason: 'Document is not available' + (details ? ` (${details})` : ''),
logging: true,
@ -227,7 +215,7 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
}
doc = ctx.document!;
complete(args, ctx.response as any);
void complete(args, ctx.response as any);
},
});
}
@ -239,7 +227,7 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
},
async willSendResponse(ctx) {
if (!didResolveSource) {
complete(args, {
void complete(args, {
action: 'abort',
reason: 'Did not resolve source',
logging: false,
@ -249,7 +237,7 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
if (!ctx.document) {
const details = ctx.operationName ? `operationName: ${ctx.operationName}` : '';
complete(args, {
void complete(args, {
action: 'abort',
reason: 'Document is not available' + (details ? ` (${details})` : ''),
logging: true,
@ -259,13 +247,13 @@ export function hiveApollo(clientOrOptions: HiveClient | HivePluginOptions): Apo
doc = ctx.document;
if (ctx.response.body.kind === 'incremental') {
complete(args, {
void complete(args, {
action: 'abort',
reason: '@defer and @stream is not supported by Hive',
logging: true,
});
} else {
complete(args, ctx.response.body.singleResult);
void complete(args, ctx.response.body.singleResult);
}
},
});

View file

@ -1,10 +1,10 @@
import axios from 'axios';
import {
type execute as ExecuteImplementation,
type ExecutionResult,
type GraphQLSchema,
type subscribe as SubscribeImplementation,
} from 'graphql';
import { post } from './internal/http-client.js';
import { createReporting } from './internal/reporting.js';
import type { HiveClient, HivePluginOptions } from './internal/types.js';
import { createUsage } from './internal/usage.js';
@ -41,136 +41,140 @@ export function createHive(options: HivePluginOptions): HiveClient {
await Promise.all([schemaReporter.dispose(), usage.dispose()]);
}
async function info(): Promise<void> {
if (enabled === false || !options.debug) {
return;
}
// enabledOnly when `printTokenInfo` is `true` or `debug` is true and `printTokenInfo` is not `false`
const printTokenInfo = enabled
? options.printTokenInfo === true || (!!options.debug && options.printTokenInfo !== false)
: false;
try {
let endpoint = 'https://app.graphql-hive.com/graphql';
const info = printTokenInfo
? async () => {
try {
let endpoint = 'https://app.graphql-hive.com/graphql';
// Look for the reporting.endpoint for the legacy reason.
if (options.reporting && options.reporting.endpoint) {
endpoint = options.reporting.endpoint;
}
if (options.selfHosting?.graphqlEndpoint) {
endpoint = options.selfHosting.graphqlEndpoint;
}
const query = /* GraphQL */ `
query myTokenInfo {
tokenInfo {
__typename
... on TokenInfo {
token {
name
}
organization {
name
cleanId
}
project {
name
type
cleanId
}
target {
name
cleanId
}
canReportSchema: hasTargetScope(scope: REGISTRY_WRITE)
canCollectUsage: hasTargetScope(scope: REGISTRY_WRITE)
canReadOperations: hasProjectScope(scope: OPERATIONS_STORE_READ)
}
... on TokenNotFoundError {
message
}
// Look for the reporting.endpoint for the legacy reason.
if (options.reporting && options.reporting.endpoint) {
endpoint = options.reporting.endpoint;
}
if (options.selfHosting?.graphqlEndpoint) {
endpoint = options.selfHosting.graphqlEndpoint;
}
const query = /* GraphQL */ `
query myTokenInfo {
tokenInfo {
__typename
... on TokenInfo {
token {
name
}
organization {
name
cleanId
}
project {
name
type
cleanId
}
target {
name
cleanId
}
canReportSchema: hasTargetScope(scope: REGISTRY_WRITE)
canCollectUsage: hasTargetScope(scope: REGISTRY_WRITE)
canReadOperations: hasProjectScope(scope: OPERATIONS_STORE_READ)
}
... on TokenNotFoundError {
message
}
}
}
`;
const response = await post(
endpoint,
JSON.stringify({
query,
operationName: 'myTokenInfo',
}),
{
headers: {
'content-type': 'application/json',
Authorization: `Bearer ${options.token}`,
'user-agent': `hive-client/${version}`,
'graphql-client-name': 'Hive Client',
'graphql-client-version': version,
},
timeout: 30_000,
fetchImplementation: options?.agent?.__testing?.fetch,
},
);
if (response.ok) {
const result: ExecutionResult<any> = await response.json();
if (result.data?.tokenInfo.__typename === 'TokenInfo') {
const { tokenInfo } = result.data;
const {
organization,
project,
target,
canReportSchema,
canCollectUsage,
canReadOperations,
} = tokenInfo;
const print = createPrinter([
tokenInfo.token.name,
organization.name,
project.name,
target.name,
]);
const appUrl =
options.selfHosting?.applicationUrl?.replace(/\/$/, '') ??
'https://app.graphql-hive.com';
const organizationUrl = `${appUrl}/${organization.cleanId}`;
const projectUrl = `${organizationUrl}/${project.cleanId}`;
const targetUrl = `${projectUrl}/${target.cleanId}`;
logger.info(
[
'[hive][info] Token details',
'',
`Token name: ${print(tokenInfo.token.name)}`,
`Organization: ${print(organization.name, organizationUrl)}`,
`Project: ${print(project.name, projectUrl)}`,
`Target: ${print(target.name, targetUrl)}`,
'',
`Can report schema? ${print(canReportSchema ? 'Yes' : 'No')}`,
`Can collect usage? ${print(canCollectUsage ? 'Yes' : 'No')}`,
`Can read operations? ${print(canReadOperations ? 'Yes' : 'No')}`,
'',
].join('\n'),
);
} else if (result.data?.tokenInfo.message) {
logger.error(
`[hive][info] Token not found. Reason: ${result.data?.tokenInfo.message}`,
);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`,
);
} else {
logger.error(`[hive][info] ${result.errors![0].message}`);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`,
);
}
} else {
logger.error(`[hive][info] Error ${response.status}: ${response.statusText}`);
}
} catch (error) {
logger.error(`[hive][info] Error ${(error as Error)?.message ?? error}`);
}
`;
const response = await axios.post(
endpoint,
JSON.stringify({
query,
operationName: 'myTokenInfo',
}),
{
headers: {
'content-type': 'application/json',
Authorization: `Bearer ${options.token}`,
'user-agent': `hive-client/${version}`,
'graphql-client-name': 'Hive Client',
'graphql-client-version': version,
},
timeout: 30_000,
decompress: true,
responseType: 'json',
},
);
if (response.status >= 200 && response.status < 300) {
const result: ExecutionResult<any> = await response.data;
if (result.data?.tokenInfo.__typename === 'TokenInfo') {
const { tokenInfo } = result.data;
const {
organization,
project,
target,
canReportSchema,
canCollectUsage,
canReadOperations,
} = tokenInfo;
const print = createPrinter([
tokenInfo.token.name,
organization.name,
project.name,
target.name,
]);
const appUrl =
options.selfHosting?.applicationUrl?.replace(/\/$/, '') ??
'https://app.graphql-hive.com';
const organizationUrl = `${appUrl}/${organization.cleanId}`;
const projectUrl = `${organizationUrl}/${project.cleanId}`;
const targetUrl = `${projectUrl}/${target.cleanId}`;
logger.info(
[
'[hive][info] Token details',
'',
`Token name: ${print(tokenInfo.token.name)}`,
`Organization: ${print(organization.name, organizationUrl)}`,
`Project: ${print(project.name, projectUrl)}`,
`Target: ${print(target.name, targetUrl)}`,
'',
`Can report schema? ${print(canReportSchema ? 'Yes' : 'No')}`,
`Can collect usage? ${print(canCollectUsage ? 'Yes' : 'No')}`,
`Can read operations? ${print(canReadOperations ? 'Yes' : 'No')}`,
'',
].join('\n'),
);
} else if (result.data?.tokenInfo.message) {
logger.error(`[hive][info] Token not found. Reason: ${result.data?.tokenInfo.message}`);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`,
);
} else {
logger.error(`[hive][info] ${result.errors![0].message}`);
logger.info(
`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`,
);
}
} else {
logger.error(`[hive][info] Error ${response.status}: ${response.statusText}`);
}
} catch (error) {
logger.error(`[hive][info] Error ${(error as Error)?.message ?? error}`);
}
}
: () => {};
function createInstrumentedExecute(
executeImpl: typeof ExecuteImplementation,
@ -181,7 +185,7 @@ export function createHive(options: HivePluginOptions): HiveClient {
if ('then' in result) {
void result.then(result => collect(args, result));
} else {
collect(args, result);
void collect(args, result);
}
return result;

View file

@ -1,9 +1,8 @@
import { GraphQLError } from 'graphql';
import type { Plugin } from '@envelop/types';
import { isAsyncIterable } from '@graphql-tools/utils';
import { autoDisposeSymbol, createHive } from './client.js';
import type { HiveClient, HivePluginOptions } from './internal/types.js';
import { isHiveClient } from './internal/utils.js';
import { isAsyncIterable, isHiveClient } from './internal/utils.js';
export function useHive(clientOrOptions: HiveClient): Plugin;
export function useHive(clientOrOptions: HivePluginOptions): Plugin;
@ -46,7 +45,7 @@ export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin
return {
onExecuteDone({ result }) {
if (!isAsyncIterable(result)) {
complete(args, result);
void complete(args, result);
return;
}
@ -58,7 +57,7 @@ export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin
}
},
onEnd() {
complete(args, errors.length ? { errors } : {});
void complete(args, errors.length ? { errors } : {});
},
};
},

View file

@ -1,7 +1,6 @@
import { createHash } from 'node:crypto';
import axios from 'axios';
import { get } from './internal/http-client.js';
import type { SchemaFetcherOptions, ServicesFetcherOptions } from './internal/types.js';
import { joinUrl } from './internal/utils.js';
import { createHash, joinUrl } from './internal/utils.js';
import { version } from './version.js';
interface Schema {
@ -47,39 +46,27 @@ function createFetcher(options: SchemaFetcherOptions & ServicesFetcherOptions) {
};
const fetchWithRetry = (): Promise<readonly Schema[] | Schema> => {
return axios
.get(endpoint, {
headers,
responseType: 'json',
})
.then(response => {
if (response.status >= 200 && response.status < 300) {
const result = response.data;
return get(endpoint, {
headers,
}).then(async response => {
if (response.ok) {
const result = await response.json();
const etag = response.headers['etag'];
if (etag) {
cached = result;
cacheETag = etag;
}
return result;
const etag = response.headers.get('etag');
if (etag) {
cached = result;
cacheETag = etag;
}
return retry(response.status);
})
.catch(async error => {
if (axios.isAxiosError(error)) {
if (error.response?.status === 304 && cached !== null) {
return cached;
}
return result;
}
if (error.response?.status) {
return retry(error.response.status);
}
}
if (response.status === 304 && cached !== null) {
return cached;
}
throw error;
});
return retry(response.status);
});
};
return fetchWithRetry();
@ -117,12 +104,11 @@ export function createServicesFetcher(options: ServicesFetcherOptions) {
const fetcher = createFetcher(options);
return function schemaFetcher() {
return fetcher().then(services => {
return fetcher().then(async services => {
if (services instanceof Array) {
return services.map(service => ({
id: createSchemaId(service),
...service,
}));
return Promise.all(
services.map(service => createSchemaId(service).then(id => ({ id, ...service }))),
);
}
throw new Error(
'Encountered a single service instead of a multiple services. Please use createSchemaFetcher instead.',
@ -132,7 +118,7 @@ export function createServicesFetcher(options: ServicesFetcherOptions) {
}
const createSchemaId = (service: Schema) =>
createHash('sha256')
createHash('SHA-256')
.update(service.sdl)
.update(service.url || '')
.update(service.name)

View file

@ -1,6 +1,6 @@
export type { HivePluginOptions, HiveClient } from './internal/types.js';
export { useHive } from './envelop.js';
export { useHive as useYogaHive } from './yoga.js';
export { useHive as useYogaHive, createHive as createYogaHive } from './yoga.js';
export { hiveApollo, createSupergraphSDLFetcher, createSupergraphManager } from './apollo.js';
export { createSchemaFetcher, createServicesFetcher } from './gateways.js';
export { createHive } from './client.js';

View file

@ -1,8 +1,10 @@
import retry from 'async-retry';
import axios from 'axios';
import { version } from '../version.js';
import { post } from './http-client.js';
import type { Logger } from './types.js';
type ReadOnlyResponse = Pick<Response, 'status' | 'text' | 'json'>;
export interface AgentOptions {
enabled?: boolean;
name?: string;
@ -43,16 +45,14 @@ export interface AgentOptions {
*/
logger?: Logger;
/**
* Define a custom http agent to be used when performing http requests
* Testing purposes only
*/
httpAgent?: any;
/**
* Define a custom https agent to be used when performing https requests
*/
httpsAgent?: any;
__testing?: {
fetch?: typeof fetch;
};
}
export function createAgent<TEvent, TResult = void>(
export function createAgent<TEvent>(
pluginOptions: AgentOptions,
{
prefix,
@ -70,7 +70,7 @@ export function createAgent<TEvent, TResult = void>(
headers?(): Record<string, string>;
},
) {
const options: Required<AgentOptions> = {
const options: Required<Omit<AgentOptions, '__testing'>> = {
timeout: 30_000,
debug: false,
enabled: true,
@ -80,8 +80,6 @@ export function createAgent<TEvent, TResult = void>(
maxSize: 25,
logger: console,
name: 'hive-client',
httpAgent: undefined,
httpsAgent: undefined,
...pluginOptions,
};
@ -104,8 +102,26 @@ export function createAgent<TEvent, TResult = void>(
}
let scheduled = false;
let inProgressCaptures: Promise<void>[] = [];
function capture(event: TEvent) {
function capture(event: TEvent | Promise<TEvent>) {
if (event instanceof Promise) {
const promise = captureAsync(event);
inProgressCaptures.push(promise);
void promise.finally(() => {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
inProgressCaptures = inProgressCaptures.filter(p => p !== promise);
});
} else {
captureSync(event);
}
}
async function captureAsync(event: Promise<TEvent>) {
captureSync(await event);
}
function captureSync(event: TEvent) {
// Calling capture starts the schedule
if (!scheduled) {
scheduled = true;
@ -120,25 +136,25 @@ export function createAgent<TEvent, TResult = void>(
}
}
function sendImmediately(event: TEvent): Promise<TResult | null> {
function sendImmediately(event: TEvent): Promise<ReadOnlyResponse | null> {
data.set(event);
debugLog('Sending immediately');
return send({ runOnce: true, throwOnError: true });
}
async function send<T>(sendOptions: {
async function send(sendOptions: {
runOnce?: boolean;
throwOnError: true;
}): Promise<T | null | never>;
async function send<T>(sendOptions: {
}): Promise<ReadOnlyResponse | null>;
async function send(sendOptions: {
runOnce?: boolean;
throwOnError: false;
}): Promise<T | null>;
async function send<T>(sendOptions?: {
}): Promise<ReadOnlyResponse | null>;
async function send(sendOptions?: {
runOnce?: boolean;
throwOnError: boolean;
}): Promise<T | null | never> {
}): Promise<ReadOnlyResponse | null> {
const runOnce = sendOptions?.runOnce ?? false;
if (!data.size()) {
@ -156,35 +172,33 @@ export function createAgent<TEvent, TResult = void>(
const sendReport: retry.RetryFunction<{
status: number;
data: T | null;
text(): Promise<string>;
json(): Promise<unknown>;
}> = async (_bail, attempt) => {
debugLog(`Sending (queue ${dataToSend}) (attempt ${attempt})`);
if (!enabled) {
return {
status: 200,
data: null,
text: async () => 'OK',
json: async () => ({}),
};
}
const response = await axios
.post(options.endpoint, buffer, {
headers: {
accept: 'application/json',
'content-type': 'application/json',
Authorization: `Bearer ${options.token}`,
'User-Agent': `${options.name}/${version}`,
...headers(),
},
responseType: 'json',
timeout: options.timeout,
httpAgent: options.httpAgent,
httpsAgent: options.httpsAgent,
})
.catch(error => {
debugLog(`Attempt ${attempt} failed: ${error.message}`);
return Promise.reject(error);
});
const response = await post(options.endpoint, buffer, {
headers: {
accept: 'application/json',
'content-type': 'application/json',
Authorization: `Bearer ${options.token}`,
'User-Agent': `${options.name}/${version}`,
...headers(),
},
timeout: options.timeout,
fetchImplementation: pluginOptions.__testing?.fetch,
}).catch(error => {
debugLog(`Attempt ${attempt} failed: ${error.message}`);
return Promise.reject(error);
});
if (response.status >= 200 && response.status < 300) {
return response;
@ -202,7 +216,7 @@ export function createAgent<TEvent, TResult = void>(
if (response.status < 200 || response.status >= 300) {
throw new Error(
`[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${response.data}`,
`[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${await response.text()}`,
);
}
@ -211,8 +225,7 @@ export function createAgent<TEvent, TResult = void>(
if (!runOnce) {
schedule();
}
return response.data;
return response;
} catch (error: any) {
if (!runOnce) {
schedule();
@ -234,6 +247,10 @@ export function createAgent<TEvent, TResult = void>(
clearTimeout(timeoutID);
}
if (inProgressCaptures.length) {
await Promise.all(inProgressCaptures);
}
await send({
runOnce: true,
throwOnError: false,

View file

@ -0,0 +1,82 @@
import { fetch } from '@whatwg-node/fetch';
export function get(
endpoint: string,
config: {
headers: Record<string, string>;
timeout?: number;
fetchImplementation?: typeof fetch;
},
) {
return makeFetchCall(endpoint, {
method: 'GET',
headers: config.headers,
timeout: config.timeout,
fetchImplementation: config.fetchImplementation,
});
}
export async function post(
endpoint: string,
data: string | Buffer,
config: {
headers: Record<string, string>;
timeout?: number;
fetchImplementation?: typeof fetch;
},
) {
return makeFetchCall(endpoint, {
body: data,
method: 'POST',
headers: config.headers,
timeout: config.timeout,
fetchImplementation: config.fetchImplementation,
});
}
async function makeFetchCall(
endpoint: string,
config: {
body?: string | Buffer;
method: 'GET' | 'POST';
headers: Record<string, string>;
timeout?: number;
fetchImplementation?: typeof fetch;
},
) {
const controller = new AbortController();
let timeoutId: ReturnType<typeof setTimeout> | undefined = undefined;
const responsePromise = (config.fetchImplementation ?? fetch)(endpoint, {
method: config.method,
body: config.body,
headers: config.headers,
signal: controller.signal,
});
if (config.timeout) {
timeoutId = setTimeout(() => controller.abort(), config.timeout);
}
try {
return await responsePromise;
} catch (error) {
if (isAggregateError(error)) {
throw new Error(error.errors.map(e => e.message).join(', '), {
cause: error,
});
}
throw error;
} finally {
if (timeoutId !== undefined) {
clearTimeout(timeoutId);
}
}
}
interface AggregateError extends Error {
errors: Error[];
}
function isAggregateError(error: unknown): error is AggregateError {
return !!error && typeof error === 'object' && 'errors' in error && Array.isArray(error.errors);
}

View file

@ -49,7 +49,7 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
);
let currentSchema: GraphQLSchema | null = null;
const agent = createAgent<GraphQLSchema, ExecutionResult<SchemaPublishMutation>>(
const agent = createAgent<GraphQLSchema>(
{
logger,
...pluginOptions.agent,
@ -60,6 +60,7 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
token,
enabled: pluginOptions.enabled,
debug: pluginOptions.debug,
__testing: pluginOptions.agent?.__testing,
},
{
prefix: 'reporting',
@ -76,7 +77,6 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
},
headers() {
return {
'Content-Type': 'application/json',
'graphql-client-name': 'Hive Client',
'graphql-client-version': version,
};
@ -103,12 +103,14 @@ export function createReporting(pluginOptions: HivePluginOptions): SchemaReporte
return {
async report({ schema }) {
try {
const result = await agent.sendImmediately(schema);
const response = await agent.sendImmediately(schema);
if (result === null) {
if (response === null) {
throw new Error('Empty response');
}
const result: ExecutionResult<SchemaPublishMutation> = await response.json();
if (Array.isArray(result.errors)) {
throw new Error(result.errors.map(error => error.message).join('\n'));
}

View file

@ -6,7 +6,7 @@ import type { SchemaReporter } from './reporting.js';
export interface HiveClient {
[hiveClientSymbol]: true;
[autoDisposeSymbol]: boolean | NodeJS.Signals[];
info(): Promise<void>;
info(): void | Promise<void>;
reportSchema: SchemaReporter['report'];
/** Collect usage for Query and Mutation operations */
collectUsage(): CollectUsageCallback;
@ -28,7 +28,7 @@ export type AbortAction = {
export type CollectUsageCallback = (
args: ExecutionArgs,
result: GraphQLErrorsResult | AbortAction,
) => void;
) => Promise<void>;
export interface ClientInfo {
name: string;
version: string;
@ -192,6 +192,11 @@ export type HivePluginOptions = OptionalWhenFalse<
* Disabled by default
*/
reporting?: HiveReportingPluginOptions | false;
/**
* Print info about the token.
* Disabled by default (enabled by default only in debug mode)
*/
printTokenInfo?: boolean;
/**
* Automatically dispose the client when the process is terminated
*

View file

@ -50,7 +50,7 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
if (!pluginOptions.usage || pluginOptions.enabled === false) {
return {
collect() {
return () => {};
return async () => {};
},
async dispose() {},
collectSubscription() {},
@ -81,6 +81,7 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
token: pluginOptions.token,
enabled: pluginOptions.enabled,
debug: pluginOptions.debug,
__testing: pluginOptions.agent?.__testing,
},
{
prefix: 'usage',
@ -133,7 +134,6 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
},
headers() {
return {
'Content-Type': 'application/json',
'graphql-client-name': 'Hive Client',
'graphql-client-version': version,
'x-usage-api-version': '2',
@ -169,7 +169,7 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
collect() {
const finish = measureDuration();
return function complete(args, result) {
return async function complete(args, result) {
const duration = finish();
let providedOperationName: string | undefined = undefined;
try {
@ -212,29 +212,33 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
ttl: options.ttl,
processVariables: options.processVariables ?? false,
});
const { key, value: info } = collect(document, args.variableValues ?? null);
agent.capture({
type: 'request',
data: {
key,
timestamp: Date.now(),
operationName,
operation: info.document,
fields: info.fields,
execution: {
ok: errors.length === 0,
duration,
errorsTotal: errors.length,
errors,
},
// TODO: operationHash is ready to accept hashes of persisted operations
client:
typeof args.contextValue !== 'undefined' &&
typeof options.clientInfo !== 'undefined'
? options.clientInfo(args.contextValue)
: createDefaultClientInfo()(args.contextValue),
},
});
agent.capture(
collect(document, args.variableValues ?? null).then(({ key, value: info }) => {
return {
type: 'request',
data: {
key,
timestamp: Date.now(),
operationName,
operation: info.document,
fields: info.fields,
execution: {
ok: errors.length === 0,
duration,
errorsTotal: errors.length,
errors,
},
// TODO: operationHash is ready to accept hashes of persisted operations
client:
typeof args.contextValue !== 'undefined' &&
typeof options.clientInfo !== 'undefined'
? options.clientInfo(args.contextValue)
: createDefaultClientInfo()(args.contextValue),
},
};
}),
);
}
} catch (error) {
const details = providedOperationName ? ` (name: "${providedOperationName}")` : '';
@ -242,7 +246,7 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
}
};
},
collectSubscription({ args }) {
async collectSubscription({ args }) {
const document = args.document;
const rootOperation = document.definitions.find(
o => o.kind === Kind.OPERATION_DEFINITION,
@ -270,23 +274,25 @@ export function createUsage(pluginOptions: HivePluginOptions): UsageCollector {
ttl: options.ttl,
processVariables: options.processVariables ?? false,
});
const { key, value: info } = collect(document, args.variableValues ?? null);
agent.capture({
type: 'subscription',
data: {
key,
timestamp: Date.now(),
operationName,
operation: info.document,
fields: info.fields,
// TODO: operationHash is ready to accept hashes of persisted operations
client:
typeof args.contextValue !== 'undefined' && typeof options.clientInfo !== 'undefined'
? options.clientInfo(args.contextValue)
: createDefaultClientInfo()(args.contextValue),
},
});
agent.capture(
collect(document, args.variableValues ?? null).then(({ key, value: info }) => ({
type: 'subscription',
data: {
key,
timestamp: Date.now(),
operationName,
operation: info.document,
fields: info.fields,
// TODO: operationHash is ready to accept hashes of persisted operations
client:
typeof args.contextValue !== 'undefined' &&
typeof options.clientInfo !== 'undefined'
? options.clientInfo(args.contextValue)
: createDefaultClientInfo()(args.contextValue),
},
})),
);
}
},
};

View file

@ -1,7 +1,43 @@
import { createHash } from 'node:crypto';
import { crypto, TextEncoder } from '@whatwg-node/fetch';
import { hiveClientSymbol } from '../client.js';
import type { HiveClient, HivePluginOptions } from './types.js';
export const isCloudflareWorker =
typeof caches !== 'undefined' && 'default' in caches && !!caches.default;
async function digest(algo: 'SHA-256' | 'SHA-1', output: 'hex' | 'base64', data: string) {
const buffer = await crypto.subtle.digest(algo, new TextEncoder().encode(data));
if (output === 'hex') {
return arrayBufferToHEX(buffer);
}
return arrayBufferToBase64(buffer);
}
function arrayBufferToHEX(buffer: ArrayBuffer) {
return Array.prototype.map
.call(new Uint8Array(buffer), x => ('00' + x.toString(16)).slice(-2))
.join('');
}
function arrayBufferToBase64(buffer: ArrayBuffer) {
return btoa(String.fromCharCode.apply(null, new Uint8Array(buffer) as unknown as number[]));
}
export function createHash(algo: 'SHA-256' | 'SHA-1') {
let str: string = '';
return {
update(data: string) {
str += data;
return this;
},
async digest(output: 'hex' | 'base64') {
return digest(algo, output, str);
},
};
}
export function memo<R, A, K>(fn: (arg: A) => R, cacheKeyFn: (arg: A) => K): (arg: A) => R {
let memoizedResult: R | null = null;
let memoizedKey: K | null = null;
@ -19,18 +55,22 @@ export function memo<R, A, K>(fn: (arg: A) => R, cacheKeyFn: (arg: A) => K): (ar
};
}
export function isAsyncIterable<T>(value: any): value is AsyncIterable<T> {
return value?.[Symbol.asyncIterator] != null;
}
export function cache<R, A, K, V>(
fn: (arg: A, arg2: V) => R,
cacheKeyFn: (arg: A, arg2: V) => K,
cacheKeyFn: (arg: A, arg2: V) => Promise<K>,
cacheMap: {
has(key: K): boolean;
set(key: K, value: R): void;
get(key: K): R | undefined;
},
) {
return (arg: A, arg2: V) => {
const key = cacheKeyFn(arg, arg2);
const cachedValue = cacheMap.get(key);
return async (arg: A, arg2: V) => {
const key = await cacheKeyFn(arg, arg2);
const cachedValue = await cacheMap.get(key);
if (cachedValue !== null && typeof cachedValue !== 'undefined') {
return {
@ -51,8 +91,8 @@ export function cache<R, A, K, V>(
};
}
export function cacheDocumentKey<T, V>(doc: T, variables: V | null) {
const hasher = createHash('md5').update(JSON.stringify(doc));
export async function cacheDocumentKey<T, V>(doc: T, variables: V | null) {
const hasher = createHash('SHA-1').update(JSON.stringify(doc));
if (variables) {
hasher.update(
@ -75,9 +115,9 @@ export function cacheDocumentKey<T, V>(doc: T, variables: V | null) {
const HR_TO_NS = 1e9;
const NS_TO_MS = 1e6;
function deltaFrom(hrtime: [number, number]): { ms: number; ns: number } {
const delta = process.hrtime(hrtime);
const ns = delta[0] * HR_TO_NS + delta[1];
function deltaFrom(startedAt: number): { ms: number; ns: number } {
const endedAt = performance.now();
const ns = Math.round(((endedAt - startedAt) * HR_TO_NS) / 1000);
return {
ns,
@ -88,7 +128,7 @@ function deltaFrom(hrtime: [number, number]): { ms: number; ns: number } {
}
export function measureDuration() {
const startAt = process.hrtime();
const startAt = performance.now();
return function end() {
return deltaFrom(startAt).ns;

View file

@ -8,10 +8,9 @@ import {
} from 'graphql';
import type { GraphQLParams, Plugin } from 'graphql-yoga';
import LRU from 'tiny-lru';
import { isAsyncIterable } from '@graphql-tools/utils';
import { autoDisposeSymbol, createHive } from './client.js';
import { autoDisposeSymbol, createHive as createHiveClient } from './client.js';
import type { CollectUsageCallback, HiveClient, HivePluginOptions } from './internal/types.js';
import { isHiveClient } from './internal/utils.js';
import { isAsyncIterable, isHiveClient } from './internal/utils.js';
type CacheRecord = {
callback: CollectUsageCallback;
@ -20,18 +19,20 @@ type CacheRecord = {
parsedDocument?: DocumentNode;
};
export function createHive(clientOrOptions: HivePluginOptions) {
return createHiveClient({
...clientOrOptions,
agent: {
name: 'hive-client-yoga',
...clientOrOptions.agent,
},
});
}
export function useHive(clientOrOptions: HiveClient): Plugin;
export function useHive(clientOrOptions: HivePluginOptions): Plugin;
export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin {
const hive = isHiveClient(clientOrOptions)
? clientOrOptions
: createHive({
...clientOrOptions,
agent: {
name: 'hive-client-yoga',
...clientOrOptions.agent,
},
});
const hive = isHiveClient(clientOrOptions) ? clientOrOptions : createHive(clientOrOptions);
void hive.info();
@ -104,7 +105,7 @@ export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin
errors.push(...ctx.result.errors);
},
onEnd() {
record.callback(args, errors.length ? { errors } : {});
void record.callback(args, errors.length ? { errors } : {});
},
};
},
@ -126,7 +127,7 @@ export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin
// Report if execution happened (aka executionArgs have been set within onExecute)
if (record.executionArgs) {
record.callback(
void record.callback(
{
...record.executionArgs,
document: record.parsedDocument ?? record.executionArgs.document,
@ -148,7 +149,7 @@ export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin
document = parse(record.paramsArgs.query);
parsedDocumentCache.set(record.paramsArgs.query, document);
}
record.callback(
void record.callback(
{
document,
schema: latestSchema,

View file

@ -156,7 +156,7 @@ describe('built-in HTTP usage reporting', async () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
046386c6981ae292daf3adc123d3b6b0: {
e15a9b2c408491a7de1e557f240fd9b97db3972f: {
fields: [
Query.hi,
],
@ -240,7 +240,7 @@ describe('built-in HTTP usage reporting', async () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
5164dcbb81769931d535efca9e3e8fb5: {
7ed6f1c2474785a05302bb92320b793f661d22d8: {
fields: [
Mutation.hi,
],
@ -329,7 +329,7 @@ describe('built-in HTTP usage reporting', async () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
046386c6981ae292daf3adc123d3b6b0: {
e15a9b2c408491a7de1e557f240fd9b97db3972f: {
fields: [
Query.hi,
],
@ -350,7 +350,7 @@ describe('built-in HTTP usage reporting', async () => {
version: '1',
},
},
operationMapKey: '046386c6981ae292daf3adc123d3b6b0',
operationMapKey: 'e15a9b2c408491a7de1e557f240fd9b97db3972f',
});
return true;
@ -439,7 +439,7 @@ describe('built-in HTTP usage reporting', async () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
046386c6981ae292daf3adc123d3b6b0: {
e15a9b2c408491a7de1e557f240fd9b97db3972f: {
fields: [
Query.hi,
],
@ -455,7 +455,7 @@ describe('built-in HTTP usage reporting', async () => {
version: '4.2.0',
},
},
operationMapKey: '046386c6981ae292daf3adc123d3b6b0',
operationMapKey: 'e15a9b2c408491a7de1e557f240fd9b97db3972f',
});
return true;
@ -546,7 +546,7 @@ describe('graphql-ws usage reporting setup', async () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
0063ba7bf2695b896c464057aef29cdc: {
f25063b60ab942d0c0d14cdd9cd3172de2e7ebc4: {
fields: [
Query.hi,
],
@ -562,7 +562,7 @@ describe('graphql-ws usage reporting setup', async () => {
version: '1.0.0',
},
},
operationMapKey: '0063ba7bf2695b896c464057aef29cdc',
operationMapKey: 'f25063b60ab942d0c0d14cdd9cd3172de2e7ebc4',
});
return true;

View file

@ -15,8 +15,7 @@ test("should log that it's not enabled", async () => {
token: '',
});
const result = await hive
.info()
const result = await Promise.resolve(hive.info())
.then(() => 'OK')
.catch(() => 'ERROR');
@ -36,13 +35,8 @@ test("should not log that it's not enabled", async () => {
agent: { logger },
});
const result = await hive
.info()
.then(() => 'OK')
.catch(() => 'ERROR');
expect(logger.info).not.toBeCalled();
expect(result).toBe('OK');
expect(hive.info()).toBeUndefined();
});
test('should not throw exception about missing token when disabled', async () => {
@ -57,11 +51,6 @@ test('should not throw exception about missing token when disabled', async () =>
agent: { logger },
});
const result = await hive
.info()
.then(() => 'OK')
.catch(() => 'ERROR');
expect(logger.info).not.toBeCalled();
expect(result).toBe('OK');
expect(hive.info()).toBeUndefined();
});

View file

@ -20,8 +20,7 @@ test('should not leak the exception', async () => {
},
});
const result = await hive
.info()
const result = await Promise.resolve(hive.info())
.then(() => 'OK')
.catch(() => 'ERROR');
@ -78,8 +77,7 @@ test('should use selfHosting.graphqlEndpoint if provided', async () => {
},
});
const result = await hive
.info()
const result = await Promise.resolve(hive.info())
.then(() => 'OK')
.catch(() => 'ERROR');

View file

@ -1,14 +1,10 @@
import { createServer } from 'node:http';
import { AddressInfo } from 'node:net';
import axios from 'axios';
/* eslint-disable-next-line import/no-extraneous-dependencies */
import { createSchema, createYoga } from 'graphql-yoga';
/* eslint-disable-next-line import/no-extraneous-dependencies */
import nock from 'nock';
// eslint-disable-next-line import/no-extraneous-dependencies
import { ApolloServer } from '@apollo/server';
/* eslint-disable-next-line import/no-extraneous-dependencies */
import { startStandaloneServer } from '@apollo/server/standalone';
import { Response } from '@whatwg-node/fetch';
import { createHive, hiveApollo, useHive } from '../src';
import { waitFor } from './test-utils';
@ -53,7 +49,7 @@ describe('GraphQL Yoga', () => {
token: 'my-token',
agent: {
maxRetries: 0,
sendInterval: 100,
sendInterval: 10,
timeout: 50,
logger,
},
@ -76,60 +72,35 @@ describe('GraphQL Yoga', () => {
logging: false,
});
const server = createServer(yoga);
await yoga.fetch(
new Request('http://localhost/graphql', {
method: 'POST',
body: JSON.stringify({
query: /* GraphQL */ `
{
hello
}
`,
}),
headers: {
'content-type': 'application/json',
},
}),
);
async function stop() {
await new Promise(resolve => server.close(resolve));
await hive.dispose();
}
await waitFor(50);
await new Promise<void>(resolve => server.listen(0, resolve));
const port = (server.address() as AddressInfo).port;
await axios
.post(`http://localhost:${port}/graphql`, {
query: /* GraphQL */ `
{
hello
}
`,
})
.catch(async error => {
await stop();
return Promise.reject(error);
});
await waitFor(300);
await stop();
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][info] Error'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting] Failed'));
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][usage] Failed'));
await hive.dispose();
clean();
}, 1_000);
test('should capture client name and version headers', async () => {
const usageMock = nock('http://yoga.localhost')
.post(
'/usage',
(body: {
operations: [
{
metadata: {
client?: {
name: string;
version: string;
};
};
},
];
}) => {
return (
body.operations[0].metadata.client?.name === 'vitest' &&
body.operations[0].metadata.client?.version === '1.0.0'
);
},
)
.reply(200);
const fetchSpy = vi.fn<[RequestInfo | URL, options: RequestInit | undefined]>(async () =>
Response.json({}, { status: 200 }),
);
const clean = handleProcess();
const hive = createHive({
enabled: true,
@ -137,12 +108,15 @@ describe('GraphQL Yoga', () => {
token: 'my-token',
agent: {
maxRetries: 0,
sendInterval: 100,
sendInterval: 10,
timeout: 50,
__testing: {
fetch: fetchSpy,
},
},
reporting: false,
usage: {
endpoint: 'http://yoga.localhost/usage',
endpoint: 'http://yoga.localhost:4200/usage',
},
});
@ -155,42 +129,31 @@ describe('GraphQL Yoga', () => {
logging: false,
});
const server = createServer(yoga);
await yoga.fetch(`http://localhost/graphql`, {
method: 'POST',
body: JSON.stringify({
query: /* GraphQL */ `
{
hello
}
`,
}),
headers: {
'content-type': 'application/json',
'x-graphql-client-name': 'vitest',
'x-graphql-client-version': '1.0.0',
},
});
async function stop() {
await new Promise(resolve => server.close(resolve));
await hive.dispose();
}
await new Promise<void>(resolve => server.listen(0, resolve));
const port = (server.address() as AddressInfo).port;
await axios
.post(
`http://localhost:${port}/graphql`,
{
query: /* GraphQL */ `
{
hello
}
`,
},
{
headers: {
'x-graphql-client-name': 'vitest',
'x-graphql-client-version': '1.0.0',
},
},
)
.catch(async error => {
await stop();
return Promise.reject(error);
});
await waitFor(300);
await stop();
await waitFor(50);
await hive.dispose();
clean();
usageMock.done();
expect(fetchSpy).toHaveBeenCalledWith(
'http://yoga.localhost:4200/usage',
expect.objectContaining({
body: expect.stringContaining('"client":{"name":"vitest","version":"1.0.0"}'),
}),
);
}, 1_000);
});
@ -211,7 +174,7 @@ describe('Apollo Server', () => {
token: 'my-token',
agent: {
maxRetries: 0,
sendInterval: 100,
sendInterval: 10,
timeout: 50,
logger,
},
@ -234,7 +197,7 @@ describe('Apollo Server', () => {
}
`,
});
await waitFor(300);
await waitFor(50);
await apollo.stop();
clean();
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][info]'));
@ -244,28 +207,10 @@ describe('Apollo Server', () => {
test('should capture client name and version headers', async () => {
const clean = handleProcess();
const usageMock = nock('http://apollo.localhost')
.post(
'/usage',
(body: {
operations: [
{
metadata: {
client?: {
name: string;
version: string;
};
};
},
];
}) => {
return (
body.operations[0].metadata.client?.name === 'vitest' &&
body.operations[0].metadata.client?.version === '1.0.0'
);
},
)
.reply(200);
const fetchSpy = vi.fn<[RequestInfo | URL, options: RequestInit | undefined]>(async () =>
Response.json({}, { status: 200 }),
);
const apollo = new ApolloServer({
typeDefs,
resolvers,
@ -276,12 +221,15 @@ describe('Apollo Server', () => {
token: 'my-token',
agent: {
maxRetries: 0,
sendInterval: 100,
sendInterval: 10,
timeout: 50,
__testing: {
fetch: fetchSpy,
},
},
reporting: false,
usage: {
endpoint: 'http://apollo.localhost/usage',
endpoint: 'http://apollo.localhost:4200/usage',
},
}),
],
@ -293,26 +241,30 @@ describe('Apollo Server', () => {
},
});
await axios.post(
'http://localhost:4000/graphql',
{
await fetch('http://localhost:4000/graphql', {
method: 'POST',
body: JSON.stringify({
query: /* GraphQL */ `
{
hello
}
`,
}),
headers: {
'content-type': 'application/json',
'x-graphql-client-name': 'vitest',
'x-graphql-client-version': '1.0.0',
},
{
headers: {
'x-graphql-client-name': 'vitest',
'x-graphql-client-version': '1.0.0',
},
},
);
});
await waitFor(300);
await waitFor(50);
await apollo.stop();
expect(fetchSpy).toHaveBeenCalledWith(
'http://apollo.localhost:4200/usage',
expect.objectContaining({
body: expect.stringContaining('"client":{"name":"vitest","version":"1.0.0"}'),
}),
);
clean();
usageMock.done();
}, 1_000);
});

View file

@ -29,8 +29,10 @@ test('should not leak the exception', async () => {
enabled: true,
debug: true,
agent: {
timeout: 500,
timeout: 50,
maxRetries: 1,
sendInterval: 10,
minTimeout: 10,
logger,
},
token: 'Token',
@ -49,7 +51,7 @@ test('should not leak the exception', async () => {
`),
});
await waitFor(2000);
await waitFor(50);
await hive.dispose();
expect(logger.info).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
@ -106,7 +108,9 @@ test('should send data to Hive', async () => {
debug: true,
agent: {
timeout: 500,
maxRetries: 1,
minTimeout: 10,
sendInterval: 10,
maxRetries: 0,
logger,
},
token,
@ -130,7 +134,7 @@ test('should send data to Hive', async () => {
`),
});
await waitFor(2000);
await waitFor(50);
await hive.dispose();
http.done();
@ -187,7 +191,9 @@ test('should send data to Hive (deprecated endpoint)', async () => {
debug: true,
agent: {
timeout: 500,
maxRetries: 1,
minTimeout: 10,
sendInterval: 10,
maxRetries: 0,
logger,
},
token,
@ -208,7 +214,7 @@ test('should send data to Hive (deprecated endpoint)', async () => {
`),
});
await waitFor(2000);
await waitFor(50);
await hive.dispose();
http.done();
@ -263,7 +269,9 @@ test('should send data to app.graphql-hive.com/graphql by default', async () =>
debug: true,
agent: {
timeout: 500,
maxRetries: 1,
minTimeout: 10,
sendInterval: 10,
maxRetries: 0,
logger,
},
token,
@ -281,7 +289,7 @@ test('should send data to app.graphql-hive.com/graphql by default', async () =>
`),
});
await waitFor(2000);
await waitFor(50);
await hive.dispose();
http.done();
@ -337,9 +345,9 @@ test('should send data to Hive immediately', async () => {
debug: true,
agent: {
timeout: 500,
maxRetries: 1,
maxRetries: 0,
logger,
sendInterval: 200,
sendInterval: 100,
},
token,
reporting: {
@ -376,7 +384,7 @@ test('should send data to Hive immediately', async () => {
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
await waitFor(400);
await waitFor(100);
expect(logger.info).toHaveBeenCalledTimes(4);
await hive.dispose();
@ -400,7 +408,9 @@ test('should send original schema of a federated (v1) service', async () => {
debug: true,
agent: {
timeout: 500,
maxRetries: 1,
maxRetries: 0,
minTimeout: 10,
sendInterval: 10,
logger,
},
token,
@ -413,7 +423,6 @@ test('should send original schema of a federated (v1) service', async () => {
},
});
let body: any = {};
const http = nock('http://localhost')
.post('/200')
.matchHeader('Authorization', `Bearer ${token}`)
@ -421,8 +430,13 @@ test('should send original schema of a federated (v1) service', async () => {
.matchHeader('graphql-client-name', headers['graphql-client-name'])
.matchHeader('graphql-client-version', headers['graphql-client-version'])
.once()
.reply((_, _body) => {
body = _body;
.reply((_, body: any) => {
expect(body.variables.input.sdl).toBe(`type Query{bar:String}`);
expect(body.variables.input.author).toBe(author);
expect(body.variables.input.commit).toBe(commit);
expect(body.variables.input.service).toBe(serviceName);
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
return [200];
});
@ -438,13 +452,6 @@ test('should send original schema of a federated (v1) service', async () => {
await hive.dispose();
http.done();
expect(body.variables.input.sdl).toBe(`type Query{bar:String}`);
expect(body.variables.input.author).toBe(author);
expect(body.variables.input.commit).toBe(commit);
expect(body.variables.input.service).toBe(serviceName);
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
});
test('should send original schema of a federated (v2) service', async () => {
@ -464,7 +471,9 @@ test('should send original schema of a federated (v2) service', async () => {
debug: true,
agent: {
timeout: 500,
maxRetries: 1,
sendInterval: 10,
minTimeout: 10,
maxRetries: 0,
logger,
},
token,
@ -477,7 +486,6 @@ test('should send original schema of a federated (v2) service', async () => {
},
});
let body: any = {};
const http = nock('http://localhost')
.post('/200')
.matchHeader('Authorization', `Bearer ${token}`)
@ -485,8 +493,13 @@ test('should send original schema of a federated (v2) service', async () => {
.matchHeader('graphql-client-name', headers['graphql-client-name'])
.matchHeader('graphql-client-version', headers['graphql-client-version'])
.once()
.reply((_, _body) => {
body = _body;
.reply((_, body: any) => {
expect(body.variables.input.sdl).toBe(`type Query{bar:String}`);
expect(body.variables.input.author).toBe(author);
expect(body.variables.input.commit).toBe(commit);
expect(body.variables.input.service).toBe(serviceName);
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
return [200];
});
@ -502,13 +515,6 @@ test('should send original schema of a federated (v2) service', async () => {
await hive.dispose();
http.done();
expect(body.variables.input.sdl).toBe(`type Query{bar:String}`);
expect(body.variables.input.author).toBe(author);
expect(body.variables.input.commit).toBe(commit);
expect(body.variables.input.service).toBe(serviceName);
expect(body.variables.input.url).toBe(serviceUrl);
expect(body.variables.input.force).toBe(true);
});
test('should display SchemaPublishMissingServiceError', async () => {
@ -634,3 +640,60 @@ test('should display SchemaPublishMissingUrlError', async () => {
`[hive][reporting] Failed to report schema: Service url is not defined`,
);
});
test('retry on non-200', async () => {
const logSpy = vi.fn();
const logger = {
error: logSpy,
info: logSpy,
};
const token = 'Token';
const fetchSpy = vi.fn(async (_url: RequestInfo | URL, _init?: RequestInit) => {
return new Response('No no no', { status: 500, statusText: 'Internal server error' });
});
const hive = createHive({
enabled: true,
debug: true,
printTokenInfo: false,
agent: {
logger,
timeout: 10,
minTimeout: 10,
sendInterval: 10,
maxRetries: 1,
__testing: {
fetch: fetchSpy,
},
},
token,
reporting: {
author: 'Test',
commit: 'Commit',
endpoint: 'http://localhost/registry',
},
});
hive.reportSchema({
schema: buildSchema(/* GraphQL */ `
type Query {
foo: String
}
`),
});
await waitFor(50);
await hive.dispose();
expect(logSpy).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 1)');
expect(logSpy).toHaveBeenCalledWith(
expect.stringContaining(`[hive][reporting] Attempt 1 failed`),
);
expect(logSpy).toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 2)');
expect(logSpy).toHaveBeenCalledWith(
expect.stringContaining(`[hive][reporting] Attempt 2 failed`),
);
expect(logSpy).not.toHaveBeenCalledWith('[hive][reporting] Sending (queue 1) (attempt 3)');
});

View file

@ -91,7 +91,8 @@ test('collect fields', async () => {
schema,
max: 1,
});
const info = collect(op, {}).value;
const info$ = await collect(op, {});
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -117,7 +118,8 @@ test('collect input object types', async () => {
schema,
max: 1,
});
const info = collect(op, {}).value;
const info$ = await collect(op, {});
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -143,7 +145,7 @@ test('collect enums and scalars as inputs', async () => {
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($limit: Int!, $type: ProjectType!) {
projects(filter: { pagination: { limit: $limit }, type: $type }) {
@ -152,7 +154,8 @@ test('collect enums and scalars as inputs', async () => {
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -176,7 +179,7 @@ test('collect scalars as hard-coded inputs', async () => {
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
{
projects(filter: { pagination: { limit: 20 } }) {
@ -185,7 +188,8 @@ test('collect scalars as hard-coded inputs', async () => {
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -204,7 +208,7 @@ test('collect enum values from object fields', async () => {
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($limit: Int!) {
projects(filter: { pagination: { limit: $limit }, type: FEDERATION }) {
@ -213,7 +217,8 @@ test('collect enum values from object fields', async () => {
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -234,7 +239,7 @@ test('collect enum values from arguments', async () => {
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects {
projectsByType(type: FEDERATION) {
@ -243,7 +248,8 @@ test('collect enum values from arguments', async () => {
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -260,7 +266,7 @@ test('collect arguments', async () => {
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($limit: Int!, $type: ProjectType!) {
projects(filter: { pagination: { limit: $limit }, type: $type }) {
@ -269,7 +275,8 @@ test('collect arguments', async () => {
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -293,7 +300,7 @@ test('skips argument directives', async () => {
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($limit: Int!, $type: ProjectType!, $includeName: Boolean!) {
projects(filter: { pagination: { limit: $limit }, type: $type }) {
@ -311,7 +318,8 @@ test('skips argument directives', async () => {
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -337,7 +345,7 @@ test('collect used-only input fields', async () => {
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($limit: Int!, $type: ProjectType!) {
projects(filter: { pagination: { limit: $limit }, type: $type }) {
@ -346,7 +354,8 @@ test('collect used-only input fields', async () => {
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -370,7 +379,7 @@ test('collect all input fields when `processVariables` has not been passed and i
schema,
max: 1,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($pagination: PaginationInput!, $type: ProjectType!) {
projects(filter: { pagination: $pagination, type: $type }) {
@ -379,7 +388,8 @@ test('collect all input fields when `processVariables` has not been passed and i
}
`),
{},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -411,14 +421,14 @@ test('should get a cache hit when document is the same but variables are differe
}
}
`);
const first = collect(doc, {
const first = await collect(doc, {
pagination: {
limit: 1,
},
type: 'STITCHING',
});
const second = collect(doc, {
const second = await collect(doc, {
pagination: {
offset: 2,
},
@ -442,21 +452,21 @@ test('(processVariables: true) should get a cache miss when document is the same
}
}
`);
const first = collect(doc, {
const first = await collect(doc, {
pagination: {
limit: 1,
},
type: 'STITCHING',
});
const second = collect(doc, {
const second = await collect(doc, {
pagination: {
offset: 2,
},
type: 'STITCHING',
});
const third = collect(doc, {
const third = await collect(doc, {
pagination: {
offset: 2,
},
@ -474,7 +484,7 @@ test('(processVariables: true) collect used-only input fields', async () => {
max: 1,
processVariables: true,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($pagination: PaginationInput!, $type: ProjectType!) {
projects(filter: { pagination: $pagination, type: $type }) {
@ -488,7 +498,8 @@ test('(processVariables: true) collect used-only input fields', async () => {
},
type: 'STITCHING',
},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -513,7 +524,7 @@ test('(processVariables: true) should collect input object without fields when c
max: 1,
processVariables: true,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($pagination: PaginationInput, $type: ProjectType!) {
projects(filter: { pagination: $pagination, type: $type }) {
@ -524,7 +535,8 @@ test('(processVariables: true) should collect input object without fields when c
{
type: 'STITCHING',
},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[
@ -548,7 +560,7 @@ test('(processVariables: true) collect used-only input type fields from an array
max: 1,
processVariables: true,
});
const info = collect(
const info$ = await collect(
parse(/* GraphQL */ `
query getProjects($filter: FilterInput) {
projects(filter: $filter) {
@ -572,7 +584,8 @@ test('(processVariables: true) collect used-only input type fields from an array
},
},
},
).value;
);
const info = await info$.value;
expect(info.fields).toMatchInlineSnapshot(`
[

View file

@ -141,6 +141,7 @@ test('should send data to Hive', async () => {
agent: {
timeout: 500,
maxRetries: 0,
sendInterval: 10,
logger,
},
token,
@ -154,8 +155,8 @@ test('should send data to Hive', async () => {
const collect = hive.collectUsage();
await waitFor(2000);
collect(
await waitFor(20);
await collect(
{
schema,
document: op,
@ -164,7 +165,7 @@ test('should send data to Hive', async () => {
{},
);
await hive.dispose();
await waitFor(1000);
await waitFor(30);
http.done();
expect(logger.error).not.toHaveBeenCalled();
@ -212,8 +213,8 @@ test('should send data to Hive', async () => {
expect(operation.operationMapKey).toEqual(key);
expect(operation.timestamp).toEqual(expect.any(Number));
// execution
expect(operation.execution.duration).toBeGreaterThanOrEqual(1500 * 1_000_000); // >=1500ms in microseconds
expect(operation.execution.duration).toBeLessThan(3000 * 1_000_000); // <3000ms
expect(operation.execution.duration).toBeGreaterThanOrEqual(18 * 1_000_000); // >=18ms in microseconds
expect(operation.execution.duration).toBeLessThan(25 * 1_000_000); // <25ms
expect(operation.execution.errorsTotal).toBe(0);
expect(operation.execution.ok).toBe(true);
});
@ -249,6 +250,7 @@ test('should send data to Hive (deprecated endpoint)', async () => {
agent: {
timeout: 500,
maxRetries: 0,
sendInterval: 10,
logger,
},
token,
@ -259,8 +261,8 @@ test('should send data to Hive (deprecated endpoint)', async () => {
const collect = hive.collectUsage();
await waitFor(2000);
collect(
await waitFor(20);
await collect(
{
schema,
document: op,
@ -269,7 +271,7 @@ test('should send data to Hive (deprecated endpoint)', async () => {
{},
);
await hive.dispose();
await waitFor(1000);
await waitFor(50);
http.done();
expect(logger.error).not.toHaveBeenCalled();
@ -317,8 +319,8 @@ test('should send data to Hive (deprecated endpoint)', async () => {
expect(operation.operationMapKey).toEqual(key);
expect(operation.timestamp).toEqual(expect.any(Number));
// execution
expect(operation.execution.duration).toBeGreaterThanOrEqual(1500 * 1_000_000); // >=1500ms in microseconds
expect(operation.execution.duration).toBeLessThan(3000 * 1_000_000); // <3000ms
expect(operation.execution.duration).toBeGreaterThanOrEqual(18 * 1_000_000); // >=18ms in microseconds
expect(operation.execution.duration).toBeLessThan(25 * 1_000_000); // <25ms
expect(operation.execution.errorsTotal).toBe(0);
expect(operation.execution.ok).toBe(true);
});
@ -335,6 +337,8 @@ test('should not leak the exception', async () => {
agent: {
timeout: 500,
maxRetries: 1,
sendInterval: 10,
minTimeout: 10,
logger,
},
token: 'Token',
@ -343,7 +347,7 @@ test('should not leak the exception', async () => {
},
});
hive.collectUsage()(
await hive.collectUsage()(
{
schema,
document: op,
@ -352,7 +356,7 @@ test('should not leak the exception', async () => {
{},
);
await waitFor(1000);
await waitFor(50);
await hive.dispose();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
@ -388,12 +392,14 @@ test('sendImmediately should not stop the schedule', async () => {
const hive = createHive({
enabled: true,
debug: true,
printTokenInfo: false,
agent: {
timeout: 500,
maxRetries: 0,
maxSize: 2,
minTimeout: 10,
logger,
sendInterval: 100,
sendInterval: 50,
},
token,
usage: {
@ -411,7 +417,7 @@ test('sendImmediately should not stop the schedule', async () => {
expect(logger.info).toHaveBeenCalledTimes(0);
collect(
await collect(
{
schema,
document: op,
@ -419,40 +425,39 @@ test('sendImmediately should not stop the schedule', async () => {
},
{},
);
await waitFor(200);
// Because maxSize is 2 and sendInterval is 100ms
await waitFor(120);
// Because maxSize is 2 and sendInterval is 50ms (+120ms buffer)
// the scheduled send task should be done by now
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
// since we sent only 1 element, the buffer was not full,
// so we should not see the following log:
expect(logger.info).not.toHaveBeenCalledWith(`[hive][usage] Sending immediately`);
expect(logger.info).toHaveBeenCalledTimes(2);
// Now we will check the maxSize
// We run collect three times
collect(args, {});
collect(args, {});
// Now we will hit the maxSize
// We run collect two times
await Promise.all([collect(args, {}), collect(args, {})]);
await waitFor(1);
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledTimes(4);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 2) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending immediately`);
await waitFor(1); // we run setImmediate under the hood
// we run setImmediate under the hood
// It should be sent already
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.info).toHaveBeenCalledTimes(4);
await waitFor(50);
await waitFor(100);
expect(logger.info).toHaveBeenCalledTimes(5);
// Let's check if the scheduled send task is still running
collect(args, {});
await waitFor(200);
await collect(args, {});
await waitFor(30);
expect(logger.error).not.toHaveBeenCalled();
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sending (queue 1) (attempt 1)`);
expect(logger.info).toHaveBeenCalledWith(`[hive][usage] Sent!`);
expect(logger.info).toHaveBeenCalledTimes(7);
await hive.dispose();
await waitFor(1000);
http.done();
});
@ -484,8 +489,10 @@ test('should send data to Hive at least once when using atLeastOnceSampler', asy
const hive = createHive({
enabled: true,
debug: true,
printTokenInfo: false,
agent: {
timeout: 500,
sendInterval: 10,
maxRetries: 0,
logger,
},
@ -510,35 +517,36 @@ test('should send data to Hive at least once when using atLeastOnceSampler', asy
const collect = hive.collectUsage();
await waitFor(2000);
collect(
{
schema,
document: op,
operationName: 'deleteProject',
},
{},
);
// different query
collect(
{
schema,
document: op2,
operationName: 'getProject',
},
{},
);
// duplicated call
collect(
{
schema,
document: op,
operationName: 'deleteProject',
},
{},
);
await Promise.all([
collect(
{
schema,
document: op,
operationName: 'deleteProject',
},
{},
),
// different query
collect(
{
schema,
document: op2,
operationName: 'getProject',
},
{},
),
// duplicated call
collect(
{
schema,
document: op,
operationName: 'deleteProject',
},
{},
),
]);
await hive.dispose();
await waitFor(1000);
await waitFor(50);
http.done();
expect(logger.error).not.toHaveBeenCalled();
@ -590,6 +598,7 @@ test('should not send excluded operation name data to Hive', async () => {
agent: {
timeout: 500,
maxRetries: 0,
sendInterval: 10,
logger,
},
token,
@ -604,42 +613,43 @@ test('should not send excluded operation name data to Hive', async () => {
});
const collect = hive.collectUsage();
await waitFor(2000);
collect(
{
schema,
document: op,
operationName: 'deleteProjectExcludeThis',
},
{},
);
collect(
{
schema,
document: op,
operationName: 'deleteProjectShouldntBeIncluded',
},
{},
);
collect(
{
schema,
document: op,
operationName: 'deleteProject',
},
{},
);
collect(
{
schema,
document: op2,
operationName: 'getProject',
},
{},
);
await waitFor(20);
await Promise.all([
(collect(
{
schema,
document: op,
operationName: 'deleteProjectExcludeThis',
},
{},
),
collect(
{
schema,
document: op,
operationName: 'deleteProjectShouldntBeIncluded',
},
{},
),
collect(
{
schema,
document: op,
operationName: 'deleteProject',
},
{},
),
collect(
{
schema,
document: op2,
operationName: 'getProject',
},
{},
)),
]);
await hive.dispose();
await waitFor(1000);
await waitFor(50);
http.done();
expect(logger.error).not.toHaveBeenCalled();
@ -687,8 +697,63 @@ test('should not send excluded operation name data to Hive', async () => {
expect(operation.operationMapKey).toEqual(key);
expect(operation.timestamp).toEqual(expect.any(Number));
// execution
expect(operation.execution.duration).toBeGreaterThanOrEqual(1500 * 1_000_000); // >=1500ms in microseconds
expect(operation.execution.duration).toBeLessThan(3000 * 1_000_000); // <3000ms
expect(operation.execution.duration).toBeGreaterThanOrEqual(18 * 1_000_000); // >=18ms in microseconds
expect(operation.execution.duration).toBeLessThan(25 * 1_000_000); // <25ms
expect(operation.execution.errorsTotal).toBe(0);
expect(operation.execution.ok).toBe(true);
});
test('retry on non-200', async () => {
const logSpy = vi.fn();
const logger = {
error: logSpy,
info: logSpy,
};
const token = 'Token';
const fetchSpy = vi.fn(async (_url: RequestInfo | URL, _init?: RequestInit) => {
return new Response('No no no', { status: 500, statusText: 'Internal server error' });
});
const hive = createHive({
enabled: true,
debug: true,
printTokenInfo: false,
agent: {
logger,
timeout: 10,
minTimeout: 10,
sendInterval: 10,
maxRetries: 1,
__testing: {
fetch: fetchSpy,
},
},
token,
usage: {
endpoint: 'http://localhost/200',
},
reporting: false,
});
const collect = hive.collectUsage();
await collect(
{
schema,
document: op,
operationName: 'asd',
},
{},
);
await waitFor(50);
await hive.dispose();
expect(logSpy).toHaveBeenCalledWith('[hive][usage] Sending (queue 1) (attempt 1)');
expect(logSpy).toHaveBeenCalledWith(expect.stringContaining(`[hive][usage] Attempt 1 failed`));
expect(logSpy).toHaveBeenCalledWith('[hive][usage] Sending (queue 1) (attempt 2)');
expect(logSpy).toHaveBeenCalledWith(expect.stringContaining(`[hive][usage] Attempt 2 failed`));
expect(logSpy).not.toHaveBeenCalledWith('[hive][usage] Sending (queue 1) (attempt 3)');
});

View file

@ -1,43 +1,43 @@
import { cacheDocumentKey } from '../src/internal/utils';
test('produce identical hash for the same document and the same keys but different values in variables', () => {
const left = cacheDocumentKey('doc', { a: true });
const right = cacheDocumentKey('doc', { a: false });
test('produce identical hash for the same document and the same keys but different values in variables', async () => {
const left = await cacheDocumentKey('doc', { a: true });
const right = await cacheDocumentKey('doc', { a: false });
expect(left).toEqual(right);
});
test('produce identical hash for the same document but with an empty array', () => {
const left = cacheDocumentKey('doc', { a: [] });
const right = cacheDocumentKey('doc', { a: [] });
test('produce identical hash for the same document but with an empty array', async () => {
const left = await cacheDocumentKey('doc', { a: [] });
const right = await cacheDocumentKey('doc', { a: [] });
expect(left).toEqual(right);
});
test('produce identical hash for the same document but with and without an empty array', () => {
const left = cacheDocumentKey('doc', { a: [] });
const right = cacheDocumentKey('doc', { a: null });
test('produce identical hash for the same document but with and without an empty array', async () => {
const left = await cacheDocumentKey('doc', { a: [] });
const right = await cacheDocumentKey('doc', { a: null });
expect(left).toEqual(right);
});
test('produce identical hash for the same document but with an array of primitive values', () => {
const left = cacheDocumentKey('doc', { a: [1, 2, 3] });
const right = cacheDocumentKey('doc', { a: [4, 5, 6] });
test('produce identical hash for the same document but with an array of primitive values', async () => {
const left = await cacheDocumentKey('doc', { a: [1, 2, 3] });
const right = await cacheDocumentKey('doc', { a: [4, 5, 6] });
expect(left).toEqual(right);
});
test('produce different hash for the same document but with different keys in variables', () => {
const left = cacheDocumentKey('doc', { a: true });
const right = cacheDocumentKey('doc', { b: true });
test('produce different hash for the same document but with different keys in variables', async () => {
const left = await cacheDocumentKey('doc', { a: true });
const right = await cacheDocumentKey('doc', { b: true });
expect(left).not.toEqual(right);
});
test('produce different hash for the same document but with and without variables', () => {
const left = cacheDocumentKey('doc', { a: true });
const right = cacheDocumentKey('doc', null);
test('produce different hash for the same document but with and without variables', async () => {
const left = await cacheDocumentKey('doc', { a: true });
const right = await cacheDocumentKey('doc', null);
expect(left).not.toEqual(right);
});
test('produce different hash for the same document but with and without variables (empty object)', () => {
const left = cacheDocumentKey('doc', { a: true });
const right = cacheDocumentKey('doc', {});
test('produce different hash for the same document but with and without variables (empty object)', async () => {
const left = await cacheDocumentKey('doc', { a: true });
const right = await cacheDocumentKey('doc', {});
expect(left).not.toEqual(right);
});

View file

@ -1,6 +1,5 @@
/* eslint import/no-extraneous-dependencies: ["error", {"devDependencies": true}] */
import { createServer } from 'node:http';
import axios from 'axios';
import { GraphQLError } from 'graphql';
import { createClient } from 'graphql-ws';
import { useServer as useWSServer } from 'graphql-ws/lib/use/ws';
@ -22,7 +21,7 @@ it('reports usage', async ({ expect }) => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
0063ba7bf2695b896c464057aef29cdc: {
f25063b60ab942d0c0d14cdd9cd3172de2e7ebc4: {
fields: [
Query.hi,
],
@ -102,17 +101,14 @@ it('reports usage', async ({ expect }) => {
graphqlScope.done();
});
it('reports usage with response cache', async ({ expect }) => {
axios.interceptors.request.use(config => {
return config;
});
test('reports usage with response cache', async ({ expect }) => {
let usageCount = 0;
const graphqlScope = nock('http://localhost')
.post('/usage', body => {
usageCount++;
expect(body.map).toMatchInlineSnapshot(`
{
0063ba7bf2695b896c464057aef29cdc: {
f25063b60ab942d0c0d14cdd9cd3172de2e7ebc4: {
fields: [
Query.hi,
],
@ -236,37 +232,26 @@ it('does not report usage for operation that does not pass validation', async ({
],
});
await new Promise<void>((resolve, reject) => {
setTimeout(() => {
resolve();
}, 1000);
nock.emitter.once('no match', (req: any) => {
reject(new Error(`Unexpected request was sent to ${req.path}`));
});
(async () => {
const res = await yoga.fetch('http://localhost/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
query: /* GraphQL */ `
{
__schema {
types {
name
}
}
const res = await yoga.fetch('http://localhost/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
query: /* GraphQL */ `
{
__schema {
types {
name
}
`,
}),
});
expect(res.status).toBe(200);
expect(await res.text()).toContain('GraphQL introspection has been disabled');
})().catch(reject);
}
}
`,
}),
});
expect(res.status).toBe(200);
expect(await res.text()).toContain('GraphQL introspection has been disabled');
expect(callback).not.toHaveBeenCalled();
});
@ -313,36 +298,21 @@ it('does not report usage if context creating raises an error', async ({ expect
],
});
// eslint-disable-next-line no-async-promise-executor
await new Promise<void>((resolve, reject) => {
nock.emitter.once('no match', (req: any) => {
reject(new Error(`Unexpected request was sent to ${req.path}`));
});
setTimeout(() => {
resolve();
}, 1000);
(async () => {
const res = await yoga.fetch('http://localhost/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
query: /* GraphQL */ `
{
hi
}
`,
}),
});
expect(res.status).toBe(200);
expect(await res.text()).toMatchInlineSnapshot(
`{"errors":[{"message":"Not authenticated."}]}`,
);
})().catch(reject);
const res = await yoga.fetch('http://localhost/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
query: /* GraphQL */ `
{
hi
}
`,
}),
});
expect(res.status).toBe(200);
expect(await res.text()).toMatchInlineSnapshot(`{"errors":[{"message":"Not authenticated."}]}`);
expect(callback).not.toHaveBeenCalled();
});
@ -353,7 +323,7 @@ describe('subscription usage reporting', () => {
const graphqlScope = nock('http://localhost')
.post('/usage', body => {
expect(body.map).toEqual({
c6cc5505189a301dcadc408034c21a2d: {
'74cf03b67c3846231d04927b02e1fca45e727223': {
fields: ['Subscription.hi'],
operation: 'subscription{hi}',
operationName: 'anonymous',
@ -363,7 +333,7 @@ describe('subscription usage reporting', () => {
expect(body.operations).toBeUndefined();
expect(body.subscriptionOperations).toMatchObject([
{
operationMapKey: 'c6cc5505189a301dcadc408034c21a2d',
operationMapKey: '74cf03b67c3846231d04927b02e1fca45e727223',
metadata: {
client: {
name: 'brrr',
@ -468,7 +438,7 @@ describe('subscription usage reporting', () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
c6cc5505189a301dcadc408034c21a2d: {
74cf03b67c3846231d04927b02e1fca45e727223: {
fields: [
Subscription.hi,
],
@ -481,7 +451,7 @@ describe('subscription usage reporting', () => {
expect(body).toMatchObject({
subscriptionOperations: [
{
operationMapKey: 'c6cc5505189a301dcadc408034c21a2d',
operationMapKey: '74cf03b67c3846231d04927b02e1fca45e727223',
metadata: {
client: {
name: 'brrr',
@ -591,7 +561,7 @@ describe('subscription usage reporting', () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
c6cc5505189a301dcadc408034c21a2d: {
74cf03b67c3846231d04927b02e1fca45e727223: {
fields: [
Subscription.hi,
],
@ -712,7 +682,7 @@ describe('subscription usage reporting', () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
c6cc5505189a301dcadc408034c21a2d: {
74cf03b67c3846231d04927b02e1fca45e727223: {
fields: [
Subscription.hi,
],
@ -830,7 +800,7 @@ describe('subscription usage reporting', () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
c6cc5505189a301dcadc408034c21a2d: {
74cf03b67c3846231d04927b02e1fca45e727223: {
fields: [
Subscription.hi,
],
@ -1002,7 +972,7 @@ describe('subscription usage reporting', () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
c6cc5505189a301dcadc408034c21a2d: {
74cf03b67c3846231d04927b02e1fca45e727223: {
fields: [
Subscription.hi,
],
@ -1175,7 +1145,7 @@ describe('incremental delivery usage reporting', () => {
.post('/usage', body => {
expect(body.map).toMatchInlineSnapshot(`
{
4c76cedb3f9db3810a8080b299e93f1a: {
b78b2367025b1253b17f5362d5f0b4d5b27c4a08: {
fields: [
Query.greetings,
],

View file

@ -15,7 +15,7 @@
},
"license": "MIT",
"engines": {
"node": ">=14.0.0"
"node": ">=16.0.0"
},
"main": "dist/cjs/index.js",
"module": "dist/esm/index.js",

View file

@ -1,5 +0,0 @@
import { createHash } from 'node:crypto';
export function hashOperation(operation: string) {
return createHash('md5').update(operation, 'utf8').digest('hex');
}

View file

@ -1,2 +1 @@
export * from './normalize/operation.js';
export * from './hash.js';

View file

@ -14,7 +14,7 @@
"devDependencies": {
"@aws-sdk/client-s3": "3.556.0",
"@aws-sdk/s3-request-presigner": "3.556.0",
"@graphql-hive/core": "0.2.4",
"@graphql-hive/core": "workspace:*",
"@graphql-inspector/core": "5.1.0-alpha-20231208113249-34700c8a",
"@hive/cdn-script": "workspace:*",
"@hive/emails": "workspace:*",

View file

@ -10,7 +10,7 @@
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@graphql-hive/core": "0.2.4",
"@graphql-hive/core": "workspace:*",
"@hive/service-common": "workspace:*",
"@hive/usage-common": "workspace:*",
"@sentry/node": "7.110.1",

View file

@ -64,6 +64,40 @@ server.listen(4000, () => {
})
```
#### Cloudflare Workers
To use Hive with Cloudflare Workers, you can use
[GraphQL Yoga](https://the-guild.dev/graphql/yoga-server) (as shown below) or use the generic
[Hive client](../api-reference/client#custom-integration) with your own GraphQL server
implementation.
Here's an example of how to use Hive with GraphQL Yoga on Cloudflare Workers:
```typescript
import { useYogaHive, createYogaHive, useYogaHive } from '@graphql-hive/client'
import { createYoga } from 'graphql-yoga'
export default {
async fetch(request, env, ctx) {
const hive = createYogaHive({
enabled: true, // Enable/Disable Hive Client
token: 'YOUR-TOKEN',
usage: true // Collects schema usage based on operations
});
const yoga = createYoga({
plugins: [
useYogaHive(hive)
]
});
const response = await yoga.fetch(request, env, ctx);
ctx.waitUntil(hive.dispose());
return response;
}
}
```
##### Client Information
You can associate a client name and version with any operation reported to Hive, by sending the

View file

@ -417,12 +417,12 @@ importers:
'@graphql-tools/utils':
specifier: ^10.0.0
version: 10.0.0(graphql@16.8.1)
'@whatwg-node/fetch':
specifier: 0.9.17
version: 0.9.17
async-retry:
specifier: 1.3.3
version: 1.3.3
axios:
specifier: ^1.6.0
version: 1.6.2
tiny-lru:
specifier: 8.0.2
version: 8.0.2
@ -474,8 +474,8 @@ importers:
specifier: 5.2.0
version: 5.2.0(graphql@16.8.1)
nock:
specifier: 13.5.4
version: 13.5.4
specifier: 14.0.0-beta.5
version: 14.0.0-beta.5
vitest:
specifier: 1.5.0
version: 1.5.0(@types/node@20.12.7)
@ -584,7 +584,7 @@ importers:
specifier: 3.556.0
version: 3.556.0
'@graphql-hive/core':
specifier: 0.2.4
specifier: workspace:*
version: link:../../libraries/core/dist
'@graphql-inspector/core':
specifier: 5.1.0-alpha-20231208113249-34700c8a
@ -1455,7 +1455,7 @@ importers:
packages/services/usage-ingestor:
devDependencies:
'@graphql-hive/core':
specifier: 0.2.4
specifier: workspace:*
version: link:../../libraries/core/dist
'@hive/service-common':
specifier: workspace:*
@ -16113,16 +16113,6 @@ packages:
- debug
dev: true
/axios@1.6.2:
resolution: {integrity: sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==}
dependencies:
follow-redirects: 1.15.2
form-data: 4.0.0
proxy-from-env: 1.1.0
transitivePeerDependencies:
- debug
dev: false
/axios@1.6.5:
resolution: {integrity: sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==}
dependencies:
@ -20788,16 +20778,6 @@ packages:
resolution: {integrity: sha512-Rwix9pBtC1Nuy5wysTmKy+UjbDJpIfg8eHjw0rjZ1mX4GNLz1Bmd16uDpI3Gk1i70Fgcs8Csg2lPm8HULFg9DQ==}
dev: false
/follow-redirects@1.15.2:
resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==}
engines: {node: '>=4.0'}
peerDependencies:
debug: '*'
peerDependenciesMeta:
debug:
optional: true
dev: false
/follow-redirects@1.15.4(debug@4.3.4):
resolution: {integrity: sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==}
engines: {node: '>=4.0'}
@ -20884,6 +20864,7 @@ packages:
asynckit: 0.4.0
combined-stream: 1.0.8
mime-types: 2.1.35
dev: true
/format@0.2.2:
resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==}
@ -26555,15 +26536,12 @@ packages:
tslib: 2.6.2
dev: true
/nock@13.5.4:
resolution: {integrity: sha512-yAyTfdeNJGGBFxWdzSKCBYxs5FxLbCg5X5Q4ets974hcQzG1+qCxvIyOo4j2Ry6MUlhWVMX4OoYDefAIIwupjw==}
engines: {node: '>= 10.13'}
/nock@14.0.0-beta.5:
resolution: {integrity: sha512-u255tf4DYvyErTlPZA9uTfXghiZZy+NflUOFONPVKZ5tP0yaHwKig28zyFOLhu8y5YcCRC+V5vDk4HHileh2iw==}
engines: {node: '>= 18'}
dependencies:
debug: 4.3.4(supports-color@8.1.1)
json-stringify-safe: 5.0.1
propagate: 2.0.1
transitivePeerDependencies:
- supports-color
dev: true
/node-abi@3.52.0: