migrate to vitest (#921)

Co-authored-by: Kamil Kisiela <kamil.kisiela@gmail.com>
This commit is contained in:
Dimitri POSTOLOV 2023-02-10 11:11:23 +01:00 committed by GitHub
parent d0ef64142d
commit de7ba835e4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 880 additions and 2493 deletions

View file

@ -40,8 +40,6 @@ module.exports = {
'packages/web/app/src/graphql/index.ts',
'packages/libraries/cli/src/sdk.ts',
'packages/services/storage/src/db/types.ts',
'babel.config.cjs',
'jest.config.js',
],
parserOptions: {
ecmaVersion: 2020,

View file

@ -58,7 +58,7 @@ jobs:
publish: false
secrets: inherit
# Unit tests using Jest
# Unit tests using Vitest
unit-tests:
name: test
uses: ./.github/workflows/tests-unit.yaml

View file

@ -46,7 +46,7 @@ jobs:
with:
cmd: yq -i 'del(.services.*.volumes)' docker/docker-compose.community.yml
- name: get cpu count for jest
- name: get cpu count for vitest
id: cpu-cores
uses: SimenB/github-actions-cpu-cores@v1
@ -66,7 +66,7 @@ jobs:
env:
HIVE_DEBUG: 1
run: |
pnpm --filter integration-tests test:integration --max-workers ${{ steps.cpu-cores.outputs.count }}
VITEST_MAX_THREADS=${{ steps.cpu-cores.outputs.count }} pnpm --filter integration-tests test:integration
- name: log dump
if: ${{ failure() }}

View file

@ -25,4 +25,5 @@ pnpm-lock.yaml
.changeset/
# temp volumes
.hive
.hive/
.hive-dev/

View file

@ -1,12 +0,0 @@
module.exports = {
presets: [
['@babel/preset-env', { targets: { node: process.versions.node.split('.')[0] } }],
'@babel/preset-typescript',
],
plugins: [
'babel-plugin-transform-typescript-metadata',
['@babel/plugin-proposal-decorators', { legacy: true }],
'babel-plugin-parameter-decorator',
'@babel/plugin-proposal-class-properties',
],
};

View file

@ -1,14 +1,14 @@
# Testing
## Unit tests
## Unit Tests
We are using Jest.
We are using Vitest.
Simply run `pnpm test` to run all the tests locally.
## Integration Tests
We are using Jest to test the following concerns:
We are using Vitest to test the following concerns:
1. Main application flows and integration of different services
2. Containerize execution of all services
@ -16,7 +16,7 @@ We are using Jest to test the following concerns:
Integration tests are based pre-built Docker images, so you can run it in 2 modes:
#### Running from source code
#### Running from Source Code
To run integration tests locally, from the local source code, you need to build a valid Docker
image.
@ -28,7 +28,7 @@ To do so, follow these instructions:
4. Build source code: `pnpm build`
5. Set env vars:
```
```bash
export COMMIT_SHA="local"
export RELEASE="local"
export BRANCH_NAME="local"
@ -41,7 +41,7 @@ export DOCKER_TAG=":local"
7. Use Docker Compose to run the built containers (based on `community` compose file), along with
the extra containers:
```
```bash
export DOCKER_TAG=":local"
export DOCKER_REGISTRY=""
@ -50,7 +50,7 @@ docker compose -f ./docker/docker-compose.community.yml -f ./integration-tests/d
8. Run the tests: `pnpm --filter integration-tests test:integration`
#### Running from pre-built Docker image
#### Running from Pre-Built Docker Image
To run integration tests locally, from the pre-build Docker image, follow:
@ -62,7 +62,7 @@ To run integration tests locally, from the pre-build Docker image, follow:
is done successfully)
5. Set the needed env vars, and use Docker Compose to run all local services:
```
```bash
export DOCKER_REGISTRY="ghcr.io/kamilkisiela/graphql-hive/"
export DOCKER_TAG=":IMAGE_TAG_HERE"
@ -71,12 +71,12 @@ docker compose -f ./docker/docker-compose.community.yml -f ./integration-tests/d
6. Run the tests: `pnpm --filter integration-tests test:integration`
## e2e Tests
## E2E Tests
e2e Tests are based on Cypress, and matches files that ends with `.cy.ts`. The tests flow runs from
a pre-build Docker image.
#### Running from source code
#### Running from Source Code
To run e2e tests locally, from the local source code, follow:
@ -87,7 +87,7 @@ To run e2e tests locally, from the local source code, follow:
4. Build source code: `pnpm build`
5. Set env vars:
```
```bash
export COMMIT_SHA="local"
export RELEASE="local"
export BRANCH_NAME="local"
@ -100,7 +100,7 @@ export DOCKER_TAG=":local"
`docker compose -f ./docker/docker-compose.community.yml --env-file ./integration-tests/.env up -d --wait`
8. Run Cypress: `pnpm test:e2e`
#### Running from pre-built Docker image
#### Running from Pre-Built Docker Image
To run integration tests locally, from the pre-build Docker image, follow:
@ -111,7 +111,7 @@ To run integration tests locally, from the pre-build Docker image, follow:
4. Build source code: `pnpm build`
5. Decide on the commit ID / Docker image tag you would like to use and set it as env var:
```
```bash
export DOCKER_REGISTRY="ghcr.io/kamilkisiela/graphql-hive/"
export DOCKER_TAG=":IMAGE_TAG_HERE"
```
@ -120,7 +120,7 @@ export DOCKER_TAG=":IMAGE_TAG_HERE"
`docker compose -f ./docker/docker-compose.community.yml --env-file ./integration-tests/.env up -d --wait`
7. Run Cypress: `pnpm test:e2e`
#### Docker Compose configuration
#### Docker Compose Configuration
Keep in mind that integration tests are running a combination of 2 Docker Compose files:
@ -137,7 +137,7 @@ Keep in mind that integration tests are running a combination of 2 Docker Compos
If you are having issues with running Docker images, follow these instructions:
1. Make sure you have latest Docker installed.
1. Make sure you have the latest Docker installed.
1. Make sure no containers are running (`docker ps` and then `docker stop CONTAINER_ID`).
1. Delete the local volume used for testing, it's located under `.hive` directory.
1. Try to run `docker system prune` to clean all the Docker images, containers, networks and caches.

View file

@ -1,26 +0,0 @@
import { readFileSync } from 'fs';
import { dirname, resolve } from 'path';
import { fileURLToPath } from 'url';
import { pathsToModuleNameMapper } from 'ts-jest';
const ROOT_DIR = dirname(fileURLToPath(import.meta.url));
const TSCONFIG = resolve(ROOT_DIR, 'tsconfig.json');
const tsconfig = JSON.parse(readFileSync(TSCONFIG, 'utf-8'));
export default {
preset: 'ts-jest',
extensionsToTreatAsEsm: ['.ts'],
testEnvironment: 'node',
rootDir: ROOT_DIR,
restoreMocks: true,
reporters: ['default'],
moduleNameMapper: {
...pathsToModuleNameMapper(tsconfig.compilerOptions.paths, {
prefix: `${ROOT_DIR}/`,
}),
'^(\\.{1,2}/.*)\\.js$': '$1',
},
testTimeout: 90_000,
setupFilesAfterEnv: ['dotenv/config'],
collectCoverage: false,
};

View file

@ -5,12 +5,11 @@
"private": true,
"scripts": {
"prepare:env": "cd ../ && pnpm build:libraries && pnpm build:services",
"test:integration": "jest"
"test:integration": "vitest ."
},
"dependencies": {
"@apollo/gateway": "2.3.1",
"@apollo/server": "4.3.3",
"@app/gql": "link:./testkit/gql",
"@aws-sdk/client-s3": "3.267.0",
"@esm2cjs/execa": "6.1.1-cjs.1",
"@graphql-hive/client": "workspace:*",
@ -26,8 +25,6 @@
"graphql": "16.6.0",
"human-id": "4.0.0",
"ioredis": "5.3.0",
"jest": "29.4.1",
"jest-expect-message": "1.1.3",
"slonik": "30.1.2",
"zod": "3.20.5"
},
@ -37,8 +34,7 @@
"@hive/server": "workspace:*",
"@types/dockerode": "3.3.14",
"@types/ioredis": "4.28.10",
"@types/jest": "29.4.0",
"ts-jest": "29.0.5",
"tslib": "2.5.0"
"tslib": "2.5.0",
"vitest": "0.28.4"
}
}

View file

@ -2,7 +2,6 @@ import { randomUUID } from 'node:crypto';
import { writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import { join, resolve } from 'node:path';
import 'jest-expect-message';
import { execaCommand } from '@esm2cjs/execa';
import { fetchLatestSchema, fetchLatestValidSchema } from './flow';
import { getServiceHost } from './utils';

View file

@ -1,5 +1,5 @@
import Docker from 'dockerode';
import humanId from 'human-id';
import { humanId } from 'human-id';
let docker: Docker | null = null;

View file

@ -115,47 +115,50 @@ function runArtifactsCDNTests(
getServiceHost(runtime.service, runtime.port).then(v => `http://${v}${runtime.path}`);
describe(`Artifacts CDN ${name}`, () => {
test.concurrent('legacy cdn access key can be used for accessing artifacts', async () => {
const endpointBaseUrl = await getBaseEndpoint();
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
const { target, createToken } = await createProject(ProjectType.Single);
const token = await createToken({
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
});
test.concurrent(
'legacy cdn access key can be used for accessing artifacts',
async ({ expect }) => {
const endpointBaseUrl = await getBaseEndpoint();
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
const { target, createToken } = await createProject(ProjectType.Single);
const token = await createToken({
targetScopes: [TargetAccessScope.RegistryRead, TargetAccessScope.RegistryWrite],
});
await token
.publishSchema({
author: 'Kamil',
commit: 'abc123',
sdl: `type Query { ping: String }`,
})
.then(r => r.expectNoGraphQLErrors());
await token
.publishSchema({
author: 'Kamil',
commit: 'abc123',
sdl: `type Query { ping: String }`,
})
.then(r => r.expectNoGraphQLErrors());
// manually generate CDN access token for legacy support
const legacyToken = generateLegacyToken(target.id);
const legacyTokenHash = await bcrypt.hash(legacyToken, await bcrypt.genSalt(10));
await putS3Object(s3Client, 'artifacts', `cdn-legacy-keys/${target.id}`, legacyTokenHash);
// manually generate CDN access token for legacy support
const legacyToken = generateLegacyToken(target.id);
const legacyTokenHash = await bcrypt.hash(legacyToken, await bcrypt.genSalt(10));
await putS3Object(s3Client, 'artifacts', `cdn-legacy-keys/${target.id}`, legacyTokenHash);
const url = buildEndpointUrl(endpointBaseUrl, target.id, 'sdl');
const response = await fetch(url, {
method: 'GET',
headers: {
'x-hive-cdn-key': legacyToken,
},
});
const url = buildEndpointUrl(endpointBaseUrl, target.id, 'sdl');
const response = await fetch(url, {
method: 'GET',
headers: {
'x-hive-cdn-key': legacyToken,
},
});
expect(response.status).toEqual(200);
expect(await response.text()).toMatchInlineSnapshot(`
"type Query {
ping: String
}"
expect(response.status).toBe(200);
expect(await response.text()).toMatchInlineSnapshot(`
type Query {
ping: String
}
`);
});
},
);
test.concurrent(
'legacy deleting cdn access token from s3 revokes artifact cdn access',
async () => {
async ({ expect }) => {
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
const { createToken, target } = await createProject(ProjectType.Single);
@ -186,12 +189,12 @@ function runArtifactsCDNTests(
'x-hive-cdn-key': legacyToken,
},
});
expect(response.status).toEqual(200);
expect(response.status).toBe(200);
expect(await response.text()).toMatchInlineSnapshot(`
"type Query {
ping: String
}"
`);
type Query {
ping: String
}
`);
await deleteS3Object(s3Client, 'artifacts', [`cdn-legacy-keys/${target.id}`]);
@ -202,7 +205,7 @@ function runArtifactsCDNTests(
'x-hive-cdn-key': legacyToken,
},
});
expect(response.status).toEqual(403);
expect(response.status).toBe(403);
},
);
@ -210,7 +213,7 @@ function runArtifactsCDNTests(
const endpointBaseUrl = await getBaseEndpoint();
const url = buildEndpointUrl(endpointBaseUrl, 'i-do-not-exist', 'sdl');
const response = await fetch(url, { method: 'GET' });
expect(response.status).toEqual(400);
expect(response.status).toBe(400);
expect(response.headers.get('content-type')).toContain('application/json');
expect(await response.json()).toEqual({
code: 'MISSING_AUTH_KEY',
@ -218,7 +221,7 @@ function runArtifactsCDNTests(
description:
'Please refer to the documentation for more details: https://docs.graphql-hive.com/features/registry-usage',
});
expect(response.headers.get('location')).toEqual(null);
expect(response.headers.get('location')).toBe(null);
});
test.concurrent('access invalid credentials', async () => {
@ -230,7 +233,7 @@ function runArtifactsCDNTests(
'x-hive-cdn-key': 'skrrtbrrrt',
},
});
expect(response.status).toEqual(403);
expect(response.status).toBe(403);
expect(response.headers.get('content-type')).toContain('application/json');
expect(await response.json()).toEqual({
code: 'INVALID_AUTH_KEY',
@ -239,10 +242,10 @@ function runArtifactsCDNTests(
description:
'Please refer to the documentation for more details: https://docs.graphql-hive.com/features/registry-usage',
});
expect(response.headers.get('location')).toEqual(null);
expect(response.headers.get('location')).toBe(null);
});
test.concurrent('access SDL artifact with valid credentials', async () => {
test.concurrent('access SDL artifact with valid credentials', async ({ expect }) => {
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
const { createToken, target } = await createProject(ProjectType.Single);
@ -259,7 +262,7 @@ function runArtifactsCDNTests(
})
.then(r => r.expectNoGraphQLErrors());
expect(publishSchemaResult.schemaPublish.__typename).toEqual('SchemaPublishSuccess');
expect(publishSchemaResult.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const cdnAccessResult = await writeToken.createCdnAccess();
const endpointBaseUrl = await getBaseEndpoint();
const url = buildEndpointUrl(endpointBaseUrl, target.id, 'sdl');
@ -271,8 +274,8 @@ function runArtifactsCDNTests(
redirect: 'manual',
});
expect(response.status).toMatchInlineSnapshot(`302`);
expect(await response.text()).toMatchInlineSnapshot(`"Found."`);
expect(response.status).toBe(302);
expect(await response.text()).toBe('Found.');
expect(response.headers.get('location')).toBeDefined();
const artifactContents = await fetchS3ObjectArtifact(
@ -280,13 +283,13 @@ function runArtifactsCDNTests(
`artifact/${target.id}/sdl`,
);
expect(artifactContents.body).toMatchInlineSnapshot(`
"type Query {
type Query {
ping: String
}"
}
`);
});
test.concurrent('access services artifact with valid credentials', async () => {
test.concurrent('access services artifact with valid credentials', async ({ expect }) => {
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
const { createToken, target } = await createProject(ProjectType.Federation);
@ -305,7 +308,7 @@ function runArtifactsCDNTests(
})
.then(r => r.expectNoGraphQLErrors());
expect(publishSchemaResult.schemaPublish.__typename).toEqual('SchemaPublishSuccess');
expect(publishSchemaResult.schemaPublish.__typename).toBe('SchemaPublishSuccess');
// check if artifact exists in bucket
const artifactContents = await fetchS3ObjectArtifact(
@ -313,7 +316,7 @@ function runArtifactsCDNTests(
`artifact/${target.id}/services`,
);
expect(artifactContents.body).toMatchInlineSnapshot(
`"[{"name":"ping","sdl":"type Query { ping: String }","url":"ping.com"}]"`,
'[{"name":"ping","sdl":"type Query { ping: String }","url":"ping.com"}]',
);
const cdnAccessResult = await writeToken.createCdnAccess();
@ -327,27 +330,27 @@ function runArtifactsCDNTests(
redirect: 'manual',
});
expect(response.status).toMatchInlineSnapshot(`302`);
expect(await response.text()).toMatchInlineSnapshot(`"Found."`);
expect(response.status).toBe(302);
expect(await response.text()).toBe('Found.');
const locationHeader = response.headers.get('location');
expect(locationHeader).toBeDefined();
const locationUrl = new URL(locationHeader!);
expect(locationUrl.protocol).toEqual('http:');
expect(locationUrl.hostname).toEqual('localhost');
expect(locationUrl.port).toEqual('8083');
expect(locationUrl.protocol).toBe('http:');
expect(locationUrl.hostname).toBe('localhost');
expect(locationUrl.port).toBe('8083');
response = await fetch(locationHeader!, {
method: 'GET',
redirect: 'manual',
});
const body = await response.text();
expect(response.status).toEqual(200);
expect(response.status).toBe(200);
expect(body).toMatchInlineSnapshot(
`"[{"name":"ping","sdl":"type Query { ping: String }","url":"ping.com"}]"`,
'[{"name":"ping","sdl":"type Query { ping: String }","url":"ping.com"}]',
);
});
test.concurrent('access services artifact with if-none-match header', async () => {
test.concurrent('access services artifact with if-none-match header', async ({ expect }) => {
const { createOrg } = await initSeed().createOwner();
const { createProject } = await createOrg();
const { createToken, target } = await createProject(ProjectType.Federation);
@ -367,7 +370,7 @@ function runArtifactsCDNTests(
})
.then(r => r.expectNoGraphQLErrors());
expect(publishSchemaResult.schemaPublish.__typename).toEqual('SchemaPublishSuccess');
expect(publishSchemaResult.schemaPublish.__typename).toBe('SchemaPublishSuccess');
// check if artifact exists in bucket
const artifactContents = await fetchS3ObjectArtifact(
@ -375,7 +378,7 @@ function runArtifactsCDNTests(
`artifact/${target.id}/services`,
);
expect(artifactContents.body).toMatchInlineSnapshot(
`"[{"name":"ping","sdl":"type Query { ping: String }","url":"ping.com"}]"`,
'[{"name":"ping","sdl":"type Query { ping: String }","url":"ping.com"}]',
);
const cdnAccessResult = await writeToken.createCdnAccess();
@ -390,7 +393,7 @@ function runArtifactsCDNTests(
redirect: 'manual',
});
expect(response.status).toMatchInlineSnapshot(`304`);
expect(response.status).toBe(304);
});
test.concurrent('access services artifact with ApolloGateway and ApolloServer', async () => {
@ -407,13 +410,13 @@ function runArtifactsCDNTests(
.publishSchema({
author: 'Kamil',
commit: 'abc123',
sdl: `type Query { ping: String }`,
sdl: 'type Query { ping: String }',
service: 'ping',
url: 'ping.com',
})
.then(r => r.expectNoGraphQLErrors());
expect(publishSchemaResult.schemaPublish.__typename).toEqual('SchemaPublishSuccess');
expect(publishSchemaResult.schemaPublish.__typename).toBe('SchemaPublishSuccess');
const cdnAccessResult = await writeToken.createCdnAccess();
const gateway = new ApolloGateway({
@ -423,9 +426,7 @@ function runArtifactsCDNTests(
}),
});
const server = new ApolloServer({
gateway,
});
const server = new ApolloServer({ gateway });
try {
const { url } = await startStandaloneServer(server);
@ -451,7 +452,7 @@ function runArtifactsCDNTests(
}),
});
expect(response.status).toEqual(200);
expect(response.status).toBe(200);
const result = await response.json();
expect(result.data.__schema.types).toContainEqual({
name: 'Query',
@ -527,7 +528,7 @@ describe('CDN token', () => {
}).then(r => r.expectNoGraphQLErrors());
expect(result.target!.cdnAccessTokens.edges).toHaveLength(2);
expect(result.target!.cdnAccessTokens.pageInfo.hasNextPage).toEqual(true);
expect(result.target!.cdnAccessTokens.pageInfo.hasNextPage).toBe(true);
let endCursor = result.target!.cdnAccessTokens.pageInfo.endCursor;
result = await execute({
@ -544,7 +545,7 @@ describe('CDN token', () => {
}).then(r => r.expectNoGraphQLErrors());
expect(result.target!.cdnAccessTokens.edges).toHaveLength(2);
expect(result.target!.cdnAccessTokens.pageInfo.hasNextPage).toEqual(true);
expect(result.target!.cdnAccessTokens.pageInfo.hasNextPage).toBe(true);
endCursor = result.target!.cdnAccessTokens.pageInfo.endCursor;
result = await execute({
@ -561,7 +562,7 @@ describe('CDN token', () => {
}).then(r => r.expectNoGraphQLErrors());
expect(result.target!.cdnAccessTokens.edges).toHaveLength(1);
expect(result.target!.cdnAccessTokens.pageInfo.hasNextPage).toEqual(false);
expect(result.target!.cdnAccessTokens.pageInfo.hasNextPage).toBe(false);
});
it('new created access tokens are added at the beginning of the connection', async () => {
@ -606,7 +607,7 @@ describe('CDN token', () => {
authToken: ownerToken,
}).then(r => r.expectNoGraphQLErrors());
expect(secondResult.target!.cdnAccessTokens.edges).toHaveLength(2);
expect(secondResult.target!.cdnAccessTokens.edges[1].node.id).toEqual(firstId);
expect(secondResult.target!.cdnAccessTokens.edges[1].node.id).toBe(firstId);
});
it('delete cdn access token', async () => {
@ -651,7 +652,7 @@ describe('CDN token', () => {
expect(deleteResult.deleteCdnAccessToken.ok).toBeDefined();
expect(deleteResult.deleteCdnAccessToken.error).toBeNull();
expect(deleteResult.deleteCdnAccessToken.ok!.deletedCdnAccessTokenId).toEqual(
expect(deleteResult.deleteCdnAccessToken.ok!.deletedCdnAccessTokenId).toBe(
paginatedResult.target!.cdnAccessTokens.edges[0].node.id,
);
@ -713,15 +714,15 @@ describe('CDN token', () => {
expect(deleteResult).toMatchInlineSnapshot(`
[
{
"locations": [
locations: [
{
"column": 3,
"line": 2,
column: 3,
line: 2,
},
],
"message": "No access (reason: "Missing target:settings permission")",
"path": [
"deleteCdnAccessToken",
message: No access (reason: "Missing target:settings permission"),
path: [
deleteCdnAccessToken,
],
},
]

View file

@ -99,7 +99,7 @@ describe('create', () => {
});
});
test.concurrent('error: non existing organization', async () => {
test.concurrent('error: non existing organization', async ({ expect }) => {
const { ownerToken } = await initSeed().createOwner();
const errors = await execute({
document: CreateOIDCIntegrationMutation,
@ -119,22 +119,22 @@ describe('create', () => {
expect(errors).toMatchInlineSnapshot(`
[
{
"locations": [
locations: [
{
"column": 3,
"line": 2,
column: 3,
line: 2,
},
],
"message": "No access (reason: "Missing organization:integrations permission")",
"path": [
"createOIDCIntegration",
message: No access (reason: "Missing organization:integrations permission"),
path: [
createOIDCIntegration,
],
},
]
`);
});
test.concurrent('error: too short clientId', async () => {
test.concurrent('error: too short clientId', async ({ expect }) => {
const { ownerToken, createOrg } = await initSeed().createOwner();
const { organization } = await createOrg();
@ -155,24 +155,24 @@ describe('create', () => {
expect(result).toMatchInlineSnapshot(`
{
"createOIDCIntegration": {
"error": {
"details": {
"authorizationEndpoint": null,
"clientId": "Must be at least 3 characters long.",
"clientSecret": null,
"tokenEndpoint": null,
"userinfoEndpoint": null,
createOIDCIntegration: {
error: {
details: {
authorizationEndpoint: null,
clientId: Must be at least 3 characters long.,
clientSecret: null,
tokenEndpoint: null,
userinfoEndpoint: null,
},
"message": "Failed to create OIDC Integration.",
message: Failed to create OIDC Integration.,
},
"ok": null,
ok: null,
},
}
`);
});
test.concurrent('error: too long clientId', async () => {
test.concurrent('error: too long clientId', async ({ expect }) => {
const { ownerToken, createOrg } = await initSeed().createOwner();
const { organization } = await createOrg();
@ -193,24 +193,24 @@ describe('create', () => {
expect(result).toMatchInlineSnapshot(`
{
"createOIDCIntegration": {
"error": {
"details": {
"authorizationEndpoint": null,
"clientId": "Can not be longer than 100 characters.",
"clientSecret": null,
"tokenEndpoint": null,
"userinfoEndpoint": null,
createOIDCIntegration: {
error: {
details: {
authorizationEndpoint: null,
clientId: Can not be longer than 100 characters.,
clientSecret: null,
tokenEndpoint: null,
userinfoEndpoint: null,
},
"message": "Failed to create OIDC Integration.",
message: Failed to create OIDC Integration.,
},
"ok": null,
ok: null,
},
}
`);
});
test.concurrent('error: too short clientSecret', async () => {
test.concurrent('error: too short clientSecret', async ({ expect }) => {
const { ownerToken, createOrg } = await initSeed().createOwner();
const { organization } = await createOrg();
@ -231,24 +231,24 @@ describe('create', () => {
expect(result).toMatchInlineSnapshot(`
{
"createOIDCIntegration": {
"error": {
"details": {
"authorizationEndpoint": null,
"clientId": null,
"clientSecret": "Must be at least 3 characters long.",
"tokenEndpoint": null,
"userinfoEndpoint": null,
createOIDCIntegration: {
error: {
details: {
authorizationEndpoint: null,
clientId: null,
clientSecret: Must be at least 3 characters long.,
tokenEndpoint: null,
userinfoEndpoint: null,
},
"message": "Failed to create OIDC Integration.",
message: Failed to create OIDC Integration.,
},
"ok": null,
ok: null,
},
}
`);
});
test.concurrent('error: too long clientSecret', async () => {
test.concurrent('error: too long clientSecret', async ({ expect }) => {
const { ownerToken, createOrg } = await initSeed().createOwner();
const { organization } = await createOrg();
@ -269,24 +269,24 @@ describe('create', () => {
expect(result).toMatchInlineSnapshot(`
{
"createOIDCIntegration": {
"error": {
"details": {
"authorizationEndpoint": null,
"clientId": null,
"clientSecret": "Can not be longer than 200 characters.",
"tokenEndpoint": null,
"userinfoEndpoint": null,
createOIDCIntegration: {
error: {
details: {
authorizationEndpoint: null,
clientId: null,
clientSecret: Can not be longer than 200 characters.,
tokenEndpoint: null,
userinfoEndpoint: null,
},
"message": "Failed to create OIDC Integration.",
message: Failed to create OIDC Integration.,
},
"ok": null,
ok: null,
},
}
`);
});
test.concurrent('error: invalid oauth api url', async () => {
test.concurrent('error: invalid oauth api url', async ({ expect }) => {
const { ownerToken, createOrg } = await initSeed().createOwner();
const { organization } = await createOrg();
@ -307,18 +307,18 @@ describe('create', () => {
expect(result).toMatchInlineSnapshot(`
{
"createOIDCIntegration": {
"error": {
"details": {
"authorizationEndpoint": "Must be a valid OAuth API url.",
"clientId": null,
"clientSecret": null,
"tokenEndpoint": "Must be a valid OAuth API url.",
"userinfoEndpoint": "Must be a valid OAuth API url.",
createOIDCIntegration: {
error: {
details: {
authorizationEndpoint: Must be a valid OAuth API url.,
clientId: null,
clientSecret: null,
tokenEndpoint: Must be a valid OAuth API url.,
userinfoEndpoint: Must be a valid OAuth API url.,
},
"message": "Failed to create OIDC Integration.",
message: Failed to create OIDC Integration.,
},
"ok": null,
ok: null,
},
}
`);
@ -521,15 +521,15 @@ describe('delete', () => {
expect(errors).toMatchInlineSnapshot(`
[
{
"locations": [
locations: [
{
"column": 3,
"line": 2,
column: 3,
line: 2,
},
],
"message": "No access (reason: "Missing organization:integrations permission")",
"path": [
"deleteOIDCIntegration",
message: No access (reason: "Missing organization:integrations permission"),
path: [
deleteOIDCIntegration,
],
},
]
@ -538,7 +538,7 @@ describe('delete', () => {
test.concurrent(
'success: upon integration deletion oidc members are also deleted',
async () => {
async ({ expect }) => {
const seed = initSeed();
const { ownerToken, createOrg } = await seed.createOwner();
const { organization } = await createOrg();
@ -601,15 +601,15 @@ describe('delete', () => {
expect(refetchedMeResult).toMatchInlineSnapshot(`
[
{
"locations": [
locations: [
{
"column": 3,
"line": 2,
column: 3,
line: 2,
},
],
"message": "No access (reason: "User not found")",
"path": [
"me",
message: No access (reason: "User not found"),
path: [
me,
],
},
]
@ -701,7 +701,7 @@ describe('update', () => {
});
});
test.concurrent('error: user does not have permissions', async () => {
test.concurrent('error: user does not have permissions', async ({ expect }) => {
const { ownerToken, createOrg } = await initSeed().createOwner();
const { organization } = await createOrg();
@ -736,15 +736,15 @@ describe('update', () => {
expect(errors).toMatchInlineSnapshot(`
[
{
"locations": [
locations: [
{
"column": 3,
"line": 2,
column: 3,
line: 2,
},
],
"message": "No access (reason: "Missing organization:integrations permission")",
"path": [
"updateOIDCIntegration",
message: No access (reason: "Missing organization:integrations permission"),
path: [
updateOIDCIntegration,
],
},
]

View file

@ -17,7 +17,7 @@ function filterEmailsByOrg(orgName: string, emails: emails.Email[]) {
}));
}
test.only('rate limit approaching and reached for organization', async () => {
test('rate limit approaching and reached for organization', async () => {
const { createOrg, ownerToken, ownerEmail } = await initSeed().createOwner();
const { createProject, organization } = await createOrg();
const { createToken } = await createProject(ProjectType.Single);

View file

@ -0,0 +1,13 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
globals: true,
alias: {
'@app/gql/graphql': 'testkit/gql/graphql.ts',
'@app/gql': 'testkit/gql/index.ts',
},
setupFiles: ['dotenv/config', '../serializer.ts'],
testTimeout: 90_000,
},
});

View file

@ -1,25 +0,0 @@
import { readFileSync } from 'fs';
import { dirname, resolve } from 'path';
import { fileURLToPath } from 'url';
import { pathsToModuleNameMapper } from 'ts-jest';
const ROOT_DIR = dirname(fileURLToPath(import.meta.url));
const TSCONFIG = resolve(ROOT_DIR, 'tsconfig.json');
const tsconfig = JSON.parse(readFileSync(TSCONFIG, 'utf-8'));
export default {
extensionsToTreatAsEsm: ['.ts', '.tsx'],
testEnvironment: 'node',
rootDir: ROOT_DIR,
restoreMocks: true,
reporters: ['default'],
modulePathIgnorePatterns: ['dist', 'integration-tests', 'tmp', 'target'],
moduleNameMapper: {
...pathsToModuleNameMapper(tsconfig.compilerOptions.paths, {
prefix: `${ROOT_DIR}/`,
}),
'^(\\.{1,2}/.*)\\.js$': '$1',
},
setupFiles: [],
collectCoverage: false,
};

View file

@ -40,18 +40,13 @@
"prettier": "prettier --cache --write --list-different .",
"release": "pnpm build:libraries && changeset publish",
"seed": "node scripts/seed-local-env.js",
"test": "jest",
"test": "vitest .",
"test:e2e": "CYPRESS_BASE_URL=$HIVE_APP_BASE_URL cypress run",
"typecheck": "pnpm turbo typecheck --color",
"upload-sourcemaps": "./scripts/upload-sourcemaps.sh",
"workspace": "pnpm run --filter $1 $2"
},
"devDependencies": {
"@babel/core": "7.20.12",
"@babel/plugin-proposal-class-properties": "7.18.6",
"@babel/plugin-proposal-decorators": "7.20.13",
"@babel/preset-env": "7.20.2",
"@babel/preset-typescript": "7.18.6",
"@changesets/changelog-github": "0.4.8",
"@changesets/cli": "2.26.0",
"@graphql-codegen/add": "4.0.0",
@ -69,11 +64,7 @@
"@swc/core": "1.3.34",
"@theguild/eslint-config": "^0.7.0",
"@theguild/prettier-config": "^1.1.0",
"@types/jest": "29.4.0",
"@types/node": "18.13.0",
"babel-jest": "29.4.1",
"babel-plugin-parameter-decorator": "1.0.16",
"babel-plugin-transform-typescript-metadata": "0.3.2",
"bob-the-bundler": "5.0.1",
"cypress": "12.5.1",
"dotenv": "16.0.3",
@ -84,18 +75,18 @@
"glob": "8.1.0",
"graphql": "16.6.0",
"husky": "8.0.3",
"jest": "29.4.1",
"jest-snapshot-serializer-raw": "1.2.0",
"lint-staged": "13.1.1",
"prettier": "2.8.4",
"prettier-plugin-tailwindcss": "0.2.2",
"pretty-quick": "3.1.3",
"rimraf": "4.1.2",
"ts-jest": "29.0.5",
"ts-node": "10.9.1",
"tsup": "6.6.0",
"tsx": "3.12.3",
"turbo": "1.7.4",
"typescript": "4.9.5"
"typescript": "4.9.5",
"vitest": "0.28.4"
},
"husky": {
"hooks": {

View file

@ -2,8 +2,8 @@ import { createHive } from '../src/client';
test("should log that it's not enabled", async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const hive = createHive({
@ -26,16 +26,14 @@ test("should log that it's not enabled", async () => {
test("should not log that it's not enabled", async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const hive = createHive({
enabled: false,
debug: false,
agent: {
logger,
},
agent: { logger },
});
const result = await hive
@ -49,16 +47,14 @@ test("should not log that it's not enabled", async () => {
test('should not throw exception about missing token when disabled', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const hive = createHive({
enabled: false,
debug: false,
agent: {
logger,
},
agent: { logger },
});
const result = await hive

View file

@ -4,16 +4,14 @@ import { createHive } from '../src/client';
test('should not leak the exception', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const hive = createHive({
enabled: true,
debug: true,
agent: {
logger,
},
agent: { logger },
token: 'Token',
reporting: {
endpoint: 'http://empty.localhost',
@ -33,8 +31,8 @@ test('should not leak the exception', async () => {
test('should use selfHosting.graphqlEndpoint if provided', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
nock('http://localhost')

View file

@ -39,8 +39,8 @@ function handleProcess() {
test('GraphQL Yoga - should not interrupt the process', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const clean = handleProcess();
const hive = createHive({
@ -105,8 +105,8 @@ test('GraphQL Yoga - should not interrupt the process', async () => {
test('Apollo Server - should not interrupt the process', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const clean = handleProcess();
const apollo = new ApolloServer({

View file

@ -21,8 +21,8 @@ const headers = {
test('should not leak the exception', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const hive = createHive({
@ -67,8 +67,8 @@ test('should not leak the exception', async () => {
test('should send data to Hive', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const author = 'Test';
@ -148,8 +148,8 @@ test('should send data to Hive', async () => {
test('should send data to Hive (deprecated endpoint)', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const author = 'Test';
@ -226,8 +226,8 @@ test('should send data to Hive (deprecated endpoint)', async () => {
test('should send data to app.graphql-hive.com/graphql by default', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const author = 'Test';
@ -297,8 +297,8 @@ test('should send data to app.graphql-hive.com/graphql by default', async () =>
test('should send data to Hive immediately', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const author = 'Test';
@ -385,8 +385,8 @@ test('should send data to Hive immediately', async () => {
test('should send original schema of a federated (v1) service', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const author = 'Test';
@ -449,8 +449,8 @@ test('should send original schema of a federated (v1) service', async () => {
test('should send original schema of a federated (v2) service', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const author = 'Test';
@ -513,8 +513,8 @@ test('should send original schema of a federated (v2) service', async () => {
test('should display SchemaPublishMissingServiceError', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const token = 'Token';
@ -575,8 +575,8 @@ test('should display SchemaPublishMissingServiceError', async () => {
test('should display SchemaPublishMissingUrlError', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const token = 'Token';

View file

@ -87,7 +87,7 @@ const op = parse(/* GraphQL */ `
`);
beforeEach(() => {
jest.restoreAllMocks();
vi.restoreAllMocks();
});
afterEach(() => {
@ -96,8 +96,8 @@ afterEach(() => {
test('should send data to Hive', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const token = 'Token';
@ -195,8 +195,8 @@ test('should send data to Hive', async () => {
test('should send data to Hive (deprecated endpoint)', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const token = 'Token';
@ -291,8 +291,8 @@ test('should send data to Hive (deprecated endpoint)', async () => {
test('should not leak the exception', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const hive = createHive({
@ -331,8 +331,8 @@ test('should not leak the exception', async () => {
test('sendImmediately should not stop the schedule', async () => {
const logger = {
error: jest.fn(),
info: jest.fn(),
error: vi.fn(),
info: vi.fn(),
};
const token = 'Token';

View file

@ -44,7 +44,7 @@ test('increasing the defaultBytesPerUnit', () => {
defaultBytesPerUnit,
resetAfter: 5000,
logger: {
info: jest.fn(),
info: vi.fn(),
} as any,
increaseBy() {
return 0.25;
@ -63,7 +63,7 @@ test('increasing the defaultBytesPerUnit should not go over 50% of original esti
defaultBytesPerUnit,
resetAfter: 5000,
logger: {
info: jest.fn(),
info: vi.fn(),
} as any,
increaseBy() {
return 0.6;
@ -82,7 +82,7 @@ test('teach estimator multiple times', () => {
defaultBytesPerUnit,
resetAfter: 5000,
logger: {
info: jest.fn(),
info: vi.fn(),
} as any,
increaseBy() {
return 0;
@ -112,7 +112,7 @@ test('reset after N milliseconds', async () => {
defaultBytesPerUnit,
resetAfter: 100,
logger: {
info: jest.fn(),
info: vi.fn(),
} as any,
increaseBy() {
return 0;

View file

@ -10,13 +10,13 @@ const defaultBytesPerUnit = eventHubLimitInBytes / bufferSize;
test('increase the defaultBytesPerOperation estimation by 5% when over 100 calls were made and 10% of them failed', async () => {
const logger = {
// info: jest.fn(console.info),
// error: jest.fn(console.error),
info: jest.fn(),
error: jest.fn(),
// info: vi.fn(console.info),
// error: vi.fn(console.error),
info: vi.fn(),
error: vi.fn(),
};
const flush = jest.fn();
const onRetry = jest.fn();
const flush = vi.fn();
const onRetry = vi.fn();
const interval = 200;
const size = {
successful: bufferSize / 2,
@ -135,10 +135,10 @@ test('increase the defaultBytesPerOperation estimation by 5% when over 100 calls
test('buffer should split the report into multiple reports when the estimated size is greater than the limit', async () => {
const logger = {
info: jest.fn(),
error: jest.fn(),
info: vi.fn(),
error: vi.fn(),
};
const flush = jest.fn();
const flush = vi.fn();
const interval = 200;
const buffer = createKVBuffer<{
id: string;
@ -220,13 +220,13 @@ test('buffer should split the report into multiple reports when the estimated si
test('buffer create two chunks out of one buffer when actual buffer size is too big', async () => {
const logger = {
info: jest.fn(),
error: jest.fn(),
// info: jest.fn(console.info),
// error: jest.fn(console.error),
info: vi.fn(),
error: vi.fn(),
// info: vi.fn(console.info),
// error: vi.fn(console.error),
};
const flush = jest.fn();
const split = jest.fn((report, numOfChunks) => {
const flush = vi.fn();
const split = vi.fn((report, numOfChunks) => {
const reports: Array<{
id: string;
size: number;
@ -249,7 +249,7 @@ test('buffer create two chunks out of one buffer when actual buffer size is too
return reports;
});
const onRetry = jest.fn();
const onRetry = vi.fn();
const interval = 200;
const buffer = createKVBuffer<{

File diff suppressed because it is too large Load diff

4
serializer.ts Normal file
View file

@ -0,0 +1,4 @@
import rawSnapshotSerializer from 'jest-snapshot-serializer-raw/always';
import { expect } from 'vitest';
expect.addSnapshotSerializer(rawSnapshotSerializer);

View file

@ -3,7 +3,7 @@
"module": "esnext",
"target": "esnext",
"lib": ["esnext", "dom"],
"types": ["vitest/globals"],
"baseUrl": ".",
"outDir": "dist",
"rootDir": "packages",

14
vite.config.ts Normal file
View file

@ -0,0 +1,14 @@
import { defaultExclude, defineConfig } from 'vitest/config';
export default defineConfig({
test: {
globals: true,
alias: {
'@hive/usage-common': 'packages/services/usage-common/src/index.ts',
'@hive/service-common': 'packages/services/service-common/src/index.ts',
'@graphql-hive/core': 'packages/libraries/core/src/index.ts',
},
exclude: [...defaultExclude, 'integration-tests'],
setupFiles: ['./serializer.ts'],
},
});