docs: hive gateway documentation and reorganize documentation (#5291)

Co-authored-by: Arda TANRIKULU <ardatanrikulu@gmail.com>
Co-authored-by: Dotan Simha <dotansimha@gmail.com>
Co-authored-by: Valentin Cocaud <v.cocaud@gmail.com>
Co-authored-by: YassinEldeeb <yassineldeeb94@gmail.com>
This commit is contained in:
Laurin Quast 2024-09-10 05:24:38 -07:00 committed by GitHub
parent ed360d5f23
commit b7c8f34af8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
148 changed files with 10632 additions and 546 deletions

View file

@ -110,7 +110,8 @@
"slonik@30.4.4": "patches/slonik@30.4.4.patch",
"@oclif/core@3.26.6": "patches/@oclif__core@3.26.6.patch",
"oclif@4.13.6": "patches/oclif@4.13.6.patch",
"@graphiql/react@1.0.0-alpha.3": "patches/@graphiql__react@1.0.0-alpha.3.patch"
"@graphiql/react@1.0.0-alpha.3": "patches/@graphiql__react@1.0.0-alpha.3.patch",
"@theguild/components": "patches/@theguild__components.patch"
}
}
}

View file

@ -1,6 +1,6 @@
# Hive Client for Apollo Server
[Documentation](https://the-guild.dev/graphql/hive/docs/integrations/apollo-server)
[Documentation](https://the-guild.dev/graphql/hive/docs/other-integrations/apollo-server)
---

View file

@ -1,7 +1,7 @@
# Hive CLI (Command Line Interface)
A CLI util to manage and control your GraphQL Hive. You can perform
[schema-registry actions](https://the-guild.dev/graphql/hive/docs/features/schema-registry#actions-on-schemas)
[schema-registry actions](https://the-guild.dev/graphql/hive/docs/schema-registry#actions-on-schemas)
on your Hive targets using the Hive CLI.
[![Version](https://img.shields.io/npm/v/@graphql-hive/cli.svg)](https://npmjs.org/package/@graphql-hive/cli)

View file

@ -200,7 +200,7 @@ export default class SchemaCheck extends Command<typeof SchemaCheck> {
if (!git.pullRequestNumber) {
this.warn(
"Could not resolve pull request number. Are you running this command on a 'pull_request' event?\n" +
'See https://the-guild.dev/graphql/hive/docs/integrations/ci-cd#github-workflow-for-ci',
'See https://the-guild.dev/graphql/hive/docs/other-integrations/ci-cd#github-workflow-for-ci',
);
}

View file

@ -1,6 +1,6 @@
# Hive Client for Envelop
[Documentation](https://the-guild.dev/graphql/hive/docs/integrations/envelop)
[Documentation](https://the-guild.dev/graphql/hive/docs/other-integrations/envelop)
---

View file

@ -1,6 +1,6 @@
# Hive Client for GraphQL Yoga
[Documentation](https://the-guild.dev/graphql/hive/docs/integrations/graphql-yoga)
[Documentation](https://the-guild.dev/graphql/hive/docs/other-integrations/graphql-yoga)
---

View file

@ -0,0 +1,4 @@
# Federation Demo Subgraphs
Some Apollo Federation compatible subgraphs we deploy to cloudflare workers for usage within our
getting started guides.

View file

@ -0,0 +1,17 @@
{
"name": "@hive/demo-federation",
"version": "1.0.0",
"type": "module",
"private": true,
"scripts": {
"deploy": "wrangler deploy src/main.ts"
},
"dependencies": {
"@apollo/subgraph": "2.8.4",
"graphql": "16.9.0",
"graphql-yoga": "5.6.0"
},
"devDependencies": {
"wrangler": "3.61.0"
}
}

View file

@ -0,0 +1,16 @@
import { yoga as yogaProducts } from './products';
import { yoga as yogaReviews } from './reviews';
export default {
async fetch(request: Request) {
const url = new URL(request.url);
if (url.pathname.startsWith('/products')) {
return yogaProducts.fetch(request);
}
if (url.pathname.startsWith('/reviews')) {
return yogaReviews.fetch(request);
}
return new Response('Not Found', { status: 404 });
},
};

View file

@ -0,0 +1,70 @@
import { parse } from 'graphql';
import { createYoga } from 'graphql-yoga';
import { buildSubgraphSchema } from '@apollo/subgraph';
const products = [
{
upc: '1',
name: 'Table',
price: 899,
weight: 100,
},
{
upc: '2',
name: 'Couch',
price: 1299,
weight: 1000,
},
{
upc: '3',
name: 'Chair',
price: 54,
weight: 50,
},
];
type Product = (typeof products)[number];
type Context = {
products: Product[];
};
const typeDefs = parse(/* GraphQL */ `
extend type Query {
topProducts(first: Int = 5): [Product]
}
type Product @key(fields: "upc") {
upc: String!
name: String
price: Int
weight: Int
}
`);
const resolvers = {
Product: {
__resolveReference(object: Product, context: Context) {
return {
...object,
...context.products.find(product => product.upc === object.upc),
};
},
},
Query: {
topProducts(_: unknown, args: { first: number }, context: Context) {
return context.products.slice(0, args.first);
},
},
};
export const yoga = createYoga<Context>({
schema: buildSubgraphSchema([{ typeDefs, resolvers }]),
context() {
return { products };
},
landingPage: false,
graphqlEndpoint: '/products',
graphiql: {
title: 'Products Subgraph',
},
});

View file

@ -0,0 +1,108 @@
import { parse } from 'graphql';
import { createYoga } from 'graphql-yoga';
import { buildSubgraphSchema } from '@apollo/subgraph';
const usernames = [
{ id: '1', username: '@ada' },
{ id: '2', username: '@complete' },
];
const reviews = [
{
id: '1',
authorID: '1',
product: { upc: '1' },
body: 'Love it!',
},
{
id: '2',
authorID: '1',
product: { upc: '2' },
body: 'Too expensive.',
},
{
id: '3',
authorID: '2',
product: { upc: '3' },
body: 'Could be better.',
},
{
id: '4',
authorID: '2',
product: { upc: '1' },
body: 'Prefer something else.',
},
];
type Review = (typeof reviews)[number];
type User = (typeof usernames)[number];
type Context = {
reviews: Review[];
usernames: User[];
};
const typeDefs = parse(/* GraphQL */ `
type Review @key(fields: "id") {
id: ID!
body: String
author: User @provides(fields: "username")
product: Product
}
extend type User @key(fields: "id") {
id: ID! @external
username: String @external
reviews: [Review]
}
extend type Product @key(fields: "upc") {
upc: String! @external
reviews: [Review]
}
`);
const resolvers = {
Review: {
__resolveReference(review: Review, context: Context) {
return {
...review,
...context.reviews.find(r => r.id === review.id),
};
},
author(review: Review) {
return { __typename: 'User', id: review.authorID };
},
},
User: {
__resolveReference(user: User, context: Context) {
return { ...user, ...context.usernames.find(u => u.id === user.id) };
},
reviews(user: User, _: unknown, context: Context) {
return context.reviews.filter(review => review.authorID === user.id);
},
numberOfReviews(user: User) {
return reviews.filter(review => review.authorID === user.id).length;
},
username(user: User) {
const found = usernames.find(username => username.id === user.id);
return found ? found.username : null;
},
},
Product: {
reviews(product: { upc: string }, context: Context) {
return context.reviews.filter(review => review.product.upc === product.upc);
},
},
};
export const yoga = createYoga<Context>({
schema: buildSubgraphSchema([{ typeDefs, resolvers }]),
context() {
return { reviews, usernames };
},
landingPage: false,
graphqlEndpoint: '/reviews',
graphiql: {
title: 'Reviews Subgraph',
},
});

View file

@ -0,0 +1,9 @@
{
"extends": "../../../../tsconfig.json",
"compilerOptions": {
"target": "ES2020",
"module": "esnext",
"rootDir": "../../.."
},
"include": ["./src/**/*.ts"]
}

View file

@ -0,0 +1,6 @@
name = "federation-demo"
compatibility_date = "2024-04-03"
node_compat = true
[limits]
cpu_ms = 100

View file

@ -31,32 +31,42 @@ export default withGuildDocs({
},
{
source: '/docs/features/publish-schema',
destination: '/docs/features/schema-registry#publish-a-schema',
destination: '/docs/schema-registry#publish-a-schema',
permanent: true,
},
{
source: '/docs/features/checking-schema',
destination: '/docs/features/schema-registry#check-a-schema',
destination: '/docs/schema-registry#check-a-schema',
permanent: true,
},
{
source: '/docs/features/delete-schema',
destination: '/docs/features/schema-registry#delete-a-service',
destination: '/docs/schema-registry#delete-a-service',
permanent: true,
},
{
source: '/docs/features/registry-usage',
destination: '/docs/features/high-availability-cdn',
destination: '/docs/high-availability-cdn',
permanent: true,
},
{
source: '/docs/features/high-availability-cdn',
destination: '/docs/high-availability-cdn',
permanent: true,
},
{
source: '/docs/features/monitoring',
destination: '/docs/features/usage-reporting',
destination: '/docs/schema-registry/usage-reporting',
permanent: true,
},
{
source: '/docs/features/usage-reporting',
destination: '/docs/schema-registry/usage-reporting',
permanent: true,
},
{
source: '/docs/features/schema-history',
destination: '/docs/features/schema-registry#schema-history-and-changelog',
destination: '/docs/schema-registry#schema-history-and-changelog',
permanent: true,
},
{
@ -69,9 +79,14 @@ export default withGuildDocs({
destination: '/docs/management/projects#alerts-and-notifications',
permanent: true,
},
{
source: '/docs/management/external-schema-composition',
destination: '/docs/schema-registry/external-schema-composition',
permanent: true,
},
{
source: '/docs/features/external-schema-composition',
destination: '/docs/management/external-schema-composition',
destination: '/docs/schema-registry/external-schema-composition',
permanent: true,
},
{
@ -89,12 +104,47 @@ export default withGuildDocs({
destination: '/docs/management/sso-oidc-provider',
permanent: true,
},
{
source: '/docs/features/schema-registry',
destination: '/docs/schema-registry',
permanent: true,
},
{
source: '/docs/management/external-schema-composition',
destination: '/docs/schema-registry/external-schema-composition',
permanent: true,
},
{
source: '/docs/features/laboratory',
destination: '/docs/dashboard/laboratory',
permanent: true,
},
{
source: '/docs/management/contracts',
destination: '/docs/schema-registry/contracts',
permanent: true,
},
{
source: '/docs/features/schema-policy',
destination: '/docs/schema-registry/schema-policy',
permanent: true,
},
{
source: '/docs/features/app-deployments',
destination: '/docs/schema-registry/app-deployments',
permanent: true,
},
{
// SEO: Redirect to the new URL
source: '/docs/self-hosting/federation-2',
destination: '/docs/self-hosting/external-composition',
permanent: true,
},
{
source: '/docs/integrations/:path*',
destination: '/docs/other-integrations/:path*',
permanent: false,
},
],
swcMinify: true,
webpack: (config, { webpack }) => {

Binary file not shown.

After

Width:  |  Height:  |  Size: 162 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 303 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 163 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 340 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 208 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 208 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 199 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 173 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 145 KiB

View file

@ -0,0 +1,95 @@
#!/bin/sh
set -u
GITHUB_OWNER="ardatan"
GITHUB_REPO="graphql-mesh"
BINARY_NAME="hive-gateway"
# Determine the package version
if [ "$#" -eq 1 ]; then
TARGET_VERSION=$1
else
echo "Version not provided. Retrieving the latest version..."
TARGET_VERSION=$(npm show @graphql-hive/gateway version 2> /dev/null)
if [ -z "$TARGET_VERSION" ]; then
echo "Could not retrieve the latest version of @graphql-hive/gateway."
exit 1
fi
echo "Using version: $TARGET_VERSION"
fi
fetch_and_prepare_binary() {
identify_architecture || return 1
architecture="$ARCH_DETECTED"
check_non_empty "$architecture" "architecture"
RELEASE_TAG="v$TARGET_VERSION"
DOWNLOAD_URL="https://github.com/$GITHUB_OWNER/$GITHUB_REPO/releases/download/$RELEASE_TAG/$BINARY_NAME-${architecture}.gz"
destination_file="./$BINARY_NAME-${architecture}.gz"
echo "Downloading $BINARY_NAME from $DOWNLOAD_URL ..."
curl -sSfL "$DOWNLOAD_URL" -o "$destination_file"
if [ $? -ne 0 ]; then
echo "Download failed: $DOWNLOAD_URL"
exit 1
fi
echo "Unzipping $destination_file..."
gunzip "$destination_file"
if [ $? -ne 0 ]; then
echo "Unzipping failed: $destination_file"
exit 1
fi
binary_path="./$BINARY_NAME"
mv "$BINARY_NAME-${architecture}" "$BINARY_NAME"
chmod +x "$BINARY_NAME"
echo "Binary downloaded and ready to use at $binary_path."
}
identify_architecture() {
os_type="$(uname -s)"
cpu_type="$(uname -m)"
case "$os_type" in
Linux)
os_type="Linux"
;;
Darwin)
os_type="macOS"
;;
*)
echo "No binaries available for OS: $os_type"
return 1
;;
esac
case "$cpu_type" in
x86_64 | x64 | amd64)
cpu_type="X64"
;;
arm64 | aarch64)
cpu_type="ARM64"
;;
*)
echo "No binaries available for CPU architecture: $cpu_type"
return 1
;;
esac
ARCH_DETECTED="$os_type-$cpu_type"
}
check_non_empty() {
if [ -z "$1" ]; then
echo "Error: $2 is empty or undefined"
exit 1
fi
}
fetch_and_prepare_binary "$@" || exit 1

View file

@ -16,6 +16,11 @@ export const authors: Record<string, Author> = {
link: 'https://twitter.com/n1rual',
github: 'n1ru4l',
},
arda: {
name: 'Arda Tanrikulu',
link: 'https://twitter.com/ardatanrikulu',
github: 'ardatan',
},
aleksandra: {
name: 'Aleksandra Sikora',
link: 'https://twitter.com/aleksandrasays',

View file

@ -89,7 +89,7 @@ export function CompanyTestimonialsSection({ className }: { className?: string }
)}
>
<Heading as="h2" size="md">
Loved by developers, trusted by business
Loved by developers, trusted by businesses
</Heading>
<Tabs.Root
defaultValue={testimonials[0].company}

View file

@ -156,7 +156,7 @@ export function FeatureTabs({ className }: { className?: string }) {
<Feature
title="Schema Registry"
icon={<SchemaRegistryIcon />}
documentationLink="/docs/features/schema-registry"
documentationLink="/docs/schema-registry"
description="Publish schemas, compose federated services, and detect backward-incompatible changes with ease."
highlights={highlights['Schema Registry']}
setActiveHighlight={setActiveHighlight}
@ -166,7 +166,7 @@ export function FeatureTabs({ className }: { className?: string }) {
<Feature
title="GraphQL Observability"
icon={<GraphQLObservabilityIcon />}
documentationLink="/docs/features/usage-reporting"
documentationLink="/docs/schema-registry/usage-reporting"
description="Enhanced GraphQL Observability tools provide insights into API usage and user experience metrics."
highlights={highlights['GraphQL Observability']}
setActiveHighlight={setActiveHighlight}

View file

@ -8,20 +8,21 @@
- Can GraphQL Hive be self-hosted?
Yes, the on-premise version (identical to Hive Cloud) is free and open-source. You can read
“Self-Hosting Hive”(link) in our documentation.
Yes, the on-premise version (identical to Hive Cloud) is free and open-source.
[Read about Self-Hosting Hive in our documentation](/docs/self-hosting/get-started).
- What counts as GraphQL operation?
- What counts as a GraphQL operation?
Every GraphQL request that is processed by your GraphQL API and reported to GraphQL Hive. If your
server receives 1M GraphQL requests, all of them will be reported to Hive (assuming no sampling).
- Are you SOC-2 Type II complaint?
Were currently about to finish the process of being SOC-2 Type II complaint and getting the
We arere currently about to finish the process of being SOC-2 Type II complaint and getting the
certificate.
- Do you have a gateway?
- Do you have a GraphQL Gateway?
Of course, we do! It is called Hive Gateway (previously known as GraphQL Mesh), we maintain it for
years already.
Of course, we do! It is called Hive Gateway (previously known as GraphQL Mesh) that supports
Apollo Federation and has been battle-tested by our clients for a few years now.
[Read more in our documentation](/docs/gateway).

View file

@ -175,19 +175,12 @@ function EnterpriseFocusedCards({ className }: { className?: string }) {
</InfoCard>
<InfoCard
as="li"
heading="Secure and efficient"
heading="Customizable User Roles and Permissions"
icon={<PerformanceListItemIcon />}
className="flex-1 px-0 sm:px-8 sm:py-0 md:px-8 md:py-0"
>
<a
href="https://the-guild.dev/graphql/hive/docs/features/app-deployments#publish-an-app-deployment"
target="_blank"
rel="noreferrer"
className="hover:text-blue-700 hover:underline"
>
Persisted Documents
</a>{' '}
secure and reduce traffic by hashing operations on app deployments.
Control user access with detailed, role-based permissions for enhanced security and
flexibility.
</InfoCard>
</ul>
</section>
@ -216,8 +209,8 @@ function UltimatePerformanceCards() {
icon={<PerformanceListItemIcon />}
className="flex-1 basis-full rounded-2xl md:basis-0 md:rounded-3xl"
>
Minimize unnecessary network calls that hinder your applications speed. Hive leverages
GraphQL to enhance responsiveness and scales these benefits across your enterprise.
Minimize unnecessary network calls that hinder your application's speed. Use GraphQL to
enhance responsiveness and scales these benefits across your enterprise.
</InfoCard>
<InfoCard
as="li"
@ -225,8 +218,8 @@ function UltimatePerformanceCards() {
icon={<PerformanceListItemIcon />}
className="flex-1 basis-full rounded-2xl md:rounded-3xl lg:basis-0"
>
Reduce latency effectively with Hive by enabling frontend teams to obtain all required
data in a single request, maximizing GraphQLs inherent performance benefits.
Streamline communication between frontend and backend by enabling precise data selection,
reducing unnecessary payloads and simplifying API interactions.
</InfoCard>
</ul>
</section>

View file

@ -1,10 +1,13 @@
export default {
index: 'Introduction',
'get-started': 'Get Started',
features: 'Features',
'api-reference': 'CLI/API Reference',
'schema-registry': 'Schema Registry',
'high-availability-cdn': 'High-Availability CDN',
dashboard: 'Dashboard',
gateway: 'Gateway',
management: 'Management',
integrations: 'Integrations and Guides',
'other-integrations': 'Other Integrations',
'api-reference': 'CLI/API Reference',
specs: 'Specifications',
'use-cases': 'Use Cases',
'self-hosting': 'Self-Hosting',

View file

@ -1,4 +1,6 @@
export default {
cli: 'Hive CLI',
'gateway-config': 'Hive Gateway Configuration',
'gateway-cli': 'Hive Gateway CLI',
client: 'Hive Client',
};

View file

@ -2,8 +2,8 @@ import { Callout, Tabs } from '@theguild/components'
# Hive CLI (Command Line Interface)
You can perform [schema-registry actions](/docs/features/schema-registry#actions-on-schemas) on your
Hive targets schemas using the Hive CLI.
You can perform [schema-registry actions](/docs/schema-registry#actions-on-schemas) on your Hive
targets schemas using the Hive CLI.
## Installation
@ -73,7 +73,7 @@ You can also download a specific version of the binary:
<Callout>
We recommend publishing the schema from your CI/CD pipeline. You can find more information in out
[CI/CD Integration guide](../integrations/ci-cd.mdx).
[CI/CD Integration guide](/docs/other-integrations/ci-cd).
</Callout>
Start by setting your Hive token in
@ -82,7 +82,7 @@ file, or set it as `HIVE_TOKEN` environment variable.
Further reading:
- [Publishing a schema to the Schema Registry](/docs/features/schema-registry#publish-a-schema)
- [Publishing a schema to the Schema Registry](/docs/schema-registry#publish-a-schema)
#### Single Schema Project
@ -111,8 +111,8 @@ hive schema:publish --service reviews --url http://my-service.com/graphql schema
Further reading:
- [`schema:publish` API Reference](/docs/api-reference/cli#hive-schemapublish-file)
- [Apollo Router integration](/docs/integrations/apollo-router)
- [Apollo-Server integration](/docs/integrations/apollo-server)
- [Apollo Router integration](/docs/other-integrations/apollo-router)
- [Apollo-Server integration](/docs/other-integrations/apollo-server)
#### Hive Metadata
@ -135,7 +135,7 @@ hive schema:publish schema.graphql --metadata '{ "someData": true }'
Further reading:
- [Fetching Hive Metadata from the CDN](/docs/features/high-availability-cdn)
- [Fetching Hive Metadata from the CDN](/docs/high-availability-cdn)
### Check a schema
@ -176,7 +176,7 @@ hive schema:check --contextId "pr-123" "src/*.graphql"
Further reading:
- [Publishing a schema to the Schema Registry](/docs/features/schema-registry#publish-a-schema)
- [Publishing a schema to the Schema Registry](/docs/schema-registry#publish-a-schema)
- [Conditional Breaking Changes](/docs/management/targets#conditional-breaking-changes)
### Delete a schema
@ -381,9 +381,8 @@ Sometimes it is useful to fetch a schema (SDL or Supergraph) from Hive, for exam
local development. This can be done using the `schema:fetch` command.
<Callout type="info">
Don't confuse this with the [high-availability CDN](/docs/features/high-availability-cdn.mdx).
This command is used to fetch a schema from the API where the CDN always represents the latest
valid schema.
Don't confuse this with the [high-availability CDN](/docs/high-availability-cdn). This command is
used to fetch a schema from the API where the CDN always represents the latest valid schema.
</Callout>
You can fetch a schema by using the action id (commit sha) that was used for publishing the schema
@ -408,7 +407,7 @@ You can fetch the GraphQL schema from the CDN using the `artifact:fetch` command
<Callout type="info">
You can learn how to create a CDN access token in the [High-Availability CDN
documentation](/docs/features/high-availability-cdn).
documentation](/docs/high-availability-cdn).
</Callout>
```bash
@ -436,8 +435,8 @@ hive schema:check schema.graphql --github
```
<Callout>
Check our [CI/CD Integration guide](../integrations/ci-cd.mdx) for more information and GitHub
workflow examples.
Check our [CI/CD Integration guide](/docs/other-integrations/ci-cd) for more information and
GitHub workflow examples.
</Callout>
## API Reference

View file

@ -14,11 +14,11 @@ import { Tabs } from '@theguild/components'
Here's a list of official libraries for JavaScript and NodeJS:
- `@graphql-hive/yoga` ([npm](https://npmjs.com/package/@graphql-hive/yoga),
[guide](../integrations/graphql-yoga)) - Integration with GraphQL Yoga.
[guide](/docs/other-integrations/graphql-yoga)) - Integration with GraphQL Yoga.
- `@graphql-hive/apollo` ([npm](https://npmjs.com/package/@graphql-hive/apollo),
[guide](../integrations/apollo-server)) - Integration with Apollo Server.
[guide](/docs/other-integrations/apollo-server)) - Integration with Apollo Server.
- `@graphql-hive/envelop` ([npm](https://npmjs.com/package/@graphql-hive/envelop),
[guide](../integrations/envelop)) - Integration with Envelop.
[guide](/docs/other-integrations/envelop)) - Integration with Envelop.
- `@graphql-hive/core` ([npm](https://npmjs.com/package/@graphql-hive/core)) - core library for
interacting with Hive's **Schema Registry** and **Usage Reporting**.
@ -26,12 +26,16 @@ You can refer to the following guides for getting started within your project, t
page for configuring the client to your needs.
<Cards>
<Cards.Card arrow title="GraphQL Yoga" href="/docs/integrations/graphql-yoga" />
<Cards.Card arrow title="Envelop" href="/docs/integrations/envelop" />
<Cards.Card arrow title="Schema-Stitching Gateway" href="/docs/integrations/schema-stitching" />
<Cards.Card arrow title="Apollo-Server" href="/docs/integrations/apollo-server" />
<Cards.Card arrow title="Apollo Gateway" href="/docs/integrations/apollo-gateway" />
<Cards.Card arrow title="GraphQL Mesh" href="/docs/integrations/graphql-mesh" />
<Cards.Card arrow title="Hive Gateway" href="/docs/gateway" />
<Cards.Card arrow title="GraphQL Yoga" href="/docs/other-integrations/graphql-yoga" />
<Cards.Card arrow title="Envelop" href="/docs/other-integrations/envelop" />
<Cards.Card
arrow
title="Schema-Stitching Gateway"
href="/docs/other-integrations/schema-stitching"
/>
<Cards.Card arrow title="Apollo-Server" href="/docs/other-integrations/apollo-server" />
<Cards.Card arrow title="Apollo Gateway" href="/docs/other-integrations/apollo-gateway" />
</Cards>
#### Configuration
@ -316,7 +320,7 @@ useHive({
#### Persisted Documents
Hive client supports resolving persisted documents. For getting started please refer to our
[App Deployments (Persisted Documents) documentation](/docs/features/app-deployments).
[App Deployments (Persisted Documents) documentation](/docs/schema-registry/app-deployments).
##### Basic Configuration
@ -429,7 +433,7 @@ The [`graphql-hive` gem](https://github.com/charlypoly/graphql-ruby-hive) allows
Refer to the following guides for integration with your project:
<Cards>
<Cards.Card arrow title="GraphQL-Ruby" href="/docs/integrations/graphql-ruby" />
<Cards.Card arrow title="GraphQL-Ruby" href="/docs/other-integrations/graphql-ruby" />
</Cards>
### PHP Client
@ -438,7 +442,7 @@ The [Lighthouse Hive](https://github.com/stayallive/lighthouse-graphql-hive) is
integration can be used to measure and collect data against all your GraphQL operations.
<Cards>
<Cards.Card arrow title="Lighthouse (Laravel)" href="/docs/integrations/lighthouse" />
<Cards.Card arrow title="Lighthouse (Laravel)" href="/docs/other-integrations/lighthouse" />
</Cards>
### Rust Client
@ -446,5 +450,5 @@ integration can be used to measure and collect data against all your GraphQL ope
Refer to the following guides for integration with your Rust project:
<Cards>
<Cards.Card arrow title="Apollo Router" href="/docs/integrations/apollo-router" />
<Cards.Card arrow title="Apollo Router" href="/docs/other-integrations/apollo-router" />
</Cards>

View file

@ -0,0 +1,75 @@
---
description: Hive Gateway CLI Reference
---
import { Callout } from '@theguild/components'
# Hive Gateway CLI Reference
An overview of all the CLI arguments and environment variables for the `hive-gateway` CLI.
[Get started with the CLI](/docs/gateway).
### Usage
You can get help with using the CLI by appending the `--help` argument:
```sh
hive-gateway --help
```
which will print out the following:
{/* IMPORTANT: please dont forget to update the following when arguments change. simply run `node --import tsx packages/hive-gateway/src/bin.ts --help` and copy over the text */}
```
Usage: hive-gateway [options] [command]
serve GraphQL federated architecture for any API service(s)
Options:
--fork <count> count of workers to spawn. defaults to "os.availableParallelism()" when NODE_ENV is "production", otherwise only one (the main) worker
(default: 1 (env: FORK)
-c, --config-path <path> path to the configuration file. defaults to the following files respectively in the current working directory: gateway.config.ts,
gateway.config.mts, gateway.config.cts, gateway.config.js, gateway.config.mjs, gateway.config.cjs (env: CONFIG_PATH)
-h, --host <hostname> host to use for serving (default: "127.0.0.1" (default: "127.0.0.1")
-p, --port <number> port to use for serving (default: 4000 (env: PORT)
--polling <duration> schema polling interval in human readable duration (default: "10s") (env: POLLING)
--no-masked-errors don't mask unexpected errors in responses
--masked-errors mask unexpected errors in responses (default: true)
--hive-registry-token <token> Hive registry token for usage metrics reporting (env: HIVE_REGISTRY_TOKEN)
--apollo-graph-ref <graphRef> Apollo graph ref of the managed federation graph (<YOUR_GRAPH_ID>@<VARIANT>) (env: APOLLO_GRAPH_REF)
--apollo-key <apiKey> Apollo API key to use to authenticate with the managed federation up link (env: APOLLO_KEY)
--help display help for command
Commands:
supergraph [options] [schemaPathOrUrl] serve a Federation supergraph provided by a compliant composition tool such as GraphQL Mesh or Apollo Rover
subgraph [schemaPathOrUrl] serve a Federation subgraph that can be used with any Federation compatible router like Hive Gateway or Apollo Router
proxy [options] [endpoint] serve a proxy to a GraphQL API and add additional features such as monitoring/tracing, caching, rate limiting, security, and more
help [command] display help for command
```
<Callout>All arguments can also be configured in the config file.</Callout>
### Environment Variables
In addition to the env vars showcased in the [CLI usage](#usage), more are available.
These are usually used for easier usage with these two schema registry services. You don't need any
configuration file if you provide these environment variables.
#### Hive Registry
- `HIVE_CDN_ENDPOINT`: The endpoint of the Hive Registry CDN
- `HIVE_CDN_KEY`: The API key provided by Hive Registry to fetch the schema
- `HIVE_REGISTRY_TOKEN`: The token to push the metrics to Hive Registry
[Learn more about Hive Registry integration here](/docs/gateway/supergraph-proxy-source)
#### Apollo GraphOS
- `APOLLO_KEY`: The API key provided by Apollo GraphOS to fetch the supergraph.
- `APOLLO_GRAPH_REF`: The API key provided by Apollo GraphOS to fetch the supergraph.
- `APOLLO_SCHEMA_CONFIG_DELIVERY_ENDPOINT`: The URL of the managed federation up link. By default,
it uses the first uplink in the list.
[Learn more about GraphOS integration here](/docs/gateway/supergraph-proxy-source)

View file

@ -0,0 +1,449 @@
---
description: Hive Gateway Configuration Reference
---
import { Callout } from '@theguild/components'
# Hive Gateway Configuration Reference
An overview of all the configuration options for the `gateway.config.ts` used by the `hive-gateway`
CLI.
Both TypeScript (`*.ts`) and JavaScript (`*.js`) config filetypes are supported.
## Default config files
The following list of files are loaded by default, sorted by priority:
- `gateway.config.ts` _(recommended)_
- `gateway.config.mts`
- `gateway.config.cts`
- `gateway.config.js`
- `gateway.config.mjs`
- `gateway.config.cjs`
### Supergraph Related
#### `supergraph`
You can provide `GraphQLSchema`, `DocumentNode` which has the AST of the supergraph or `string`
which is the `SDL` representation of the supergraph.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
supergraph: './supergraph.graphql',
// or
supergraph: 'https://example.com/supergraph.graphql',
// or you can provide a function that returns a promise of the schema
supergraph: () =>
fetch('https://example.com/unified.graphql', {
headers: {
Authorization: 'Bearer token'
}
}).then(res => res.text())
})
```
<Callout>
For Hive Registry and Apollo GraphOS, you probably don't need to provide the `supergraph` option.
- If you use Hive Registry, please refer to the dedicated section for
[Fetching Supergraph from Hive Registry CDN](/docs/gateway/supergraph-proxy-source)
- If you use Apollo GraphOS, please refer to the dedicated section for
[Fetching Supergraph from Apollo GraphOS](/docs/gateway/supergraph-proxy-source).
</Callout>
#### Polling
Let's say you have a source that can be changed after a while, it can be a CDN, schema registry or a
local file. So by enabling this option, Hive Gateway can poll the source and update the schema
automatically.
If a function is provided as in the example above, that function will be called every time the
polling interval is reached.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
pollingInterval: 5_000 // Polling interval in milliseconds
})
```
#### `additionalResolvers`
You can provide additional resolvers to the supergraph. This can be useful if you want to add a
custom resolver to the supergraph, or override the existing one.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
additionalResolvers: {
Query: {
hello: () => 'Hello World'
}
}
})
```
#### `transports` (Advanced usage only)
<Callout>
This is an advanced feature and should be used with caution. Use this only if you know what you
are doing.
</Callout>
Gateway Transports are the key component of the gateway runtime's execution. It allows the gateway
to communicate with the subgraph. For example `@graphql-mesh/transport-rest` is used to communicate
with the REST subgraphs generated by
[OpenAPI](https://the-guild.dev/graphql/mesh/v1/source-handlers/openapi) and
[JSON Schema](https://the-guild.dev/graphql/mesh/v1/source-handlers/json-schema) source handlers.
And GraphQL subgraphs use GraphQL HTTP Transport(`@graphql-mesh/transport-http`).
Gateway looks up the supergraph, and checks the kind of the subgraph, and loads it by checking the
`@graphql-mesh/transport-{kind}` package, then loads it to create an executor for the subgraph. You
can see how an example `@transport` definition looks like
[here](https://github.com/ardatan/graphql-mesh/blob/master/e2e/auto-type-merging/__snapshots__/auto-type-merging.test.ts.snap#L4).
And see the implementation of the default `transport` loading logic
[here](https://github.com/ardatan/graphql-mesh/blob/master/packages/fusion/runtime/src/utils.ts#L32).
You can replace this logic by providing your own `transports`.
### Subgraphs
If you want to serve a single subgraph, you can provide the subgraph configuration as well. You can
generate subgraphs by using [GraphQL Mesh](https://graphql-mesh.com/) or any other Federation
compliant tool listed
[here](https://www.apollographql.com/docs/federation/building-supergraphs/supported-subgraphs/).
#### `subgraph`
You can provide `GraphQLSchema`, `DocumentNode` which has the AST of the subgraph or `string` which
is the `SDL` representation of the subgraph
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
subgraph: './subgraph.graphql',
// or
subgraph: 'https://example.com/subgraph.graphql',
// or you can provide a function that returns a promise of the schema
subgraph: () =>
fetch('https://example.com/subgraph.graphql', {
headers: {
Authorization: 'Bearer token'
}
}).then(res => res.text())
})
```
<Callout>
The rest of the configuration options are the same as the supergraph configuration.
</Callout>
### Configure Hive Gateway as a GraphQL Proxy
#### `proxy`
HTTP executor options to proxy all incoming requests to another HTTP endpoint.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
proxy: {
endpoint: 'https://example.com/graphql'
}
})
```
<Callout>
By default, Hive Gateway introspects the schema from the endpoint. And if it fails, it skips the
validation and schema aware features. But if Hive CDN endpoint and key have been provided in the
configuration, Hive Gateway will fetch the schema from the Hive CDN.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
proxy: {
endpoint: 'https://example.com/graphql'
},
schema: {
type: 'hive',
endpoint: 'https://cdn.graphql-hive.com/artifacts/v1/0123-3434/sdl',
key: 'SOME_HIVE_KEY'
}
})
```
</Callout>
##### `endpoint`
The URL of the GraphQL endpoint to proxy requests to.
##### `headers`
Additional headers to include when querying the original schema It can be a plain object or a
function that returns a plain object.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
proxy: {
endpoint: 'https://example.com/graphql',
headers: execReq => ({
// This will pass the original request headers to the proxied request
authorization: execReq.context.headers.authorization
})
}
})
```
##### `useGETForQueries`
Whether to use the GET HTTP method for queries when querying the original schema. In that case, the
query will be sent as a query string parameter named `query`.
##### `method`
The HTTP method to use when querying the original schema. Default is `POST`.
##### `timeout`
The timeout in milliseconds for the request to the original schema. There is no timeout by default.
##### `retry`
Retry attempts in case of a failure. Default is 0.
##### `credentials`
Request Credentials (default: 'same-origin')
[Learn more](https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials)
#### `skipValidation`
By default, Hive Gateway validates the operations on the gateway against the introspected schema.
This is recommended to keep it enabled for security reasons. But it brings a performance overhead.
If you want to disable this validation and send the operations directly to the upstream service, you
can set this option to `true`.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
proxy: {
endpoint: 'https://example.com/graphql'
},
skipValidation: true
})
```
### Configure Server
#### `sslCredentials` for HTTPS
This is the option to provide SSL Credentials for HTTPS Server. If this is provided, Hive Gateway
will be served via HTTPS instead of HTTP.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
sslCredentials: {
key_file_name: 'path/to/key.pem',
cert_file_name: 'path/to/cert.pem',
ca_file_name: 'path/to/ca.pem',
passphrase: 'passphrase',
dh_params_file_name: 'path/to/dhparams.pem',
ssl_ciphers: 'ECDHE-R',
// This translates to SSL_MODE_RELEASE_BUFFERS
ssl_prefer_low_memory_usage: false
}
})
```
#### `browser`
This is the option to open the browser automatically when the server is started.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
/**
* Path to the browser that will be used by `Hive Gateway` to open a playground window in development mode
* This feature can be disabled by passing `false`
*/
browser: true // or `google-chrome` or `firefox` or `safari` or `edge` or `opera` or `vivaldi` or `brave` or `chromium` or `msedge` or `false`
})
```
#### `port` and `host`
These are the options to configure the port and host of the server in the configuration file rather
than passing them as CLI arguments.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
port: 4000,
host: 'localhost'
})
```
#### `maxHeaderSize`
This is the option to configure the maximum header size of the server. By default, it is 16KB. If
longer headers are sent, the server will respond with a 431 status code.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
maxHeaderSize: 32 * 1024 // 32KB
})
```
#### `plugins`
This is the option to extend your Hive Gateway with plugins. Hive Gateway uses
[GraphQL Yoga](https://the-guild.dev/graphql/yoga-server/docs/features/envelop-plugins), and
[Envelop](https://the-guild.dev/graphql/envelop) plugin system which allows you to hook into the
different phases of the GraphQL execution to manipulate or track the entire workflow step-by-step.
[See dedicated plugins feature page for more information](/docs/gateway/other-features/custom-plugins)
#### `cors`
[See dedicated CORS feature page for more information](/docs/gateway/other-features/security/cors)
#### `graphiql`
You can configure GraphiQL playground that allows you to play with your GraphQL API.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
graphiql: {
defaultQuery: 'query { hello }'
}
})
```
[Learn more about available GraphiQL Options from the dedicated GraphQL Yoga page](https://the-guild.dev/graphql/yoga-server/docs/features/graphiql)
`TODO: Move those into a dedicated GraphiQL page under Features`
#### `landingPage`
If you want to disable the landing page, you can set this option to `false`.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
landingPage: false
})
```
#### `batching`
[See dedicated page](/docs/gateway/other-features/performance/request-batching)
#### `fetchAPI` (Advanced usage only)
<Callout>
This is an advanced feature and should be used with caution. Use this only if you know what you
are doing. Use it on your own risk.
</Callout>
Hive Gateway heavily relies on WHATWG Fetch API not only as a HTTP Client but also for handling HTTP
Server components. So it uses [`@whatwg-node/fetch`](https://github.com/ardatan/whatwg-node) by
default which is a platform agnostic implementation of the Fetch API. If you want to use a different
Fetch API implementation, you can provide it here.
```ts filename="gateway.config.ts"
import fetch from 'node-fetch'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
fetchAPI: {
fetch
}
})
```
#### `logger`
By default, Hive Gateway uses a simple logger that logs to the console by using standard `console`
methods.
Using this option, you can do;
- Disable logging by providing `false`
- Provide your own logger instance
- Choose a log level
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
import { createLogger } from 'some-logger-library'
export const gatewayConfig = defineConfig({
logger: createLogger()
// or
logger: 'info' // or 'debug' or 'warn' or 'error'
// or
logger: false
})
```
[Hive Gateway uses the same logging mechanism of GraphQL Yoga](https://the-guild.dev/graphql/yoga-server/docs/features/logging-and-debugging)
#### `graphqlEndpoint`
This is the option to provide a custom GraphQL endpoint for the server. By default, it is
`/graphql`.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
graphqlEndpoint: '/my-graphql-endpoint'
})
```
#### `maskedErrors`
This is enabled by default for security reasons.
[Learn more about Error Masking](/docs/gateway/other-features/security/error-masking)
#### `cache`
Provide a cache storage for the server. By default, Hive Gateway uses an in-memory cache.
[Learn more about Caching](/docs/gateway/other-features/performance)
#### `pubsub`
Provide a PubSub instance for the server. By default, Hive Gateway uses an in-memory PubSub. In
order to have a better scalability, you can provide a custom PubSub.
[Learn more about Subscriptions and Webhooks to see if you need this option](/docs/gateway/subscriptions#subscriptions-using-http-callback)
#### `healthCheckEndpoint` and `readinessCheckEndpoint`
[Learn more about Health Check and Readiness Check](/docs/gateway/monitoring-tracing#healthcheck)

View file

@ -0,0 +1,5 @@
export default {
insights: 'Insights',
explorer: 'Explorer',
laboratory: 'Laboratory',
};

View file

@ -0,0 +1,31 @@
import NextImage from 'next/image'
import { Callout } from '@theguild/components'
import schemaExplorerImage from '../../../../public/docs/pages/features/schema-explorer.png'
# Schema Explorer
The Hive Schema Explorer is a useful tool that can provide you with a comprehensive understanding of
your GraphQL schema. Not only does it allow you to explore the different types and fields of your
schema, but it also enables you to gain a deeper understanding of the arguments and their respective
input types.
<NextImage
alt="Schema Explorer"
src={schemaExplorerImage}
className="mt-10 max-w-2xl rounded-lg drop-shadow-md"
/>
## Schema Usage and Coverage
With [Usage Reporting](/docs/schema-registry/usage-reporting) feature enabled, you'll be able to see
an overview of the schema usage and coverage (for types, fields and input types), based on the
GraphQL operations you report to Hive.
This feature is useful if you wish to understand how your GraphQL schema is being used and queried,
and understand the impact of changes you make to your schema.
<Callout type="info">
The maximum duration is defined by the retention of your [Hive
plan](/docs/management/organizations#subscription-and-billing), and depends on the data you
already sent before to Hive.
</Callout>

View file

@ -0,0 +1,14 @@
import NextImage from 'next/image'
import usageOperationsImage from '../../../../public/docs/pages/features/usage-operations.png'
# Insights
A list of all the GraphQL operations executed by your consumers, their performance metrics and total
count. By clicking on a specific query, you'll be able to see the full list of fields and arguments
used in the operation.
<NextImage
alt="Insights"
src={usageOperationsImage}
className="mt-10 max-w-2xl rounded-lg drop-shadow-md"
/>

View file

@ -64,7 +64,7 @@ curl -X POST -H "X-Hive-Key: HIVE_TOKEN_HERE" -H "Content-Type: application/json
<Callout>
We recommend using the CDN for consuming the GraphQL schema in your project. [See GraphQL Code
Generator Integration](/docs/integrations/graphql-code-generator).
Generator Integration](/docs/other-integrations/graphql-code-generator).
</Callout>
Since the Laboratory schema is a valid GraphQL schema, and supports introspection, you may use it

View file

@ -0,0 +1,12 @@
export default {
index: 'Getting Started',
'supergraph-proxy-source': 'Supergraph / Proxy Source',
'usage-reporting': 'Usage Reporting',
'persisted-documents': 'Persisted Documents',
'authorization-authentication': 'Authorization / Authentication',
'monitoring-tracing': 'Monitoring/Tracing',
'defer-stream': 'Incremental Delivery (Defer & Stream)',
subscriptions: 'Subscriptions',
'other-features': 'Other Features',
deployment: 'Deployment',
};

View file

@ -0,0 +1,733 @@
import { Callout, Tabs } from '@theguild/components'
# Authorization and Authentication
Hive Gateway supports Authentication and Authorization using JSON Web Tokens (JWT).
A [JSON Web Tokens (JWT)](https://jwt.io/) is a signed token containing arbitrary informations,
commonly used for authentication. By being signed by the issuer of the token, it can be verified
that the token is valid and has not been tampered with.
Hive Gateway provides a plugin to easily integrate JWT into your API, allowing you to easily
validate, decode and use the token (for identity and authorization).
Once you have the JWT token extract and validated, the JWT claims (and optionally, the full token)
are injected to the Hive Gateway execution context, and forwarded to upstream GraphQL subgraphs,
using the `extensions` field.
<Callout type="warning">
When JWT is enabled and claims are forwarded to the upstream GraphQL subgraphs, you might want to
use [HMAC Signature](/docs/gateway/other-features/security/hmac-signature) between your Hive Gateway
and the subgraphs. This will ensure that the requests to the subgraphs are trusted and signed by the
gateway, and no other entity can execute requests to the subgraph on behalf of the end-users.
</Callout>
<Callout>
You can refer to [Generic Auth plugin docs](https://www.npmjs.com/package/@envelop/generic-auth),
if you need a more customized auth setup without JWT.
</Callout>
## How to use?
Here's a mininal example for configuring the JWT plugin with a local signing key, and looking for
the token in the `authorization` header:
```ts filename="gateway.config.ts"
import {
createInlineSigningKeyProvider,
defineConfig,
extractFromHeader
} from '@graphql-hive/gateway'
const signingKey = 'my-secret-key'
export const gatewayConfig = defineConfig({
jwt: {
// Look and extract for the token in the 'authorization' header, with the 'Bearer' prefix.
lookupLocations: [extractFromHeader({ name: 'authorization', prefix: 'Bearer' })],
// Decode and validate the token using the provided signing key.
singingKeyProviders: [createInlineSigningKeyProvider(signingKey)],
// Forward the verified token payload to the upstream GraphQL subgraphs.
forwared: {
payload: true
}
}
})
```
You can also pass
[additional configuration options to the Yoga plugin](https://the-guild.dev/graphql/yoga-server/docs/features/jwt#additional-configuration):
```ts filename="gateway.config.ts"
import { defineConfig, createInlineSigningKeyProvider, createRemoteJwksSigningKeyProvider, extractFromHeader, extractFromCookie } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
jwt: {
// Forward the extracted token and claims to the upstream GraphQL subgraphs.
forwarded: {
payload: true, // optional, defualt is "true"
token: false, // optional, defualt is "false"
extensionsFieldName: "jwt", // optional, defualt is "jwt"
},
// Configure your signing providers: either a local signing-key or a remote JWKS are supported.
singingKeyProviders: [
createInlineSigningKeyProvider(signingKey),
createRemoteJwksSigningKeyProvider({ jwksUri: 'https://example.com/.well-known/jwks.json' })
]
// Configure where to look for the JWT token: in the headers, or cookies.
// By default, the plugin will look for the token in the 'authorization' header only.
lookupLocations: [
extractFromHeader({ name: 'authorization', prefix: 'Bearer' }),
extractFromCookie({ name: 'auth' }),
],
// Configure your token issuers/audience/algorithms verification options.
// By default, the plugin will only verify the HS256/RS256 algorithms.
// Please note that this should match the JWT signer issuer/audience/algorithms.
tokenVerification: {
issuer: 'http://my-issuer.com',
audience: 'my-audience',
algorithms: ['HS256', 'RS256'],
},
// The plugin can reject the request if the token is missing or invalid (doesn't pass JWT `verify` flow).
// By default, the plugin will reject the request if the token is missing or invalid.
reject: {
missingToken: true,
invalidToken: true,
}
}
})
```
## Configuration Options
Please refer to the
[configuration options of the Yoga plugin](https://the-guild.dev/graphql/yoga-server/docs/features/jwt#additional-configuration)
for complete details and examples.
## Forwarding the JWT token and payload
The JWT token and payload can be forwarded to the upstream GraphQL subgraphs, using the `extensions`
field of the request body.
This workflow can allow you to easily delegate the authentication process to Hive Gateway, and allow
the subgraphs to deal only with the user identity and authorization.
```mermaid
flowchart LR
1(["End-user"]) --->|"query { comments { id author { id name }}}"| 2
subgraph Hive Gateway
2["Engine"]
3["JWT Plugin"]
4["Query Planner"]
2--->|"Bearer XYZ"|3
3--->|"{ sub: 123 }"|2
2--->4
end
subgraph "Users"
5["Users Subgraph"]
4--->|"query { _entities(representations: $r) { ... on User { name }} }\nextensions: { jwt: { payload: { sub: 123 }}}"|5
end
subgraph "Comments"
6["Comments Subgraph"]
4--->|"query { comments { id author { id }} }\nextensions: { jwt: { payload: { sub: 123 }}}"|6
end
```
To pass the full token payload, you can use the `forwarded.claims` option:
```ts
{
forwarded: {
payload: true // optional, defualt is "true"
}
}
```
The token payload will be injected into `extensions.jwt.payload` of the upstream request body:
```json
{
"query": "{ comments { id author { id }} }",
"extensions": {
"jwt": {
"payload": {
"sub": 123
}
}
}
}
```
You can also pass the full token, using the `forwared.token` option:
```ts
{
forwared: {
payload: true, // optional, defualt is "true"
token: true // optional, defualt is "false"
}
}
```
And the token and (optional) prefix will be injected into `extensions.jwt.token` of the upstream
HTTP request:
```json
{
"query": "{ comments { id author { id }} }",
"extensions": {
"jwt": {
"payload": {
"sub": 123
},
"token": {
"value": "XYZ",
"prefix": "Bearer"
}
}
}
}
```
Additionally, if you wish to change the name of the `jwt` field in the extensions, you can use the
`forwarded.extensionsFieldName` option to change it:
```ts
{
forwarded: {
extensionsFieldName: 'myJwt' // optional, defualt is "jwt"
}
}
```
## Using the JWT token
### Within Gateway
The JWT plugin will inject the decoded token and payload into the context of Hive Gateway.
You can use the injected payload with other plugins, to implement things like authorization or
user-identity based logic.
For example, with a plugin like Operation Field Permissions, you can use the `jwt` property of the
context to access the decoded JWT token, and decide what permissions to allow to the user based on
identity or token claims:
```ts filename="gateway.config.ts"
import { useOperationFieldPermissions } from '@envelop/operation-field-permissions'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
},
plugins: () => [
useOperationFieldPermissions({
getPermissions: async context => {
const { jwt } = context
// Check based on identity / user-id.
if (jwt?.payload?.sub === '123') {
return new Set(['Query.*'])
}
// Check based on token payload
if (jwt?.payload?.role === 'admin') {
return new Set(['Query.*'])
}
// Default permissions
return new Set(['Query.greetings'])
}
})
]
})
```
### In upstream GraphQL subgraphs
<Tabs items={['GraphQL-Yoga', 'Apollo Server', 'Other GraphQL servers']}>
{/* GraphQL-Yoga */}
<Tabs.Tab>
The JWT token and claims are forwarded to the upstream GraphQL subgraphs, using the `extensions`
field.
To access the JWT token and claims in your upstream service resolvers/execution, you can use the
`extensions` field of the incoming GraphQL request.
If you are using [GraphQL-Yoga](https://the-guild.dev/graphql/yoga-server) for your upstream
subgraph implementation, you can use a built-in utility for extracting it for you in an easy way:
```ts filename="yoga-subgraph.ts"
import { useForwardedJWT } from '@graphql-hive/gateway'
const myYogaSubgraphServer = createYoga({
schema: mySchema,
plugins: [
useForwardedJWT({
// The name of the field in the extensions object, default is "jwt"
extensionsFieldName: 'jwt',
// The name of the field to inject into the local context object, default is "jwt"
extendContextFieldName: 'jwt'
})
]
})
```
With this plugin configured, you should be able to just access `context.jwt` in your subgraphs, just
like you would in the gateway.
This makes the process of integrating JWT easier, and streamlined across the whole flow of
execution.
</Tabs.Tab>
{/* Apollo Server */}
<Tabs.Tab>
With Apollo-Server, you can access the forwarded claims/token, using a custom Apollo-Server plugin
that extracts `extensions` and injects it into the context:
```ts filename="apollo-subgraph.ts"
import { ApolloServer, ApolloServerPlugin } from '@apollo/server'
const extractJwtPlugin = {
async requestDidStart({ request, contextValue }) {
contextValue.jwt = request.extensions?.jwt
}
} satisfies ApolloServerPlugin<{ jwt?: { payload: Record<string, any> } }>
const server = new ApolloServer({
// Now, in your schema resolvers, you can access the JWT token and claims using `context.jwt`.
plugins: [extractJwtPlugin]
// ...
})
```
</Tabs.Tab>
{/* Other GraphQL servers */}
<Tabs.Tab>
Other implementations for GraphQL subgraph servers can also access the JWT token and claims, by
looking at the `extensions` field of the incoming request.
The `extensions` field of the incoming request will contain the JWT token and claims, injected by
Hive Gateway, following this structure:
```json
{
"extensions": {
"jwt": {
"payload": {
"sub": 123
},
// optional, depends on the gateway plugin configuration
"token": {
"value": "XYZ",
"prefix": "Bearer"
}
}
}
}
```
</Tabs.Tab>
</Tabs>
## Additional Configuration
### Token lookup
The plugin can be configured to look for the JWT token in different locations:
<Tabs items={['HTTP Header', 'HTTP Cookie', 'Custom Function']}>
{/* HTTP Header */}
<Tabs.Tab>
By default, the plugin will look for the token in the `authorization` header. You can configure the
plugin to look for the token in a different header or with a different prefix.
The prefix is being validated along with the token (for example: `Bearer my-token`).
```ts
import { defineConfig, extractFromHeader } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
lookupLocations: [extractFromHeader({ name: 'x-auth-token', prefix: 'Bearer' })]
}
})
```
</Tabs.Tab>
{/* HTTP Cookie */}
<Tabs.Tab>
You can configure the plugin to look for the token in a cookie. To do so, you need to enable cookie
parsing in the gateway.
```ts
import { extractFromCookie, defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
lookupLocations: [extractFromCookie({ name: 'my-cookie' })]
}
// Make sure you enabled cookie parsing in the gateway
cookies: true,
})
```
</Tabs.Tab>
{/* Custom Function */}
<Tabs.Tab>
You can configure the plugin to use a custom function to look for the token:
```ts
import { defineConfig } from '@graphql-hive/gateway'
const getToken = ({ request, serverContext, url }) => {
return request.headers.get('x-my-token')
}
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
lookupLocations: [getToken]
}
})
```
</Tabs.Tab>
{/* Multiple locations */}
<Tabs.Tab>
You can configure the plugin to look for the token in multiple locations. The plugin will look for
the token in the order you provide.
```ts
import { defineConfig, extractFromCookie, extractFromHeader } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
lookupLocations: [
extractFromHeader({ name: 'authorization', prefix: 'Bearer' }),
extractFromHeader({ name: 'x-legacy-auth' }),
extractFromHeader({ name: 'x-api-key', prefix: 'API-Access' }),
extractFromCookie({ name: 'browserAuth' })
]
}
})
```
</Tabs.Tab>
</Tabs>
---
### Signing Key providers
The plugin can be configured to use different signing key providers:
<Tabs items={['Inline', 'Remote JWKS', 'Multiple providers']}>
{/* Inline */}
<Tabs.Tab>
You can provide the signing key directly in the configuration.
> Do not hardcode the signing key in your code. Use environment variables, local encrypted file or a
> secret store!
```ts
import { createInlineSigningKeyProvider, defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
singingKeyProviders: [createInlineSigningKeyProvider(process.env.MY_JWT_SECRET)]
}
})
```
> In case you are using an inline signing key provider, all `keyid` / `kid` will be allowed in
> tokens.
</Tabs.Tab>
{/* Remote JWKS */}
<Tabs.Tab>
You can configure the plugin to fetch the signing key from a remote JWKS endpoint.
Provide a `jwksClientOptions` object (see
[node-jwks-rsa documentation](https://github.com/auth0/node-jwks-rsa/blob/master/EXAMPLES.md)].
```ts
import { createRemoteJwksSigningKeyProvider, defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
singingKeyProviders: [
createRemoteJwksSigningKeyProvider({
jwksUri: 'https://example.com/.well-known/jwks.json'
})
]
}
})
```
</Tabs.Tab>
{/* Multiple providers */}
<Tabs.Tab>
When using multiple providers, the plugin will try to use the first available signing key.
```ts
import {
createInlineSigningKeyProvider,
createRemoteJwksSigningKeyProvider,
defineConfig
} from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
singingKeyProviders: [
// In case your remote provider is not available, the plugin will try use the inline provider.
createRemoteJwksSigningKeyProvider({
jwksUri: 'https://example.com/.well-known/jwks.json'
}),
createInlineSigningKeyProvider(process.env.MY_JWT_SECRET)
]
}
})
```
</Tabs.Tab>
</Tabs>
---
### Token Verification
The plugin verification process can be customized to match the JWT token `issuer`, `audience`, and
algorithms.
> Note that the verification options should match the JWT signer's configuration.
You can find
[here the complete list of verification options](https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/jsonwebtoken/index.d.ts#L58-L77)
for this plugin.
```ts
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
tokenVerification: {
issuer: ['http://yoga'],
audience: 'my-audience',
algorithms: ['HS256', 'RS256']
}
}
})
```
### Execution Rejection
The plugin can be configured to reject the request if the token is missing or invalid.
By default, an authentication error will be thrown if the token is missing or invalid, and the
request will be reject with status code `401`.
```ts
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// ...
reject: {
missingToken: true,
invalidToken: true
}
}
})
```
> In case you want to handle the error yourself, you can set
> `reject: { missingToken: false, invalidToken: false }` and handle the error in your resolvers. The
> `context.jwt` will be `undefined` in case of missing or invalid token.
## Granular Protection using Auth Directives (`@authenticated`, `@requiresScopes` and `@policy`)
### Configuration
By default, the JWT plugin protects the whole schema. If you want to use a granular protection by
using Federation directives such as `@authenticated`, `@requiresScopes` and `@policy`, you can use
the Generic Auth plugin to have a granular protection using with or without JWT.
With the following configuration, you can use the JWT plugin to extract the token and claims, and
then use the Generic Auth plugin to protect the schema with the Federation directives:
```ts
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// ...
jwt: {
// You have to disable the default rejection of the JWT plugin
reject: {
missingToken: false,
invalidToken: false
}
},
genericAuth: {
// Then set generic auth plugin to use granular mode
mode: 'protect-granular',
// Set where to extract the payload
resolveUser: ctx => ctx.jwt?.payload,
// If you want to continue execution even if some fields are rejected
rejectUnauthenticated: false
}
})
```
### Protect a field using a field `@authenticated`
In your GraphQL schema SDL, you can add `@authenticated` directive to your fields.
```graphql
# Import it from Federation spec
extend schema @link(url: "https://specs.apollo.dev/federation/v2.6", import: ["@authenticated"])
type Query {
me: User! @authenticated
protectedField: String @authenticated
# publicField: String
}
```
> You can apply that directive to any GraphQL `field` definition, not only to root fields.
### Role/scope based authentication (RBAC) with `@requiresScope` directive
You can use `@requiresScope` directive to protect your schema based on the user's role or scope.
Here's an example of how you can use it:
```graphql
extend schema @link(url: "https://specs.apollo.dev/federation/v2.5", import: ["@requiresScopes"])
type Query {
me: User! @requiresScopes(scopes: [["read:user"]])
protectedField: String @requiresScopes(scopes: [["read:admin"]])
publicField: String
}
```
By default, the plugin will try to extract available scopes for the current payload from `scope`
property which is expected to be a string like `read:user read:admin`. However you can customize
this behavior by providing a custom `extractScopes` function.
```ts
{
resolveUserFn,
validateUser,
mode: 'protect-granular',
// Set where to extract the payload
resolveUser: ctx => ctx.jwt?.payload,
extractScopes: jwtPayload => jwtPayload?.scopes // Expected to return an array of strings
}
```
You can also apply `AND` or `OR` logic to the scopes:
```graphql
extend schema @link(url: "https://specs.apollo.dev/federation/v2.5", import: ["@requiresScopes"])
type Query {
# This field requires the user to have `read:user` OR `read:admin` scopes
me: User! @requiresScopes(scopes: [["read:user"], ["read:admin"]])
# This field requires the user to have `read:user` AND `read:admin` scopes
protectedField: String @requiresScopes(scopes: [["read:admin", "read:user"]])
publicField: String
}
```
### `@policy` directive to fetch the roles from a policy service
You can use the `@policy` directive to fetch the roles from a policy service. Here's an example of
how you can use it:
```graphql
extend schema @link(url: "https://specs.apollo.dev/federation/v2.5", import: ["@policy"])
type Query {
me: User! @policy(policies: [["read:user"]])
protectedField: String @policy(policies: [["read:admin"]])
publicField: String
}
```
It has the same logic with `@requiresScopes` but it can asynchronously fetch the roles from a
source;
```ts
{
resolveUserFn,
validateUser,
mode: 'protect-granular',
fetchPolicies: async user => {
const res = await fetch('https://policy-service.com', {
headers: {
Authorization: `Bearer ${user.token}`
}
})
// Expected to return an array of strings
return res.json()
}
}
```

View file

@ -0,0 +1,41 @@
---
description:
Stream and defer are directives that allow you to improve latency for clients by sending the most
important data as soon as it's ready.
---
import { Callout } from '@theguild/components'
# Defer and Stream
Stream and defer are directives that allow you to improve latency for clients by sending the most
important data as soon as it's ready.
As applications grow, the GraphQL operation documents can get bigger. The server will only send the
response back once all the data requested in the query is ready. However, not all requested data is
of equal importance, and the client may not need all of the data at once. To remedy this, GraphQL
specification working group is working on
[introducing new `@defer` and `@stream` directives](https://github.com/graphql/graphql-wg/blob/main/rfcs/DeferStream.md)
which allows applications to request a subset of data which is critical and get the rest of the data
in subsequent responses from the server. This
[proposal](https://github.com/graphql/graphql-spec/pull/742) is in
[Stage 2](https://github.com/graphql/graphql-spec/blob/main/CONTRIBUTING.md#stage-2-draft), meaning
GraphQL libraries can start implementing this as experimental feature to provide feedback to the
working group.
<Callout>
Stream and Defer are **experimental** features and not yet stable. The implementation can and will
change. Furthermore, there is no yet a stable specification for the incremental delivery protocol.
</Callout>
## Enabling in the configuration
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
deferStream: true
})
```
[See more](https://the-guild.dev/graphql/yoga-server/docs/features/defer-stream#using-defer)

View file

@ -0,0 +1,7 @@
export default {
index: 'Overview',
docker: 'Docker',
serverless: 'Serverless / On the Edge',
'node-frameworks': 'Node.js Frameworks',
runtimes: 'Serverside JS Runtimes',
};

View file

@ -0,0 +1,346 @@
import { Callout } from '@theguild/components'
# Docker
Docker is a tool that allows you to package an application and its dependencies into a container
that can run on any system. This makes it easy to deploy applications in a consistent and
reproducible way, regardless of the underlying infrastructure.
To simplify running your GraphQL gateway, you can use the Docker image and the Docker Compose
template we provide. This setup allows you to easily configure and run the gateway without the need
to install Node.js and the required gateway npm packages.
## Prerequisites
Make sure you have Docker installed on your system.
You can follow [the official Docker Engine install manual](https://docs.docker.com/engine/install/)
in case you don't have Docker installed already.
## Configuration
### Arguments
Hive Gateway can be configured with [CLI arguments](/docs/api-reference/gateway-cli#arguments) even
when running the image!
For example, changing the supergraph to use the `my-schema.graphql` schema instead looks like this:
```sh
docker run \
-p 4000:4000 \
-v "$(pwd)/my-schema.graphql:/serve/my-schema.graphql" \
ghcr.io/ardatan/hive-gateway supergraph my-schema.graphql
```
For a full list of CLI arguments, please refer to the
[CLI arguments](/docs/api-reference/gateway-cli#arguments).
### Config File
Instead of configuring Hive Gateway with CLI arguments, we support configuring with a config file.
You're recommended to use the `gateway.config.ts` file to configure Hive Gateway. Simply mount the
config file when running the image.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
proxy: {
endpoint: 'https://example.com/graphql'
}
})
```
```sh
docker run \
-p 4000:4000 \
-v "$(pwd)/gateway.config.ts:/serve/gateway.config.ts" \
ghcr.io/ardatan/hive-gateway proxy
```
For a full list of CLI arguments, please refer to the
[Config Reference](/docs/api-reference/gateway-config).
### Changing Port in Container
The default port where Hive Gateway listens is `4000`; however, maybe the container is running
inside a network (like when using
[Networking in Compose](https://docs.docker.com/compose/networking/)) and you wish to change the
port of Hive Gateway in the image.
You can use the `gateway.config.ts` to change the port, or simply pass in the `--port` argument when
running the image:
```sh
docker run \
-p 8080:8080 \
-v "$(pwd)/supergraph.graphql:/serve/supergraph.graphql" \
ghcr.io/ardatan/hive-gateway supergraph --port=8080
```
## Running
Having a `supergraph.graphql` already composed with [GraphQL Mesh](https://graphql-mesh.com/),
running the Docker image is as easy as:
```sh
docker run \
-p 4000:4000 \
-v "$(pwd)/supergraph.graphql:/serve/gateway.config.ts" \
ghcr.io/ardatan/hive-gateway supergraph
```
## Docker Compose
You may have an environment where you want to use [Docker Compose](https://docs.docker.com/compose/)
and would like to add Hive Gateway there.
Start by defining the `docker-compose.yml`
```yaml
services:
hive-gateway:
image: ghcr.io/ardatan/hive-gateway
command: supergraph
ports:
- '4000:4000'
# Add Hive Registry environment variables in case you use it
# environment:
# HIVE_CDN_ENDPOINT: <secret>
# HIVE_CDN_KEY: <secret>
# HIVE_REGISTRY_TOKEN: <secret>
volumes:
- ./gateway.config.ts:/serve/gateway.config.ts
```
And then simply start the services with:
```sh
docker compose up
```
## Extend Docker Image
### Install Plugin
You may want to add additional functionality, or plugins to the base image - you just need to create
a new Dockerfile basing the image off `ghcr.io/ardatan/hive-gateway`.
If need only a handful of plugins (or some other dependencies), you can simply extend the image and
install the modules with `npm i`:
For example, adding
[Block Field Suggestions Plugin](/docs/gateway/other-features/security/block-field-suggestions) to
the container would look like this:
```dockerfile filename="Dockerfile"
FROM ghcr.io/ardatan/hive-gateway
RUN npm i @escape.tech/graphql-armor-block-field-suggestions
```
```sh
docker build -t hive-gateway-w-block-suggestions .
```
Configure to use the block field suggestions plugin:
```ts filename="gateway.config.ts"
import { blockFieldSuggestionsPlugin } from '@escape.tech/graphql-armor-block-field-suggestions'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: pluginCtx => [
blockFieldSuggestionsPlugin({
// Toggle the plugin | Default: true
enabled: true,
// Mask applied to the error message | default: '[Suggestion hidden]'
mask: '[Suggestion hidden]'
})
]
})
```
And then simply start the new image with the config file mounted:
```sh
docker run \
-p 4000:4000 \
-v "$(pwd)/gateway.config.ts:/serve/gateway.config.ts" \
hive-gateway-w-block-suggestions supergraph
```
### Develop Plugin
However, you may be developing a plugin and have a setup with some dependencies and source code,
copying over your project's files is the way to go.
In the following example, we're developing a `useTiming` plugin that will add a human readable
execution duration to the GraphQL result `extensions` property.
```json filename="package.json"
{
"name": "my-timing",
"dependencies": {
"moment": "^2"
},
"devDependencies": {
"@graphql-hive/gateway": "latest",
"@graphql-hive/gateway": "latest"
}
}
```
```ts filename="my-timing.ts"
import moment from 'moment'
import type { GatewayPlugin } from '@graphql-hive/gateway'
export function useTiming(): GatewayPlugin {
return {
onExecute() {
const start = Date.now()
return {
onExecuteDone({ result, setResult }) {
const duration = moment.duration(Date.now() - start)
if (isAsyncIterable(result)) {
setResult(
mapAsyncIterator(result, result => ({
...result,
extensions: {
...result?.extensions,
duration: duration.humanize()
}
}))
)
return
}
setResult({
...result,
extensions: {
...result?.extensions,
duration: duration.humanize()
}
})
}
}
}
}
}
```
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
import { useTiming } from './my-timing'
export const gatewayConfig = defineConfig({
plugins: () => [useTiming()]
})
```
Your Dockerfile should then look something like this:
```dockerfile filename="Dockerfile"
FROM ghcr.io/ardatan/hive-gateway
# we dont install dev deps because:
# 1. we need them for type checking only
# 2. Hive Gateway is already available in the docker image
COPY package*.json .
RUN npm i --omit=dev
COPY my-time.ts .
COPY gateway.config.ts .
```
Then build your image:
```sh
docker build -t hive-gateway-w-my-timing .
```
And finally start it (the config file is in the image and doesn't need to be mounted):
```sh
docker run -p 4000:4000 hive-gateway-w-my-timing supergraph
```
<Callout>
For faster development, you can mount the source code as volumes so that you don't have to rebuild
the image on each run.
```sh
docker run -p 4000:4000 \
-v "$(pwd)/gateway.config.ts":/serve/gateway.config.ts \
-v "$(pwd)/my-timing.ts":/serve/my-timing.ts \
hive-gateway-w-my-timing supergraph
```
</Callout>
### Additional Resolvers
Instead maybe you need to define additional resolvers that depend on other dependencies. Similarily
to the [Develop Plugin](#develop-plugin) approach, you can just copy the project code over and build
another image.
Say you have the following files:
```json filename="package.json"
{
"name": "my-time",
"dependencies": {
"moment": "^2"
},
"devDependencies": {
"@graphql-hive/gateway": "latest"
}
}
```
```js filename="my-time.ts"
import moment from 'moment'
export const additionalResolvers = {
Query: {
formattedToday() {
return moment().format('DD.MM.YYYY')
}
}
}
```
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
import { additionalResolvers } from './my-time'
export const gatewayConfig = defineConfig({ additionalResolvers })
```
Your Dockerfile should then look something like this:
```dockerfile filename="Dockerfile"
FROM ghcr.io/ardatan/hive-gateway
# we dont install dev deps because:
# 1. we need them for type checking only
# 2. Hive Gateway is already available in the docker image
COPY package*.json .
RUN npm i --omit=dev
COPY my-time.ts .
COPY gateway.config.ts .
```
Then build your image:
```sh
docker build -t hive-gateway-w-add-res .
```
And finally start it (the config file is in the image and doesn't need to be mounted):
```sh
docker run -p 4000:4000 hive-gateway-w-add-res supergraph
```

View file

@ -0,0 +1,87 @@
# Run Anywhere - Deploy your Gateway
import { Callout } from '@theguild/components'
Once you configured, and tested your gateway. Now, it is time to deploy it. Hive Gateway Runtime
uses Web Standards (WHATWG Fetch API) not only as an HTTP client but also for handling the
server-side. That gives us the ability to run the gateway in any environment that runs JavaScript.
Node.js is the most common server-side environment in JavaScript ecosystem but it doesn't use Web
Standards for handling HTTP requests. So we use a library called
[`@whatwg-node/server`](https://github.com/ardatan/whatwg-node/tree/master/packages/server#whatwg-node-generic-server-adapter)
that allows us to create a wrapper between `node:http` and Fetch API.
<Callout>
Check the following sections to see how to deploy your gateway in different environments on the left menu.
**If your environment is not listed here**, that doesn't mean you can't deploy your gateway. Thanks
to our adapter system, **you can create your own implementation for your environment**.
Feel free to contribute the documentation for your favorite server implementation if we don't have
it in the list.
</Callout>
## Other Environments (Custom)
Let's say you have an environment that is not listed here, you can still deploy your gateway. In
this case, we will show here how to pass the request information from your environment to Gateway,
then get the response for your environment back.
```ts
import { createGatewayRuntime } from '@graphql-hive/gateway'
import type {
ImaginaryEnvironmentRequest,
ImaginaryEnvironmentServerContext
} from '@imaginary-environment/types'
import { getMySupergraph } from './my-supergraph.js'
// First pass it to the runtime as a context
const gatewayRuntime = createGatewayRuntime<ImaginaryEnvironmentServerContext>({
supergraph: () => getMySupergraph()
})
// Let's say it needs a function exported
export async function gatewayEndpoint(
envRequest: ImaginaryEnvironmentRequest,
envContext: ImaginaryEnvironmentServerContext
) {
// Serve Runtime provides a fetch function which has exactly the same signature with regular `fetch`
const res = await gatewayRuntime.fetch(
envRequest.url,
{
method: envRequest.method,
headers: envRequest.headers,
body: envRequest.body // Body can be a string or a ReadableStream or UInt8Array, see [BodyInit](https://developer.mozilla.org/en-US/docs/Web/API/BodyInit)
},
envContext
)
// You can create an object from [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) object
const headersObj: Record<string, string> = {}
res.headers.forEach((value, key) => {
headersObj[key] = value
})
// It returns [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) object
// See the methods and properties of the Response object from the link
// You can get a string
const bodyText = await res.text()
// You can get a stream
const bodyStream = res.body
// You can get a buffer
const bodyBuffer = await res.arrayBuffer()
// You can get a JSON object
const bodyJson = await res.json()
// You can get a blob
const bodyBlob = await res.blob()
// You can get a form data
const bodyFormData = await res.formData()
// Then you can return the response to your environment
return {
status: res.status,
statusText: res.statusText,
headers: headersObj,
bodyText
}
}
```

View file

@ -0,0 +1,11 @@
export default {
index: 'Introduction',
express: 'Express',
fastify: 'Fastify',
koa: 'Koa',
hapi: 'Hapi',
nestjs: 'NestJS',
uwebsockets: 'µWebSockets.js',
nextjs: 'Next.js',
sveltekit: 'SvelteKit',
};

View file

@ -0,0 +1,97 @@
---
description:
Express is the most popular web framework for Node.js. It is a minimalist framework that provides
a robust set of features to handle HTTP on Node.js applications.
---
import { Callout } from '@theguild/components'
# Integration with Express
[Express is the most popular web framework for Node.js.](https://expressjs.com/) It is a minimalist
framework that provides a robust set of features to handle HTTP on Node.js applications. You can
easily integrate Hive Gateway into your Express application with a few lines of code.
## Example
```ts
import express from 'express'
import { createGatewayRuntime } from '@graphql-hive/gateway'
const app = express()
const serveRuntime = createGatewayRuntime(/* Your configuration */)
// Bind Hive Gateway to the graphql endpoint to avoid rendering the playground on any path
app.use(serveRuntime.graphqlEndpoint, serveRuntime)
app.listen(4000, () => {
console.log('Running a GraphQL API server at http://localhost:4000/graphql')
})
```
## Using Helmet
If you are using [Helmet](https://helmetjs.github.io/) to set your
[Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP), you can use the
following configuration:
```ts
app.use(
helmet({
contentSecurityPolicy: {
directives: {
'style-src': ["'self'", 'unpkg.com'],
'script-src': ["'self'", 'unpkg.com', "'unsafe-inline'"],
'img-src': ["'self'", 'raw.githubusercontent.com']
}
}
})
)
```
### Isolate GraphiQL configuration
To avoid applying this configuration to other endpoints you may have on your Express server, you can
use `Express.Router` to create a new router instance and apply the configuration only to the Hive
Gateway endpoint.
```ts
import express from 'express'
import helmet from 'helmet'
import { createGatewayRuntime } from '@graphql-hive/gateway'
const app = express()
const serveRuntime = createGatewayRuntime(/* Your configuration */)
const hiveGWRouter = express.Router()
// GraphiQL specefic CSP configuration
hiveGWRouter.use(
helmet({
contentSecurityPolicy: {
directives: {
'style-src': ["'self'", 'unpkg.com'],
'script-src': ["'self'", 'unpkg.com', "'unsafe-inline'"],
'img-src': ["'self'", 'raw.githubusercontent.com']
}
}
})
)
hiveGWRouter.use(serveRuntime)
// By adding the Hive Gateway router before the global helmet middleware,
// you can be sure that the global CSP configuration will not be applied to the Hive Gateway endpoint
app.use(serveRuntime.graphqlEndpoint, hiveGWRouter)
// Add the global CSP configuration for the rest of your server.
app.use(helmet())
// You can know register your other endpoints that will not be affected by the GraphiQL CSP configuration
app.get('/hello', (req, res) => {
res.send('Hello World!')
})
app.listen(4000, () => {
console.log('Running a GraphQL API server at http://localhost:4000/graphql')
})
```

View file

@ -0,0 +1,79 @@
---
description:
Fastify is one of the popular HTTP server frameworks for Node.js. It is a very simple, yet
powerful framework that is easy to learn and use.
---
import { Callout } from '@theguild/components'
# Integration with Fastify
[Fastify is one of the popular HTTP server frameworks for Node.js.](https://www.fastify.io/) It is a
very simple, yet powerful framework that is easy to learn and use.
You can easily integrate Hive Gateway with Fastify.
So you can benefit from the powerful plugins of Fastify ecosystem with Hive Gateway.
[See the ecosystem](https://fastify.io/docs/latest/Guides/Ecosystem)
## Example
```ts
import fastify, { FastifyReply, FastifyRequest } from 'fastify'
import { createGatewayRuntime } from '@graphql-hive/gateway'
// This is the fastify instance you have created
const app = fastify({ logger: true })
const serveRuntime = createGatewayRuntime<{
req: FastifyRequest
reply: FastifyReply
}>({
// Integrate Fastify logger
logging: {
debug: (...args) => args.forEach(arg => app.log.debug(arg)),
info: (...args) => args.forEach(arg => app.log.info(arg)),
warn: (...args) => args.forEach(arg => app.log.warn(arg)),
error: (...args) => args.forEach(arg => app.log.error(arg))
}
})
/**
* We pass the incoming HTTP request to Hive Gateway
* and handle the response using Fastify's `reply` API
* Learn more about `reply` https://www.fastify.io/docs/latest/Reply/
**/
app.route({
// Bind to the Hive Gateway's endpoint to avoid rendering on any path
url: serveRuntime.graphqlEndpoint,
method: ['GET', 'POST', 'OPTIONS'],
handler: async (req, reply) => {
// Second parameter adds Fastify's `req` and `reply` to the GraphQL Context
const response = await serveRuntime.handleNodeRequestAndResponse(req, reply, {
req,
reply
})
response.headers.forEach((value, key) => {
reply.header(key, value)
})
reply.status(response.status)
reply.send(response.body)
return reply
}
})
app.listen(4000)
```
## Add dummy content type parser for File Uploads
Fastify needs to be aware of Hive Gateway will handle `multipart/form-data` requests because
otherwise it will throw an error something like `Unsupported media type`.
```ts
// This will allow Fastify to forward multipart requests to Hive Gateway
app.addContentTypeParser('multipart/form-data', {}, (req, payload, done) => done(null))
```

View file

@ -0,0 +1,67 @@
import { Callout } from '@theguild/components'
# Integration with Hapi
[Hapi](https://hapi.dev) allows you to build powerful, scalable applications, with minimal overhead
and full out-of-the-box functionality.
Hive Gateway can be integrated easily as a route to the existing Hapi application with a few lines
of code.
## Example
```ts
import http from 'node:http'
import { Readable } from 'node:stream'
import { createGatewayRuntime } from '@graphql-hive/gateway'
import Hapi from '@hapi/hapi'
import { schema } from './my-graphql-schema'
interface ServerContext {
req: Hapi.Request
h: Hapi.ResponseToolkit
}
const hiveGateway = createGatewayRuntime<ServerContext>(/* Your configuration */)
const server = Hapi.server({ port: 4000 })
server.route({
method: '*',
path: hiveGateway.graphqlEndpoint,
options: {
payload: {
// let hiveGateway handle the parsing
output: 'stream'
}
},
handler: async (req, h) => {
const { status, headers, body } = await hiveGateway.handleNodeRequestAndResponse(
req.raw.req,
req.raw.res,
{
req,
h
}
)
const res = h.response(
Readable.from(body, {
// hapi needs the stream not to be in object mode
objectMode: false
})
)
for (const [key, val] of headers) {
res.header(key, val)
}
return res.code(status)
}
})
server.start()
```
Hive Gateway should now be available at
[http://localhost:4000/graphql](http://localhost:4000/graphql).

View file

@ -0,0 +1,20 @@
import { Callout } from '@theguild/components'
# Node.js Frameworks
We highly recommend to use Hive Gateway with the CLI in Node.js as described in
[Node.js guide](/docs/gateway/deployment/runtimes/nodejs). But if you want to use Hive Gateway with
a Node.js framework, you can use the `createGatewayRuntime` function from `@graphql-hive/gateway`
package.
In this case, you have to pass your serve configuration inside `createGatewayRuntime` instead of
exporting it `gatewayConfig` from `gateway.config.ts` file.
It handles Node.js request and response types which are
[IncomingMessage](https://nodejs.org/api/http.html#http_class_http_incomingmessage) and
[ServerResponse](https://nodejs.org/api/http.html#http_class_http_serverresponse).
If your framework has middlewares and so on, you can handle the response by yourself as in
[Fastify example](/docs/gateway/deployment/node-frameworks/fastify).
Choose your framework from the list on the left to see an example.

View file

@ -0,0 +1,52 @@
---
description:
Koa is a new web framework designed by the team behind Express, which aims to be a smaller, more
expressive, and more robust foundation for web applications and APIs.
---
import { Callout } from '@theguild/components'
# Integration with Koa
[Koa is a new web framework designed by the team behind Express, which aims to be a smaller, more expressive, and more robust foundation for web applications and APIs.](https://koajs.com)
Hive Gateway can be integrated easily as a route to the existing Koa application with a few lines of
code.
[So you can benefit middlewares written for Koa with Hive Gateway.](https://github.com/koajs/koa/wiki)
## Example
```ts
import Koa from 'koa'
import { createGatewayRuntime } from '@graphql-hive/gateway'
const app = new Koa()
const gatewayRuntime = createGatewayRuntime<Koa.ParameterizedContext>()
// Bind Hive Gateway to `/graphql` endpoint
app.use(async ctx => {
// Second parameter adds Koa's context into GraphQL Context
const response = await gatewayRuntime.handleNodeRequestAndResponse(ctx.req, ctx.res, ctx)
// Set status code
ctx.status = response.status
// Set headers
response.headers.forEach((value, key) => {
ctx.append(key, value)
})
if (response.body) {
// Set body
ctx.body = response.body
}
})
app.listen(4000, () => {
console.log(
`Running a GraphQL API server at http://localhost:4000/${gatewayRuntime.graphqlEndpoint}`
)
})
```

View file

@ -0,0 +1,3 @@
import { Callout } from '@theguild/components'
# Deployment with NestJS

View file

@ -0,0 +1,34 @@
---
description:
Next.js is a web framework that allows you to build websites very quickly and Hive Gateway can be
integrated with Next.js easily as an API Route.
---
import { Callout } from '@theguild/components'
# Integration with Next.js
[Next.js](https://nextjs.org) is a web framework that allows you to build websites very quickly and
Hive Gateway can be integrated with Next.js easily as
[a custom route handler](https://nextjs.org/docs/app/building-your-application/routing/router-handlers).
## Example
```ts
// Next.js Custom Route Handler: https://nextjs.org/docs/app/building-your-application/routing/router-handlers
import { createGatewayRuntime } from '@graphql-hive/gateway'
const { handleRequest } = createGatewayRuntime({
/* Your configuration here before the following required settings */
// While using Next.js file convention for routing, we need to configure Hive Gateway to use the correct endpoint
graphqlEndpoint: '/api/graphql',
// Hive Gateway needs to know how to create a valid Next response
fetchAPI: { Response }
})
// Export the handler to be used with the following HTTP methods
export { handleRequest as GET, handleRequest as POST, handleRequest as OPTIONS }
```

View file

@ -0,0 +1,39 @@
---
description:
SvelteKit is a framework for rapidly developing robust, performant web applications using Svelte.
---
# Integration with SvelteKit
[SvelteKit](https://kit.svelte.dev/) is a framework for rapidly developing robust, performant web
applications using [Svelte](https://svelte.dev/). You can easily integrate Hive Gateway into your
SvelteKit powered application.
## Example
SvelteKit is typically used together with [Vite](https://vitejs.dev/) with the project structure
[looking like this](https://kit.svelte.dev/docs/project-structure). We also assume that you have
composed a `supergraph.graphql` with [GraphQL Mesh](https://graphql-mesh.com/).
In this example, we want to integrate Hive Gateway into Vite's routes, we'll therefore use the
runtime.
```sh npm2yarn
npm i @graphql-hive/gateway
```
Keeping the [aforementioned project layout](https://kit.svelte.dev/docs/project-structure) in mind,
create a new server route in `my-project/src/routes/graphql/+server.ts` to expose the GraphQL server
at `/graphql` and implement using the Hive Gateway runtime like this:
```ts filename="my-project/src/routes/graphql/+server.ts"
import { createGatewayRuntime } from '@graphql-hive/gateway'
const serve = createGatewayRuntime({
supergraph: 'supergraph.graphql', // working directory is root of the project
graphqlEndpoint: '/graphql', // matches the server route path
fetchAPI: { Response } // use the native `Response`
})
export { serve as GET, serve as POST }
```

View file

@ -0,0 +1,104 @@
---
description: µWebSockets.js is an HTTP/WebSocket server for Node.js.
---
import { Callout } from '@theguild/components'
# Integration with µWebSockets.js
[µWebSockets.js](https://github.com/uNetworking/uWebSockets.js) is an alternative to Node.js's
built-in HTTP server implementation. It is much faster than Node.js's `http` module as you can see
in the benchmarks in the
[GitHub repo](https://github.com/uNetworking/uWebSockets/tree/master/benchmarks#benchmark-driven-development).
Despite its name, it is not a WebSocket-only server, it does HTTP as well.
Since Hive Gateway is framework and environment agnostic, it supports µWebSockets.js out of the box
with a simple configuration.
<Callout>
If you use Gateway CLI within Node.js, it already uses µWebSockets.js. You don't need to do
anything extra within CLI. Use this guide only if you really need to use µWebSockets.js directly.
</Callout>
## Example
```ts filename="index.ts"
import { App, HttpRequest, HttpResponse } from 'uWebSockets.js'
import { createGatewayRuntime } from '@graphql-hive/gateway'
interface ServerContext {
req: HttpRequest
res: HttpResponse
}
export const gatewayRuntime = createGatewayRuntime<ServerContext>(/* Your configuration */)
App()
.any('/*', gatewayRuntime)
.listen('localhost', 4000, () => {
console.log(`Server is running on http://localhost:4000`)
})
```
## Subscriptions with WebSockets
You can also use WebSockets instead of SSE with `graphql-ws`;
```sh npm2yarn
npm i graphql-ws
```
```ts filename="index.ts"
import { execute, ExecutionArgs, subscribe } from 'graphql'
import { makeBehavior } from 'graphql-ws/lib/use/uWebSockets'
import { App, HttpRequest, HttpResponse } from 'uWebSockets.js'
import { createGatewayRuntime } from '@graphql-hive/gateway'
interface ServerContext {
req: HttpRequest
res: HttpResponse
}
export const serveRuntime = createGatewayRuntime<ServerContext>(/* Your configuration */)
// Hive Gateway's envelop may augment the `execute` and `subscribe` operations
// so we need to make sure we always use the freshest instance
type EnvelopedExecutionArgs = ExecutionArgs & {
rootValue: {
execute: typeof execute
subscribe: typeof subscribe
}
}
const wsHandler = makeBehavior({
execute: args => (args as EnvelopedExecutionArgs).rootValue.execute(args),
subscribe: args => (args as EnvelopedExecutionArgs).rootValue.subscribe(args),
onSubscribe: async (ctx, msg) => {
const { schema, execute, subscribe, contextFactory, parse, validate } =
serveRuntime.getEnveloped(ctx)
const args: EnvelopedExecutionArgs = {
schema,
operationName: msg.payload.operationName,
document: parse(msg.payload.query),
variableValues: msg.payload.variables,
contextValue: await contextFactory(),
rootValue: {
execute,
subscribe
}
}
const errors = validate(args.schema, args.document)
if (errors.length) return errors
return args
}
})
App()
.any('/*', gatewayRuntime)
.ws(gatewayRuntime.graphqlEndpoint, wsHandler)
.listen(() => {
console.log(`Server is running on http://localhost:4000`)
})
```

View file

@ -0,0 +1,6 @@
export default {
index: 'Introduction',
nodejs: 'Node.js',
bun: 'Bun',
deno: 'Deno',
};

View file

@ -0,0 +1,33 @@
---
description:
Hive Gateway provides you a cross-platform GraphQL Server. So you can easily integrate it into any
platform besides Node.js.
---
import { Callout } from '@theguild/components'
# Integration with Bun
Hive Gateway provides you a cross-platform GraphQL Server. So you can easily integrate it into any
platform besides Node.js. [Bun](https://bun.sh) is a modern JavaScript runtime like Node or Deno,
and it supports Fetch API as a first class citizen. So the configuration is really simple like any
other JS runtime with Hive Gateway;
The following code is a simple example of how to use Hive Gateway with Bun.
```ts
import { createGatewayRuntime } from '@graphql-hive/gateway'
const gatewayRuntime = createGatewayRuntime(/* Your configuration */)
const server = Bun.serve({
fetch: gatewayRuntime
})
console.info(
`Server is running on ${new URL(
server.graphqlEndpoint,
`http://${server.hostname}:${server.port}`
)}`
)
```

View file

@ -0,0 +1,51 @@
---
description:
Hive Gateway provides you a cross-platform GraphQL Server. So you can easily integrate it into any
platform besides Node.js.
---
import { Callout } from '@theguild/components'
# Integration with Deno
Hive Gateway provides you a cross-platform GraphQL Server. So you can easily integrate it into any
platform besides Node.js.
[Deno is a simple, modern and secure runtime for JavaScript and TypeScript that uses V8 and is built in Rust](https://deno.land/).
We will use `@graphql-hive/gateway` which has an agnostic HTTP handler using
[Fetch API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch)'s
[`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) and
[`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) objects.
## Example
Create a `deno.json` file.
[Learn more about import maps](https://deno.land/manual/basics/import_maps)
Create a `deno-hive-gateway.ts` file:
```json filename="deno.json" {3}
{
"imports": {
"@graphql-hive/gateway": "npm:@graphql-hive/gateway@^0.1.0"
}
}
```
```ts filename="deno-hive-gateway.ts"
import { serve } from 'https://deno.land/std@0.157.0/http/server.ts'
import { createGatewayRuntime } from '@graphql-hive/gateway'
const gatewayRuntime = createGatewayRuntime(/* Your configuration */)
serve(gatewayRuntime, {
onListen({ hostname, port }) {
console.log(`Listening on http://${hostname}:${port}/${gatewayRuntime.graphqlEndpoint}`)
}
})
```
And run it:
```bash
deno run --allow-net deno-hive-gateway.ts
```

View file

@ -0,0 +1,13 @@
import { Callout } from '@theguild/components'
# Server-side JavaScript Environments
For Node.js and other Node-compliant environments, you can use Gateway CLI as described in the
[Node.js guide](/docs/gateway/deployment/runtimes/nodejs). But if you want to use Hive Gateway with
a server-side JavaScript environment that is not compatible with Node.js API, you can use the
`createGatewayRuntime` function from `@graphql-hive/gateway` package.
In this case, you have to pass your serve configuration inside `createGatewayRuntime` instead of
exporting it `gatewayConfig` from `gateway.config.ts` file.
See the guides on the left for examples with different server-side JavaScript environments.

View file

@ -0,0 +1,29 @@
import { Callout } from '@theguild/components'
# Node.js
Node.js is the most common runtime for JavaScript.
If you have Node.js environment, we highly recommend to use Hive Gateway with the CLI as described
in the [introduction](/docs/gateway). If you really want to use the runtime in a customized way. You
can use the `createGatewayRuntime` function from `@graphql-hive/gateway` package.
## Hive Gateway CLI
You can follow the introduction page directly to use Hive Gateway CLI. [See here](/docs/gateway)
## Hive Gateway Runtime (advanced-only)
Use this method only if you know what you are doing. It is recommended to use Hive Gateway CLI for
most cases.
```ts
import { createServer } from 'http'
import { createGatewayRuntime } from '@graphql-hive/gateway'
const serveRuntime = createGatewayRuntime(/* Your configuration */)
const server = createServer(serveRuntime)
server.listen(4000, () => {
console.log(`Server is running on http://localhost:4000`)
})
```

View file

@ -0,0 +1,7 @@
export default {
index: 'Introduction',
'cloudflare-workers': 'Cloudflare Workers',
'aws-lambda': 'AWS Lambda',
'google-cloud-platform': 'Google Cloud Platform',
'azure-functions': 'Azure Functions',
};

View file

@ -0,0 +1,49 @@
import { Callout } from '@theguild/components'
# Deploying Hive Gateway to AWS Lambda
AWS Lambda is a serverless computing platform that makes it easy to build applications that run on
the AWS cloud. Hive Gateway is platform agnostic so they can fit together easily.
<Callout>
Before you start, make sure you read the [Serverless / On the
Edge](/docs/gateway/deployment/serverless) page.
</Callout>
```ts
import { APIGatewayEvent, APIGatewayProxyResult, Context } from 'aws-lambda'
import { createGatewayRuntime } from '@graphql-hive/gateway'
const serveRuntime = createGatewayRuntime(/* Your configuration */)
export async function handler(
event: APIGatewayEvent,
lambdaContext: Context
): Promise<APIGatewayProxyResult> {
const response = await serveRuntime.fetch(
event.path +
'?' +
new URLSearchParams((event.queryStringParameters as Record<string, string>) || {}).toString(),
{
method: event.httpMethod,
headers: event.headers as HeadersInit,
body: event.body
? Buffer.from(event.body, event.isBase64Encoded ? 'base64' : 'utf8')
: undefined
},
{
event,
lambdaContext
}
)
const responseHeaders = Object.fromEntries(response.headers.entries())
return {
statusCode: response.status,
headers: responseHeaders,
body: await response.text(),
isBase64Encoded: false
}
}
```

View file

@ -0,0 +1,23 @@
import { Callout } from '@theguild/components'
# Deploying Hive Gateway to Azure Functions
Azure Functions is a serverless environment that supports JavaScript. Hive Gateway is platform
agnostic and can be deployed to Azure Functions as well.
<Callout>
Before you start, make sure you read the [Serverless / On the
Edge](/docs/gateway/deployment/serverless) page.
</Callout>
```ts
import { app } from '@azure/functions'
import { createGatewayRuntime } from '@graphql-hive/gateway'
const handler = createGatewayRuntime(/* Your configuration */)
app.http('graphql', {
method: ['GET', 'POST'],
handler
})
```

View file

@ -0,0 +1,31 @@
import { Callout } from '@theguild/components'
# Deploying Hive Gateway to Cloudflare Workers
Hive Gateway a provides you a cross-platform GraphQL Server. So you can easily integrate it into any
platform besides Node.js.
[Cloudflare Workers](https://developers.cloudflare.com/workers) provides a serverless execution
environment that allows you to create entirely new applications or augment existing ones without
configuring or maintaining infrastructure.
<Callout>
Before you start, make sure you read the [Serverless / On the
Edge](/docs/gateway/deployment/serverless) page.
</Callout>
```ts
import { createGatewayRuntime } from '@graphql-hive/gateway'
const gatewayRuntime = createGatewayRuntime({
// gatewayConfig
})
export default { fetch: gatewayRuntime }
```
<Callout>
If you want to use [Cloudflare KV
Cache](https://developers.cloudflare.com/workers/runtime-apis/kv) as a distributed cache, [see
here for Hive Gateway integration](/docs/gateway/other-features/performance#cloudflare-workers-kv)
</Callout>

View file

@ -0,0 +1,133 @@
---
description:
Google Cloud Platform (GCP) is a suite of cloud computing services powered by Google. It is easy
to use Hive Gateway with GCP.
---
import { Callout } from '@theguild/components'
# Deploying Hive Gateway to Google Cloud Platform
Google Cloud Platform (GCP) is a suite of cloud computing services powered by Google. It is easy to
use Hive Gateway with GCP.
## Prerequisites
You will first need to install the GCP command-line tool: `gcloud`.
[You can find instructions here](https://cloud.google.com/sdk/docs/install).
If you already have `gcloud` installed, make sure it is up to date with `gcloud components update`.
[Create a new project](https://cloud.google.com/resource-manager/docs/creating-managing-projects)
and make sure
[billing is enabled](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled).
<Callout type="warning">
Running these examples requires you to have billing enabled on your GCP account. It should not
cost more than a few cents, but don't forget to clean up your project after you are done to avoid
unexpected charges.
</Callout>
## Cloud Functions
Cloud Functions is a serverless execution environment for building and connecting cloud services.
With Cloud Functions, you write simple, single-purpose functions that are attached to events, such
as an HTTP request.
It is probably the most straight forward way to deploy a Hive Gateway to GCP.
<Callout>
Before you start, make sure you read the [Serverless / On the
Edge](/docs/gateway/deployment/serverless) page.
</Callout>
### Installation
```sh npm2yarn
npm i @google-cloud/functions-framework @graphql-hive/gateway graphql
```
<Callout type="info">
Don't forget to add the `main` field to your `package.json`. Google Cloud Functions rely on it to
know which file to run.
</Callout>
<Callout type="info">
This example uses ESM syntax, so you should set `"type": "module"` in your `package.json`.
</Callout>
### Usage
```js filename=index.js
import { createGatewayRuntime } from '@graphql-hive/gateway'
export const graphql = createGatewayRuntime(/* Configuration */)
```
You can now deploy your function with `gcloud` CLI:
```bash
$ gcloud functions deploy graphql --runtime nodejs18 --trigger-http --allow-unauthenticated
```
You can now test your function by using the URL found in the `httpsTrigger.url` property returned by
the previous command or by using the `gcloud` CLI:
```bash
gcloud functions describe graphql
```
## Cloud Run
Cloud Run is the Platform as a Service by Google. It is straightforward to use Hive Gateway with it.
### Installation
Create a new Node project and add Hive Gateway to its dependencies.
```sh npm2yarn
npm i @graphql-hive/gateway graphql
```
<Callout type="info">
This example uses ESM syntax, so you should set `"type": "module"` in your `package.json`.
</Callout>
Add a `start` script to your `package.json`. Cloud Run needs to know how to start your application.
<Callout>You can use Gateway CLI as usual with `gateway.config.ts`</Callout>
```json
{
"name": "hive-gateway-cloud-run-guide",
"version": "1.0.0",
"type": "module",
"scripts": {
"start": "hive-gateway supergraph"
},
"dependencies": {
"graphql": "latest",
"@graphql-hive/gateway": "latest"
}
}
```
You can now deploy to Cloud Run. You can use all default values, except the last one, which allows
unauthenticated access to your service.
```bash
$ gcloud run deploy --source .
```
<Callout type="info">
If this is your first time using Cloud Run, enabling the service can take up to a few minutes to
be fully effective. If you encounter any `403 Forbidden` errors, please wait for 2 minutes and try
again.
</Callout>
You can now access your API using the URL provided by `gcloud`. The default GraphQL endpoint is
`/graphql`.
If you need to use TypeScript or any other tool that requires a build phase, such as code
generation, add a Dockerfile to the root of your project so that Cloud Run can build a custom image
for you.

View file

@ -0,0 +1,64 @@
import { Callout } from '@theguild/components'
# Serverless / On the Edge
Hive Gateway can be deployed on the edge. This means that you can deploy your Hive Gateway to a
serverless environment like AWS Lambda, Cloudflare Workers, or Azure Functions.
For Serverless environments, you cannot use Gateway CLI `hive-gateway` but you can use the
`createGatewayRuntime` function from `@graphql-hive/gateway` package.
The gateway configuration goes into `createGatewayRuntime` function instead of `gatewayConfig`
export in `gateway.config.ts` file.
## Distributed Caching
But you need to be aware of the limitations of these environments. For example, in-memory caching is
not possible in these environments. So you have to setup a distributed cache like Redis or
Memcached.
[See here to configure cache storage](/docs/gateway/other-features/performance).
## Bundling problem
Hive Gateway cannot import the required dependencies manually, and load the supergraph from the file
system. So if you are not using a schema registry such as Hive Gateway or Apollo GraphOS, we need to
save the supergraph as a code file (`supergraph.js` or `supergraph.ts`) and import it.
### Loading the supergraph from a file
For example, in GraphQL Mesh you need to save the supergraph as a TypeScript file:
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-mesh/compose-cli'
export const composeConfig = defineConfig({
output: 'supergraph.ts',
subgraph: [
//...
]
})
```
In `supergraph.ts` file, you need to export the supergraph:
```ts
export default /* GraphQL */ `
#...
`
```
Then you need to import the supergraph in your serverless function:
```ts
import { createGatewayRuntime, WSTransport } from '@graphql-hive/gateway'
// Let's say you are using WS transport
import supergraph from './supergraph.js'
const serveRuntime = createGatewayRuntime({
supergraph,
transports: {
ws: WSTransport
}
})
```

View file

@ -0,0 +1,345 @@
---
description:
Hive Gateway is the Apollo Federation Gateway and/or Proxy Gateway for your GraphQL services.
---
import { Callout, Cards, Tabs } from '@theguild/components'
# Hive Gateway
Hive Gateway is a fully open-source MIT-licensed GraphQL gateway that can act as a Apollo Federation
Gateway or a Proxy Gateway for any GraphQL services.
The Hive Gateway can be run as a standalone binary, a Docker Image, or as a JavaScript package (e.g.
within Node.js, Bun, Deno, Google Cloud Functions, Azure Functions or Cloudflare Workers)
Hive Gateway provides the following features on top of your Federated GraphQL schema or proxied
GraphQL schema:
- [GraphQL Subscriptions](/docs/gateway/subscriptions) with WebSockets, HTTP Callbacks or SSE
- Automatic query and request batching to reduce the number of HTTP requests to your subgraph
- [JSON Web Tokens (JWT)](/docs/gateway/authorization-authentication) authentication between client
and gateway also between gateway and subgraph
- [Authorization](/docs/gateway/authorization-authentication) on specific fields and types using
Federation Auth directives like `@authenticated` and `@skipAuth`
- [Role-based Access Control (RBAC)](/docs/gateway/authorization-authentication) either
programmatically or with directives `@requiresScope` and `@policy`
- [Response Caching](/docs/gateway/other-features/performance/response-caching) based on either in
memory or Redis for distributed caching optionally with `@cacheControl` directive
- [Security](/docs/gateway/other-features/security) features such as safelisting, depth limit etc
- [Rate Limiting](/docs/gateway/other-features/security/rate-limiting) on specific fields and types
either programmatically or declaratively with `@rateLimit` directive
- [Prometheus and OpenTelemetry integration](/docs/gateway/monitoring-tracing) with fully
customizable spans and attributes
- [Persisted Documents](/docs/gateway/persisted-documents) backed by either Hive Registry or a
selfhosting storage
- [E2E HTTP Compression](/docs/gateway/other-features/performance/compression) from the client to
the subgraph for better performance and resource management
- [And more](/docs/gateway/other-features)
## Installation
Hive Gateway can be installed in different ways depending on your preference.
<Tabs items={["Binary", "Docker", "JavaScript Package"]} >
{/* Binary */}
<Tabs.Tab>
This command will download the appropriate binary for your operating system.
```sh
curl -sSL https://graphql-hive.com/install-gateway.sh | sh
```
</Tabs.Tab>
{/* Docker */}
<Tabs.Tab>
You can use the official Docker image to run Hive Gateway.
```sh
docker pull ghcr.io/ardatan/hive-gateway
```
</Tabs.Tab>
{/* JavaScript Package */}
<Tabs.Tab>
To use the NPM package, you need to have [Node.js](https://nodejs.org) installed in your
environment. Then, you can install Hive Gateway CLI with your preferred package manager.
```sh npm2yarn
npm i @graphql-hive/gateway
```
</Tabs.Tab>
</Tabs>
## Starting the Gateway
Hive Gateway supports two different modes:
- **Apollo Federation.** Serve a supergraph provided by a schema registry like
[Hive Registry](https://the-guild.dev/graphql/hive/docs/schema-registry), a composition tool like
[Apollo Rover](https://www.apollographql.com/docs/rover/),
[GraphQL Mesh](https://graphql-mesh.com/) or any other Federation compliant composition tool such
as [Apollo Rover](https://www.apollographql.com/docs/rover/) or schema registry
([Hive Registry](https://the-guild.dev/graphql/hive),
[Apollo GraphOS](https://www.apollographql.com/docs/graphos/))
- **Proxy a GraphQL API.** Hive Gateway can also act as a proxy to an existing GraphQL API.
<Tabs items={["Apollo Federation", "Proxy"]}>
{/* Apollo Federation */}
<Tabs.Tab>
To serve a Apollo Federation Gateway, we need to point the Gateway to either a local supergraph file
or a supergraph served by our schema registry. For this example, we will serve a supergraph from the
Hive schema registry.
<Tabs items={["Binary", "Docker", "JavaScript Package"]}>
{/* Binary */}
<Tabs.Tab>
```sh filename="Run Apollo Federation Gateway with the Hive Gateway Binary"
hive-gateway supergraph \
http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/supergraph \
--hive-cdn-key "YOUR HIVE CDN KEY"
```
</Tabs.Tab>
{/* Docker */}
<Tabs.Tab>
```sh filename="Run Apollo Federation Gateway with the Hive Gateway Docker Image"
docker run --rm --name hive-gateway -p 4000:4000 \
ghcr.io/ardatan/hive-gateway supergraph \
http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/supergraph \
--hive-cdn-key "YOUR HIVE CDN KEY"
```
</Tabs.Tab>
{/* JavaScript Package */}
<Tabs.Tab>
If you installed the JavaScript package, you can use `npx` for running the CLI.
```sh filename="Run Apollo Federation Gateway with npx"
npx hive-gateway supergraph \
http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/supergraph \
--hive-cdn-key "YOUR HIVE CDN KEY"
```
</Tabs.Tab>
</Tabs>
</Tabs.Tab>
{/* Proxy */}
<Tabs.Tab>
In order to proxy a GraphQL API, we need to provide the URL of the API when starting our Gateway.
Optionally, we can also provide a schema file from either a local file or a schema registry, which
will be used instead of instrospecting the proxied API.
<Tabs items={["Binary", "Docker", "JavaScript Package"]}>
{/* Binary */}
<Tabs.Tab>
```sh filename="Run Proxy Gateway with the Hive Gateway Binary"
hive-gateway proxy https://localhost:3000/graphql \
--hive-cdn-endpoint http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/sdl \
--hive-cdn-key "YOUR HIVE CDN KEY"
```
</Tabs.Tab>
{/* Docker */}
<Tabs.Tab>
```sh filename="Run Proxy Gateway with the Hive Gateway Binary"
docker run --rm --name hive-gateway -p 4000:4000 \
ghcr.io/ardatan/hive-gateway proxy https://localhost:3000/graphql \
--hive-cdn-endpoint http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/sdl \
--hive-cdn-key "YOUR HIVE CDN KEY"
```
</Tabs.Tab>
{/* JavaScript Package */}
<Tabs.Tab>
```sh filename="Run Proxy Gateway with the Hive Gateway Binary"
npx hive-gateway proxy https://localhost:3000/graphql \
--hive-cdn-endpoint http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/sdl \
--hive-cdn-key "YOUR HIVE CDN KEY"
```
</Tabs.Tab>
</Tabs>
</Tabs.Tab>
</Tabs>
By default, Hive Gateway will start a server on port 4000. You can customize that behavior. For that
please refer to our [CLI Reference](/docs/api-reference/gateway/cli).
## Configuration File
The Hive Gateway config file `gateway.config.ts` is used for enabling additional features such as
authorization, authentication caching, rate limiting, and more. The recommended language for the
configuration file is TypeScript.
We can provide the CLI configuration parameters, also via the configuration file.
<Tabs items={["Apollo Federation", "Proxy"]}>
{/* Apollo Federation */}
<Tabs.Tab>
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
supergraph: {
type: 'hive',
endpoint:
'http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/supergraph',
key: 'YOUR HIVE CDN KEY'
}
})
```
Hive Gateway will automatically load the default config file and apply the settings.
<Tabs items={["Binary", "Docker", "JavaScript Package"]}>
{/* Binary */}
<Tabs.Tab>
```sh filename="Run Proxy Gateway with the Hive Gateway Binary using configuration file"
hive-gateway supergraph
```
</Tabs.Tab>
{/* Docker */}
<Tabs.Tab>
For docker, we need to mount the configuration file into the container.
```sh filename="Run Proxy Gateway with the Hive Gateway Binary" {2}
docker run --rm --name hive-gateway -p 4000:4000 \
-v $(pwd)/gateway.config.ts:/serve/gateway.config.ts \
ghcr.io/ardatan/hive-gateway supergraph
```
</Tabs.Tab>
{/* JavaScript Package */}
<Tabs.Tab>
```sh filename="Run Proxy Gateway with the Hive Gateway Binary"
npx hive-gateway supergraph
```
</Tabs.Tab>
</Tabs>
</Tabs.Tab>
{/* Proxy */}
<Tabs.Tab>
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
proxy: {
endpoint: 'http://localhost:3000/graphql'
}
})
```
Hive Gateway will automatically load the default config file and apply the settings.
<Tabs items={["Binary", "Docker", "JavaScript Package"]}>
{/* Binary */}
<Tabs.Tab>
```sh filename="Run Proxy Gateway with the Hive Gateway Binary using configuration file"
hive-gateway proxy
```
</Tabs.Tab>
{/* Docker */}
<Tabs.Tab>
For docker, we need to mount the configuration file into the container.
```sh filename="Run Proxy Gateway with the Hive Gateway Binary" {2}
docker run --rm --name hive-gateway -p 4000:4000 \
-v $(pwd)/gateway.config.ts:/serve/gateway.config.ts \
ghcr.io/ardatan/hive-gateway proxy https://localhost:3000/graphql \
--hive-cdn-endpoint http://cdn.graphql-hive.com/artifacts/v1/12713322-4f6a-459b-9d7c-8aa3cf039c2e/sdl \
--hive-cdn-key "YOUR HIVE CDN KEY"
```
</Tabs.Tab>
{/* JavaScript Package */}
<Tabs.Tab>
```sh filename="Run Proxy Gateway with the Hive Gateway Binary"
npx hive-gateway proxy
```
</Tabs.Tab>
</Tabs>
</Tabs.Tab>
</Tabs>
## Next steps
After learning the first steps of Hive Gateway, you can explore the following topics.
<Cards>
<Cards.Card arrow title="Set up Usage Reporting" href="/docs/gateway/usage-reporting" />
<Cards.Card arrow title="Set up Persisted Documents" href="/docs/gateway/persisted-documents" />
</Cards>

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,7 @@
export default {
index: 'Overview',
performance: 'Performance/Cache',
security: 'Security',
testing: 'Testing & Debugging',
'custom-plugins': 'Custom Plugins',
};

View file

@ -0,0 +1,246 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Custom Plugins
Hive Gateway uses
[GraphQL Yoga](https://the-guild.dev/graphql/yoga-server/docs/features/envelop-plugins), and it uses
[Envelop](https://the-guild.dev/graphql/envelop) plugin system which allows you to hook into the
different phases of the GraphQL execution to manipulate or track the entire workflow step-by-step.
<Callout>
You can both use Yoga or Envelop or Gateway plugins with your GraphQL Gateway.
But you should always opt-in for the Hive Gateway variant of the plugin, then Yoga then Envelop because each of them have more control over the execution.
For example, Yoga variant of the plugin leverage HTTP hooks, and Hive Gateway one can leverage more hooks and more control over the context.
We'd recommend to check the features of the gateway first, and if you can't find what you are
looking for, then you can use this option on your own to add plugins from either GraphQL Yoga or
[Envelop's Plugin Hub](https://the-guild.dev/graphql/envelop/plugins).
</Callout>
You can provide those plugins as an array of objects,
```ts filename="gateway.config.ts" {7}
import { useGraphQLJit } from '@envelop/graphql-jit'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: () => [useGraphQLJit()]
})
```
## Writing Plugins
Sometimes you might want to build your own plugins. You can write your own gateway plugin and even
share it with other people by publishing it to `npm`.
<Callout>
A good entry-point for discovering how to write Gateway plugins is to look at the source code of
the existing plugins maintained by us.
</Callout>
The most hooks for Hive Gateway origin from the Envelop and Yoga plugin systems.
[Please refer to the Envelop Plugin Lifecycle documentation for more information.](https://the-guild.dev/graphql/envelop/docs/plugins/lifecycle)
and
[Yoga Plugin Lifecycle documentation](https://the-guild.dev/graphql/yoga-server/docs/features/envelop-plugins).
In addition, Yoga adds more HTTP specific hooks while Hive Gateway adds more related to the subgraph
execution. Gateway plugins also uses
[Explicit Resource Management](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-5-2.html),
so all the resources are cleaned up gracefully when Hive Gateway is shut down. You can see
`Symbol.asyncDispose` below.
### Plugin Lifecycle
The following diagram shows the plugin lifecycle of Hive Gateway. For a detailed description of each
hook, please refer to the detail sections of each hook. Please check Yoga and Envelop documentation
for more information about the hooks except `onSubgraphExecute`.
```mermaid
stateDiagram-v2
[*] --> onRequest
state onRequest_if_state <<choice>>
onRequest --> onRequest_if_state
onRequest_if_state --> onResponse: Is not a GraphQL Request
onRequest_if_state --> GraphQLRequest: Is GraphQL Request
GraphQLRequest: GraphQL Request
state GraphQLRequest {
[*] --> onRequestParse
onRequestParse --> onParams
onParams --> onParse
onParse --> onValidate
onValidate --> onContextBuilding
onContextBuilding --> onExecute
onContextBuilding --> onSubscribe
onExecute --> onSubgraphExecute
onSubscribe --> onSubgraphExecute
onSubgraphExecute --> onFetch
onFetch --> onSubgraphExecuteDone
onSubgraphExecuteDone --> onExecuteDone
onSubgraphExecuteDone --> onSubscribeDone
onExecuteDone --> onResultProcess
onSubscribeDone --> onResultProcess
onResultProcess --> [*]
}
GraphQLRequest --> onResponse
onResponse --> [*]
```
#### `onSubgraphExecute`
This hook is invoked for ANY request that is sent to the subgraph.
**Example actions in this hook:**
- Manipulate the request
- Add a custom auth header
- Monitor the subgraph request
You can see [Prometheus plugin](/docs/gateway/authorization-authentication) for an example of how to
use this hook.
#### `onFetch`
This hook is invoked everytime the gateways sends an outgoing HTTP request to an upstream service.
**Example actions in this hook:**
- Manipulate HTTP [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object
- Manipulate HTTP [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) object
- Change [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) implementation
- Add custom headers
- Monitor the HTTP request
**Example plugins:**
- [Prometheus plugin](/docs/gateway/authorization-authentication)
##### `API`
- `supergraph`: The unified graph
- `subgraph`: The subgraph
- `subgraphName`: The name of the subgraph
- `transportEntry`: The transport entry for the subgraph including the configuration for the
upstream communication, and details.
- `executionRequest`: The execution request object that is sent to the subgraph, that includes
`document`, `variables`, `contextValue`, `operationName`, and etc.
- `setExecutionRequest`: A function to replace the execution request object that will be sent to the
subgraph.
- `executor`: The executor function that will be used to execute the request to the subgraph, and it
takes the execution request object.
- `setExecutor`: A function to replace the executor function
- `logger`: The logger instance for the specific request that includes the details of the request
and the response.
#### `Symbol.asyncDispose` or `Symbol.dispose`
In order to clean up resources when Hive Gateway is shut down, you can use `Symbol.asyncDispose` or
`Symbol.syncDispose` to clean up resources.
```ts
export const useMyPlugin = () => {
return {
async [Symbol.asyncDispose]() {
// Clean up resources
stopConnection()
}
}
}
```
You can learn more about
[Explicit Resource Management](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-5-2.html#using-declarations-and-explicit-resource-management)
here.
### Plugin Context
Hive Gateway comes with ready-to-use `logger`, `fetch`, cache storage and etc that are shared across
different components. We'd highly recommend you to use those available context values instead of
creating your own for a specific plugin.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins({
fetch, // WHATWG compatible Fetch implementation.
logger, // Logger instance used by Hive Gateway
cwd, // Current working directory
pubsub, // PubSub instance used by Hive Gateway
cache // Cache storage used by Hive Gateway
}) {
return [
useMyPlugin({ logger, fetch }) // So the plugin can use the shared logger and fetch
]
}
})
```
## Example Additional Plugin (SOFA)
GraphQL SOFA allows you to generate a fully documented REST API from your GraphQL schema. This is
useful when you need to support REST clients or when you want to expose a REST API to the public.
- **Dont choose between REST and GraphQL**
- Get most of the **benefits of GraphQL** on the backend and frontend, while using and **exposing
REST**
- **Support all your existing clients** with REST while improving your backend stack with GraphQL
- Create custom, perfectly client-aligned REST endpoints for your frontend simply by naming a route
and attaching a query
- In the other way around (REST to GraphQL) you wont get the best of both worlds. Instead just less
powerful, harder-to-maintain server implementations with a some of the benefits of GraphQL. It can
be a good and fast start for a migration though.
- Fully **generated documentation** that is always up-to-date
- **GraphQL Subscriptions as Webhooks**
## Installation
```sh npm2yarn
npm i @graphql-yoga/plugin-sofa
```
## Quick Start
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
import { useSOFA } from '@graphql-yoga/plugin-sofa'
export const gatewayConfig = defineConfig({
plugins: pluginCtx => [
useSOFA({
// The path where the REST API will be served
basePath: '/rest',
// The path where the Swagger UI will be served
swaggerUIEndpoint: '/rest/docs',
// OpenAPI Document details
info: {
title: 'My API',
description: 'My API Description',
version: '1.0.0'
}
})
]
})
```
[Learn more about SOFA](https://the-guild.dev/graphql/sofa-api/docs)
<Callout>
You can consume the API using [feTS Client](https://the-guild.dev/openapi/fets/client/quick-start)
without any code generation!
</Callout>

View file

@ -0,0 +1,21 @@
---
searchable: false
description: Extend the capabilities of your GraphQL API such as rate limiting, caching, and more.
---
import { Callout } from '@theguild/components'
# Extend Your Gateway
Hive Gateway Runtime can be extended with feature flags and additional plugins. This allows you to
add more features to your Hive Gateway, such as rate limiting, caching, and more. This documentation
section covers most of the possible features that you can add with simple additions.
Hive Gateway also allows you to hook into the different phases of the lifecycle from the client to
the upstream services by using custom plugins that you can
[learn more here](/docs/gateway/other-features/custom-plugins).
<Callout>
We'd recommend to check the available features and plugins on the left sidebar, because we believe
some of them can be useful for your use case.
</Callout>

View file

@ -0,0 +1,12 @@
export default {
index: 'Overview',
'response-caching': 'Response Caching',
compression: 'Compression in HTTP',
'parsing-and-validation-caching': 'Parsing and Validation Caching',
'execution-cancellation': 'Execution Cancellation',
'upstream-cancellation': 'Upstream Cancellation',
'http-caching': 'Upstream HTTP Caching',
'deduplicate-request': 'Deduplicate HTTP Requests',
'automatic-persisted-queries': 'Automatic Persisted Queries',
'request-batching': 'Request Batching',
};

View file

@ -0,0 +1,152 @@
---
description:
Automatic Persisted Queries is a protocol for reducing the overhead of sending the same GraphQL
documents to the server over and over again.
searchable: false
---
import { Callout } from '@theguild/components'
# Automatic Persisted Queries
Automatic Persisted Queries is a protocol for reducing the overhead of sending the same GraphQL
documents to the server over and over again. Thus reducing client to server upstream traffic.
Since the upload speed can be the bottleneck from client to server, reducing the payload size can
improve the performance especially for huge GraphQL documents.
The Automatic Persisted Queries plugin follows
[the APQ Specification of Apollo](https://github.com/apollographql/apollo-link-persisted-queries#apollo-engine).
<Callout>
Automatic Persisted Queries do not provide any security features, the benefit
of using them is to reduce network overhead. If you want to avoid executing
arbitrary GraphQL operations please use [Persisted
Operations](/docs/gateway/persisted-documents).
Furthermore, an potential DDOS attacker could spam your GraphQL API with persisted operation
registrations, thus completly disable the advantages you would get from APQ and, furthermore, even
decrease the performance of your GraphQL API.
</Callout>
## Installation
## Quick Start
Using Automatic Persisted Queries requires installing a separate package.e.
```sh npm2yarn
npm i @graphql-yoga/plugin-apq
```
```ts filename="Automatic Persisted Queries Gateway setup" {3, 13}
import { defineConfig } from '@graphql-hive/gateway'
import { useAPQ } from '@graphql-yoga/plugin-apq'
export const gatewayConfig = defineConfig({
plugins: pluginCtx => [useAPQ()]
})
```
Start your Hive Gatewayr and send a request for priming the cache (register the operation).
```bash filename="Execute GraphQL Operation to prime the cache"
curl -X POST -H 'Content-Type: application/json' http://localhost:4000/graphql \
-d '{"query":"{__typename}","extensions":{"persistedQuery":{"version":1,"sha256Hash":"ecf4edb46db40b5132295c0291d62fb65d6759a9eedfa4d5d612dd5ec54a6b38"}}}'
```
Then afterwards we can send the same payload again, but this time omit the `query` field.
```bash filename="Execute GraphQL Operation without query payload"
curl -X POST -H 'Content-Type: application/json' http://localhost:4000/graphql \
-d '{"extensions":{"persistedQuery":{"version":1,"sha256Hash":"ecf4edb46db40b5132295c0291d62fb65d6759a9eedfa4d5d612dd5ec54a6b38"}}}'
```
Especially for big GraphQL document strings, the subsequent payload can be much smaller.
## Client Usage
GraphQL clients such `Apollo Client` and `Urql` support Automatic Persisted Queries out of the box.
Check the corresponding documentation for more information.
- [Apollo Client](https://www.apollographql.com/docs/apollo-server/performance/apq/#step-2-enable-automatic-persisted-queries)
- [Urql](https://formidable.com/open-source/urql/docs/advanced/persistence-and-uploads/)
## Custom Store
By default all the documents strings are stored in memory with an LRU cache that holds up to 1000
unique entries.
A custom store implementation can be provided via the `store` option.
```ts filename="Automatic Persisted Operations with a custom store" {16}
import { useAPQ, type APQStore } from '@graphql-yoga/plugin-apq'
// Note: this store grows infinitely, so it is not a good idea to use it in production.
const store: APQStore = new Map()
useAPQ({ store })
```
For external stores the `set` and `get` properties on the store can also return a `Promise`.
<Callout>
In production, it's recommended to capture the errors from any store that could stop functioning.
Instead of raising an error, returning undefined or null will allow the server to continue to
respond to requests if the store goes down.
```ts filename="Automatic Persisted Operations with a redis store" {16}
import Keyv from 'keyv'
const store = new Keyv('redis://user:pass@localhost:6379')
useAPQ({
store: {
async get(key) {
try {
return await store.get(key)
} catch (e) {
console.error(`Error while fetching the operation: ${key}`, e)
}
},
async set(key, value) {
try {
return await store.set(key, value)
} catch (e) {
console.error(`Error while saving the operation: ${key}`, e)
}
}
}
})
```
</Callout>
## Configure Error responses
By default, responses for missing or mismatching query will include `extensions` property with HTTP
status code.
For example:
```ts {4}
{
extensions: {
http: {
status: 404
},
code: 'PERSISTED_QUERY_NOT_FOUND'
}
}
```
You can force the error responses to use 200 OK status code:
```ts filename="Automatic Persisted Operations with a custom store" {18-20}
useAPQ({
responseConfig: {
forceStatusCodeOk: true
}
})
```

View file

@ -0,0 +1,242 @@
---
searchable: false
---
# Compression in HTTP
import { Callout } from '@theguild/components'
Compression is a technique used to reduce the size of the data that is being transferred between the
server and the client. This is done by compressing the data before sending it and then decompressing
it on the client side. This can help reduce the amount of data that needs to be transferred, which
can improve the performance of your website.
`Content-Encoding` and `Accept-Encoding` HTTP headers are used for this behavior. The
`Content-Encoding` header is used to specify the compression algorithm that was used to compress the
data, while the `Accept-Encoding` header is used to specify the compression algorithms that the
client supports.
[Learn more about compression in HTTP](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding)
Hive Gateway is capable of handling compressions in the following directions. We can selectively
enable or disable compression in each direction.
```mermaid
graph TD
client --> gateway
gateway --> subgraphA
subgraphA --> gateway
gateway --> subgraphB
subgraphB --> gateway
gateway --> client
```
<Callout>
<b>Caution!</b>
<br />
Please take a look at the each direction, because even if they look similar, they have different
configurations and behaviors. While configuring the compression, make sure each side supports the
compression algorithm that the other side supports. Otherwise, it will end up with unexpected
errors.
</Callout>
## From the gateway to the client
When the client sends a request to the gateway, it can specify the compression algorithm that it
supports using the `Accept-Encoding` header. Then the gateway can compress the response using the
specified algorithm before sending it back to the client with the `Content-Encoding` header, so that
the client can decompress it.
```mermaid
graph TD
Client --> AddAcceptEncodingHeader
AddAcceptEncodingHeader --> Gateway
Gateway --> GWCompressor
GWCompressor --> AddContentEncodingHeader
AddContentEncodingHeader --> ClientDecompressor
ClientDecompressor --> Client
```
In the following example, we say that the client supports the `gzip` algorithm for compression. Then
the gateway compresses the response using the `gzip` algorithm before sending it back to the client.
So the client can decompress the response using the `gzip` algorithm.
```ts
const res = await fetch('http://localhost:4000/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept-Encoding': 'gzip'
},
body: JSON.stringify({
query: `
query {
hello
}
`
})
})
console.assert(res.headers.get('Content-Encoding') === 'gzip', 'Response is compressed')
```
You need to configure the gateway for this feature. [See here](#configuration-on-gateway)
## From the client to the gateway
When the client sends a request to the gateway, it can compress the request using the specified
algorithm before sending it to the gateway. Then the gateway can decompress the request before
processing it.
```mermaid
graph TD
Client --> ClientCompressor
ClientCompressor --> AddContentEncodingHeader
AddContentEncodingHeader --> GatewayDecompressor
GatewayDecompressor --> Gateway
Gateway --> Client
```
In the following example, we compress the request using the `gzip` algorithm before sending it to
the gateway. Then the gateway decompresses the request before processing it.
```ts
const res = await fetch('http://localhost:4000/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Encoding': 'gzip'
},
// Compress the request body
body: gzip(
JSON.stringify({
query: `
query {
hello
}
`
})
)
})
```
Here we are using the `gzip` function to compress the request body before sending it to the gateway.
We assume that the `gzip` function is a function that compresses the data using the gzip algorithm.
<Callout>
<b>Caution!</b>
<br />
When this feature is not enabled as described below, the gateway won't be able to process the
client request body. Then it will fail with a `400 Bad Request` response. Because there is no way
to check if the server supports compression from the consumer side. Before configuring this
feature on the client side, make sure that the gateway supports the compression algorithm that the
client supports.
</Callout>
### Configuration on Gateway
In your gateway configuration, you need to enable the compression for the gateway.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
contentEncoding: true
})
```
Now gateway will respect the `Accept-Encoding` header from the client and compress the response
accordingly.
## From the subgraph to the gateway
When the subgraph sends a response to the gateway, it can compress the response using the specified
algorithm before sending it to the gateway. Then the gateway can decompress the response before
sending it to the client.
It has the same principle as the previous example, but here the gateway is acting like a client
against a subgraph.
```mermaid
graph TD
Gateway --> AddAcceptEncodingHeader
AddAcceptEncodingHeader --> Subgraph
Subgraph --> SubgraphCompressor
SubgraphCompressor --> AddContentEncodingHeader
AddContentEncodingHeader --> GatewayDecompressor
GatewayDecompressor --> Gateway
```
<Callout>
You don't need to configure anything on the gateway side for this feature. Because the HTTP Client
implementation is based on [`@whatwg-node/fetch`](https://github.com/ardatan/whatwg-node) which
automatically sends the `Accept-Encoding` headers to the upstream APIs, and decompresses the
response based on the sent `Content-Encoding` headers.
</Callout>
### Configuration on Subgraph
You should configure your subgraph to respect the `Accept-Encoding` header and compress the response
accordingly. For example if you have a GraphQL subgraph using
[GraphQL Yoga](https://the-guild.dev/graphql/yoga-server) server you can use `useContentEncoding`
plugin to enable this;
```npm2yarn
npm i @whatwg-node/server
```
```ts
import { createYoga } from 'graphql-yoga'
import { useContentEncoding } from '@whatwg-node/server'
const server = createYoga({
schema,
plugins: [useContentEncoding()]
})
```
<Callout>
If you use [`feTS`](https://the-guild.dev/openapi/fets) or any other
[`@whatwg-node/server`](https://github.com/ardatan/whatwg-node) based server implementation in
your non GraphQL subgraph, you can still use the same plugin.
</Callout>
## From the gateway to the subgraph
When the gateway sends a request to the subgraph, it can compress the request using the specified
algorithm before sending it to the subgraph. Then the subgraph can decompress the request before
processing it.
```mermaid
graph TD
Gateway --> GatewayCompressor
GatewayCompressor --> SubgraphDecompressor
SubgraphDecompressor --> Subgraph
Subgraph --> Gateway
```
In this case, gateway will always send a compressed request to the defined subgraphs with
`Content-Encoding` header.
<Callout>
<b>Caution!</b>
<br />
If the subgraph does not support compression, the gateway will receive an unexpected error. So
make sure that the subgraph supports the compression algorithm that the gateway supports. Because
there is no way to check the subgraph's support for compression since the gateway is acting like a
client here.
</Callout>
### Configuration on Gateway
In your gateway configuration, you need to enable the compression.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
contentEncoding: {
subgraphs: ['*'] // Enable compression for all subgraphs
// subgraphs: ['subgraph1', 'subgraph2'] // Enable compression for specific subgraphs
}
})
```

View file

@ -0,0 +1,29 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Deduplicate HTTP Requests
Most of the time, your Hive Gateway will receive multiple requests for the same data. This can
happen when multiple clients request the same data, or when a single client sends multiple requests
for the same data.
To reduce the load on your downstream services, you can deduplicate the requests. This means that if
multiple requests for the same data are received at the same time, only one request will be sent to
the downstream service, and the responses will be shared among the clients.
You can enable request deduplication by using the `useRequestDeduplication` plugin.
```ts filename="gateway.config.ts"
import { defineConfig, useRequestDeduplication } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: pluginCtx => [
useRequestDeduplication({
...pluginCtx
})
]
})
```

View file

@ -0,0 +1,44 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Execution Cancellation
In the real world, a lot of HTTP requests are dropped or canceled. This can happen due to a flakey
internet connection, navigation to a new view or page within a web or native app or the user simply
closing the app. In this case, the server can stop processing the request and save resources.
That is why Hive Gateway comes with the support for canceling the GraphQL execution upon request
cancellation.
So any extra calls to the services can be stopped and the resources can be saved.
## Enable Execution Cancellation
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
executionCancellation: true
})
```
That is all you need to do to enable execution cancellation in your Hive Gateway. Theoretically, you
can enable this and immediately benefit from it without making any other adjustments within your
GraphQL schema implementation.
If you want to understand how it works and how you can adjust your resolvers to properly cancel
pending promises (e.g. database reads or HTTP requests), you can continue with the next section.
<Callout>
You can also use the same plugin with your subgraph configuration if you use GraphQL Yoga. [See
more](https://the-guild.dev/graphql/yoga-server/docs/features/execution-cancellation)
</Callout>
<Callout>
Also this can be combined with [Upstream
Cancellation](/docs/gateway/other-features/performance/upstream-cancellation) that does this
cancellation not only in the execution level but also on the upstream HTTP level.
</Callout>

View file

@ -0,0 +1,31 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# HTTP caching
Most of JavaScript runtimes except browsers don't respect HTTP caching headers by default. But you
can enable HTTP caching in your Hive Gateway by using the HTTP caching plugin. This allows you to
cache the responses when possible, and reduce the server load. It uses
[`http-cache-semantics`](https://www.npmjs.com/package/http-cache-semantics) under the hood.
<Callout>
You need to set your cache storage in your gateway configuration to enable response caching. See
[Cache Storage](/docs/gateway/other-features/performance#providing-cache-storage) for more
information.
</Callout>
```ts filename="gateway.config.ts"
import { defineConfig, useHttpCache } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
cache // Your cache storage here,
plugins: pluginCtx => [
useHttpCache({
...pluginCtx
})
]
})
```

View file

@ -0,0 +1,141 @@
---
description:
Performance is a critical aspect of any application. Hive Gateway Runtime provides a set of
features to help you optimize the performance of your gateway.
searchable: false
---
import { Callout } from '@theguild/components'
# Performance & Caching
Hive Gateway provides a set of features to help you optimize the performance of your GraphQL
gateway. Hive Gateway provides a shared caching storage that can be used across plugins, transforms
and subgraph execution.
## Providing Cache Storage
In order to enable features that need a storage to keep the data, you need to define a cache storage
implementation, and pass it to the `gatewayConfig`.
You can choose the best-fit cache storage for your use case.
### LocalForage
LocalForage is a library that improves the existing storage mechanism in the browser by using
`IndexedDB`, `WebSQL` and `localStorage`, [see more](https://github.com/localForage/localForage).
Even if it is known as a browser storage, Hive Gateway provides you as a platform-agnostic cache
storage to leverage the well-known storage APIs that are available in most JavaScript environments.
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
cache: {
type: 'localforage',
// All of the following options are listed with default values, you don't need to provide them
driver: ['WEBSQL', 'INDEXEDDB', 'LOCALSTORAGE'] // The order of the drivers to use
name: 'HiveGateway', // The name of the database
version: 1.0, // The version of the database
size: 4980736, // The size of the database
storeName: 'keyvaluepairs', // The name of the store
description: 'Cache storage for Hive Gateway', // The description of the database
}
responseCaching: {
session: () => null,
}
})
```
### Redis
Redis is an in-memory data structure store, used as a database, cache, and message broker. You can
use Redis as a cache storage for your Hive Gateway.
<Callout>The Redis cache currently only works in Node.js environments.</Callout>
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
cache: {
type: 'redis',
host: 'localhost', // The host of the Redis server
port: 6379, // The port of the Redis server
password: undefined, // The password of the Redis server
lazyConnect: true, // If true, the connection will be established when the first operation is executed
// or
url: 'redis://localhost:6379' // The URL of the Redis server
},
responseCaching: {
session: () => null
}
})
```
### Cloudflare Workers KV
Cloudflare Workers KV is a distributed, eventually consistent key-value store available in the
Cloudflare Workers runtime. You can use Cloudflare Workers KV as a cache storage for your Hive
Gateway. [Learn more about KV](https://developers.cloudflare.com/workers/runtime-apis/kv/)
<Callout>
This is only available for Cloudflare Workers runtime. If you want to learn how to deploy your
Hive Gateway to Cloudflare Workers, you can check the [deployment
documentation](/docs/gateway/deployment/serverless/cloudflare-workers).
</Callout>
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
cache: {
type: 'cfw-kv',
namespace: 'HiveGateway' // The namespace of the KV
}
responseCaching: {
session: () => null
}
})
```
## Custom Cache Storage
You can also implement your own cache storage by extending the `CacheStorage` class. It needs to
match `KeyValueCache` interface from `@graphql-hive/gateway`.
```ts filename="my-cache-storage.ts"
import { LRUCache } from 'lru-cache'
import { KeyValueCache } from '@graphql-hive/gateway'
export class MyKeyValueCache<V = any> implements KeyValueCache<V> {
// Your cache implementation here
private cache = new LRUCache<string, V>()
// Get the value of the key
async get(key: string) {
return this.cache.get(key)
}
// Set the key with the value and optional options
async set(key: string, value: V, options?: { ttl?: number }) {
this.cache.set(key, value, options?.ttl)
}
// Delete the key from the cache
async delete(key: string) {
this.cache.del(key)
}
// Get all keys that match the given prefix
async getKeysByPrefix(prefix: string) {
return Array.from(this.cache.keys()).filter(key => key.startsWith(prefix))
}
// This should be implemented if you want to clear the cache on shutdown
[Symbol.asyncDispose]() {
this.cache.reset()
}
}
```

View file

@ -0,0 +1,59 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Parsing & Validation Caching
By default, Hive Gateway maintains a parsing and validation cache. If requests contain documents
that have been executed before, they will not be parsed and validated again.
Using the parser cache can improve performance up to ~60%, and using the validation cache up to ~50%
(based on benchmarks).
This behavior is built-in and can be optionally disabled using the `parserAndValidationCache`
options:
```ts filename="gateway.config.ts" {4,5}
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
// disable parse and validate caching
parserAndValidationCache: false
})
```
<Callout>
Due to the restrictions of the GraphQL execution flow, we cannot use an async cache storage as we
use in other caching plugins. So the cache storage for the parser and validation cache should be
synchronous, and it is an in-memory store by default.
</Callout>
Furthermore, you can provide your own cache store to both of these plugins by implementing the
following interface:
```ts
interface CacheStore<T> {
get(key: string): T | undefined
set(key: string, value: T): void
}
```
You can then pass your cache store to the `parserAndValidationCache` options:
```ts filename="gateway.config.ts" {9-13}
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
parserAndValidationCache: {
documentCache: documentCacheStore as CacheStore<DocumentNode>,
errorCache: errorCacheStore as CacheStore<Error>,
validationCache: validationCacheStore as CacheStore<typeof validate>
}
})
```
<Callout>
We'd recommend to keep the default behavior as-is since it's already optimized for performance.
</Callout>

View file

@ -0,0 +1,64 @@
---
description:
Request Batching is the process of taking a group of requests, combining them into one, and making
a single request with the same data that all of the other queries would have made.
searchable: false
---
import { Callout } from '@theguild/components'
# Request Batching
Batching is the process of taking a group of requests, combining them into one, and making a single
request with the same data that all of the other queries would have made. This is a way to reduce
the number of requests that your application makes to the server.
The Batching functionality is described via the
[`Batching RFC`](https://github.com/graphql/graphql-over-http/blob/main/rfcs/Batching.md).
## Enable Batching
Batching is disabled by default, but you can enable it by setting the `batching` option to `true`:
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
batching: true
})
```
```bash filename="Execute batched operation"
curl -X POST -H 'Content-Type: application/json' http://localhost:4000/graphql \
-d '[{"query": "{ hee: __typename }"}, {"query": "{ ho: __typename }"}]'
```
## Limit the amount of Batched Requests
By default up to 10 GraphQL requests are allowed within a single HTTP request. If this amount is
exceeded an error will be raised. You can customize this option by passing an object to the
`batching` configuration option:
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
batching: {
limit: 2
}
})
```
```bash filename="Execute batched operation (exceed limit)"
curl -X POST -H 'Content-Type: application/json' -i http://localhost:4000/graphql \
-d '[{"query": "{ hee: __typename }"}, {"query": "{ ho: __typename }"}, {"query": "{ holla: __typename }"}]'
```
When exceeding the batching limit the HTTP status code will be
[`413` (Payload Too Large)](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/413).
```json filename="GraphQL Error response"
{
"errors": [{ "message": "Batching is limited to 2 operations per request." }]
}
```

View file

@ -0,0 +1,415 @@
---
searchable: false
---
import { Code } from 'nextra/components'
import { Callout, Tabs } from '@theguild/components'
# Response Caching
**_GraphQL Response Caching_** is a feature that allows you to cache the response of a GraphQL
query. This is useful when you want to reduce the number of requests to your sources. For example,
if you have a GraphQL query that fetches a list of products, you can cache the response of this
query so that the next time the same query is made, the response is fetched from the cache instead
of making a request to the underlying sources.
<Callout>
You need to set your cache storage in your gateway configuration to enable response caching. See
[Cache Storage](/docs/gateway/other-features/performance#providing-cache-storage) for more
information.
</Callout>
## How to use?
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
responseCaching: {
// global cache
session: () => null
}
})
```
After starting the server we can execute a GraphQL Query operation, that selects the `Query.slow`
field.
```sh filename="Execute slow GraphQL Query Operation with cUrl"
curl -X POST http://localhost:4000/graphql \
-H 'Content-Type: application/json' \
-d '{ "query" : "{ slow }" }' \
-w '\nTotal time : %{time_total}'
```
The output will look similar to the following:
```sh filename="Initial Request time"
{"data":{"slow":"I am slow."}}
Total time:5.026632
```
After executing the same curl statement a second time, the duration is significantly lower.
```sh filename="Cached Request time"
{"data":{"slow":"I am slow."}}
Total time:0.007571%
```
## Configuration
The behaviour of this plugin can be configured by passing an object at the gateway level or by using
`@cacheControl` directive at schema defintion level.
The `@cacheControl` directive can be used to give to subgraphs the control over the cache behavior
for the fields and types they are defining. You can add this directive during composition.
- [See here for Federation to learn more about the `@cacheControl` directive](https://www.apollographql.com/docs/federation/performance/caching/#using-cache-hints-with-subgraphs)
- [See here for GraphQL Mesh to learn more about the `@cacheControl` in subgraph definitions](/docs/gateway/other-features/performance/response-caching)
## Session based caching
If your GraphQL API returns specific data depending on the viewer's session, you can use the session
option to cache the response per session. Usually, the session is determined by an HTTP header, e.g.
an user id within the encoded access token.
<Callout type="warning">
Don't forget to validate the authentication token before using it as a session key.
Allowing cached responses to be returned with unverified tokens can lead to data leaks.
Please see the [Authorization/Auhtentication](/docs/gateway/authorization-authentication) section
for more information.
</Callout>
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
responseCaching: {
// cache based on the authentication header
session: request => request.headers.get('authentication')
}
})
```
### Enforce session based caching
In some cases, a type or a field should only be cached if their is a session. For this, you can use
the `scope` to indicate that the cache should only be used if a session is present.
This can be useful to prevent exposure of sensitive data to unauthorized users.
<Tabs items={[
"Programatically using options",
<span>Declaratively using <Code>@cacheControl</Code></span>
]}>
<Tabs.Tab>
```ts filename="Response Cache configuration with scope"
defineConfig({
responseCaching: {
// cache based on the authentication header
session: request => request.headers.get('authentication')
// You can use configuration object to define the scope
scopePerSchemaCoordinate: {
'Query.me': 'PRIVATE', // on a field
User: 'PRIVATE', // or a type
}}
})
```
</Tabs.Tab >
<Tabs.Tab>
```graphql filename="GraphQL schema with @cacheControl directive"
type Query {
me: User @cacheControl(scope: PRIVATE) # on a field
}
type User @cacheControl(scope: PRIVATE) { # on a type
id: ID!
name: String!
}
```
</Tabs.Tab>
</Tabs>
### Group based caching
The `session` option can also be used to cache responses based for a group of users. This can be
useful if data exposed by your API is the same for a group of users sharing the same characteristic.
For example, if data returned by an API is always the same for every users with the same role, you
can use the role as a session key.
```ts filename="Role based caching"
defineConfig({
responseCaching: {
session: request => request.headers.get('x-user-role')
}
})
```
## Time to Live (TTL)
By default, all cached operations are stored indefinitely. This can lead to stale data being
returned.
It is possible to give cached operations a time to live. Either globally, based on
[schema coordinates](https://github.com/graphql/graphql-wg/blob/main/rfcs/SchemaCoordinates.md) or
object types.
If a query operation result contains multiple objects of the same or different types, the lowest TTL
is picked.
<Tabs items={[
"Programatically using options",
<span>Declaratively using <Code>@cacheControl</Code></span>
]}>
<Tabs.Tab>
```ts filename="Response Cache configuration with TTL"
defineConfig({
responseCaching: {
session: () => null,
// by default cache all operations for 2 seconds
ttl: 2_000,
ttlPerSchemaCoordinate: {
// only cache query operations containing User for 500ms
User: 500
// cache operations selecting Query.lazy for 10 seconds
'Query.lazy': 10_000
}
}
})
```
</Tabs.Tab>
<Tabs.Tab>
```graphql filename="GraphQL schema with @cacheControl directive"
type Query {
me: User @cacheControl(maxAge: 500) # only cache query operations containing User for 500ms
}
type User @cacheControl(maxAge: 10000) { # cache operations selecting Query.lazy for 10 seconds
id: ID!
name: String!
}
```
</Tabs.Tab>
</Tabs>
## Control which responses are cached
By default, all successful operations influences the cache.
You can globaly disable caching using the `enabled` option. This can be useful for local
development.
```ts filename="Disabling caching"
defineConfig({
responseCaching: {
session: request => null,
enabled: () => process.env.NODE_ENV !== 'development'
}
})
```
### Ingore a specific request
You can entirely disable caching (both caching and invalidation) for a specific request by using the
`enabled` option.
Be aware that this means that if the response contains entities that are part of other cached
responses, those responses will not be invalidated.
```ts filename="Disabling caching for a specific request"
defineConfig({
responseCaching: {
session: request => null,
enabled: request => request.headers.get('x-no-cache') !== 'true'
}
})
```
### Disable caching of specific types and fields
Some types or fields contains data that should never be cached. For example, a field that returns
the current time.
You can disable caching for specific types or fields by setting it's TTL to `0`. This will prevent
the response from being cached, but will not prevent cache invalidation for other entities contained
in the response.
<Tabs items={[
"Programatically using options",
<span>Declaratively using <Code>@cacheControl</Code></span>
]}>
<Tabs.Tab title="Programatically using options">
```ts filename="Disabling caching for a specific type"
defineConfig({
responseCaching: {
session: request => null,
ttlPerSchemaCoordinate: {
// for a entire type
Date: 0
// for a specific field
'Query.time': 0
}
}
})
```
</Tabs.Tab>
<Tabs.Tab title="Declaratively using @cacheControl">
```graphql filename="GraphQL schema with @cacheControl directive"
type Query {
time: String @cacheControl(maxAge: 0) # on a field
}
type Date @cacheControl(maxAge: 0) { # on a type
day: Int!
month: Int!
year: Int!
}
```
</Tabs.Tab>
</Tabs>
## Invalidations via Mutation
When executing a mutation operation the cached query results that contain type entities within the
Mutation result will be automatically be invalidated.
```graphql filename="GraphQL mutation operation"
mutation UpdateUser {
updateUser(id: 1, newName: "John") {
__typename
id
name
}
}
```
```json filename="GraphQL operation execution result"
{
"data": {
"updateLaunch": {
"__typename": "User",
"id": "1",
"name": "John"
}
}
}
```
For the given GraphQL operation and execution result, all cached query results that contain the type
`User` with the id `1` will be invalidated.
This behavior can be disabled by setting the `invalidateViaMutation` option to `false`.
```ts filename="Disabling mutation invalidation"
defineConfig({
responseCaching: {
session: request => null,
invalidateViaMutation: false
}
})
```
### Entity identity
Automatic cache invalidation works by instpecting the result of each query and mutation operations,
and keeping track of the entities that are part of it.
By default, the identity of entities is based on the `id` field.
You can customize the identity field by setting the `idFields` options.
```ts filename="Customizing entity identity"
defineConfig({
responseCaching: {
session: request => null,
idFields: ['id', 'email']
}
})
```
```graphql
type User {
email: String!
username: String!
profile: Profile!
}
type Profile {
id: ID!
bio: String
picture: String
}
```
In this example, `User`'s identity will be based on `email` field, and `Profile`'s identity will be
based on `id` field.
## HTTP Caching
Response Caching plugin sends `ETag` headers to the client, and respects `If-None-Match` headers in
the HTTP request.
If the client sends an `If-None-Match` header with the same value as the `ETag` header, the server
will respond with a `304 Not Modified` status code without any content, which allows you to reduce
the server load.
Most of the browsers and some HTTP clients support this behavior, so you can use it to improve the
performance of your frontend application.
[Learn more about `ETag` and `If-None-Match` headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag).
### Example with `curl`
First we send a request to the GraphQL server, and we can see that the response contains the headers
```bash filename="Get ETag and Last-Modified headers"
curl -H 'Content-Type: application/json' \
"http://localhost:4000/graphql?query={me{id name}}" -v
```
Then the server will respond a data something the following with the `ETag` and `Last-Modified`
headers:
- `ETag` is the key that is used to identify the cached response.
- `Last-Modified` is used to determine if the cached response is still valid.
```bash filename="Response with ETag and Last-Modified headers"
> GET /graphql?query={me{id,name}} HTTP/1.1
> Host: localhost:4000
> User-Agent: curl/7.68.0
> Accept: application/json
>
* Mark bundle as not supporting multiuse
< HTTP/1.1 200 OK
< access-control-allow-origin: *
< content-length: 130
< content-type: application/json; charset=utf-8
< etag: 2c0ebfe7b2b0273029f2fa23a99d213b56f4838756b3ef7b323c04de1e836be3
< last-modified: Wed Feb 15 2023 15:23:55 GMT+0300 (GMT+03:00)
< Date: Wed, 15 Feb 2023 12:23:55 GMT
< Connection: keep-alive
< Keep-Alive: timeout=5
<
{"data":{"me":{"id":"1","name":"Bob"}}}
```
In the next calls, we can use the `ETag` header as the `If-None-Match` header together with
`Last-Modified` header as `If-Modified-Since` to check if the cached response is still valid.
```bash filename="Use the headers to check if the cached response is still valid"
curl -H "Accept: application/json" \
-H "If-None-Match: 2c0ebfe7b2b0273029f2fa23a99d213b56f4838756b3ef7b323c04de1e836be3" \
-H "If-Modified-Since: Wed Feb 15 2023 15:23:55 GMT" \
"http://localhost:4000/graphql?query=\{me\{id,name\}\}" -v
```
Then the server will return `304: Not Modified` status code with no content.

View file

@ -0,0 +1,25 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Upstream Cancellation
This feature enables you to cancel the upstream HTTP requests when the client cancels the downstream
GraphQL request. This can be useful when you want to save resources on your server and the services.
There is also
[Execution Cancellation](/docs/gateway/other-features/performance/execution-cancellation) that stops
the execution, but it doesn't stop ongoing HTTP requests. This seperately allows you to stop the
HTTP requests by hooking into [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).
## Enable Upstream Cancellation
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
upstreamCancellation: true
})
```

View file

@ -0,0 +1,18 @@
export default {
index: 'Overview',
'error-masking': 'Error Masking',
cors: 'CORS',
'csrf-prevention': 'CSRF Prevention',
'rate-limiting': 'Rate Limiting',
'disable-introspection': 'Introspection',
https: 'HTTPS',
'hmac-signature': 'HMAC Signature',
'audit-documents': 'Audit Documents',
'block-field-suggestions': 'Block Field Suggestions',
'character-limit': 'Character Limit',
'cost-limit': 'Cost Limit',
'max-aliases': 'Max Aliases',
'max-depth': 'Max Depth',
'max-directives': 'Max Directives',
'max-tokens': 'Max Tokens',
};

View file

@ -0,0 +1,30 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Audit Documents
Audit your documents for useful metrics such as query depth, directive count and alias count. This
is useful if you want to introduce security rules on your GraphQL server (e.g. via graphql-armor)
and need to figure out the values for doing so.
## How to use?
You need to install the `@graphql-inspector/cli` package:
```sh npm2yarn
npm i @graphql-inspector/cli
```
Then you can point documents to the `audit` command:
```sh
graphql-inspector audit DOCUMENTS
```
<Callout>
Learn more about this command, and [GraphQL Inspector
here](https://the-guild.dev/graphql/inspector/docs/commands/audit#audit---usage)
</Callout>

View file

@ -0,0 +1,39 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Block Field Suggestions
This is a feature that allows you to prevent **returning field suggestions** and **leaking your
schema** to unauthorized actors provided by
[GraphQL Armor](https://escape.tech/graphql-armor/docs/plugins/block-field-suggestions/)
In production, this can lead to Schema leak even if the introspection is disabled.
## How to use?
Install the plugin:
```sh npm2yarn
npm install @escape.tech/graphql-armor-block-field-suggestions
```
Then, add it to your plugins:
```ts filename="gateway.config.ts"
import { blockFieldSuggestionsPlugin } from '@escape.tech/graphql-armor-block-field-suggestions'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: () => [
blockFieldSuggestionsPlugin({
// Toggle the plugin | Default: true
enabled: true,
// Mask applied to the error message | default: '[Suggestion hidden]'
mask: '[Suggestion hidden]'
})
]
})
```

View file

@ -0,0 +1,42 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Character Limit
**Limit** number of **characters** in a GraphQL query document.
This plugin helps you to prevent **DoS attacks** by hard-limiting the size of the query document.
[Provided by GraphQL Armor](https://escape.tech/graphql-armor/docs/plugins/character-limit)
## How to use?
Install the plugin:
```sh npm2yarn
npm install @escape.tech/graphql-armor-character-limit
```
Then, add it to your plugins:
```ts filename="gateway.config.ts"
import { characterLimitPlugin } from '@escape.tech/graphql-armor-character-limit'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: () => [
characterLimitPlugin({
maxLength: 15000 // Number of characters allowed | Default: 15000
})
]
})
```
<Callout>
- We recommend using the [Max Tokens Plugin](/docs/gateway/other-features/security/max-tokens)
instead of this one. - This plugin does not limit the size of input variables. - This plugin
relies on a parser plugin to works and access query document through the context.
</Callout>

View file

@ -0,0 +1,114 @@
---
description:
CORS stands for Cross Origin Resource Sharing. In a nutshell, as a security measure, browsers
aren't allowed to access resources outside their own domain.
searchable: false
---
import { Callout } from '@theguild/components'
# CORS
CORS stands for Cross Origin Resource Sharing. In a nutshell, as a security measure, browsers aren't
allowed to access resources outside their own domain.
If your api and web apps are deployed to different domains (or subdomains), you'll have to worry
about CORS. For example, if your web client is deployed to example.com but your Hive Gateway is
api.example.com. For security reasons your browser will not allow XHR requests (like the kind that
the GraphQL client makes) to a domain other than the one currently in the browser's address bar.
To deal with this you have two options:
_1. Avoid CORS by proxying your requests_ e.g. If you setup a proxy or redirect to forward requests
from `example.com/api/*` to `api.example.com`, you avoid CORS issues all together.
_2. Configure the gateway to send back CORS headers_ Hive Gateway comes with CORS support out of the
box - CORS can be configured when creating the server either by passing a CORSOptions object, or a
builder function that returns the CORSOptions object.
```ts
export type CORSOptions =
| {
origin?: string[] | string
methods?: string[]
allowedHeaders?: string[]
exposedHeaders?: string[]
credentials?: boolean
maxAge?: number
}
| false
```
### Example configuration using CORSOptions
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
cors: {
origin: 'http://localhost:4000',
credentials: true,
allowedHeaders: ['X-Custom-Header'],
methods: ['POST']
}
})
```
This will return the following headers:
```
Access-Control-Allow-Origin: 'http://localhost:4000'
Access-Control-Allow-Credentials: true
Access-Control-Allow-Methods: POST
Access-Control-Allow-Headers: X-Custom-Header
```
### Example configuration using builder function
You can also pass a function to the cors property, that takes your request and constructs the
options
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
cors: request => {
return {
origin: 'http://localhost:4000',
credentials: true,
allowedHeaders: ['X-Custom-Header'],
methods: ['POST']
}
}
})
```
This will return the same headers as the previous example, but take the `origin` of the request, and
return it in the `Access-Control-Allow-Origin` header.
## Default CORS setting
By default, Hive Gateway will return `Access-Control-Allow-Origin: *` when preflight requests are
made.
This means cross origin requests from browsers work out of the box - however it may be appropriate
to lock to a specific domain before deploying to production.
## Disabling CORS
You can disable CORS on your gateway by simply passing `false` as the cors property
**For example:**
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
cors: false
})
```
<Callout>
If you disable CORS, you may run into issues with your web client not being able to access the
Hive Gateway. This is because of the browser's security policy.
</Callout>

View file

@ -0,0 +1,52 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Cost Limit
**Limit** the **complexity** of a GraphQL document by using
[GraphQL Armor](https://escape.tech/graphql-armor/docs/plugins/cost-limit)
## How to use?
```sh npm2yarn
npm install @escape.tech/graphql-armor-cost-limit
```
Then, add it to your plugins:
```ts filename="gateway.config.ts"
import {defineConfig} from '@graphql-hive/gateway';
import { costLimitPlugin } from '@escape.tech/graphql-armor-cost-limit';
export const gatewayConfig = defineConfig({
plugins: () => [
costLimitPlugin({
// Toogle the plugin | default: true
enabled: true,
// Cost allowed for the query | default: 5000
maxCost: 5000,
// Static cost of an object | default: 2
objectCost: 2,
// Static cost of a field | default: 1
scalarCost: 1,
// Factorial applied to nested operator | default: 1.5
depthCostFactor: 1.5,
// Ignore the cost of introspection queries | default: true
ignoreIntrospection: true,
// Do you want to propagate the rejection to the client? | default: true
propagateOnRejection: true,
/* Advanced options (use here on your own risk) */
// Callbacks that are ran whenever a Query is accepted
onAccept: []
// Callbacks that are ran whenever a Query is rejected
onReject: []
}),
]
});
```

View file

@ -0,0 +1,32 @@
---
description:
If you have CORS enabled, almost all requests coming from the browser will have a preflight
request - however, some requests are deemed "simple" and don't make a preflight.
searchable: false
---
import { Callout } from '@theguild/components'
# CSRF Prevention
If you have CORS enabled, almost all requests coming from the browser will have a preflight
request - however, some requests are deemed "simple" and don't make a preflight. One example of such
a request is a good ol' GET request without any headers, this request can be marked as "simple" and
have preflight CORS checks skipped therefore skipping the CORS check.
This attack can be mitigated by saying: "all GET requests must have a custom header set". This would
force all clients to manipulate the headers of GET requests, marking them as "\_not-\_simple" and
therefore always executing a preflight request. Apollo does this when using the
[`csrfPrevention = true` option](https://www.apollographql.com/docs/apollo-server/api/apollo-server/#csrfprevention).
## Quick Start
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
csrfPrevention: {
requestHeaders: ['x-gateway-csrf']
}
})
```

View file

@ -0,0 +1,87 @@
---
description:
Learn how to disable GraphQL schema introspection and the "did you mean x" suggestion feature.
searchable: false
---
import { Callout } from '@theguild/components'
# Introspection
A powerful feature of GraphQL is schema introspection. This feature is used by GraphiQL for
exploring the schema and also by tooling such as
[GraphQL Code Generator](https://the-guild.dev/graphql/codegen) for generating type-safe
client/frontend code.
GraphQL schema introspection is also a feature that allows clients to ask a GraphQL server what
GraphQL features it supports (e.g. defer/stream or subscriptions).
## Disabling Introspection
<Callout>
If your goal is to avoid unknown actors from reverse-engineering your GraphQL
schema and executing arbitrary operations, it is highly recommended to use
persisted operations.
[Learn more about persisted operations.](/docs/gateway/persisted-documents)
</Callout>
## Disable Introspection based on the GraphQL Request
Sometimes you want to allow introspectition for certain users. You can access the `Request` object
and determine based on that whether introspection should be enabled or not. E.g. you can check the
headers.
```ts filename="gateway.config.ts" {7}
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
disableIntrospection: {
isDisabled: request => request.headers.get('x-allow-introspection') !== 'secret-access-key'
}
})
```
## Disabling Field Suggestions
<Callout>
The [`graphql-armor`](https://github.com/Escape-Technologies/graphql-armor) plugin is a security layer that help you protect your GraphQL server from malicious queries.
It allows you to configure various security features such as character limit or blocking field suggestions.
For more information about `graphql-armor` features, you can refer to the [documentation for the plugin](/docs/gateway/other-features/security/block-field-suggestions).
Here is an example of how to use `graphql-armor` to disable introspection and block field
suggestions.
</Callout>
When executing invalid GraphQL operation the GraphQL engine will try to construct smart suggestions
that hint typos in the executed GraphQL document. This can be considered a security issue, as it can
leak information about the GraphQL schema, even if introspection is disabled.
<Callout>
If your goal is to avoid unknown actors from reverse-engineering your GraphQL
schema and executing arbitrary operations, it is highly recommended to use
persisted operations.
[Learn more about persisted operations.](/docs/gateway/persisted-documents)
</Callout>
Disabling the "did you mean x" suggestion feature can be achieved via the
`blockFieldSuggestionsPlugin` from
[`graphql-armor`](https://github.com/Escape-Technologies/graphql-armor).
```sh npm2yarn
npm i @escape.tech/graphql-armor-block-field-suggestions
```
```ts filename="Disabling the 'did you mean x' suggestion feature with a plugin" {2, 7}
import { blockFieldSuggestionsPlugin } from '@escape.tech/graphql-armor-block-field-suggestions'
import { defineConfig, useDisableIntrospection } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
disableIntrospection: true,
plugins: pluginCtx => [blockFieldSuggestionsPlugin()]
})
```

View file

@ -0,0 +1,18 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Error Masking
Hive Gateway automatically masks unexpected errors and prevents leaking sensitive information to
clients.
Unexpected errors can be caused by failed connections to remote services such as databases or HTTP
APIs. Nobody external needs to know that your database server is not reachable. Exposing such
information to the outside world can make you vulnerable for targeted attacks.
In order to build secure applications, it is crucial to understand this concept.
{/* `TODO: Consider how to explain subgraph errors etc` */}

View file

@ -0,0 +1,226 @@
---
searchable: false
---
import { Callout, Steps } from '@theguild/components'
# HMAC Signature
HMAC (Hash-based Message Authentication Code) is a mechanism for calculating a message
authentication code involving a hash function in combination with a secret key. It can be used to
verify the integrity and authenticity of a message.
This Gateway plugin implements HMAC signing for requests between Hive Gateway and the upstream
GraphQL subgraph. It also provides HMAC verification plugin for the incoming requests in the
subgraph services.
By activating this plugin, you can ensure that the requests send to GraphQL subgraphs is trusted and
signed by the Hive Gateway. In case of any missing signature, tampering or unauthorized access, the
subgraph services will reject the request.
```mermaid
flowchart LR
1(["End-user"]) --->|"query { comments { id author { id name }}}"| 2
subgraph Hive Gateway
2["Engine"]
3["useHmacUpstreamSignature"]
4["Query Planner"]
2--->3
2--->4
end
subgraph "Users Subgraph"
5["useHmacSignatureValidation"]
4--->|"query { _entities(representations: $r) { ... on User { name }} }\nextensions: { hmac-signature: AbC123 }"|5
end
subgraph "Comments Subgraph"
6["useHmacSignatureValidation"]
4--->|"query { comments { id author { id }} }\nextensions: { hmac-signature: AbC123 }"|6
end
```
## How to use?
<Steps>
### Step 1: Gather your secret key
Before you start, you need to have a secret key that will be used for HMAC signing and verification.
The secret key should be a random, opaque string, that will be shared between the Hive Gateway and
the subgraphs validating the HMAC signature.
### Step 2: HMAC Signing in Hive Gateway
```ts
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
hmacUpstreamSignature: {
secret: myHMACSecret // see step 1 for the secret key
}
// ...
})
```
Now, every GraphQL request sent to the upstream GraphQL subgraphs will be signed with the HMAC and
the `extensions` of the upstream request will contain the HMAC signature.
To configure the subgraph verification of the HMAC signature, please follow the next step.
### Step 3: HMAC Verification in Subgraph services
The next step is to perform a verification over the sent HMAC signature in the subgraph services:
#### With GraphQL Yoga
If you are using Yoga, you can use the gateway package:
```sh npm2yarn
npm i @graphql-hive/gateway
```
```ts
import { createYoga } from 'graphql-yoga'
import { useHmacSignatureValidation } from '@graphql-hive/gateway'
const myYogaSubgraphServer = createYoga({
// ...
plugins: [
useHmacSignatureValidation({
secret: myHMACSecret // see step 1 for the secret key
})
// other Yoga plugins
// ...
]
})
```
<Callout>
Make sure to add `useHmacSignatureValidation` first in the plugins list in your Yoga
configuration. This will ensure the request is verified before processing the other plugins.
</Callout>
#### With Apollo Server
If you are using Apollo-Server for your subgraph services, you can implement a custom plugin to
verify the HMAC signature. You can still use the utilities from the `@graphql-hive/gateway` library
to serialize the request parameters and verify the HMAC signature in a stable way.
Start by installing the `@graphql-hive/gateway` package:
```sh npm2yarn
npm i @graphql-hive/gateway
```
Now, configure your Apollo Server with the HMAC verification plugin:
```ts filename="apollo-subgraph.ts"
import { createHmac } from 'crypto'
import { ApolloServer, ApolloServerPlugin } from '@apollo/server'
import { defaultParamsSerializer } from '@graphql-hive/gateway'
const verifyHmacPlugin = {
async requestDidStart({ request, contextValue }) {
const signature = request.extensions?.['hmac-signature']
if (!signature) {
throw new Error('HMAC signature is missing')
}
const serializedParams = defaultParamsSerializer({
query: request.query,
variables: request.variables
})
const incomingReqSignature = createHmac('sha256', HMAC_SIGNING_SECRET)
.update(serializedParams)
.digest('base64')
if (incomingReqSignature !== signature) {
throw new Error('HMAC signature is invalid')
}
}
} satisfies ApolloServerPlugin<{}>
const server = new ApolloServer({
plugins: [
verifyHmacPlugin
// ... other Apollo plugins
]
})
```
#### Other GraphQL servers
To implement HMAC verification in other GraphQL servers, you should implement a HMAC verification
using the following specification:
- The incoming request to your server will contain an `extensions` field with a `hmac-signature`
key.
- The `hmac-signature` value is a `base64` encoded HMAC signature of the request parameters, using
the SHA-256 algorithm.
- The request parameters should be serialized in a stable way, so the signature can be verified
correctly. I should consist of the GraphQL `query` and `variables`:
```json
{
"query": "query { comments { id author { id name } } ",
"variables": {}
}
```
- The HMAC signature should be calculated using the secret key shared between the Hive Gateway and
the subgraph services.
Here's an example of an incoming subgraph request with the HMAC signature:
```json
{
"query": "query { comments { id author { id name } } ",
"variables": {},
"extensions": {
"hmac-signature": "AbC123"
}
}
```
> The signature is produced by the Hive Gateway using the shared secret key, and the serialized
> request (query and variables).
</Steps>
## Configuration
### `hmacUpstreamSignature`
The `hmacUpstreamSignature` flag allows you to customize the HMAC signing behavior in the Hive
Gateway:
- `secret`: The secret key used for HMAC signing and verification. It should be a random, opaque
string shared between the Hive Gateway and the subgraph services.
- `extensionName` (optional, default: `hmac-signature`): The key name used in the `extensions` field
of the outgoing requests to store the HMAC signature.
- `serializeExecutionRequest` - A function to customize the way the incoming request is serialized
before calculating the HMAC signature. By default, it uses
[stable JSON hash](https://www.npmjs.com/package/json-stable-stringify) of the GraphQL `query` and
`variables`.
- `shouldSign`: A function to determine if the request should be signed or not. By default, it signs
all requests.
### `useHmacSignatureValidation`
The `useHmacSignatureValidation` plugin allow you to customize the HMAC verification behavior in the
subgraph.
- `secret`: The secret key used for HMAC signing and verification. It should be a random, opaque
string shared between the Hive Gateway and the subgraph services.
- `extensionName` (optional, default: `hmac-signature`): The key name used in the `extensions` field
of the outgoing requests to store the HMAC signature.
- `serializeParams` - A function to customize the way the incoming request is serialized before
calculating the HMAC signature. By default, it uses
[stable JSON hash](https://www.npmjs.com/package/json-stable-stringify) of the GraphQL `query` and
`variables`.

View file

@ -0,0 +1,82 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Secure HTTP Connection (HTTPS)
HTTPS (HyperText Transfer Protocol Secure) is an encrypted version of the HTTP protocol. It uses TLS
to encrypt all communication between a client and a server.
There are different ways to secure the connection. It is either between the client and Hive Gateway
or between Hive Gateway and the subgraphs.
<Callout>This is only available on Node environment.</Callout>
## Subgraph - Gateway Connection
Hive Gateway acts as a client to the subgraphs, so if you want to have a secure connection in
between Hive Gateway and the subgraphs, you can use HTTPs.
### Using Self-Signed Certificates
But if you use self-signed certificates, Hive Gateway may not verify the certificate by default,
then you need to provide those certificates to Hive Gateway.
#### Environment Variables
Hive Gateway's default HTTP Client implementation respects Node's environment variables related to
this;
- `NODE_TLS_REJECT_UNAUTHORIZED` - If set to `0`, it disables the rejection of self-signed
certificates.
- `NODE_EXTRA_CA_CERTS` - If set, it provides a path to a CA certificate file.
Below is an example of how to use self-signed certificates with Hive Gateway;
```sh
NODE_EXTRA_CA_CERTS=/path/to/ca.crt hive-gateway supergraph <path-to-supergraph-config>
```
#### Configuration File
The only way to configure HTTPS programmaticaly is to use a custom agent like below;
```ts
import { readFileSync } from 'fs'
import { Agent } from 'https'
import { defineConfig } from '@graphql-hive/gateway'
const agent = new Agent({
ca: readFileSync('/path/to/ca.crt')
// or
rejectUnauthorized: false
})
export const gatewayConfig = defineConfig({
// This function will be called for each URL to determine if the custom agent should be used
customAgent: ({ url }) =>
url === 'https://example.com'
? agent
: undefined
})
```
## Client - Gateway Connection
You can also configure Hive Gateway to listen on HTTPS. You can provide the path to the key and
certificate files in the configuration file;
```ts
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
sslCredentials: {
key_file_name: 'path/to/key.pem',
cert_file_name: 'path/to/cert.pem',
passphrase: 'passphrase'
},
port: 443
})
```

View file

@ -0,0 +1,204 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Secure your Gateway
Building a secure GraphQL API is hard by design because of the “Graph” nature of GraphQL. Libraries
for making different aspects of a GraphQL server secure have existed since the early days of
GraphQL. However, combining those tools is often cumbersome and results in messy code. With envelop
securing your server is now as easy as pie! Hive Gateway has a built-in security layer that helps
you to secure your Gateway. But in most of time, this security layer is not enough or needed to be
customized for your use case.
## Protection against Malicious GraphQL Operations
One of the main benefits of GraphQL is that data can be requested individually. However, this also
introduces the possibility for attackers to send operations with deeply nested selection sets that
could block other requests being processed. Fortunately, infinite loops are not possible by design
as a fragment cannot self-reference itself. Unfortunately, that still does not prevent possible
attackers from sending selection sets that are hundreds of levels deep.
The following schema:
```graphql
type Query {
author(id: ID!): Author!
}
type Author {
id: ID!
posts: [Post!]!
}
type Post {
id: ID!
author: Author!
}
```
Would allow sending and executing queries such as:
```graphql
query {
author(id: 42) {
posts {
author {
posts {
author {
posts {
author {
posts {
author {
posts {
author {
posts {
author {
id
}
}
}
}
}
}
}
}
}
}
}
}
}
}
```
There are a few measurements you can use for preventing the execution of such operations.
A handy tool for analyzing your existing GraphQL operations and finding the best defaults for your
use case is [`graphql-inspector`](https://www.the-guild.dev/graphql/inspector).
Learn more about `graphql-inspector audit`
[here](https://the-guild.dev/graphql/inspector/docs/essentials/audit).
### Persisted Operations
Instead of allowing any arbitrary GraphQL operation in production usage, we could use an allow-list
of operations that the server is allowed to execute. We can collect such a list by scanning the
code-base and extracting the list of operations.
[Learn more how to configure persisted operations](/docs/gateway/persisted-documents)
### Reject Malicious Operation Documents
Parsing a GraphQL operation document is a very expensive and compute intensitive operation that
blocks the JavaScript event loop. If an attacker sends a very complex operation document with slight
variations over and over again he can easily degrade the performance of the GraphQL server. Because
of the variations simply having an LRU cache for parsed operation documents is not enough.
A potential solution is to limit the maximal allowed count of tokens within a GraphQL document.
In computer science, lexical analysis, lexing or tokenization is the process of converting a
sequence of characters into a sequence of lexical tokens.
E.g. given the following GraphQL operation.
```graphql
graphql {
me {
id
user
}
}
```
The tokens are `query`, `{`, `me`, `{`, `id`, `user`, `}` and `}`. Having a total count of 8 tokens.
The optimal maximum token count for your application depends on the complexity of the GrapHQL
operations and documents. Usually 800-2000 tokens seems like a sane default.
You can limit the amount of allowed tokens per operation and automatically abort any further
processing of a GraphQL operation document that exceeds the limit with the
[Max Tokens Plugin](/docs/gateway/other-features/security/max-tokens).
Also this can be combined with
[Character Limit](/docs/gateway/other-features/security/character-limit) that limits the number of
characters in the query and mutation documents.
### Gateway -> Subgraph HMAC Signing
When you have multiple subgraphs and a gateway, you might want to ensure that the requests to the
subgraphs are trusted and signed by the gateway. This is handy in case your want to ensure that the
requests to the subgraphs are trusted and signed by the gateway, and no other entity can execute
requests to the subgraph.
In case of any missing signature, tampering or unauthorized access, the subgraph services will
reject the request.
We recommend using HMAC signing for requests between the Hive Gateway and the upstream in cases
where authentication plugins are involved, in order to ensure the gateway is the only entity that
can execute requests to the subgraph on behalf of the end-users.
You can use the [HMAC Signature plugin](/docs/gateway/other-features/security/hmac-signature) to
perform requesting signing and verification.
### Query Depth Limiting
Sometimes persisted operations cannot be used. E.g. if you are building an API that is used by third
party users. However, we can still apply some protection.
[Learn more about Max Depth plugin here](/docs/gateway/other-features/security/max-depth)
This can prevent malicious API users executing GraphQL operations with deeply nested selection sets.
You need to tweak the maximum depth an operation selection set is allowed to have based on your
schema and needs, as it could vary between users.
### Rate Limiting
Rate-limiting is a common practice with APIs, and with GraphQL it gets more complicated because of
the flexibility of the graph and the ability to choose what fields to query.
The [Rate Limit Plugin](/docs/gateway/other-features/security/rate-limiting) can be used to limit
access to resources by field level.
## Prevent unwanted HTTP requests
### CORS (Cross-Origin Resource Sharing) (enabled by default)
Cross-Origin Resource Sharing (CORS) is a mechanism that uses additional HTTP headers to tell
browsers to give a web application running at one origin, access to selected resources from a
different origin. A web application makes a cross-origin HTTP request when it requests a resource
that has a different origin (domain, protocol, or port) from its own.
[Learn more about CORS plugin here](/docs/gateway/other-features/security/cors)
### CSRF Prevention
Cross-Site Request Forgery (CSRF) is an attack that forces an end user to execute unwanted actions
on a web application in which they're currently authenticated.
[Learn more about CSRF Prevention plugin here](/docs/gateway/other-features/security/csrf-prevention)
## Prevent Leaking Sensitive Information
### Disable Schema Introspection
If your schema includes sensitive information that you want to hide from the outside world,
disabling the schema introspection is a possible solution. The
[Disable Introspection Plugin](/docs/gateway/other-features/security/disable-introspection) plugin
solves that in a single line of code!
### Block Field Suggestions
Field suggestions are a feature of GraphQL that allows the client to request the server to suggest
fields that can be queried. This is a very useful feature for developers using GraphQL, but it can
also be used by attackers to discover the schema of the server.
You can block field suggestions with the
[Block Field Suggestions Plugin](/docs/gateway/other-features/security/block-field-suggestions).
### Error Masking (enabled by default)
In most GraphQL servers any thrown error or rejected promise will result in the original error
leaking to the outside world. Some frameworks have custom logic for catching unexpected errors and
mapping them to an unexpected error instead. In Hive Gateway, this is enabled by default.
[Learn more about Error Masking](/docs/gateway/other-features/security/error-masking)

View file

@ -0,0 +1,51 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Max Aliases
**Limit** the number of **aliases** in a GraphQL document.
It is used to prevent **DOS attack** or **heap overflow**.
[Provided by GraphQL Armor](https://escape.tech/graphql-armor/docs/plugins/max-aliases)
## How to use?
Install the plugin:
```sh npm2yarn
npm install @escape.tech/graphql-armor-max-aliases
```
Then, add it to your plugins:
```ts filename="gateway.config.ts"
import { maxAliasesPlugin } from '@escape.tech/graphql-armor-max-aliases'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: () => [
maxAliasesPlugin({
// Toggle the plugin | Default: true
enabled: true,
// Number of aliases allowed | Default: 5
maxAliases: 5,
// Do you want to propagate the rejection to the client? | default: true
propagateOnRejection: true,
// List of queries that are allowed to bypass the plugin
allowList: [],
/* Advanced options (use here on your own risk) */
// Callbacks that are ran whenever a Query is accepted
onAccept: [],
// Callbacks that are ran whenever a Query is rejected
onReject: []
})
]
})
```

View file

@ -0,0 +1,55 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Max Depth
**Limit** the **depth** of a GraphQL document.
It is used to prevent too large queries that could lead to overfetching or **DOS attack**.
[Provided by GraphQL Armor](https://escape.tech/graphql-armor/docs/plugins/max-depth)
## How to use?
Install the plugin:
```sh npm2yarn
npm install @escape.tech/graphql-armor-max-depth
```
Then, add it to your plugins:
```ts filename="gateway.config.ts"
import { maxDepthPlugin } from '@escape.tech/graphql-armor-max-depth'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: () => [
maxDepthPlugin({
// Toggle the plugin | Default: true
enabled: true,
// Depth threshold | default: 6
n: 6,
// Do you want to propagate the rejection to the client? | default: true
propagateOnRejection: true,
// List of queries that are allowed to bypass the plugin
allowList: [],
/* Advanced options (use here on your own risk) */
// Callbacks that are ran whenever a Query is accepted
onAccept: [],
// Callbacks that are ran whenever a Query is rejected
onReject: []
})
]
})
```
## References
- https://github.com/advisories/GHSA-mh3m-8c74-74xh

View file

@ -0,0 +1,56 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Max Directives
**Limit** the number of **directives** in a GraphQL document.
It is used to prevent **DOS attack**, **heap overflow** or **server overloading**.
[Provided by GraphQL Armor](https://escape.tech/graphql-armor/docs/plugins/max-directives)
## How to use?
Install the plugin:
```sh npm2yarn
npm install @escape.tech/graphql-armor-max-directives
```
Then, add it to your plugins:
```ts filename="gateway.config.ts"
import { maxDirectivesPlugin } from '@escape.tech/graphql-armor-max-directives'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: () => [
maxDirectivesPlugin({
// Toggle the plugin | Default: true
enabled: true,
// Number of directives allowed | Default: 10
n: 10,
// Do you want to propagate the rejection to the client? | default: true
propagateOnRejection: true,
// List of queries that are allowed to bypass the plugin
allowList: [],
/* Advanced options (use here on your own risk) */
// Callbacks that are ran whenever a Query is accepted
onAccept: [],
// Callbacks that are ran whenever a Query is rejected
onReject: []
})
]
})
```
## References
- https://github.com/graphql-java/graphql-java/issues/2888
- https://github.com/graphql-java/graphql-java/pull/2892

View file

@ -0,0 +1,59 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Max Tokens
**Limit** the number of **tokens** in a GraphQL document.
It is used to prevent **DOS attack**, **heap overflow** or **server overloading**.
The token limit is often limited by the graphql parser, but this is not always the case and would
lead to a fatal heap overflow.
[Provided by GraphQL Armor](https://escape.tech/graphql-armor/docs/plugins/max-tokens)
## How to use?
Install the plugin:
```sh npm2yarn
npm install @escape.tech/graphql-armor-max-tokens
```
Then, add it to your plugins:
```ts filename="gateway.config.ts"
import { maxTokensPlugin } from '@escape.tech/graphql-armor-max-tokens'
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
plugins: () => [
maxTokensPlugin({
// Toggle the plugin | Default: true
enabled: true,
// Number of tokens allowed | Default: 5000
n: 5000,
// Do you want to propagate the rejection to the client? | default: true
propagateOnRejection: true,
// List of queries that are allowed to bypass the plugin
allowList: [],
/* Advanced options (use here on your own risk) */
// Callbacks that are ran whenever a Query is accepted
onAccept: [],
// Callbacks that are ran whenever a Query is rejected
onReject: []
})
]
})
```
## References
- https://github.com/graphql/graphql-js/pull/3684
- https://github.com/advisories/GHSA-p4qx-6w5p-4rj2

View file

@ -0,0 +1,93 @@
---
searchable: false
---
import { Callout } from '@theguild/components'
# Rate Limiting
Rate limiting is a technique for reducing server load by limiting the number of requests that can be
made to a subgraph.
You can use rate limiting feature in order to limit the rate of calling queries and mutations.
## Programmatic Configuration
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
rateLimiting: {
rules: [
{
type: 'Query',
field: 'foo',
max: 5, // requests limit for a time period
ttl: 5000, // time period
// You can use any value from the context
identifier: '{context.headers.authorization}'
}
]
}
})
```
# Rate Limiting through `@rateLimit` directive
```ts filename="gateway.config.ts"
import { defineConfig } from '@graphql-hive/gateway'
export const gatewayConfig = defineConfig({
rateLimiting: true
})
```
This approach follows the pattern of
[`graphql-rate-limit`](https://github.com/teamplanes/graphql-rate-limit/blob/master/README.md#field-config).
To set rate limit hints in your subgraph schema, the `@rateLimit` directive definition should be
included in the subgraph schema:
```graphql
# Import the directive for Federation
extend schema
@link(url: "https://specs.apollo.dev/link/v1.0")
@link(url: "https://specs.apollo.dev/federation/v2.3", import: ["@composeDirective"])
@link(url: "https://the-guild.dev/graphql/mesh/spec/v1.0", import: ["@rateLimit"])
@composeDirective(name: "@rateLimit")
directive @rateLimit(
max: Int
window: String
message: String
identityArgs: [String]
arrayLengthField: String
) on FIELD_DEFINITION
```
Then in the subgraph schema, you can use the `@rateLimit` directive to set rate limit hints on
fields:
```graphql
type Query {
getItems: [Item] @rateLimit(window: "1s", max: 5, message: "You are doing that too often.")
}
```
## Field Configuration
- `window`: Specify a time interval window that the max number of requests can access the field. We
use Zeit's ms to parse the window arg, docs here.
- `max`: Define the max number of calls to the given field per window.
- `identityArgs`: If you wanted to limit the requests to a field per id, per user, use identityArgs
to define how the request should be identified. For example you'd provide just ["id"] if you
wanted to rate limit the access to a field by id. We use Lodash's get to access nested identity
args, docs here.
- `message`: A custom message per field. Note you can also use formatError to customise the default
error message if you don't want to define a single message per rate limited field.
- `arrayLengthField`: Limit calls to the field, using the length of the array as the number of calls
to the field.

View file

@ -0,0 +1,6 @@
export default {
index: 'Overview',
mocking: 'Mocking',
debugging: 'Debugging',
snapshot: 'Upstream HTTP Snapshot',
};

Some files were not shown because too many files have changed in this diff Show more