mirror of
https://github.com/graphql-hive/console
synced 2026-04-21 14:37:17 +00:00
Co-authored-by: Saurav Tapader <stapader@expediagroup.com>
This commit is contained in:
parent
1454ea3513
commit
c0672ff416
7 changed files with 83 additions and 47 deletions
|
|
@ -29,7 +29,7 @@ ENVIRONMENT=local
|
|||
- Alternatively,
|
||||
[configure hive to use your own Auth0 Application](#setting-up-auth0-app-for-developing)
|
||||
- Open the UI (`http://localhost:3000` by default) and Sign in with any of the identity provider
|
||||
- Once this is done, you should be able to login and use the project
|
||||
- Once this is done, you should be able to log in and use the project
|
||||
- Once you generate the token against your organization/personal account in hive, the same can be
|
||||
added locally to `hive.json` within `packages/libraries/cli` which can be used to interact via the
|
||||
hive cli with the registry
|
||||
|
|
|
|||
|
|
@ -4,24 +4,27 @@ This service takes care of feeding usage data into the ClickHouse instance.
|
|||
|
||||
## Configuration
|
||||
|
||||
| Name | Required | Description | Example Value |
|
||||
| ----------------------------------- | ---------------------------------------------- | ------------------------------------------------------------------------------------- | ---------------------------------------------------- |
|
||||
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
|
||||
| `KAFKA_TOPIC` | **Yes** | The kafka topic. | `usage_reports_v2` |
|
||||
| `KAFKA_BROKER` | **Yes** | The address of the Kafka broker. | `127.0.0.1:29092` |
|
||||
| `KAFKA_CONCURRENCY` | **Yes** | The concurrency of the Kafka connection. | `3` |
|
||||
| `KAFKA_SSL` | No | Whether an SSL connection should be established to the kafka service. | `1` (enabled) or `0` (disabled) |
|
||||
| `KAFKA_SASL_MECHANISM` | No | The mechanism used for doing SASL authentication | `plain` or `scram-sha-256` or `scram-sha-512` |
|
||||
| `KAFKA_SASL_USERNAME` | No (Yes, if `KAFKA_SASL_MECHANISM` is defined) | The username for the SASL authentication | `letmein` |
|
||||
| `KAFKA_SASL_PASSWORD` | No (Yes, if `KAFKA_SASL_MECHANISM` is defined) | Whether an SSL connection should be established to the kafka service. | `letmein` |
|
||||
| `CLICKHOUSE_PROTOCOL` | **Yes** | The ClickHouse protocol. | `http` or `https` |
|
||||
| `CLICKHOUSE_HOST` | **Yes** | The ClickHouse host. | `127.0.0.1` |
|
||||
| `CLICKHOUSE_PORT` | **Yes** | The ClickHouse port. | `8443` |
|
||||
| `CLICKHOUSE_USERNAME` | **Yes** | The username for accessing ClickHouse. | `letmein` |
|
||||
| `CLICKHOUSE_PASSWORD` | **Yes** | The password for accessing ClickHouse. | `letmein` |
|
||||
| `HEARTBEAT_ENDPOINT` | No | The endpoint for a heartbeat. | `http://127.0.0.1:6969/heartbeat` |
|
||||
| `SENTRY` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
|
||||
| `SENTRY_DSN` | No (Yes if `SENTRY` is defined) | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |
|
||||
| `PROMETHEUS_METRICS` | No | Whether Prometheus metrics should be enabled | `1` (enabled) or `0` (disabled) |
|
||||
| `PROMETHEUS_METRICS_LABEL_INSTANCE` | No | The instance label added for the prometheus metrics. | `usage-ingestor` |
|
||||
| `REQUEST_LOGGING` | No | Log http requests | `1` (enabled) or `0` (disabled) |
|
||||
| Name | Required | Description | Example Value |
|
||||
| ----------------------------------- | ---------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------- |
|
||||
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
|
||||
| `KAFKA_TOPIC` | **Yes** | The kafka topic. | `usage_reports_v2` |
|
||||
| `KAFKA_BROKER` | **Yes** | The address of the Kafka broker. | `127.0.0.1:29092` |
|
||||
| `KAFKA_CONCURRENCY` | **Yes** | The concurrency of the Kafka connection. | `3` |
|
||||
| `KAFKA_SSL` | No | Whether an SSL connection should be established to the kafka service. | `1` (enabled) or `0` (disabled) |
|
||||
| `KAFKA_SSL_CA_PATH` | No | Refer to [TLS create secure context](https://nodejs.org/dist/latest-v8.x/docs/api/tls.html#tls_tls_createsecurecontext_options) for more information | `./path_to_ca` |
|
||||
| `KAFKA_SSL_CERT_PATH` | No | Refer to [TLS create secure context](https://nodejs.org/dist/latest-v8.x/docs/api/tls.html#tls_tls_createsecurecontext_options) for more information | `./path_to_cert` |
|
||||
| `KAFKA_SSL_KEY_PATH` | No | Refer to [TLS create secure context](https://nodejs.org/dist/latest-v8.x/docs/api/tls.html#tls_tls_createsecurecontext_options) for more information | `./path_to_key` |
|
||||
| `KAFKA_SASL_MECHANISM` | No | The mechanism used for doing SASL authentication | `plain` or `scram-sha-256` or `scram-sha-512` |
|
||||
| `KAFKA_SASL_USERNAME` | No (Yes, if `KAFKA_SASL_MECHANISM` is defined) | The username for the SASL authentication | `letmein` |
|
||||
| `KAFKA_SASL_PASSWORD` | No (Yes, if `KAFKA_SASL_MECHANISM` is defined) | Whether an SSL connection should be established to the kafka service. | `letmein` |
|
||||
| `CLICKHOUSE_PROTOCOL` | **Yes** | The ClickHouse protocol. | `http` or `https` |
|
||||
| `CLICKHOUSE_HOST` | **Yes** | The ClickHouse host. | `127.0.0.1` |
|
||||
| `CLICKHOUSE_PORT` | **Yes** | The ClickHouse port. | `8443` |
|
||||
| `CLICKHOUSE_USERNAME` | **Yes** | The username for accessing ClickHouse. | `letmein` |
|
||||
| `CLICKHOUSE_PASSWORD` | **Yes** | The password for accessing ClickHouse. | `letmein` |
|
||||
| `HEARTBEAT_ENDPOINT` | No | The endpoint for a heartbeat. | `http://127.0.0.1:6969/heartbeat` |
|
||||
| `SENTRY` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
|
||||
| `SENTRY_DSN` | No (Yes if `SENTRY` is defined) | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |
|
||||
| `PROMETHEUS_METRICS` | No | Whether Prometheus metrics should be enabled | `1` (enabled) or `0` (disabled) |
|
||||
| `PROMETHEUS_METRICS_LABEL_INSTANCE` | No | The instance label added for the prometheus metrics. | `usage-ingestor` |
|
||||
| `REQUEST_LOGGING` | No | Log http requests | `1` (enabled) or `0` (disabled) |
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import * as fs from 'fs';
|
||||
import zod from 'zod';
|
||||
|
||||
const isNumberString = (input: unknown) => zod.string().regex(/^\d+$/).safeParse(input).success;
|
||||
|
|
@ -37,6 +38,9 @@ const SentryModel = zod.union([
|
|||
const KafkaBaseModel = zod.object({
|
||||
KAFKA_BROKER: zod.string(),
|
||||
KAFKA_SSL: emptyString(zod.union([zod.literal('1'), zod.literal('0')]).optional()),
|
||||
KAFKA_SSL_CA_PATH: zod.string().optional(),
|
||||
KAFKA_SSL_CERT_PATH: zod.string().optional(),
|
||||
KAFKA_SSL_KEY_PATH: zod.string().optional(),
|
||||
KAFKA_CONCURRENCY: NumberFromString,
|
||||
KAFKA_CONSUMER_GROUP: zod.string(),
|
||||
KAFKA_TOPIC: zod.string(),
|
||||
|
|
@ -146,7 +150,18 @@ export const env = {
|
|||
consumerGroup: kafka.KAFKA_CONSUMER_GROUP,
|
||||
connection: {
|
||||
broker: kafka.KAFKA_BROKER,
|
||||
isSSL: kafka.KAFKA_SSL === '1',
|
||||
ssl:
|
||||
kafka.KAFKA_SSL === '1'
|
||||
? kafka.KAFKA_SSL_CA_PATH != null &&
|
||||
kafka.KAFKA_SSL_CERT_PATH != null &&
|
||||
kafka.KAFKA_SSL_KEY_PATH != null
|
||||
? {
|
||||
ca: fs.readFileSync(kafka.KAFKA_SSL_CA_PATH),
|
||||
cert: fs.readFileSync(kafka.KAFKA_SSL_CERT_PATH),
|
||||
key: fs.readFileSync(kafka.KAFKA_SSL_KEY_PATH),
|
||||
}
|
||||
: true
|
||||
: false,
|
||||
sasl:
|
||||
kafka.KAFKA_SASL_MECHANISM != null
|
||||
? {
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ export function createIngestor(config: {
|
|||
const kafka = new Kafka({
|
||||
clientId: 'usage-ingestor',
|
||||
brokers: [config.kafka.connection.broker],
|
||||
ssl: config.kafka.connection.isSSL,
|
||||
ssl: config.kafka.connection.ssl,
|
||||
sasl:
|
||||
config.kafka.connection.sasl?.mechanism === 'plain'
|
||||
? {
|
||||
|
|
|
|||
|
|
@ -7,24 +7,27 @@ The data is written to a Kafka broker, form Kafka the data is feed into clickhou
|
|||
|
||||
## Configuration
|
||||
|
||||
| Name | Required | Description | Example Value |
|
||||
| ----------------------------------- | -------- | ------------------------------------------------------------------------------------- | ---------------------------------------------------- |
|
||||
| `PORT` | No | The port this service is running on. | `4001` |
|
||||
| `TOKENS_ENDPOINT` | **Yes** | The endpoint of the tokens service. | `http://127.0.0.1:6001` |
|
||||
| `RATE_LIMIT_ENDPOINT` | No | The endpoint of the rate limiting service. | `http://127.0.0.1:4012` |
|
||||
| `KAFKA_TOPIC` | **Yes** | The kafka topic. | `usage_reports_v2` |
|
||||
| `KAFKA_CONSUMER_GROUP` | **Yes** | The kafka consumer group. | `usage_reports_v2` |
|
||||
| `KAFKA_BROKER` | **Yes** | The address of the Kafka broker. | `127.0.0.1:29092` |
|
||||
| `KAFKA_SSL` | No | Whether an SSL connection should be established to the kafka service. | `1` (enabled) or `0` (disabled) |
|
||||
| `KAFKA_SASL_MECHANISM` | No | The mechanism used for doing SASL authentication | `plain` or `scram-sha-256` or `scram-sha-512` |
|
||||
| `KAFKA_SASL_USERNAME` | No | The username for the SASL authentication | `letmein` |
|
||||
| `KAFKA_SASL_PASSWORD` | No | Whether an SSL connection should be established to the kafka service. | `letmein` |
|
||||
| `KAFKA_BUFFER_SIZE` | No | The buffer size ??? | `12` |
|
||||
| `KAFKA_BUFFER_INTERVAL` | No | The buffer interval ??? | `1` |
|
||||
| `KAFKA_BUFFER_DYNAMIC` | No | The buffer interval ??? | `1` |
|
||||
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
|
||||
| `SENTRY_DSN` | No | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |
|
||||
| `SENTRY` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
|
||||
| `PROMETHEUS_METRICS` | No | Whether Prometheus metrics should be enabled | `1` (enabled) or `0` (disabled) |
|
||||
| `PROMETHEUS_METRICS_LABEL_INSTANCE` | No | The instance label added for the prometheus metrics. | `usage-service` |
|
||||
| `REQUEST_LOGGING` | No | Log http requests | `1` (enabled) or `0` (disabled) |
|
||||
| Name | Required | Description | Example Value |
|
||||
| ----------------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------- |
|
||||
| `PORT` | No | The port this service is running on. | `4001` |
|
||||
| `TOKENS_ENDPOINT` | **Yes** | The endpoint of the tokens service. | `http://127.0.0.1:6001` |
|
||||
| `RATE_LIMIT_ENDPOINT` | No | The endpoint of the rate limiting service. | `http://127.0.0.1:4012` |
|
||||
| `KAFKA_TOPIC` | **Yes** | The kafka topic. | `usage_reports_v2` |
|
||||
| `KAFKA_CONSUMER_GROUP` | **Yes** | The kafka consumer group. | `usage_reports_v2` |
|
||||
| `KAFKA_BROKER` | **Yes** | The address of the Kafka broker. | `127.0.0.1:29092` |
|
||||
| `KAFKA_SSL` | No | Whether an SSL connection should be established to the kafka service. | `1` (enabled) or `0` (disabled) |
|
||||
| `KAFKA_SSL_CA_PATH` | No | Refer to [TLS create secure context](https://nodejs.org/dist/latest-v8.x/docs/api/tls.html#tls_tls_createsecurecontext_options) for more information | `./path_to_ca` |
|
||||
| `KAFKA_SSL_CERT_PATH` | No | Refer to [TLS create secure context](https://nodejs.org/dist/latest-v8.x/docs/api/tls.html#tls_tls_createsecurecontext_options) for more information | `./path_to_cert` |
|
||||
| `KAFKA_SSL_KEY_PATH` | No | Refer to [TLS create secure context](https://nodejs.org/dist/latest-v8.x/docs/api/tls.html#tls_tls_createsecurecontext_options) for more information | `./path_to_key` |
|
||||
| `KAFKA_SASL_MECHANISM` | No | The mechanism used for doing SASL authentication | `plain` or `scram-sha-256` or `scram-sha-512` |
|
||||
| `KAFKA_SASL_USERNAME` | No | The username for the SASL authentication | `letmein` |
|
||||
| `KAFKA_SASL_PASSWORD` | No | Whether an SSL connection should be established to the kafka service. | `letmein` |
|
||||
| `KAFKA_BUFFER_SIZE` | No | The buffer size ??? | `12` |
|
||||
| `KAFKA_BUFFER_INTERVAL` | No | The buffer interval ??? | `1` |
|
||||
| `KAFKA_BUFFER_DYNAMIC` | No | The buffer interval ??? | `1` |
|
||||
| `ENVIRONMENT` | No | The environment of your Hive app. (**Note:** This will be used for Sentry reporting.) | `staging` |
|
||||
| `SENTRY_DSN` | No | The DSN for reporting errors to Sentry. | `https://dooobars@o557896.ingest.sentry.io/12121212` |
|
||||
| `SENTRY` | No | Whether Sentry error reporting should be enabled. | `1` (enabled) or `0` (disabled) |
|
||||
| `PROMETHEUS_METRICS` | No | Whether Prometheus metrics should be enabled | `1` (enabled) or `0` (disabled) |
|
||||
| `PROMETHEUS_METRICS_LABEL_INSTANCE` | No | The instance label added for the prometheus metrics. | `usage-service` |
|
||||
| `REQUEST_LOGGING` | No | Log http requests | `1` (enabled) or `0` (disabled) |
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import * as fs from 'fs';
|
||||
import zod from 'zod';
|
||||
|
||||
const isNumberString = (input: unknown) => zod.string().regex(/^\d+$/).safeParse(input).success;
|
||||
|
|
@ -39,6 +40,9 @@ const KafkaBaseModel = zod.object({
|
|||
KAFKA_BROKER: zod.string(),
|
||||
KAFKA_TOPIC: zod.string(),
|
||||
KAFKA_SSL: emptyString(zod.union([zod.literal('1'), zod.literal('0')]).optional()),
|
||||
KAFKA_SSL_CA_PATH: zod.string().optional(),
|
||||
KAFKA_SSL_CERT_PATH: zod.string().optional(),
|
||||
KAFKA_SSL_KEY_PATH: zod.string().optional(),
|
||||
KAFKA_BUFFER_SIZE: NumberFromString,
|
||||
KAFKA_BUFFER_INTERVAL: NumberFromString,
|
||||
KAFKA_BUFFER_DYNAMIC: zod.union([zod.literal('1'), zod.literal('0')]),
|
||||
|
|
@ -143,7 +147,18 @@ export const env = {
|
|||
topic: kafka.KAFKA_TOPIC,
|
||||
connection: {
|
||||
broker: kafka.KAFKA_BROKER,
|
||||
isSSL: kafka.KAFKA_SSL === '1',
|
||||
ssl:
|
||||
kafka.KAFKA_SSL === '1'
|
||||
? kafka.KAFKA_SSL_CA_PATH != null &&
|
||||
kafka.KAFKA_SSL_CERT_PATH != null &&
|
||||
kafka.KAFKA_SSL_KEY_PATH != null
|
||||
? {
|
||||
ca: fs.readFileSync(kafka.KAFKA_SSL_CA_PATH),
|
||||
cert: fs.readFileSync(kafka.KAFKA_SSL_CERT_PATH),
|
||||
key: fs.readFileSync(kafka.KAFKA_SSL_KEY_PATH),
|
||||
}
|
||||
: true
|
||||
: false,
|
||||
sasl:
|
||||
kafka.KAFKA_SASL_MECHANISM != null
|
||||
? {
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ export function createUsage(config: {
|
|||
const kafka = new Kafka({
|
||||
clientId: 'usage',
|
||||
brokers: [config.kafka.connection.broker],
|
||||
ssl: config.kafka.connection.isSSL,
|
||||
ssl: config.kafka.connection.ssl,
|
||||
sasl:
|
||||
config.kafka.connection.sasl?.mechanism === 'plain'
|
||||
? {
|
||||
|
|
|
|||
Loading…
Reference in a new issue