ToolJet/plugins/packages/bigquery/lib/index.ts
Siddharth Pundir 1a75b1d46d
Implemented the list tables and list dataset for the big query (#15806)
* Implemented the list tables and dataset for the big query

* Implemented the list table method
2026-04-08 00:39:12 +05:30

433 lines
No EOL
14 KiB
TypeScript

import { QueryError, QueryResult, QueryService, ConnectionTestResult } from '@tooljet-plugins/common';
import { SourceOptions, QueryOptions } from './types';
import { BigQuery } from '@google-cloud/bigquery';
const JSON5 = require('json5');
const _ = require('lodash');
export default class Bigquery implements QueryService {
async run(sourceOptions: SourceOptions, queryOptions: QueryOptions, dataSourceId: string): Promise<QueryResult> {
const operation = queryOptions.operation;
const client = await this.getConnection(sourceOptions);
let result = {};
try {
switch (operation) {
case 'list_datasets': {
const [datasets] = await client.getDatasets();
result = this.sanitizeResponse(datasets, ['metadata.datasetReference']);
break;
}
case 'get_dataset_info': {
const [metadata] = await client.dataset(queryOptions.datasetId).getMetadata();
result = this.sanitizeResponse(metadata, [
'datasetReference',
'location',
'description',
'creationTime',
'lastModifiedTime',
'labels',
]);
break;
}
case 'list_tables': {
const [tables] = await client.dataset(queryOptions.datasetId).getTables();
result = this.sanitizeResponse(tables, ['metadata.tableReference']);
break;
}
case 'create_table': {
const [table] = await client
.dataset(queryOptions.datasetId)
.createTable(queryOptions.tableId, this.parseJSON(queryOptions.options));
result = { tableId: table.id };
break;
}
case 'delete_table': {
await client.dataset(queryOptions.datasetId).table(queryOptions.tableId).delete();
result = `Table ${queryOptions.tableId} deleted.`;
break;
}
case 'create_view': {
const query = `CREATE VIEW ${queryOptions.datasetId}.${queryOptions.view_name} AS
SELECT ${queryOptions.viewcolumns}
FROM ${queryOptions.datasetId}.${queryOptions.tableId}
${queryOptions.condition ? `WHERE ${queryOptions.condition}` : 'WHERE TRUE'}`;
const [job] = await client.createQueryJob({
...this.parseJSON(queryOptions.queryOptions),
query: query,
});
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
result = rows;
break;
}
case 'query': {
const [job] = await client.createQueryJob({
...this.parseJSON(queryOptions.queryOptions),
query: queryOptions.query,
});
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
result = rows;
break;
}
case 'delete_record': {
const query = `DELETE FROM ${queryOptions.datasetId}.${queryOptions.tableId} ${
queryOptions.condition ? `WHERE ${queryOptions.condition}` : 'WHERE TRUE'
}`;
const [job] = await client.createQueryJob({
...this.parseJSON(queryOptions.queryOptions),
query: query,
});
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
result = rows;
break;
}
case 'insert_record': {
const rows = await client
.dataset(queryOptions.datasetId)
.table(queryOptions.tableId)
.insert(this.parseJSON(queryOptions.rows));
result = { ...rows[0], records: (this.parseJSON(queryOptions.rows) as []).length };
break;
}
case 'update_record': {
let columString = '';
columString = await this.columnBuilder(queryOptions);
const query = `UPDATE ${queryOptions.datasetId}.${queryOptions.tableId} SET ${columString} ${
queryOptions.condition ? `WHERE ${queryOptions.condition}` : 'WHERE TRUE'
}`;
const [job] = await client.createQueryJob({
...this.parseJSON(queryOptions.queryOptions),
query: query,
});
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
result = rows;
break;
}
}
} catch (error) {
console.log(error);
const errorMessage = error.message || 'An unknown error occurred.';
const errorDetails: any = {};
const errorSuggestions = {
notFound: 'Check if the table or dataset exists in the specified location.',
accessDenied: 'Verify that the service account has the necessary permissions.',
invalidQuery: 'Check the SQL syntax and ensure that all referenced columns exist.',
rateLimitExceeded: 'You are making too many requests. Try again after some time.',
backendError: 'BigQuery encountered an internal error. Retry the request after some time.',
quotaExceeded: 'You have exceeded your quota limits. Consider upgrading your plan or reducing query size.',
duplicate: 'A resource with this name already exists. Try using a different name.',
badRequest: 'Check the request parameters and ensure they are correctly formatted.',
};
if (error && error instanceof Error) {
const bigqueryError = error as any;
errorDetails.error = bigqueryError;
const reason = bigqueryError.response?.status?.errorResult?.reason || 'unknownError';
errorDetails.reason = reason;
errorDetails.message = errorMessage;
errorDetails.jobId = bigqueryError.response?.jobReference?.jobId;
errorDetails.location = bigqueryError.response?.jobReference?.location;
errorDetails.query = bigqueryError.response?.configuration?.query?.query;
const suggestion = errorSuggestions[reason];
errorDetails.suggestion = suggestion;
}
throw new QueryError('Query could not be completed', errorMessage, errorDetails);
}
return {
status: 'ok',
data: result,
};
}
async columnBuilder(queryOptions: any): Promise<string> {
const columString = [];
const columns = queryOptions.columns;
for (const [key, value] of Object.entries(columns)) {
const primaryKeyValue = typeof value === 'string' ? `'${value}'` : value;
columString.push(`${key}=${primaryKeyValue}`);
}
return columString.join(',');
}
async invokeMethod(
methodName: string,
_context: { user?: any; app?: any },
sourceOptions: SourceOptions,
args?: any
): Promise<any> {
if (methodName === 'listDatasets') {
return await this._fetchDatasets(sourceOptions, args?.search, args?.page, args?.limit);
}
if (methodName === 'listTables') {
const datasetId = args?.values?.datasetId || '';
return await this._fetchTables(sourceOptions, datasetId, args?.search, args?.page, args?.limit);
}
if (methodName === 'getTables') {
const datasetId = args?.values?.datasetId || '';
const isPaginated = !!args?.limit;
const result = await this.listTables(sourceOptions, '', '', {
datasetId,
search: args?.search,
page: args?.page,
limit: args?.limit,
});
const payload = (result as any)?.data ?? [];
if (isPaginated) {
const rows = (payload as any)?.rows ?? [];
const totalCount = (payload as any)?.totalCount ?? 0;
const formattedTables = rows.map((row: any) => ({
label: String(row.table_name),
value: String(row.table_name),
dataset_id: String(row.dataset_id),
}));
return { items: formattedTables, totalCount };
}
const rows = Array.isArray(payload) ? payload : [];
const formattedTables = rows.map((row: any) => ({
label: String(row.table_name),
value: String(row.table_name),
dataset_id: String(row.dataset_id),
}));
return { status: 'ok', data: formattedTables };
}
throw new QueryError(
'Method not found',
`Method ${methodName} is not supported`,
{}
);
}
async listTables(
sourceOptions: SourceOptions,
dataSourceId: string,
dataSourceUpdatedAt: string,
queryOptions?: { datasetId?: string; search?: string; page?: number; limit?: number }
): Promise<QueryResult> {
try {
const client = await this.getConnection(sourceOptions);
const search = queryOptions?.search || '';
const page = queryOptions?.page || 1;
const limit = queryOptions?.limit;
const datasetId = queryOptions?.datasetId || '';
// Decide which datasets to query
let datasetIds: string[] = [];
if (datasetId) {
datasetIds = [datasetId];
} else {
// No datasetId provided — fetch from all datasets
const [datasets] = await client.getDatasets();
datasetIds = datasets.map((d: any) => d.id);
}
// Fetch tables from all datasets in parallel
const allTablesRaw = await Promise.all(
datasetIds.map(async (dsId: string) => {
const [tables] = await client.dataset(dsId).getTables();
return tables.map((t: any) => ({
table_name: t.id,
dataset_id: dsId,
}));
})
);
// Flatten all tables into a single array
let allTables = allTablesRaw.flat();
// Apply search across all tables
if (search) {
const searchLower = search.toLowerCase();
allTables = allTables.filter((t) =>
t.table_name.toLowerCase().includes(searchLower)
);
}
const totalCount = allTables.length;
// Apply pagination
if (limit) {
const offset = (page - 1) * limit;
const paged = allTables.slice(offset, offset + limit);
return {
status: 'ok',
data: { rows: paged, totalCount },
};
}
return {
status: 'ok',
data: allTables,
};
} catch (error) {
const errorMessage = error.message || 'An unknown error occurred';
throw new QueryError('Could not fetch tables', errorMessage, {});
}
}
private async _fetchDatasets(sourceOptions: SourceOptions, search = '', page?: number, limit?: number): Promise<Array<{ value: string; label: string }> | { items: Array<{ value: string; label: string }>; totalCount: number }> {
try {
const client = await this.getConnection(sourceOptions);
const [datasets] = await client.getDatasets();
const searchLower = search.toLowerCase();
const filtered = search
? datasets.filter((d: any) => d.id.toLowerCase().includes(searchLower))
: datasets;
const totalCount = filtered.length;
if (limit) {
const offset = ((page || 1) - 1) * limit;
const paged = filtered.slice(offset, offset + limit);
const result = {
items: paged.map((d: any) => ({ value: d.id, label: d.id })),
totalCount,
};
return result;
}
const result = filtered.map((d: any) => ({ value: d.id, label: d.id }));
return result;
} catch (error) {
const errorMessage = error.message || 'An unknown error occurred';
throw new QueryError('Could not fetch datasets', errorMessage, {});
}
}
private async _fetchTables(
sourceOptions: SourceOptions,
datasetId: string,
search = '',
page?: number,
limit?: number
): Promise<Array<{ value: string; label: string }> | { items: Array<{ value: string; label: string }>; totalCount: number }> {
try {
const client = await this.getConnection(sourceOptions);
const [tables] = await client.dataset(datasetId).getTables();
const searchLower = search.toLowerCase();
const filtered = search
? tables.filter((t: any) => t.id.toLowerCase().includes(searchLower))
: tables;
const totalCount = filtered.length;
if (limit) {
const offset = ((page || 1) - 1) * limit;
const paged = filtered.slice(offset, offset + limit);
const result = {
items: paged.map((t: any) => ({ value: t.id, label: t.id, table_name: t.id,
dataset_id: datasetId })),
totalCount,
};
return result;
}
const result = filtered.map((t: any) => ({ value: t.id, label: t.id, table_name: t.id,
dataset_id: datasetId }));
return result;
} catch (error) {
const errorMessage = error.message || 'An unknown error occurred';
throw new QueryError('Could not fetch tables', errorMessage, {});
}
}
async getConnection(sourceOptions: any, _options?: object): Promise<any> {
const privateKey = this.getPrivateKey(sourceOptions?.private_key);
let scopes = [];
if (sourceOptions?.scope) {
scopes = typeof sourceOptions?.scope === 'string' ? sourceOptions?.scope.trim().split(/\s+/).filter(Boolean) : [];
}
return new BigQuery({
projectId: privateKey?.project_id,
credentials: {
client_email: privateKey?.client_email,
private_key: privateKey?.private_key,
},
...(scopes.length > 0 ? { scopes: scopes } : {}),
});
}
async testConnection(sourceOptions: SourceOptions): Promise<ConnectionTestResult> {
const privateKey = this.getPrivateKey(sourceOptions?.private_key);
let scopes = [];
if (sourceOptions?.scope) {
scopes = typeof sourceOptions?.scope === 'string' ? sourceOptions?.scope.trim().split(/\s+/).filter(Boolean) : [];
}
const client = new BigQuery({
projectId: privateKey?.project_id,
credentials: {
client_email: privateKey?.client_email,
private_key: privateKey?.private_key,
},
...(scopes.length > 0 ? { scopes: scopes } : {}),
});
if (!client) {
throw new Error('Invalid credentials');
}
await client.getDatasets();
return {
status: 'ok',
};
}
private parseJSON(json?: string): object {
if (!json) return {};
return JSON5.parse(json);
}
private getPrivateKey(configs?: string): {
project_id?: string;
client_email?: string;
private_key?: string;
} {
return this.parseJSON(configs);
}
private sanitizeResponse(response: object | [], pickFields: string[]): object | [] {
if (!response) return response;
if (Array.isArray(response)) {
return response.map((item) => this.sanitizeResponse(item, pickFields));
}
const pickedKeyValue = pickFields.map((field) => _.result(response, field));
if (pickedKeyValue.length === 1) {
return pickedKeyValue[0];
}
return pickedKeyValue;
}
}