2022-02-07 10:02:38 +00:00
|
|
|
import { QueryError, QueryResult, QueryService, ConnectionTestResult } from '@tooljet-plugins/common';
|
|
|
|
|
import { SourceOptions, QueryOptions } from './types';
|
|
|
|
|
import { BigQuery } from '@google-cloud/bigquery';
|
|
|
|
|
const JSON5 = require('json5');
|
2022-09-23 19:13:44 +00:00
|
|
|
const _ = require('lodash');
|
2022-02-07 10:02:38 +00:00
|
|
|
|
|
|
|
|
export default class Bigquery implements QueryService {
|
|
|
|
|
async run(sourceOptions: SourceOptions, queryOptions: QueryOptions, dataSourceId: string): Promise<QueryResult> {
|
|
|
|
|
const operation = queryOptions.operation;
|
|
|
|
|
const client = await this.getConnection(sourceOptions);
|
|
|
|
|
let result = {};
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
switch (operation) {
|
2022-03-10 06:59:48 +00:00
|
|
|
case 'list_datasets': {
|
2022-06-17 14:28:21 +00:00
|
|
|
const [datasets] = await client.getDatasets();
|
2022-09-23 19:13:44 +00:00
|
|
|
|
|
|
|
|
result = this.sanitizeResponse(datasets, ['metadata.datasetReference']);
|
2022-02-07 10:02:38 +00:00
|
|
|
break;
|
2022-03-10 06:59:48 +00:00
|
|
|
}
|
2022-06-17 14:28:21 +00:00
|
|
|
|
2026-03-30 16:14:55 +00:00
|
|
|
case 'get_dataset_info': {
|
|
|
|
|
const [metadata] = await client.dataset(queryOptions.datasetId).getMetadata();
|
|
|
|
|
result = this.sanitizeResponse(metadata, [
|
|
|
|
|
'datasetReference',
|
|
|
|
|
'location',
|
|
|
|
|
'description',
|
|
|
|
|
'creationTime',
|
|
|
|
|
'lastModifiedTime',
|
|
|
|
|
'labels',
|
|
|
|
|
]);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2022-03-10 06:59:48 +00:00
|
|
|
case 'list_tables': {
|
2022-06-17 14:28:21 +00:00
|
|
|
const [tables] = await client.dataset(queryOptions.datasetId).getTables();
|
2022-09-23 19:13:44 +00:00
|
|
|
result = this.sanitizeResponse(tables, ['metadata.tableReference']);
|
2022-02-07 10:02:38 +00:00
|
|
|
break;
|
2022-03-10 06:59:48 +00:00
|
|
|
}
|
2022-06-17 14:28:21 +00:00
|
|
|
|
|
|
|
|
case 'create_table': {
|
|
|
|
|
const [table] = await client
|
|
|
|
|
.dataset(queryOptions.datasetId)
|
|
|
|
|
.createTable(queryOptions.tableId, this.parseJSON(queryOptions.options));
|
2022-09-23 19:13:44 +00:00
|
|
|
result = { tableId: table.id };
|
2022-06-17 14:28:21 +00:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case 'delete_table': {
|
2022-09-23 19:13:44 +00:00
|
|
|
await client.dataset(queryOptions.datasetId).table(queryOptions.tableId).delete();
|
|
|
|
|
result = `Table ${queryOptions.tableId} deleted.`;
|
2022-06-17 14:28:21 +00:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case 'create_view': {
|
|
|
|
|
const query = `CREATE VIEW ${queryOptions.datasetId}.${queryOptions.view_name} AS
|
|
|
|
|
SELECT ${queryOptions.viewcolumns}
|
|
|
|
|
FROM ${queryOptions.datasetId}.${queryOptions.tableId}
|
2022-09-23 19:13:44 +00:00
|
|
|
${queryOptions.condition ? `WHERE ${queryOptions.condition}` : 'WHERE TRUE'}`;
|
2022-06-17 14:28:21 +00:00
|
|
|
|
|
|
|
|
const [job] = await client.createQueryJob({
|
|
|
|
|
...this.parseJSON(queryOptions.queryOptions),
|
|
|
|
|
query: query,
|
|
|
|
|
});
|
|
|
|
|
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
|
|
|
|
|
result = rows;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2022-03-10 06:59:48 +00:00
|
|
|
case 'query': {
|
|
|
|
|
const [job] = await client.createQueryJob({
|
|
|
|
|
...this.parseJSON(queryOptions.queryOptions),
|
|
|
|
|
query: queryOptions.query,
|
|
|
|
|
});
|
2022-02-07 10:02:38 +00:00
|
|
|
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
|
|
|
|
|
result = rows;
|
|
|
|
|
break;
|
2022-03-10 06:59:48 +00:00
|
|
|
}
|
2022-06-17 14:28:21 +00:00
|
|
|
|
|
|
|
|
case 'delete_record': {
|
|
|
|
|
const query = `DELETE FROM ${queryOptions.datasetId}.${queryOptions.tableId} ${
|
|
|
|
|
queryOptions.condition ? `WHERE ${queryOptions.condition}` : 'WHERE TRUE'
|
|
|
|
|
}`;
|
|
|
|
|
const [job] = await client.createQueryJob({
|
|
|
|
|
...this.parseJSON(queryOptions.queryOptions),
|
|
|
|
|
query: query,
|
|
|
|
|
});
|
|
|
|
|
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
|
|
|
|
|
result = rows;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case 'insert_record': {
|
|
|
|
|
const rows = await client
|
|
|
|
|
.dataset(queryOptions.datasetId)
|
|
|
|
|
.table(queryOptions.tableId)
|
|
|
|
|
.insert(this.parseJSON(queryOptions.rows));
|
2022-09-23 19:13:44 +00:00
|
|
|
result = { ...rows[0], records: (this.parseJSON(queryOptions.rows) as []).length };
|
2022-06-17 14:28:21 +00:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case 'update_record': {
|
|
|
|
|
let columString = '';
|
|
|
|
|
columString = await this.columnBuilder(queryOptions);
|
|
|
|
|
const query = `UPDATE ${queryOptions.datasetId}.${queryOptions.tableId} SET ${columString} ${
|
|
|
|
|
queryOptions.condition ? `WHERE ${queryOptions.condition}` : 'WHERE TRUE'
|
|
|
|
|
}`;
|
|
|
|
|
|
|
|
|
|
const [job] = await client.createQueryJob({
|
|
|
|
|
...this.parseJSON(queryOptions.queryOptions),
|
|
|
|
|
query: query,
|
|
|
|
|
});
|
|
|
|
|
const [rows] = await job.getQueryResults(this.parseJSON(queryOptions.queryResultsOptions));
|
|
|
|
|
result = rows;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2022-02-07 10:02:38 +00:00
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log(error);
|
2025-11-13 16:10:02 +00:00
|
|
|
const errorMessage = error.message || 'An unknown error occurred.';
|
|
|
|
|
const errorDetails: any = {};
|
|
|
|
|
|
2025-04-03 06:30:16 +00:00
|
|
|
const errorSuggestions = {
|
2025-11-13 16:10:02 +00:00
|
|
|
notFound: 'Check if the table or dataset exists in the specified location.',
|
|
|
|
|
accessDenied: 'Verify that the service account has the necessary permissions.',
|
|
|
|
|
invalidQuery: 'Check the SQL syntax and ensure that all referenced columns exist.',
|
|
|
|
|
rateLimitExceeded: 'You are making too many requests. Try again after some time.',
|
|
|
|
|
backendError: 'BigQuery encountered an internal error. Retry the request after some time.',
|
|
|
|
|
quotaExceeded: 'You have exceeded your quota limits. Consider upgrading your plan or reducing query size.',
|
|
|
|
|
duplicate: 'A resource with this name already exists. Try using a different name.',
|
|
|
|
|
badRequest: 'Check the request parameters and ensure they are correctly formatted.',
|
2025-04-03 06:30:16 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if (error && error instanceof Error) {
|
|
|
|
|
const bigqueryError = error as any;
|
|
|
|
|
errorDetails.error = bigqueryError;
|
|
|
|
|
|
2025-11-13 16:10:02 +00:00
|
|
|
const reason = bigqueryError.response?.status?.errorResult?.reason || 'unknownError';
|
2025-04-03 06:30:16 +00:00
|
|
|
errorDetails.reason = reason;
|
|
|
|
|
errorDetails.message = errorMessage;
|
|
|
|
|
errorDetails.jobId = bigqueryError.response?.jobReference?.jobId;
|
|
|
|
|
errorDetails.location = bigqueryError.response?.jobReference?.location;
|
|
|
|
|
errorDetails.query = bigqueryError.response?.configuration?.query?.query;
|
|
|
|
|
|
|
|
|
|
const suggestion = errorSuggestions[reason];
|
|
|
|
|
errorDetails.suggestion = suggestion;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
throw new QueryError('Query could not be completed', errorMessage, errorDetails);
|
2022-02-07 10:02:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
status: 'ok',
|
|
|
|
|
data: result,
|
|
|
|
|
};
|
|
|
|
|
}
|
2022-06-17 14:28:21 +00:00
|
|
|
async columnBuilder(queryOptions: any): Promise<string> {
|
|
|
|
|
const columString = [];
|
|
|
|
|
const columns = queryOptions.columns;
|
|
|
|
|
for (const [key, value] of Object.entries(columns)) {
|
|
|
|
|
const primaryKeyValue = typeof value === 'string' ? `'${value}'` : value;
|
|
|
|
|
columString.push(`${key}=${primaryKeyValue}`);
|
|
|
|
|
}
|
|
|
|
|
return columString.join(',');
|
|
|
|
|
}
|
2022-02-07 10:02:38 +00:00
|
|
|
|
2026-04-07 19:09:12 +00:00
|
|
|
async invokeMethod(
|
|
|
|
|
methodName: string,
|
|
|
|
|
_context: { user?: any; app?: any },
|
|
|
|
|
sourceOptions: SourceOptions,
|
|
|
|
|
args?: any
|
|
|
|
|
): Promise<any> {
|
|
|
|
|
if (methodName === 'listDatasets') {
|
|
|
|
|
return await this._fetchDatasets(sourceOptions, args?.search, args?.page, args?.limit);
|
|
|
|
|
}
|
|
|
|
|
if (methodName === 'listTables') {
|
|
|
|
|
const datasetId = args?.values?.datasetId || '';
|
|
|
|
|
return await this._fetchTables(sourceOptions, datasetId, args?.search, args?.page, args?.limit);
|
|
|
|
|
}
|
|
|
|
|
if (methodName === 'getTables') {
|
|
|
|
|
const datasetId = args?.values?.datasetId || '';
|
|
|
|
|
const isPaginated = !!args?.limit;
|
|
|
|
|
|
|
|
|
|
const result = await this.listTables(sourceOptions, '', '', {
|
|
|
|
|
datasetId,
|
|
|
|
|
search: args?.search,
|
|
|
|
|
page: args?.page,
|
|
|
|
|
limit: args?.limit,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const payload = (result as any)?.data ?? [];
|
|
|
|
|
|
|
|
|
|
if (isPaginated) {
|
|
|
|
|
const rows = (payload as any)?.rows ?? [];
|
|
|
|
|
const totalCount = (payload as any)?.totalCount ?? 0;
|
|
|
|
|
const formattedTables = rows.map((row: any) => ({
|
|
|
|
|
label: String(row.table_name),
|
|
|
|
|
value: String(row.table_name),
|
|
|
|
|
dataset_id: String(row.dataset_id),
|
|
|
|
|
}));
|
|
|
|
|
return { items: formattedTables, totalCount };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const rows = Array.isArray(payload) ? payload : [];
|
|
|
|
|
const formattedTables = rows.map((row: any) => ({
|
|
|
|
|
label: String(row.table_name),
|
|
|
|
|
value: String(row.table_name),
|
|
|
|
|
dataset_id: String(row.dataset_id),
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
return { status: 'ok', data: formattedTables };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
throw new QueryError(
|
|
|
|
|
'Method not found',
|
|
|
|
|
`Method ${methodName} is not supported`,
|
|
|
|
|
{}
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async listTables(
|
|
|
|
|
sourceOptions: SourceOptions,
|
|
|
|
|
dataSourceId: string,
|
|
|
|
|
dataSourceUpdatedAt: string,
|
|
|
|
|
queryOptions?: { datasetId?: string; search?: string; page?: number; limit?: number }
|
|
|
|
|
): Promise<QueryResult> {
|
|
|
|
|
try {
|
|
|
|
|
const client = await this.getConnection(sourceOptions);
|
|
|
|
|
const search = queryOptions?.search || '';
|
|
|
|
|
const page = queryOptions?.page || 1;
|
|
|
|
|
const limit = queryOptions?.limit;
|
|
|
|
|
const datasetId = queryOptions?.datasetId || '';
|
|
|
|
|
// Decide which datasets to query
|
|
|
|
|
let datasetIds: string[] = [];
|
|
|
|
|
if (datasetId) {
|
|
|
|
|
datasetIds = [datasetId];
|
|
|
|
|
} else {
|
|
|
|
|
// No datasetId provided — fetch from all datasets
|
|
|
|
|
const [datasets] = await client.getDatasets();
|
|
|
|
|
datasetIds = datasets.map((d: any) => d.id);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fetch tables from all datasets in parallel
|
|
|
|
|
const allTablesRaw = await Promise.all(
|
|
|
|
|
datasetIds.map(async (dsId: string) => {
|
|
|
|
|
const [tables] = await client.dataset(dsId).getTables();
|
|
|
|
|
return tables.map((t: any) => ({
|
|
|
|
|
table_name: t.id,
|
|
|
|
|
dataset_id: dsId,
|
|
|
|
|
}));
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Flatten all tables into a single array
|
|
|
|
|
let allTables = allTablesRaw.flat();
|
|
|
|
|
// Apply search across all tables
|
|
|
|
|
if (search) {
|
|
|
|
|
const searchLower = search.toLowerCase();
|
|
|
|
|
allTables = allTables.filter((t) =>
|
|
|
|
|
t.table_name.toLowerCase().includes(searchLower)
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const totalCount = allTables.length;
|
|
|
|
|
|
|
|
|
|
// Apply pagination
|
|
|
|
|
if (limit) {
|
|
|
|
|
const offset = (page - 1) * limit;
|
|
|
|
|
const paged = allTables.slice(offset, offset + limit);
|
|
|
|
|
return {
|
|
|
|
|
status: 'ok',
|
|
|
|
|
data: { rows: paged, totalCount },
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
status: 'ok',
|
|
|
|
|
data: allTables,
|
|
|
|
|
};
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error.message || 'An unknown error occurred';
|
|
|
|
|
throw new QueryError('Could not fetch tables', errorMessage, {});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async _fetchDatasets(sourceOptions: SourceOptions, search = '', page?: number, limit?: number): Promise<Array<{ value: string; label: string }> | { items: Array<{ value: string; label: string }>; totalCount: number }> {
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
|
|
const client = await this.getConnection(sourceOptions);
|
|
|
|
|
|
|
|
|
|
const [datasets] = await client.getDatasets();
|
|
|
|
|
|
|
|
|
|
const searchLower = search.toLowerCase();
|
|
|
|
|
const filtered = search
|
|
|
|
|
? datasets.filter((d: any) => d.id.toLowerCase().includes(searchLower))
|
|
|
|
|
: datasets;
|
|
|
|
|
|
|
|
|
|
const totalCount = filtered.length;
|
|
|
|
|
|
|
|
|
|
if (limit) {
|
|
|
|
|
const offset = ((page || 1) - 1) * limit;
|
|
|
|
|
const paged = filtered.slice(offset, offset + limit);
|
|
|
|
|
|
|
|
|
|
const result = {
|
|
|
|
|
items: paged.map((d: any) => ({ value: d.id, label: d.id })),
|
|
|
|
|
totalCount,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const result = filtered.map((d: any) => ({ value: d.id, label: d.id }));
|
|
|
|
|
|
|
|
|
|
return result;
|
|
|
|
|
} catch (error) {
|
|
|
|
|
|
|
|
|
|
const errorMessage = error.message || 'An unknown error occurred';
|
|
|
|
|
throw new QueryError('Could not fetch datasets', errorMessage, {});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async _fetchTables(
|
|
|
|
|
sourceOptions: SourceOptions,
|
|
|
|
|
datasetId: string,
|
|
|
|
|
search = '',
|
|
|
|
|
page?: number,
|
|
|
|
|
limit?: number
|
|
|
|
|
): Promise<Array<{ value: string; label: string }> | { items: Array<{ value: string; label: string }>; totalCount: number }> {
|
|
|
|
|
try {
|
|
|
|
|
const client = await this.getConnection(sourceOptions);
|
|
|
|
|
|
|
|
|
|
const [tables] = await client.dataset(datasetId).getTables();
|
|
|
|
|
const searchLower = search.toLowerCase();
|
|
|
|
|
const filtered = search
|
|
|
|
|
? tables.filter((t: any) => t.id.toLowerCase().includes(searchLower))
|
|
|
|
|
: tables;
|
|
|
|
|
const totalCount = filtered.length;
|
|
|
|
|
if (limit) {
|
|
|
|
|
const offset = ((page || 1) - 1) * limit;
|
|
|
|
|
const paged = filtered.slice(offset, offset + limit);
|
|
|
|
|
|
|
|
|
|
const result = {
|
|
|
|
|
items: paged.map((t: any) => ({ value: t.id, label: t.id, table_name: t.id,
|
|
|
|
|
dataset_id: datasetId })),
|
|
|
|
|
totalCount,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const result = filtered.map((t: any) => ({ value: t.id, label: t.id, table_name: t.id,
|
|
|
|
|
dataset_id: datasetId }));
|
|
|
|
|
|
|
|
|
|
return result;
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error.message || 'An unknown error occurred';
|
|
|
|
|
throw new QueryError('Could not fetch tables', errorMessage, {});
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-02-07 10:02:38 +00:00
|
|
|
async getConnection(sourceOptions: any, _options?: object): Promise<any> {
|
|
|
|
|
const privateKey = this.getPrivateKey(sourceOptions?.private_key);
|
2025-11-13 16:10:02 +00:00
|
|
|
let scopes = [];
|
|
|
|
|
if (sourceOptions?.scope) {
|
|
|
|
|
scopes = typeof sourceOptions?.scope === 'string' ? sourceOptions?.scope.trim().split(/\s+/).filter(Boolean) : [];
|
|
|
|
|
}
|
2022-02-07 10:02:38 +00:00
|
|
|
|
2022-03-10 06:59:48 +00:00
|
|
|
return new BigQuery({
|
2022-02-07 10:02:38 +00:00
|
|
|
projectId: privateKey?.project_id,
|
|
|
|
|
credentials: {
|
|
|
|
|
client_email: privateKey?.client_email,
|
|
|
|
|
private_key: privateKey?.private_key,
|
|
|
|
|
},
|
2025-11-13 16:10:02 +00:00
|
|
|
...(scopes.length > 0 ? { scopes: scopes } : {}),
|
2022-02-07 10:02:38 +00:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async testConnection(sourceOptions: SourceOptions): Promise<ConnectionTestResult> {
|
|
|
|
|
const privateKey = this.getPrivateKey(sourceOptions?.private_key);
|
2025-11-13 16:10:02 +00:00
|
|
|
let scopes = [];
|
|
|
|
|
if (sourceOptions?.scope) {
|
|
|
|
|
scopes = typeof sourceOptions?.scope === 'string' ? sourceOptions?.scope.trim().split(/\s+/).filter(Boolean) : [];
|
|
|
|
|
}
|
2022-03-10 06:59:48 +00:00
|
|
|
|
2022-02-07 10:02:38 +00:00
|
|
|
const client = new BigQuery({
|
|
|
|
|
projectId: privateKey?.project_id,
|
|
|
|
|
credentials: {
|
|
|
|
|
client_email: privateKey?.client_email,
|
|
|
|
|
private_key: privateKey?.private_key,
|
|
|
|
|
},
|
2025-11-13 16:10:02 +00:00
|
|
|
...(scopes.length > 0 ? { scopes: scopes } : {}),
|
2022-02-07 10:02:38 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (!client) {
|
|
|
|
|
throw new Error('Invalid credentials');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await client.getDatasets();
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
status: 'ok',
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private parseJSON(json?: string): object {
|
|
|
|
|
if (!json) return {};
|
|
|
|
|
|
|
|
|
|
return JSON5.parse(json);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private getPrivateKey(configs?: string): {
|
|
|
|
|
project_id?: string;
|
|
|
|
|
client_email?: string;
|
|
|
|
|
private_key?: string;
|
|
|
|
|
} {
|
|
|
|
|
return this.parseJSON(configs);
|
|
|
|
|
}
|
2022-09-23 19:13:44 +00:00
|
|
|
|
|
|
|
|
private sanitizeResponse(response: object | [], pickFields: string[]): object | [] {
|
|
|
|
|
if (!response) return response;
|
|
|
|
|
|
|
|
|
|
if (Array.isArray(response)) {
|
|
|
|
|
return response.map((item) => this.sanitizeResponse(item, pickFields));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const pickedKeyValue = pickFields.map((field) => _.result(response, field));
|
|
|
|
|
|
|
|
|
|
if (pickedKeyValue.length === 1) {
|
|
|
|
|
return pickedKeyValue[0];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return pickedKeyValue;
|
|
|
|
|
}
|
2026-04-07 19:09:12 +00:00
|
|
|
}
|