fix(editor): Add data to a data table by csv upload (#26495)

Co-authored-by: Ricardo Espinoza <ricardo@n8n.io>
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Nikhil Kuriakose 2026-03-18 00:56:57 +05:30 committed by GitHub
parent 4eb815c4ee
commit 5fa8855a1f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 1403 additions and 204 deletions

View file

@ -21,6 +21,7 @@ const esmDependencies = [
'jose',
'p-retry',
'is-network-error',
'uuid',
// Add other ESM dependencies that need to be transformed here
];

View file

@ -0,0 +1,7 @@
import { z } from 'zod';
import { Z } from '../../zod-class';
export class ImportCsvToDataTableDto extends Z.class({
fileId: z.string().min(1),
}) {}

View file

@ -148,6 +148,7 @@ export { AddDataTableColumnDto } from './data-table/add-data-table-column.dto';
export { MoveDataTableColumnDto } from './data-table/move-data-table-column.dto';
export { RenameDataTableColumnDto } from './data-table/rename-data-table-column.dto';
export { DownloadDataTableCsvQueryDto } from './data-table/download-data-table-csv-query.dto';
export { ImportCsvToDataTableDto } from './data-table/import-csv-to-data-table.dto';
export {
OAuthClientResponseDto,

View file

@ -32,6 +32,7 @@ export const LOG_SCOPES = [
'source-control',
'dynamic-credentials',
'workflow-history-compaction',
'data-table-csv-import',
'ssrf-protection',
] as const;

View file

@ -1,78 +1,144 @@
import { testModules } from '@n8n/backend-test-utils';
import type { GlobalConfig } from '@n8n/config';
import { mkdtempSync, writeFileSync, rmSync } from 'fs';
import { mock } from 'jest-mock-extended';
import { tmpdir } from 'os';
import { join } from 'path';
import { CsvParserService } from '../csv-parser.service';
beforeAll(async () => {
await testModules.loadModules(['data-table']);
});
describe('CsvParserService', () => {
let tempDir: string;
let csvParserService: CsvParserService;
beforeEach(() => {
tempDir = mkdtempSync(join(tmpdir(), 'csv-test-'));
const globalConfig = mock<GlobalConfig>({
dataTable: { uploadDir: tempDir },
});
csvParserService = new CsvParserService(globalConfig);
});
afterEach(() => {
rmSync(tempDir, { recursive: true, force: true });
});
const writeCsv = (filename: string, content: string) => {
writeFileSync(join(tempDir, filename), content);
return filename;
};
describe('parseFile', () => {
it('should not allow path traversal when parsing CSV file metadata', async () => {
const globalConfig = mock<GlobalConfig>({
dataTable: {
uploadDir: '/safe/upload/dir',
},
dataTable: { uploadDir: '/safe/upload/dir' },
});
const service = new CsvParserService(globalConfig);
const csvParserService = new CsvParserService(globalConfig);
const maliciousFileId = '../some/other/directory/malicious-file.csv';
await expect(csvParserService.parseFile(maliciousFileId)).rejects.toThrowError(
'Path traversal detected',
);
await expect(
service.parseFile('../some/other/directory/malicious-file.csv'),
).rejects.toThrowError('Path traversal detected');
});
it('should try to access file if it is within upload directory', async () => {
const globalConfig = mock<GlobalConfig>({
dataTable: {
uploadDir: '/safe/upload/dir',
},
});
it('should parse CSV with headers and infer column types', async () => {
const fileId = writeCsv('test.csv', 'name,age,active\nAlice,30,true\nBob,25,false\n');
const csvParserService = new CsvParserService(globalConfig);
const safeFileId = 'valid-file.csv';
const result = await csvParserService.parseFile(fileId, true);
// Since we are not actually testing file reading here, just ensure no error is thrown
await expect(csvParserService.parseFile(safeFileId)).rejects.toThrowError(
"ENOENT: no such file or directory, open '/safe/upload/dir/valid-file.csv",
);
expect(result.rowCount).toBe(2);
expect(result.columnCount).toBe(3);
expect(result.columns).toEqual([
expect.objectContaining({ name: 'name', type: 'string' }),
expect.objectContaining({ name: 'age', type: 'number' }),
expect.objectContaining({ name: 'active', type: 'boolean' }),
]);
});
it('should trim header names', async () => {
const fileId = writeCsv('test.csv', ' name , age \nAlice,30\n');
const result = await csvParserService.parseFile(fileId, true);
expect(result.columns[0].name).toBe('name');
expect(result.columns[1].name).toBe('age');
});
it('should generate column names when no headers', async () => {
const fileId = writeCsv('test.csv', 'Alice,30,true\nBob,25,false\n');
const result = await csvParserService.parseFile(fileId, false);
expect(result.rowCount).toBe(2);
expect(result.columns[0].name).toBe('Column_1');
expect(result.columns[1].name).toBe('Column_2');
expect(result.columns[2].name).toBe('Column_3');
});
});
describe('parseFileData', () => {
it('should not allow path traversal when parsing CSV file data', async () => {
const globalConfig = mock<GlobalConfig>({
dataTable: {
uploadDir: '/safe/upload/dir',
},
dataTable: { uploadDir: '/safe/upload/dir' },
});
const service = new CsvParserService(globalConfig);
const csvParserService = new CsvParserService(globalConfig);
const maliciousFileId = '../some/other/directory/malicious-file.csv';
await expect(csvParserService.parseFileData(maliciousFileId)).rejects.toThrowError(
'Path traversal detected',
);
await expect(
service.parseFileData('../some/other/directory/malicious-file.csv'),
).rejects.toThrowError('Path traversal detected');
});
it('should try to access file if it is within upload directory', async () => {
const globalConfig = mock<GlobalConfig>({
dataTable: {
uploadDir: '/safe/upload/dir',
},
});
it('should return rows as objects with header keys', async () => {
const fileId = writeCsv('test.csv', 'name,age\nAlice,30\nBob,25\n');
const csvParserService = new CsvParserService(globalConfig);
const safeFileId = 'valid-file.csv';
const rows = await csvParserService.parseFileData(fileId, true);
// Since we are not actually testing file reading here, just ensure no error is thrown
await expect(csvParserService.parseFileData(safeFileId)).rejects.toThrowError(
"ENOENT: no such file or directory, open '/safe/upload/dir/valid-file.csv",
);
expect(rows).toEqual([
{ name: 'Alice', age: '30' },
{ name: 'Bob', age: '25' },
]);
});
it('should return rows with generated column names when no headers', async () => {
const fileId = writeCsv('test.csv', 'Alice,30\nBob,25\n');
const rows = await csvParserService.parseFileData(fileId, false);
expect(rows).toEqual([
{ Column_1: 'Alice', Column_2: '30' },
{ Column_1: 'Bob', Column_2: '25' },
]);
});
});
describe('parseFileWithData', () => {
it('should return both metadata and rows in a single pass', async () => {
const fileId = writeCsv('test.csv', 'name,age\nAlice,30\nBob,25\n');
const result = await csvParserService.parseFileWithData(fileId, true);
expect(result.metadata.rowCount).toBe(2);
expect(result.metadata.columnCount).toBe(2);
expect(result.metadata.columns).toEqual([
{ name: 'name', type: 'string' },
{ name: 'age', type: 'string' },
]);
expect(result.rows).toEqual([
{ name: 'Alice', age: '30' },
{ name: 'Bob', age: '25' },
]);
});
it('should handle CSV without headers', async () => {
const fileId = writeCsv('test.csv', 'Alice,30\nBob,25\n');
const result = await csvParserService.parseFileWithData(fileId, false);
expect(result.metadata.columns[0].name).toBe('Column_1');
expect(result.rows[0]).toEqual({ Column_1: 'Alice', Column_2: '30' });
});
});
});

View file

@ -0,0 +1,271 @@
import { mockInstance, testModules } from '@n8n/backend-test-utils';
import { Logger } from '@n8n/backend-common';
import type { DataTableColumn } from '../data-table-column.entity';
import { DataTableCsvImportService } from '../data-table-csv-import.service';
import { CsvParserService } from '../csv-parser.service';
import { DataTableFileCleanupService } from '../data-table-file-cleanup.service';
import { DataTableValidationError } from '../errors/data-table-validation.error';
import { FileUploadError } from '../errors/data-table-file-upload.error';
describe('DataTableCsvImportService', () => {
let service: DataTableCsvImportService;
let mockCsvParserService: jest.Mocked<CsvParserService>;
let mockFileCleanupService: jest.Mocked<DataTableFileCleanupService>;
let mockLogger: jest.Mocked<Logger>;
beforeAll(async () => {
await testModules.loadModules(['data-table']);
});
beforeEach(() => {
mockCsvParserService = mockInstance(CsvParserService);
mockFileCleanupService = mockInstance(DataTableFileCleanupService);
mockLogger = mockInstance(Logger);
mockLogger.scoped = jest.fn().mockReturnValue(mockLogger);
service = new DataTableCsvImportService(
mockCsvParserService,
mockFileCleanupService,
mockLogger,
);
jest.clearAllMocks();
});
describe('validateAndBuildRowsForExistingTable', () => {
const fileId = 'test-file-id';
const tableColumns: DataTableColumn[] = [
{
id: 'col-1',
name: 'name',
type: 'string',
index: 0,
dataTableId: 'dt-1',
} as DataTableColumn,
{
id: 'col-2',
name: 'age',
type: 'number',
index: 1,
dataTableId: 'dt-1',
} as DataTableColumn,
{
id: 'col-3',
name: 'email',
type: 'string',
index: 2,
dataTableId: 'dt-1',
} as DataTableColumn,
];
it('should transform matching CSV columns into rows', async () => {
mockCsvParserService.parseFileWithData.mockResolvedValue({
metadata: {
rowCount: 2,
columnCount: 3,
columns: [
{ name: 'name', type: 'string' },
{ name: 'age', type: 'number' },
{ name: 'email', type: 'string' },
],
},
rows: [
{ name: 'Alice', age: '30', email: 'alice@test.com' },
{ name: 'Bob', age: '25', email: 'bob@test.com' },
],
});
const result = await service.validateAndBuildRowsForExistingTable(fileId, tableColumns);
expect(result.rows).toHaveLength(2);
expect(result.rows[0]).toEqual({ name: 'Alice', age: '30', email: 'alice@test.com' });
expect(result.systemColumnsIgnored).toEqual([]);
});
it('should ignore system columns and return them in systemColumnsIgnored', async () => {
mockCsvParserService.parseFileWithData.mockResolvedValue({
metadata: {
rowCount: 1,
columnCount: 4,
columns: [
{ name: 'id', type: 'number' },
{ name: 'name', type: 'string' },
{ name: 'createdAt', type: 'date' },
{ name: 'updatedAt', type: 'date' },
],
},
rows: [{ id: '1', name: 'Alice', createdAt: '2024-01-01', updatedAt: '2024-01-01' }],
});
const result = await service.validateAndBuildRowsForExistingTable(fileId, tableColumns);
expect(result.rows).toHaveLength(1);
expect(result.rows[0]).toEqual({ name: 'Alice' });
expect(result.systemColumnsIgnored).toEqual(['id', 'createdAt', 'updatedAt']);
});
it('should throw DataTableValidationError when CSV has unrecognized columns', async () => {
mockCsvParserService.parseFileWithData.mockResolvedValue({
metadata: {
rowCount: 1,
columnCount: 3,
columns: [
{ name: 'name', type: 'string' },
{ name: 'unknown_col', type: 'string' },
{ name: 'another_unknown', type: 'string' },
],
},
rows: [{ name: 'Alice', unknown_col: 'x', another_unknown: 'y' }],
});
await expect(
service.validateAndBuildRowsForExistingTable(fileId, tableColumns),
).rejects.toThrow(DataTableValidationError);
await expect(
service.validateAndBuildRowsForExistingTable(fileId, tableColumns),
).rejects.toThrow(
'CSV contains columns not found in the data table: unknown_col, another_unknown',
);
});
it('should throw DataTableValidationError when zero non-system columns match', async () => {
mockCsvParserService.parseFileWithData.mockResolvedValue({
metadata: {
rowCount: 1,
columnCount: 2,
columns: [
{ name: 'id', type: 'number' },
{ name: 'createdAt', type: 'date' },
],
},
rows: [{ id: '1', createdAt: '2024-01-01' }],
});
await expect(
service.validateAndBuildRowsForExistingTable(fileId, tableColumns),
).rejects.toThrow(DataTableValidationError);
await expect(
service.validateAndBuildRowsForExistingTable(fileId, tableColumns),
).rejects.toThrow('No matching columns found');
});
it('should convert empty and missing CSV values to null', async () => {
mockCsvParserService.parseFileWithData.mockResolvedValue({
metadata: {
rowCount: 2,
columnCount: 2,
columns: [
{ name: 'name', type: 'string' },
{ name: 'age', type: 'number' },
],
},
rows: [{ name: 'Alice', age: '' }, { name: '' }],
});
const result = await service.validateAndBuildRowsForExistingTable(fileId, tableColumns);
expect(result.rows[0]).toEqual({ name: 'Alice', age: null });
expect(result.rows[1]).toEqual({ name: null, age: null });
});
it('should wrap unexpected errors in FileUploadError', async () => {
mockCsvParserService.parseFileWithData.mockRejectedValue(new Error('disk read failed'));
await expect(
service.validateAndBuildRowsForExistingTable(fileId, tableColumns),
).rejects.toThrow(FileUploadError);
});
});
describe('buildRowsForNewTable', () => {
const fileId = 'test-file-id';
const tableColumns: DataTableColumn[] = [
{
id: 'col-1',
name: 'First Name',
type: 'string',
index: 0,
dataTableId: 'dt-1',
} as DataTableColumn,
{
id: 'col-2',
name: 'Age',
type: 'number',
index: 1,
dataTableId: 'dt-1',
} as DataTableColumn,
];
it('should map CSV columns by index when no csvColumnName provided', async () => {
mockCsvParserService.parseFile.mockResolvedValue({
rowCount: 1,
columnCount: 2,
columns: [
{ name: 'name', type: 'string' },
{ name: 'age', type: 'number' },
],
});
mockCsvParserService.parseFileData.mockResolvedValue([{ name: 'Alice', age: '30' }]);
const result = await service.buildRowsForNewTable(fileId, true, tableColumns);
expect(result).toHaveLength(1);
expect(result[0]).toEqual({ 'First Name': 'Alice', Age: '30' });
});
it('should map CSV columns by csvColumnName when provided', async () => {
const dtoColumns = [
{ name: 'First Name', type: 'string' as const, csvColumnName: 'csv_name' },
{ name: 'Age', type: 'number' as const, csvColumnName: 'csv_age' },
];
mockCsvParserService.parseFileData.mockResolvedValue([{ csv_name: 'Alice', csv_age: '30' }]);
const result = await service.buildRowsForNewTable(fileId, true, tableColumns, dtoColumns);
expect(result).toHaveLength(1);
expect(result[0]).toEqual({ 'First Name': 'Alice', Age: '30' });
});
it('should convert empty and missing CSV values to null', async () => {
mockCsvParserService.parseFile.mockResolvedValue({
rowCount: 1,
columnCount: 2,
columns: [
{ name: 'name', type: 'string' },
{ name: 'age', type: 'number' },
],
});
mockCsvParserService.parseFileData.mockResolvedValue([
{ name: 'Alice', age: '' },
{ name: '' },
]);
const result = await service.buildRowsForNewTable(fileId, true, tableColumns);
expect(result[0]).toEqual({ 'First Name': 'Alice', Age: null });
expect(result[1]).toEqual({ 'First Name': null, Age: null });
});
it('should wrap unexpected errors in FileUploadError', async () => {
mockCsvParserService.parseFile.mockRejectedValue(new Error('file not found'));
await expect(service.buildRowsForNewTable(fileId, true, tableColumns)).rejects.toThrow(
FileUploadError,
);
});
});
describe('cleanupFile', () => {
it('should delegate to file cleanup service', async () => {
mockFileCleanupService.deleteFile.mockResolvedValue(undefined);
await service.cleanupFile('test-file-id');
expect(mockFileCleanupService.deleteFile).toHaveBeenCalledWith('test-file-id');
});
});
});

View file

@ -4,10 +4,9 @@ import { Logger } from '@n8n/backend-common';
import { ProjectRelationRepository, type User } from '@n8n/db';
import type { DataTableInfoById } from 'n8n-workflow';
import { CsvParserService } from '../csv-parser.service';
import type { DataTableColumn } from '../data-table-column.entity';
import { DataTableColumnRepository } from '../data-table-column.repository';
import { DataTableFileCleanupService } from '../data-table-file-cleanup.service';
import { DataTableCsvImportService } from '../data-table-csv-import.service';
import { DataTableRowsRepository } from '../data-table-rows.repository';
import { DataTableSizeValidator } from '../data-table-size-validator.service';
import type { DataTable } from '../data-table.entity';
@ -15,6 +14,7 @@ import { DataTableRepository } from '../data-table.repository';
import { DataTableService } from '../data-table.service';
import { DataTableColumnNotFoundError } from '../errors/data-table-column-not-found.error';
import { DataTableNotFoundError } from '../errors/data-table-not-found.error';
import { DataTableValidationError } from '../errors/data-table-validation.error';
import { RoleService } from '@/services/role.service';
describe('DataTableService', () => {
@ -26,8 +26,7 @@ describe('DataTableService', () => {
let mockDataTableSizeValidator: jest.Mocked<DataTableSizeValidator>;
let mockProjectRelationRepository: jest.Mocked<ProjectRelationRepository>;
let mockRoleService: jest.Mocked<RoleService>;
let mockCsvParserService: jest.Mocked<CsvParserService>;
let mockFileCleanupService: jest.Mocked<DataTableFileCleanupService>;
let mockCsvImportService: jest.Mocked<DataTableCsvImportService>;
beforeAll(async () => {
await testModules.loadModules(['data-table']);
@ -41,8 +40,7 @@ describe('DataTableService', () => {
mockDataTableSizeValidator = mockInstance(DataTableSizeValidator);
mockProjectRelationRepository = mockInstance(ProjectRelationRepository);
mockRoleService = mockInstance(RoleService);
mockCsvParserService = mockInstance(CsvParserService);
mockFileCleanupService = mockInstance(DataTableFileCleanupService);
mockCsvImportService = mockInstance(DataTableCsvImportService);
// Mock the logger.scoped method to return the logger itself
mockLogger.scoped = jest.fn().mockReturnValue(mockLogger);
@ -55,8 +53,7 @@ describe('DataTableService', () => {
mockDataTableSizeValidator,
mockProjectRelationRepository,
mockRoleService,
mockCsvParserService,
mockFileCleanupService,
mockCsvImportService,
);
jest.clearAllMocks();
@ -472,4 +469,133 @@ describe('DataTableService', () => {
);
});
});
describe('importCsvToExistingTable', () => {
const projectId = 'test-project-id';
const dataTableId = 'test-data-table-id';
const fileId = 'test-file-id';
const mockDataTable: DataTable = {
id: dataTableId,
name: 'Test Table',
projectId,
} as DataTable;
const tableColumns: DataTableColumn[] = [
{ id: 'col-1', name: 'name', type: 'string', index: 0, dataTableId } as DataTableColumn,
{ id: 'col-2', name: 'age', type: 'number', index: 1, dataTableId } as DataTableColumn,
{ id: 'col-3', name: 'email', type: 'string', index: 2, dataTableId } as DataTableColumn,
];
beforeEach(() => {
mockDataTableSizeValidator.validateSize.mockResolvedValue(undefined);
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.getColumns.mockResolvedValue(tableColumns);
mockCsvImportService.cleanupFile.mockResolvedValue(undefined);
// Mock insertRows transaction
Object.defineProperty(mockDataTableColumnRepository, 'manager', {
value: {
transaction: jest.fn(async (fn) => fn({} as any)),
},
writable: true,
configurable: true,
});
mockDataTableRowsRepository.insertRows.mockResolvedValue({
success: true,
insertedRows: 2,
});
mockDataTableSizeValidator.reset = jest.fn();
mockDataTableRepository.touchUpdatedAt.mockResolvedValue(undefined);
});
it('should import rows returned by csv import service', async () => {
mockCsvImportService.validateAndBuildRowsForExistingTable.mockResolvedValue({
rows: [
{ name: 'Alice', age: '30', email: 'alice@test.com' },
{ name: 'Bob', age: '25', email: 'bob@test.com' },
],
systemColumnsIgnored: [],
});
const result = await dataTableService.importCsvToExistingTable(
dataTableId,
projectId,
fileId,
);
expect(result.importedRowCount).toBe(2);
expect(result.systemColumnsIgnored).toEqual([]);
expect(mockCsvImportService.validateAndBuildRowsForExistingTable).toHaveBeenCalledWith(
fileId,
tableColumns,
);
});
it('should pass through systemColumnsIgnored from csv import service', async () => {
mockCsvImportService.validateAndBuildRowsForExistingTable.mockResolvedValue({
rows: [{ name: 'Alice' }],
systemColumnsIgnored: ['id', 'createdAt', 'updatedAt'],
});
const result = await dataTableService.importCsvToExistingTable(
dataTableId,
projectId,
fileId,
);
expect(result.importedRowCount).toBe(1);
expect(result.systemColumnsIgnored).toEqual(['id', 'createdAt', 'updatedAt']);
});
it('should propagate validation errors from csv import service', async () => {
mockCsvImportService.validateAndBuildRowsForExistingTable.mockRejectedValue(
new DataTableValidationError('CSV contains columns not found in the data table'),
);
await expect(
dataTableService.importCsvToExistingTable(dataTableId, projectId, fileId),
).rejects.toThrow(DataTableValidationError);
expect(mockCsvImportService.cleanupFile).toHaveBeenCalledWith(fileId);
});
it('should skip insertRows when csv import service returns 0 rows', async () => {
mockCsvImportService.validateAndBuildRowsForExistingTable.mockResolvedValue({
rows: [],
systemColumnsIgnored: [],
});
const result = await dataTableService.importCsvToExistingTable(
dataTableId,
projectId,
fileId,
);
expect(result.importedRowCount).toBe(0);
expect(mockDataTableRowsRepository.insertRows).not.toHaveBeenCalled();
});
it('should clean up file after successful import', async () => {
mockCsvImportService.validateAndBuildRowsForExistingTable.mockResolvedValue({
rows: [{ name: 'Alice' }],
systemColumnsIgnored: [],
});
await dataTableService.importCsvToExistingTable(dataTableId, projectId, fileId);
expect(mockCsvImportService.cleanupFile).toHaveBeenCalledWith(fileId);
});
it('should clean up file even when import fails', async () => {
mockCsvImportService.validateAndBuildRowsForExistingTable.mockRejectedValue(
new Error('parse error'),
);
await expect(
dataTableService.importCsvToExistingTable(dataTableId, projectId, fileId),
).rejects.toThrow();
expect(mockCsvImportService.cleanupFile).toHaveBeenCalledWith(fileId);
});
});
});

View file

@ -25,92 +25,127 @@ export class CsvParserService {
this.uploadDir = this.globalConfig.dataTable.uploadDir;
}
private processRowWithoutHeaders(
row: string[],
columnNames: string[],
): { rowObject: Record<string, string>; columnNames: string[] } {
let updatedColumnNames = columnNames;
if (updatedColumnNames.length === 0) {
updatedColumnNames = row.map((_, index) => `${this.DEFAULT_COLUMN_PREFIX}${index + 1}`);
}
private generateColumnNames(columnCount: number): string[] {
return Array.from({ length: columnCount }, (_, i) => `${this.DEFAULT_COLUMN_PREFIX}${i + 1}`);
}
private mapValuesToColumns(row: string[], columnNames: string[]): Record<string, string> {
const rowObject: Record<string, string> = {};
row.forEach((value, index) => {
rowObject[updatedColumnNames[index]] = value;
rowObject[columnNames[index]] = value;
});
return { rowObject, columnNames: updatedColumnNames };
return rowObject;
}
private readonly TYPE_INFERENCE_SAMPLE_SIZE = 100;
private createParserOptions(hasHeaders: boolean) {
return {
columns: hasHeaders ? true : (false as const),
skip_empty_lines: true,
bom: true,
};
}
private trimColumnNames(columns: string[]): string[] {
return columns.map((h) => h.trim());
}
private normalizeRow(
row: Record<string, string> | string[],
hasHeaders: boolean,
columnNames: string[],
): Record<string, string> | null {
if (!hasHeaders && Array.isArray(row)) {
return this.mapValuesToColumns(row, columnNames);
} else if (!Array.isArray(row)) {
return row;
}
return null;
}
private collectTypeSamples(
rowObject: Record<string, string>,
columnNames: string[],
firstNonEmptyValues: Map<string, string>,
) {
for (const colName of columnNames) {
if (!firstNonEmptyValues.has(colName)) {
const value = rowObject[colName];
if (value?.trim()) {
firstNonEmptyValues.set(colName, value);
}
}
}
}
private buildColumnMetadata(columnNames: string[], firstNonEmptyValues: Map<string, string>) {
return columnNames.map((columnName) => {
const detectedType = this.inferColumnType(firstNonEmptyValues.get(columnName));
return {
name: columnName,
type: detectedType,
compatibleTypes: this.getCompatibleTypes(detectedType),
};
});
}
private async parseCsvFile<T>(
fileId: string,
hasHeaders: boolean,
onRow: (rowObject: Record<string, string>, columnNames: string[], rowNumber: number) => void,
onEnd: (columnNames: string[], totalRows: number) => T,
): Promise<T> {
const filePath = safeJoinPath(this.uploadDir, fileId);
let columnNames: string[] = [];
let rowCount = 0;
return await new Promise((resolve, reject) => {
const parser = parse({
...this.createParserOptions(hasHeaders),
...(hasHeaders && {
columns: (header: string[]) => {
columnNames = this.trimColumnNames(header);
return columnNames;
},
}),
})
.on('data', (row: Record<string, string> | string[]) => {
rowCount++;
if (!hasHeaders && Array.isArray(row) && columnNames.length === 0) {
columnNames = this.generateColumnNames(row.length);
}
const rowObject = this.normalizeRow(row, hasHeaders, columnNames);
if (!rowObject) return;
onRow(rowObject, columnNames, rowCount);
})
.on('end', () => resolve(onEnd(columnNames, rowCount)))
.on('error', reject);
createReadStream(filePath).on('error', reject).pipe(parser);
});
}
/**
* Parses a CSV file and returns metadata including row count, column count, and inferred column types.
* Samples up to 100 rows to find the first non-empty value per column for type inference.
*/
async parseFile(fileId: string, hasHeaders: boolean = true): Promise<CsvMetadata> {
const filePath = safeJoinPath(this.uploadDir, fileId);
let rowCount = 0;
let columnNames: string[] = [];
const firstNonEmptyValues = new Map<string, string>();
return await new Promise((resolve, reject) => {
const parser = parse({
columns: hasHeaders
? (header: string[]) => {
columnNames = header;
return header;
}
: false,
skip_empty_lines: true,
})
.on('data', (row: Record<string, string> | string[]) => {
rowCount++;
let rowObject: Record<string, string>;
if (!hasHeaders && Array.isArray(row)) {
const processed = this.processRowWithoutHeaders(row, columnNames);
columnNames = processed.columnNames;
rowObject = processed.rowObject;
} else if (!Array.isArray(row)) {
rowObject = row;
} else {
return;
}
// Collect first non-empty value per column (sample up to N rows).
// `columnNames` is already populated by the `columns` header callback (which fires
// before any `data` events) when hasHeaders=true, or built incrementally by
// processRowWithoutHeaders on the first row otherwise.
if (rowCount <= this.TYPE_INFERENCE_SAMPLE_SIZE) {
for (const colName of columnNames) {
if (!firstNonEmptyValues.has(colName)) {
const value = rowObject[colName];
if (value?.trim()) {
firstNonEmptyValues.set(colName, value);
}
}
}
}
})
.on('end', () => {
const columns = columnNames.map((columnName) => {
const detectedType = this.inferColumnType(firstNonEmptyValues.get(columnName));
return {
name: columnName,
type: detectedType,
compatibleTypes: this.getCompatibleTypes(detectedType),
};
});
resolve({
rowCount,
columnCount: columns.length,
columns,
});
})
.on('error', reject);
createReadStream(filePath).on('error', reject).pipe(parser);
});
return await this.parseCsvFile(
fileId,
hasHeaders,
(rowObject, colNames, rowNumber) => {
if (rowNumber <= this.TYPE_INFERENCE_SAMPLE_SIZE) {
this.collectTypeSamples(rowObject, colNames, firstNonEmptyValues);
}
},
(colNames, totalRows) => {
const columns = this.buildColumnMetadata(colNames, firstNonEmptyValues);
return { rowCount: totalRows, columnCount: columns.length, columns };
},
);
}
/**
@ -120,32 +155,38 @@ export class CsvParserService {
fileId: string,
hasHeaders: boolean = true,
): Promise<Array<Record<string, string>>> {
const filePath = safeJoinPath(this.uploadDir, fileId);
const rows: Array<Record<string, string>> = [];
let columnNames: string[] = [];
return await new Promise((resolve, reject) => {
const parser = parse({
columns: hasHeaders ? true : false,
skip_empty_lines: true,
})
.on('data', (row: Record<string, string> | string[]) => {
if (!hasHeaders && Array.isArray(row)) {
const processed = this.processRowWithoutHeaders(row, columnNames);
columnNames = processed.columnNames;
rows.push(processed.rowObject);
} else if (!Array.isArray(row)) {
rows.push(row);
}
})
.on('end', () => {
resolve(rows);
})
.on('error', reject);
return await this.parseCsvFile(
fileId,
hasHeaders,
(rowObject) => rows.push(rowObject),
() => rows,
);
}
createReadStream(filePath).on('error', reject).pipe(parser);
});
/**
* Parses a CSV file in a single pass, returning column names and all rows.
* Skips type inference since callers only need column names for matching.
*/
async parseFileWithData(
fileId: string,
hasHeaders: boolean = true,
): Promise<{ metadata: CsvMetadata; rows: Array<Record<string, string>> }> {
const rows: Array<Record<string, string>> = [];
return await this.parseCsvFile(
fileId,
hasHeaders,
(rowObject) => rows.push(rowObject),
(colNames) => {
const columns = colNames.map((name) => ({ name, type: 'string' as const }));
return {
metadata: { rowCount: rows.length, columnCount: columns.length, columns },
rows,
};
},
);
}
/**

View file

@ -0,0 +1,169 @@
import type { CreateDataTableDto } from '@n8n/api-types';
import { Logger } from '@n8n/backend-common';
import { Service } from '@n8n/di';
import type { DataTableRow } from 'n8n-workflow';
import { DATA_TABLE_SYSTEM_COLUMNS } from 'n8n-workflow';
import type { CsvColumnMetadata } from './csv-parser.service';
import { CsvParserService } from './csv-parser.service';
import type { DataTableColumn } from './data-table-column.entity';
import { DataTableFileCleanupService } from './data-table-file-cleanup.service';
import { FileUploadError } from './errors/data-table-file-upload.error';
import { DataTableValidationError } from './errors/data-table-validation.error';
@Service()
export class DataTableCsvImportService {
private readonly logger: Logger;
constructor(
private readonly csvParserService: CsvParserService,
private readonly fileCleanupService: DataTableFileCleanupService,
logger: Logger,
) {
this.logger = logger.scoped('data-table-csv-import');
}
/**
* Builds transformed rows for importing CSV data into a newly created table.
* Supports column renaming via dtoColumns.csvColumnName and falls back to
* index-based mapping when no csvColumnName is provided.
*/
async buildRowsForNewTable(
fileId: string,
hasHeaders: boolean,
tableColumns: DataTableColumn[],
dtoColumns?: CreateDataTableDto['columns'],
): Promise<DataTableRow[]> {
try {
const columnMapping = await this.buildColumnMappingForNewTable(
fileId,
hasHeaders,
tableColumns,
dtoColumns,
);
const csvRows = await this.csvParserService.parseFileData(fileId, hasHeaders);
return this.transformRows(csvRows, columnMapping);
} catch (error) {
this.logger.error('Failed to import data from CSV file', { error, fileId });
throw new FileUploadError(error instanceof Error ? error.message : 'Failed to read CSV file');
}
}
/**
* Validates CSV columns against an existing table and builds transformed rows.
* System columns are silently skipped. Unrecognized columns cause an error.
* Empty/missing values are converted to null.
*/
async validateAndBuildRowsForExistingTable(
fileId: string,
tableColumns: DataTableColumn[],
): Promise<{ rows: DataTableRow[]; systemColumnsIgnored: string[] }> {
try {
const tableColumnNames = new Set(tableColumns.map((col) => col.name));
const { metadata: csvMetadata, rows: csvRows } =
await this.csvParserService.parseFileWithData(fileId);
const { matchedColumns, systemColumnsIgnored } = this.matchColumns(
csvMetadata.columns,
tableColumnNames,
);
const mapping = new Map(matchedColumns.map((col) => [col, col]));
return {
rows: this.transformRows(csvRows, mapping),
systemColumnsIgnored,
};
} catch (error) {
if (error instanceof DataTableValidationError) throw error;
this.logger.error('Failed to import CSV to existing table', { error, fileId });
throw new FileUploadError(
error instanceof Error ? error.message : 'Failed to import CSV file',
);
}
}
async cleanupFile(fileId: string) {
await this.fileCleanupService.deleteFile(fileId);
}
private matchColumns(
csvColumns: CsvColumnMetadata[],
tableColumnNames: Set<string>,
): { matchedColumns: string[]; systemColumnsIgnored: string[] } {
const systemColumnsIgnored: string[] = [];
const unrecognizedColumns: string[] = [];
const matchedColumns: string[] = [];
for (const csvCol of csvColumns) {
if (DATA_TABLE_SYSTEM_COLUMNS.includes(csvCol.name)) {
systemColumnsIgnored.push(csvCol.name);
} else if (tableColumnNames.has(csvCol.name)) {
matchedColumns.push(csvCol.name);
} else {
unrecognizedColumns.push(csvCol.name);
}
}
if (unrecognizedColumns.length > 0) {
throw new DataTableValidationError(
`CSV contains columns not found in the data table: ${unrecognizedColumns.join(', ')}. Remove them and try again.`,
);
}
if (matchedColumns.length === 0) {
throw new DataTableValidationError(
'No matching columns found between CSV and data table. CSV columns must match table column names exactly.',
);
}
return { matchedColumns, systemColumnsIgnored };
}
private transformRows(
csvRows: Array<Record<string, string>>,
columnMapping: Map<string, string>,
): DataTableRow[] {
return csvRows.map((csvRow) => {
const transformedRow: DataTableRow = {};
for (const [csvColName, tableColName] of columnMapping) {
const value = csvRow[csvColName];
transformedRow[tableColName] = value === undefined || value === '' ? null : value;
}
return transformedRow;
});
}
private async buildColumnMappingForNewTable(
fileId: string,
hasHeaders: boolean,
tableColumns: DataTableColumn[],
dtoColumns?: CreateDataTableDto['columns'],
): Promise<Map<string, string>> {
const columnMapping = new Map<string, string>();
const hasCsvColumnNames = dtoColumns?.some((c) => c.csvColumnName);
if (hasCsvColumnNames) {
const tableColByName = new Map(tableColumns.map((tc) => [tc.name, tc.name]));
for (const dtoCol of dtoColumns!) {
if (dtoCol.csvColumnName) {
const tableName = tableColByName.get(dtoCol.name);
if (tableName) {
columnMapping.set(dtoCol.csvColumnName, tableName);
}
}
}
} else {
const csvMetadata = await this.csvParserService.parseFile(fileId, hasHeaders);
csvMetadata.columns.forEach((csvColumn, index) => {
if (tableColumns[index]) {
columnMapping.set(csvColumn.name, tableColumns[index].name);
}
});
}
return columnMapping;
}
}

View file

@ -4,6 +4,7 @@ import {
CreateDataTableDto,
DeleteDataTableRowsDto,
DownloadDataTableCsvQueryDto,
ImportCsvToDataTableDto,
ListDataTableContentQueryDto,
ListDataTableQueryDto,
MoveDataTableColumnDto,
@ -41,6 +42,7 @@ import { DataTableNameConflictError } from './errors/data-table-name-conflict.er
import { DataTableNotFoundError } from './errors/data-table-not-found.error';
import { DataTableSystemColumnNameConflictError } from './errors/data-table-system-column-name-conflict.error';
import { DataTableValidationError } from './errors/data-table-validation.error';
import { FileUploadError } from './errors/data-table-file-upload.error';
import { ProjectService } from '@/services/project.service.ee';
import { SourceControlPreferencesService } from '@/modules/source-control.ee/source-control-preferences.service.ee';
@ -114,6 +116,8 @@ export class DataTableController {
throw new ConflictError(e.message);
} else if (e instanceof DataTableValidationError) {
throw new BadRequestError(e.message);
} else if (e instanceof FileUploadError) {
throw new BadRequestError(e.message);
} else {
throw new InternalServerError(e.message, e);
}
@ -370,6 +374,36 @@ export class DataTableController {
}
}
@Post('/:dataTableId/import-csv')
@ProjectScope('dataTable:writeRow')
async importCsvToDataTable(
req: AuthenticatedRequest<{ projectId: string }>,
_res: Response,
@Param('dataTableId') dataTableId: string,
@Body dto: ImportCsvToDataTableDto,
) {
this.checkInstanceWriteAccess();
try {
return await this.dataTableService.importCsvToExistingTable(
dataTableId,
req.params.projectId,
dto.fileId,
);
} catch (e: unknown) {
if (e instanceof DataTableNotFoundError) {
throw new NotFoundError(e.message);
} else if (e instanceof DataTableValidationError) {
throw new BadRequestError(e.message);
} else if (e instanceof FileUploadError) {
throw new BadRequestError(e.message);
} else if (e instanceof Error) {
throw new InternalServerError(e.message, e);
} else {
throw e;
}
}
}
@Post('/:dataTableId/upsert')
@ProjectScope('dataTable:writeRow')
async upsertDataTableRow(

View file

@ -30,16 +30,14 @@ import type {
} from 'n8n-workflow';
import { DATA_TABLE_SYSTEM_COLUMN_TYPE_MAP, validateFieldType } from 'n8n-workflow';
import { CsvParserService } from './csv-parser.service';
import { DataTableColumn } from './data-table-column.entity';
import { DataTableColumnRepository } from './data-table-column.repository';
import { DataTableFileCleanupService } from './data-table-file-cleanup.service';
import { DataTableCsvImportService } from './data-table-csv-import.service';
import { DataTableRowsRepository } from './data-table-rows.repository';
import { DataTableSizeValidator } from './data-table-size-validator.service';
import { DataTableRepository } from './data-table.repository';
import { columnTypeToFieldType } from './data-table.types';
import { DataTableColumnNotFoundError } from './errors/data-table-column-not-found.error';
import { FileUploadError } from './errors/data-table-file-upload.error';
import { DataTableNameConflictError } from './errors/data-table-name-conflict.error';
import { DataTableNotFoundError } from './errors/data-table-not-found.error';
import { DataTableValidationError } from './errors/data-table-validation.error';
@ -57,8 +55,7 @@ export class DataTableService {
private readonly dataTableSizeValidator: DataTableSizeValidator,
private readonly projectRelationRepository: ProjectRelationRepository,
private readonly roleService: RoleService,
private readonly csvParserService: CsvParserService,
private readonly fileCleanupService: DataTableFileCleanupService,
private readonly csvImportService: DataTableCsvImportService,
) {
this.logger = this.logger.scoped('data-table');
}
@ -79,17 +76,22 @@ export class DataTableService {
if (dto.fileId) {
try {
await this.importDataFromFile(
projectId,
result.id,
const tableColumns = await this.getColumns(result.id, projectId);
const rows = await this.csvImportService.buildRowsForNewTable(
dto.fileId,
dto.hasHeaders ?? true,
tableColumns,
dto.columns,
);
await this.fileCleanupService.deleteFile(dto.fileId);
if (rows.length > 0) {
await this.insertRows(result.id, projectId, rows);
}
} catch (error) {
await this.deleteDataTable(result.id, projectId);
throw error;
} finally {
await this.csvImportService.cleanupFile(dto.fileId);
}
}
@ -98,60 +100,29 @@ export class DataTableService {
return result;
}
private async importDataFromFile(
projectId: string,
async importCsvToExistingTable(
dataTableId: string,
projectId: string,
fileId: string,
hasHeaders: boolean,
dtoColumns?: CreateDataTableDto['columns'],
) {
): Promise<{ importedRowCount: number; systemColumnsIgnored: string[] }> {
await this.validateDataTableSize();
await this.validateDataTableExists(dataTableId, projectId);
try {
const tableColumns = await this.getColumns(dataTableId, projectId);
const { rows, systemColumnsIgnored } =
await this.csvImportService.validateAndBuildRowsForExistingTable(fileId, tableColumns);
// Build mapping from CSV column name → table column name.
// When dtoColumns carry csvColumnName (i.e. a column was renamed or some
// columns were discarded), use that for a name-based mapping. Otherwise
// fall back to the legacy index-based mapping.
const columnMapping = new Map<string, string>();
const hasCsvColumnNames = dtoColumns?.some((c) => c.csvColumnName);
if (hasCsvColumnNames && dtoColumns) {
for (const dtoCol of dtoColumns) {
if (dtoCol.csvColumnName) {
const tableCol = tableColumns.find((tc) => tc.name === dtoCol.name);
if (tableCol) {
columnMapping.set(dtoCol.csvColumnName, tableCol.name);
}
}
}
} else {
const csvMetadata = await this.csvParserService.parseFile(fileId, hasHeaders);
csvMetadata.columns.forEach((csvColumn, index) => {
if (tableColumns[index]) {
columnMapping.set(csvColumn.name, tableColumns[index].name);
}
});
if (rows.length > 0) {
await this.insertRows(dataTableId, projectId, rows);
}
const csvRows = await this.csvParserService.parseFileData(fileId, hasHeaders);
const transformedRows = csvRows.map((csvRow) => {
const transformedRow: DataTableRow = {};
for (const [csvColName, value] of Object.entries(csvRow)) {
const tableColName = columnMapping.get(csvColName);
if (tableColName) {
transformedRow[tableColName] = value;
}
}
return transformedRow;
});
if (transformedRows.length > 0) {
await this.insertRows(dataTableId, projectId, transformedRows);
}
} catch (error) {
this.logger.error('Failed to import data from CSV file', { error, fileId, dataTableId });
throw new FileUploadError(error instanceof Error ? error.message : 'Failed to read CSV file');
return {
importedRowCount: rows.length,
systemColumnsIgnored,
};
} finally {
await this.csvImportService.cleanupFile(fileId);
}
}

View file

@ -4037,6 +4037,17 @@
"dataTable.download.modal.includeSystemColumns": "Include system columns (id, createdAt, updatedAt)",
"dataTable.download.modal.confirm": "Download",
"dataTable.download.modal.cancel": "Cancel",
"dataTable.importCsv": "Import CSV",
"dataTable.importCsv.title": "Import CSV to data table",
"dataTable.importCsv.description": "Column names must match the table's column names. System columns (id, createdAt, updatedAt) found in CSV will be ignored.",
"dataTable.importCsv.columnMismatch": "The following CSV columns do not match the table: {unrecognized}. The following table columns are missing from CSV: {missing}.",
"dataTable.importCsv.unrecognizedColumnsOnly": "The following CSV columns do not match the table: {columns}. Remove them and try again.",
"dataTable.importCsv.missingColumnsOnly": "The following table columns are missing from CSV and will be set to null: {columns}.",
"dataTable.importCsv.noMatchingColumns": "No matching columns found. CSV column names must exactly match the table's column names.",
"dataTable.importCsv.readyToImport": "Ready to import {count} row | Ready to import {count} rows",
"dataTable.importCsv.success": "Successfully imported {count} row | Successfully imported {count} rows",
"dataTable.importCsv.error": "Failed to import CSV",
"dataTable.importCsv.importButton": "Import",
"dataTable.rename.error": "Error renaming data table",
"dataTable.getDetails.error": "Error fetching data table details",
"dataTable.notFound": "Data table not found",

View file

@ -112,6 +112,10 @@ const onAddColumn = async (column: DataTableColumnCreatePayload): Promise<AddCol
return await dataTableTableRef.value.addColumn(column);
};
const onCsvImported = async () => {
await dataTableTableRef.value?.fetchDataTableRows();
};
const handleSourceControlPull = async () => {
// Bypass cache and fetch fresh data from API after pull
loading.value = true;
@ -156,7 +160,11 @@ onBeforeUnmount(() => {
</div>
<div v-else-if="dataTable">
<div :class="$style.header">
<DataTableBreadcrumbs :data-table="dataTable" :read-only="readOnlyEnv" />
<DataTableBreadcrumbs
:data-table="dataTable"
:read-only="readOnlyEnv"
@imported="onCsvImported"
/>
<div v-if="saving" :class="$style.saving">
<N8nSpinner />
<N8nText>{{ i18n.baseText('generic.saving') }}...</N8nText>

View file

@ -6,6 +6,7 @@ import { MODAL_CONFIRM } from '@/app/constants';
import {
DATA_TABLE_CARD_ACTIONS,
DOWNLOAD_DATA_TABLE_MODAL_KEY,
IMPORT_CSV_MODAL_KEY,
} from '@/features/core/dataTable/constants';
import { useDataTableStore } from '@/features/core/dataTable/dataTable.store';
@ -17,6 +18,7 @@ import { computed } from 'vue';
import { N8nActionToggle } from '@n8n/design-system';
import { useUIStore } from '@/app/stores/ui.store';
import DownloadDataTableModal from './DownloadDataTableModal.vue';
import ImportCsvModal from './ImportCsvModal.vue';
type Props = {
dataTable: DataTable;
isReadOnly?: boolean;
@ -35,6 +37,7 @@ const emit = defineEmits<{
},
];
onDeleted: [];
imported: [];
}>();
const dataTableStore = useDataTableStore();
@ -46,9 +49,15 @@ const toast = useToast();
const telemetry = useTelemetry();
const downloadModalKey = computed(() => `${DOWNLOAD_DATA_TABLE_MODAL_KEY}-${props.dataTable.id}`);
const importCsvModalKey = computed(() => `${IMPORT_CSV_MODAL_KEY}-${props.dataTable.id}`);
const actions = computed<Array<UserAction<IUser>>>(() => {
const availableActions = [
{
label: i18n.baseText('dataTable.importCsv'),
value: DATA_TABLE_CARD_ACTIONS.IMPORT_CSV,
disabled: !dataTableStore.projectPermissions.dataTable.writeRow || props.isReadOnly,
},
{
label: i18n.baseText('dataTable.download.csv'),
value: DATA_TABLE_CARD_ACTIONS.DOWNLOAD_CSV,
@ -81,6 +90,10 @@ const onAction = async (action: string) => {
});
break;
}
case DATA_TABLE_CARD_ACTIONS.IMPORT_CSV: {
uiStore.openModal(importCsvModalKey.value);
break;
}
case DATA_TABLE_CARD_ACTIONS.DOWNLOAD_CSV: {
uiStore.openModal(downloadModalKey.value);
break;
@ -157,5 +170,11 @@ const deleteDataTable = async () => {
@confirm="downloadDataTableCsv"
@close="() => uiStore.closeModal(downloadModalKey)"
/>
<ImportCsvModal
:modal-name="importCsvModalKey"
:data-table="dataTable"
@imported="emit('imported')"
@close="() => uiStore.closeModal(importCsvModalKey)"
/>
</div>
</template>

View file

@ -21,6 +21,10 @@ type Props = {
const props = defineProps<Props>();
defineEmits<{
imported: [];
}>();
const renameInput = useTemplateRef<{ forceFocus: () => void }>('renameInput');
const dataTableStore = useDataTableStore();
@ -136,6 +140,7 @@ watch(
location="breadcrumbs"
@rename="onRename"
@on-deleted="onDelete"
@imported="$emit('imported')"
/>
</div>
</div>

View file

@ -0,0 +1,97 @@
import { createComponentRenderer } from '@/__tests__/render';
import { createTestingPinia } from '@pinia/testing';
import { vi } from 'vitest';
import ImportCsvModal from '@/features/core/dataTable/components/ImportCsvModal.vue';
import type { DataTable } from '@/features/core/dataTable/dataTable.types';
const ModalStub = {
template: `
<div>
<slot name="header" />
<slot name="title" />
<slot name="content" />
<slot name="footer" />
</div>
`,
};
vi.mock('@n8n/i18n', async (importOriginal) => ({
...(await importOriginal()),
useI18n: () => ({
baseText: (
key: string,
options?: { interpolate?: Record<string, string>; adjustToNumber?: number },
) => {
const interpolate = options?.interpolate ?? {};
const texts: Record<string, string> = {
'dataTable.importCsv.title': 'Import CSV to data table',
'dataTable.importCsv.description':
"Column names must match the table's column names. System columns (id, createdAt, updatedAt) found in CSV will be ignored.",
'dataTable.upload.dropOrClick': 'Drop file here or click to upload',
'dataTable.upload.uploading': 'Uploading...',
'dataTable.importCsv.columnMismatch': `The following CSV columns do not match the table: ${interpolate.unrecognized}. The following table columns are missing from CSV: ${interpolate.missing}.`,
'dataTable.importCsv.unrecognizedColumnsOnly': `The following CSV columns do not match the table: ${interpolate.columns}. Remove them and try again.`,
'dataTable.importCsv.missingColumnsOnly': `The following table columns are missing from CSV and will be set to null: ${interpolate.columns}.`,
'dataTable.importCsv.noMatchingColumns': 'No matching columns found.',
'dataTable.importCsv.readyToImport': `Ready to import ${interpolate.count} rows`,
'dataTable.importCsv.success': `Successfully imported ${interpolate.count} rows`,
'dataTable.importCsv.error': 'Failed to import CSV',
'dataTable.importCsv.importButton': 'Import',
'generic.cancel': 'Cancel',
};
return texts[key] ?? key;
},
}),
}));
const mockDataTable: DataTable = {
id: 'dt-1',
name: 'Test Table',
projectId: 'proj-1',
sizeBytes: 0,
columns: [
{ id: 'col-1', name: 'name', type: 'string', index: 0 },
{ id: 'col-2', name: 'age', type: 'number', index: 1 },
],
createdAt: '2024-01-01T00:00:00.000Z',
updatedAt: '2024-01-01T00:00:00.000Z',
};
const renderComponent = createComponentRenderer(ImportCsvModal, {
props: {
modalName: 'import-csv-modal',
dataTable: mockDataTable,
},
global: {
stubs: {
Modal: ModalStub,
},
},
});
describe('ImportCsvModal', () => {
beforeEach(() => {
vi.clearAllMocks();
createTestingPinia();
});
it('should render the upload area', () => {
const { getByText } = renderComponent();
expect(getByText('Drop file here or click to upload')).toBeInTheDocument();
});
it('should have the import button disabled initially', () => {
const { getByTestId } = renderComponent();
const importButton = getByTestId('import-csv-confirm');
expect(importButton).toBeDisabled();
});
it('should emit close when cancel is clicked', async () => {
const { getByTestId, emitted } = renderComponent();
const { default: userEvent } = await import('@testing-library/user-event');
await userEvent.click(getByTestId('import-csv-cancel'));
expect(emitted().close).toBeTruthy();
});
});

View file

@ -0,0 +1,348 @@
<script lang="ts" setup>
import { useI18n } from '@n8n/i18n';
import { ref, computed, watch } from 'vue';
import { useDataTableStore } from '@/features/core/dataTable/dataTable.store';
import { useUIStore } from '@/app/stores/ui.store';
import { useToast } from '@/app/composables/useToast';
import { useTelemetry } from '@/app/composables/useTelemetry';
import { DATA_TABLE_SYSTEM_COLUMNS } from 'n8n-workflow';
import { N8nButton, N8nIcon, N8nText, N8nCallout } from '@n8n/design-system';
import Modal from '@/app/components/Modal.vue';
import { ElUpload } from 'element-plus';
import type { UploadFile } from 'element-plus';
import type { DataTable } from '@/features/core/dataTable/dataTable.types';
type Props = {
modalName: string;
dataTable: DataTable;
};
const props = defineProps<Props>();
const emit = defineEmits<{
imported: [];
close: [];
}>();
const dataTableStore = useDataTableStore();
const uiStore = useUIStore();
const i18n = useI18n();
const toast = useToast();
const telemetry = useTelemetry();
const selectedFile = ref<File | null>(null);
const uploadedFileId = ref<string | null>(null);
const csvRowCount = ref(0);
const isUploading = ref(false);
const isImporting = ref(false);
const isUploadHovered = ref(false);
const matchedColumns = ref<string[]>([]);
const unrecognizedColumns = ref<string[]>([]);
const uploaded = computed(() => uploadedFileId.value !== null);
const tableColumnNames = computed(() => new Set(props.dataTable.columns.map((col) => col.name)));
const missingTableColumns = computed(() => {
if (!uploaded.value) return [];
return props.dataTable.columns
.filter((col) => !matchedColumns.value.includes(col.name))
.map((col) => col.name);
});
const canImport = computed(() => {
return (
uploaded.value &&
matchedColumns.value.length > 0 &&
unrecognizedColumns.value.length === 0 &&
!isImporting.value
);
});
const handleFileChange = (uploadFile: UploadFile) => {
if (uploadFile.raw) {
selectedFile.value = uploadFile.raw;
void processUpload();
}
};
const processUpload = async () => {
if (!selectedFile.value) return;
isUploading.value = true;
try {
const response = await dataTableStore.uploadCsvFile(selectedFile.value, true);
uploadedFileId.value = response.id;
csvRowCount.value = response.rowCount;
matchedColumns.value = [];
unrecognizedColumns.value = [];
for (const csvCol of response.columns) {
if (DATA_TABLE_SYSTEM_COLUMNS.includes(csvCol.name)) {
// System columns are silently skipped during import
} else if (tableColumnNames.value.has(csvCol.name)) {
matchedColumns.value.push(csvCol.name);
} else {
unrecognizedColumns.value.push(csvCol.name);
}
}
} catch (error) {
toast.showError(error, i18n.baseText('dataTable.upload.error'));
reset();
} finally {
isUploading.value = false;
}
};
const onImport = async () => {
if (!uploadedFileId.value || !canImport.value) return;
isImporting.value = true;
try {
const result = await dataTableStore.importCsvToDataTable(
props.dataTable.id,
props.dataTable.projectId,
uploadedFileId.value,
);
toast.showMessage({
title: i18n.baseText('dataTable.importCsv.success', {
adjustToNumber: result.importedRowCount,
interpolate: { count: String(result.importedRowCount) },
}),
type: 'success',
});
telemetry.track('User imported CSV to data table', {
data_table_id: props.dataTable.id,
data_table_project_id: props.dataTable.projectId,
imported_row_count: result.importedRowCount,
system_columns_ignored: result.systemColumnsIgnored,
});
uiStore.closeModal(props.modalName);
emit('imported');
} catch (error) {
toast.showError(error, i18n.baseText('dataTable.importCsv.error'));
} finally {
isImporting.value = false;
}
};
const reset = () => {
selectedFile.value = null;
uploadedFileId.value = null;
csvRowCount.value = 0;
matchedColumns.value = [];
unrecognizedColumns.value = [];
};
const isModalOpen = computed(() => uiStore.modalsById[props.modalName]?.open);
watch(isModalOpen, (open) => {
if (!open) {
reset();
}
});
const onClose = () => {
reset();
emit('close');
};
</script>
<template>
<Modal
:name="props.modalName"
:title="i18n.baseText('dataTable.importCsv.title')"
:center="true"
width="540px"
:event-bus="undefined"
>
<template #content>
<div :class="$style.content">
<N8nText color="text-light" size="small">
{{ i18n.baseText('dataTable.importCsv.description') }}
</N8nText>
<ElUpload
:class="$style.uploadDemo"
drag
:auto-upload="false"
:show-file-list="false"
accept=".csv"
:on-change="handleFileChange"
data-test-id="import-csv-upload"
@mouseenter="isUploadHovered = true"
@mouseleave="isUploadHovered = false"
>
<N8nIcon
icon="file"
:size="24"
:color="isUploadHovered ? 'text-dark' : 'text-light'"
:class="$style.uploadIcon"
/>
<N8nText v-if="selectedFile" :color="isUploadHovered ? 'text-dark' : 'text-light'">
{{ selectedFile.name }}
</N8nText>
<N8nText v-else size="medium" :color="isUploadHovered ? 'text-dark' : 'text-light'">
{{ i18n.baseText('dataTable.upload.dropOrClick') }}
</N8nText>
</ElUpload>
<div v-if="isUploading" :class="$style.uploadingMessage">
{{ i18n.baseText('dataTable.upload.uploading') }}
</div>
<div v-if="uploaded && !isUploading" :class="$style.columnResults">
<N8nCallout
v-if="unrecognizedColumns.length > 0 && missingTableColumns.length > 0"
theme="danger"
data-test-id="import-csv-column-mismatch"
>
{{
i18n.baseText('dataTable.importCsv.columnMismatch', {
interpolate: {
unrecognized: unrecognizedColumns.join(', '),
missing: missingTableColumns.join(', '),
},
})
}}
</N8nCallout>
<N8nCallout
v-else-if="unrecognizedColumns.length > 0"
theme="danger"
data-test-id="import-csv-unrecognized-columns"
>
{{
i18n.baseText('dataTable.importCsv.unrecognizedColumnsOnly', {
interpolate: { columns: unrecognizedColumns.join(', ') },
})
}}
</N8nCallout>
<N8nCallout
v-else-if="missingTableColumns.length > 0"
theme="info"
data-test-id="import-csv-missing-columns"
>
{{
i18n.baseText('dataTable.importCsv.missingColumnsOnly', {
interpolate: { columns: missingTableColumns.join(', ') },
})
}}
</N8nCallout>
<N8nCallout
v-if="matchedColumns.length === 0 && unrecognizedColumns.length === 0"
theme="danger"
data-test-id="import-csv-no-matching-columns"
>
{{ i18n.baseText('dataTable.importCsv.noMatchingColumns') }}
</N8nCallout>
<N8nText
v-if="canImport"
size="small"
:class="$style.readyToImport"
data-test-id="import-csv-ready-to-import"
>
{{
i18n.baseText('dataTable.importCsv.readyToImport', {
adjustToNumber: csvRowCount,
interpolate: { count: String(csvRowCount) },
})
}}
</N8nText>
</div>
</div>
</template>
<template #footer>
<div :class="$style.footer">
<N8nButton
variant="subtle"
size="large"
:label="i18n.baseText('generic.cancel')"
data-test-id="import-csv-cancel"
@click="onClose"
/>
<N8nButton
size="large"
:label="i18n.baseText('dataTable.importCsv.importButton')"
:disabled="!canImport"
:loading="isImporting"
data-test-id="import-csv-confirm"
@click="onImport"
/>
</div>
</template>
</Modal>
</template>
<style module lang="scss">
.content {
display: flex;
flex-direction: column;
gap: var(--spacing--sm);
}
.uploadDemo {
width: 100%;
:global(.el-upload) {
width: 100%;
border-radius: var(--radius--lg);
}
:global(.el-upload-dragger) {
width: 100%;
padding: var(--spacing--2xl) var(--spacing--lg);
border: 1px solid var(--color--foreground);
background-color: var(--color--background);
border-radius: var(--radius--lg);
transition: all 0.2s ease;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
&:hover {
background-color: var(--color--background);
}
}
:global(input[type='file']) {
display: none !important;
}
}
.uploadIcon {
margin-bottom: var(--spacing--sm);
}
.uploadingMessage {
padding: var(--spacing--lg);
text-align: center;
color: var(--color--text--tint-1);
}
.columnResults {
display: flex;
flex-direction: column;
gap: var(--spacing--xs);
}
.readyToImport {
font-weight: var(--font-weight--bold);
}
.footer {
display: flex;
justify-content: flex-end;
gap: var(--spacing--xs);
}
</style>

View file

@ -169,6 +169,7 @@ watch(
defineExpose({
addRow: dataTableOperations.onAddRowClick,
addColumn: dataTableOperations.onAddColumn,
fetchDataTableRows: dataTableOperations.fetchDataTableRows,
});
</script>

View file

@ -24,10 +24,12 @@ export const DATA_TABLE_CARD_ACTIONS = {
DELETE: 'delete',
CLEAR: 'clear',
DOWNLOAD_CSV: 'download-csv',
IMPORT_CSV: 'import-csv',
};
export const ADD_DATA_TABLE_MODAL_KEY = 'addDataTableModal';
export const DOWNLOAD_DATA_TABLE_MODAL_KEY = 'downloadDataTableModal';
export const IMPORT_CSV_MODAL_KEY = 'importCsvModal';
export const DEFAULT_ID_COLUMN_NAME = 'id';

View file

@ -266,6 +266,20 @@ export const downloadDataTableCsvApi = async (
filename,
};
};
export const importCsvToDataTableApi = async (
context: IRestApiContext,
dataTableId: string,
projectId: string,
fileId: string,
) => {
return await makeRestApiRequest<{ importedRowCount: number; systemColumnsIgnored: string[] }>(
context,
'POST',
`/projects/${projectId}/data-tables/${dataTableId}/import-csv`,
{ fileId },
);
};
export const uploadCsvFileApi = async (
context: IRestApiContext,
file: File,

View file

@ -17,6 +17,7 @@ import {
deleteDataTableRowsApi,
fetchDataTableGlobalLimitInBytes,
downloadDataTableCsvApi,
importCsvToDataTableApi,
uploadCsvFileApi,
} from '@/features/core/dataTable/dataTable.api';
import type {
@ -126,6 +127,10 @@ export const useDataTableStore = defineStore(DATA_TABLE_STORE, () => {
return await uploadCsvFileApi(rootStore.restApiContext, file, hasHeaders);
};
const importCsvToDataTable = async (dataTableId: string, projectId: string, fileId: string) => {
return await importCsvToDataTableApi(rootStore.restApiContext, dataTableId, projectId, fileId);
};
const deleteDataTable = async (dataTableId: string, projectId: string) => {
const deleted = await deleteDataTableApi(rootStore.restApiContext, dataTableId, projectId);
if (deleted) {
@ -378,6 +383,7 @@ export const useDataTableStore = defineStore(DATA_TABLE_STORE, () => {
maxSizeMB,
createDataTable,
uploadCsvFile,
importCsvToDataTable,
deleteDataTable,
updateDataTable,
fetchDataTableDetails,