Merge remote-tracking branch 'origin/main' into canary

This commit is contained in:
lobehubbot 2026-04-16 11:27:19 +00:00
commit 94b6827580
11 changed files with 15241 additions and 2 deletions

View file

@ -507,6 +507,20 @@ table chat_groups_agents {
}
}
table document_histories {
id varchar(255) [pk, not null]
document_id varchar(255) [not null]
user_id text [not null]
editor_data jsonb [not null]
save_source text [not null]
saved_at "timestamp with time zone" [not null]
indexes {
(document_id, saved_at) [name: 'document_histories_document_id_saved_at_idx']
(user_id, saved_at) [name: 'document_histories_user_id_saved_at_idx']
}
}
table documents {
id varchar(255) [pk, not null]
title text
@ -1976,6 +1990,10 @@ ref: unstructured_chunks.file_id - files.id
ref: document_chunks.document_id > documents.id
ref: document_histories.document_id > documents.id
ref: document_histories.user_id - users.id
ref: documents.file_id > files.id
ref: file_chunks.file_id - files.id

View file

@ -0,0 +1,16 @@
CREATE TABLE IF NOT EXISTS "document_histories" (
"id" varchar(255) PRIMARY KEY NOT NULL,
"document_id" varchar(255) NOT NULL,
"user_id" text NOT NULL,
"editor_data" jsonb NOT NULL,
"save_source" text NOT NULL,
"saved_at" timestamp with time zone NOT NULL
);
--> statement-breakpoint
ALTER TABLE "document_histories" DROP CONSTRAINT IF EXISTS "document_histories_document_id_documents_id_fk";--> statement-breakpoint
ALTER TABLE "document_histories" ADD CONSTRAINT "document_histories_document_id_documents_id_fk" FOREIGN KEY ("document_id") REFERENCES "public"."documents"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "document_histories" DROP CONSTRAINT IF EXISTS "document_histories_user_id_users_id_fk";--> statement-breakpoint
ALTER TABLE "document_histories" ADD CONSTRAINT "document_histories_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX IF NOT EXISTS "document_histories_document_id_idx" ON "document_histories" USING btree ("document_id");--> statement-breakpoint
CREATE INDEX IF NOT EXISTS "document_histories_user_id_idx" ON "document_histories" USING btree ("user_id");--> statement-breakpoint
CREATE INDEX IF NOT EXISTS "document_histories_saved_at_idx" ON "document_histories" USING btree ("saved_at");

File diff suppressed because it is too large Load diff

View file

@ -686,7 +686,14 @@
"when": 1774548140282,
"tag": "0097_add_agent_onboarding",
"breakpoints": true
},
{
"idx": 98,
"version": "7",
"when": 1776234919716,
"tag": "0098_add_document_history",
"breakpoints": true
}
],
"version": "6"
}
}

View file

@ -2,7 +2,7 @@
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { getTestDB } from '../../core/getTestDB';
import { documents, files, users } from '../../schemas';
import { documentHistories, documents, files, users } from '../../schemas';
import type { LobeChatDatabase } from '../../type';
import { DocumentModel } from '../document';
import { FileModel } from '../file';
@ -24,6 +24,7 @@ beforeEach(async () => {
afterEach(async () => {
await serverDB.delete(users);
await serverDB.delete(files);
await serverDB.delete(documentHistories);
await serverDB.delete(documents);
});

View file

@ -0,0 +1,275 @@
// @vitest-environment node
import { eq } from 'drizzle-orm';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { getTestDB } from '../../core/getTestDB';
import { documentHistories, documents, files, users } from '../../schemas';
import type { LobeChatDatabase } from '../../type';
import { DocumentModel } from '../document';
import { DocumentHistoryModel } from '../documentHistory';
import { FileModel } from '../file';
const serverDB: LobeChatDatabase = await getTestDB();
const userId = 'document-history-model-test-user-id';
const userId2 = 'document-history-model-test-user-id-2';
const documentModel = new DocumentModel(serverDB, userId);
const documentModel2 = new DocumentModel(serverDB, userId2);
const historyModel = new DocumentHistoryModel(serverDB, userId);
const historyModel2 = new DocumentHistoryModel(serverDB, userId2);
const fileModel = new FileModel(serverDB, userId);
const fileModel2 = new FileModel(serverDB, userId2);
beforeEach(async () => {
await serverDB.delete(users);
await serverDB.insert(users).values([{ id: userId }, { id: userId2 }]);
});
afterEach(async () => {
await serverDB.delete(documentHistories);
await serverDB.delete(documents);
await serverDB.delete(files);
await serverDB.delete(users);
});
const createTestDocument = async (model: DocumentModel, fModel: FileModel, content: string) => {
const { id: fileId } = await fModel.create({
fileType: 'text/plain',
name: 'test.txt',
size: 100,
url: 'https://example.com/test.txt',
});
const file = await fModel.findById(fileId);
if (!file) throw new Error('File not found after creation');
const { id } = await model.create({
content,
fileId: file.id,
fileType: 'text/plain',
source: file.url,
sourceType: 'file',
totalCharCount: content.length,
totalLineCount: content.split('\n').length,
});
return id;
};
describe('DocumentHistoryModel', () => {
describe('create', () => {
it('should create a new history row', async () => {
const documentId = await createTestDocument(documentModel, fileModel, 'Initial content');
const created = await historyModel.create({
documentId,
editorData: { blocks: [] },
saveSource: 'autosave',
savedAt: new Date('2026-04-11T00:00:00.000Z'),
});
expect(created).toMatchObject({
documentId,
saveSource: 'autosave',
userId,
});
const stored = await historyModel.findById(created.id);
expect(stored).toMatchObject({
documentId,
editorData: { blocks: [] },
});
});
it('should reject history rows for documents owned by another user', async () => {
const otherDocumentId = await createTestDocument(documentModel2, fileModel2, 'Other content');
await expect(
historyModel.create({
documentId: otherDocumentId,
editorData: { blocks: [] },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:00.000Z'),
}),
).rejects.toThrow('Document not found');
const stored = await serverDB
.select()
.from(documentHistories)
.where(eq(documentHistories.documentId, otherDocumentId));
expect(stored).toHaveLength(0);
});
});
describe('list', () => {
it('should return document history rows ordered by savedAt descending', async () => {
const documentId = await createTestDocument(documentModel, fileModel, 'Initial content');
await historyModel.create({
documentId,
editorData: { tag: 1 },
saveSource: 'autosave',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
await historyModel.create({
documentId,
editorData: { tag: 2 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:02.000Z'),
});
await historyModel.create({
documentId,
editorData: { tag: 3 },
saveSource: 'restore',
savedAt: new Date('2026-04-11T00:00:03.000Z'),
});
const rows = await historyModel.list({ documentId });
expect(rows.map((row) => (row.editorData as any).tag)).toEqual([3, 2, 1]);
expect(rows[0]).toMatchObject({ saveSource: 'restore' });
});
it('should support pagination via beforeSavedAt and limit', async () => {
const documentId = await createTestDocument(documentModel, fileModel, 'Initial content');
await historyModel.create({
documentId,
editorData: { tag: 1 },
saveSource: 'autosave',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
await historyModel.create({
documentId,
editorData: { tag: 2 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:02.000Z'),
});
await historyModel.create({
documentId,
editorData: { tag: 3 },
saveSource: 'restore',
savedAt: new Date('2026-04-11T00:00:03.000Z'),
});
const anchored = await historyModel.list({
beforeSavedAt: new Date('2026-04-11T00:00:03.000Z'),
documentId,
limit: 1,
});
expect(anchored).toHaveLength(1);
expect((anchored[0]?.editorData as any).tag).toBe(2);
});
});
describe('findLatestByDocumentId', () => {
it('should return the most recent history row by savedAt', async () => {
const documentId = await createTestDocument(documentModel, fileModel, 'Initial content');
await historyModel.create({
documentId,
editorData: { tag: 1 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
const latest = await historyModel.create({
documentId,
editorData: { tag: 2 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:02.000Z'),
});
const row = await historyModel.findLatestByDocumentId(documentId);
expect(row?.id).toBe(latest.id);
});
});
describe('delete', () => {
it('should delete a history row for the current user only', async () => {
const documentId = await createTestDocument(documentModel, fileModel, 'Initial content');
const otherDocumentId = await createTestDocument(documentModel2, fileModel2, 'Other content');
const created = await historyModel.create({
documentId,
editorData: { tag: 1 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
const otherCreated = await historyModel2.create({
documentId: otherDocumentId,
editorData: { tag: 1 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
await historyModel.delete(created.id);
const deleted = await historyModel.findById(created.id);
const otherRow = await historyModel2.findById(otherCreated.id);
expect(deleted).toBeUndefined();
expect(otherRow).toBeDefined();
});
it('should delete all history rows for one document without affecting others', async () => {
const documentId = await createTestDocument(documentModel, fileModel, 'Initial content');
const otherDocumentId = await createTestDocument(documentModel2, fileModel2, 'Other content');
await historyModel.create({
documentId,
editorData: { tag: 1 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
await historyModel.create({
documentId,
editorData: { tag: 2 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:02.000Z'),
});
await historyModel2.create({
documentId: otherDocumentId,
editorData: { tag: 1 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
await historyModel.deleteByDocumentId(documentId);
const rows = await historyModel.list({ documentId });
const otherRows = await historyModel2.list({ documentId: otherDocumentId });
expect(rows).toHaveLength(0);
expect(otherRows).toHaveLength(1);
});
});
describe('schema assumptions', () => {
it('should keep user scoped history rows isolated', async () => {
const documentId = await createTestDocument(documentModel, fileModel, 'Initial content');
const otherDocumentId = await createTestDocument(documentModel2, fileModel2, 'Other content');
const first = await historyModel.create({
documentId,
editorData: { tag: 1 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
await historyModel2.create({
documentId: otherDocumentId,
editorData: { tag: 1 },
saveSource: 'manual',
savedAt: new Date('2026-04-11T00:00:01.000Z'),
});
const rows = await historyModel.list({ documentId });
const otherRows = await historyModel2.list({ documentId: otherDocumentId });
expect(rows).toHaveLength(1);
expect(rows[0]?.id).toBe(first.id);
expect(otherRows).toHaveLength(1);
});
});
});

View file

@ -0,0 +1,117 @@
import { and, desc, eq, lt } from 'drizzle-orm';
import type { DocumentHistoryItem, NewDocumentHistory } from '../schemas';
import { documentHistories, documents } from '../schemas';
import type { LobeChatDatabase } from '../type';
export interface QueryDocumentHistoryParams {
beforeSavedAt?: Date;
documentId: string;
limit?: number;
}
export class DocumentHistoryModel {
private userId: string;
private db: LobeChatDatabase;
constructor(db: LobeChatDatabase, userId: string) {
this.userId = userId;
this.db = db;
}
create = async (params: Omit<NewDocumentHistory, 'userId'>): Promise<DocumentHistoryItem> => {
const [document] = await this.db
.select({ id: documents.id })
.from(documents)
.where(and(eq(documents.id, params.documentId), eq(documents.userId, this.userId)))
.limit(1);
if (!document) {
throw new Error('Document not found');
}
const [result] = await this.db
.insert(documentHistories)
.values({ ...params, userId: this.userId })
.returning();
return result!;
};
delete = async (id: string) => {
return this.db
.delete(documentHistories)
.where(and(eq(documentHistories.id, id), eq(documentHistories.userId, this.userId)));
};
deleteByDocumentId = async (documentId: string) => {
return this.db
.delete(documentHistories)
.where(
and(
eq(documentHistories.documentId, documentId),
eq(documentHistories.userId, this.userId),
),
);
};
deleteAll = async () => {
return this.db.delete(documentHistories).where(eq(documentHistories.userId, this.userId));
};
findById = async (id: string): Promise<DocumentHistoryItem | undefined> => {
const [result] = await this.db
.select()
.from(documentHistories)
.where(and(eq(documentHistories.id, id), eq(documentHistories.userId, this.userId)))
.limit(1);
return result;
};
findLatestByDocumentId = async (documentId: string): Promise<DocumentHistoryItem | undefined> => {
const [result] = await this.db
.select()
.from(documentHistories)
.where(
and(
eq(documentHistories.documentId, documentId),
eq(documentHistories.userId, this.userId),
),
)
.orderBy(desc(documentHistories.savedAt), desc(documentHistories.id))
.limit(1);
return result;
};
list = async ({
beforeSavedAt,
documentId,
limit = 50,
}: QueryDocumentHistoryParams): Promise<DocumentHistoryItem[]> => {
const conditions = [
eq(documentHistories.documentId, documentId),
eq(documentHistories.userId, this.userId),
];
if (beforeSavedAt !== undefined) {
conditions.push(lt(documentHistories.savedAt, beforeSavedAt));
}
return this.db
.select()
.from(documentHistories)
.where(and(...conditions))
.orderBy(desc(documentHistories.savedAt), desc(documentHistories.id))
.limit(limit);
};
query = async (params: QueryDocumentHistoryParams): Promise<DocumentHistoryItem[]> => {
return this.list(params);
};
listByDocumentId = async (documentId: string, limit = 50): Promise<DocumentHistoryItem[]> => {
return this.list({ documentId, limit });
};
}

View file

@ -0,0 +1,36 @@
import { index, jsonb, pgTable, text } from 'drizzle-orm/pg-core';
import { createNanoId } from '../utils/idGenerator';
import { timestamptz, varchar255 } from './_helpers';
import { documents } from './file';
import { users } from './user';
export const documentHistories = pgTable(
'document_histories',
{
id: varchar255('id')
.$defaultFn(() => createNanoId(18)())
.primaryKey(),
documentId: varchar255('document_id')
.references(() => documents.id, { onDelete: 'cascade' })
.notNull(),
userId: text('user_id')
.references(() => users.id, { onDelete: 'cascade' })
.notNull(),
editorData: jsonb('editor_data').$type<Record<string, any>>().notNull(),
saveSource: text('save_source', {
enum: ['autosave', 'manual', 'restore', 'system', 'llm_call'],
}).notNull(),
savedAt: timestamptz('saved_at').notNull(),
},
(table) => [
index('document_histories_document_id_idx').on(table.documentId),
index('document_histories_user_id_idx').on(table.userId),
index('document_histories_saved_at_idx').on(table.savedAt),
],
);
export type DocumentHistoryItem = typeof documentHistories.$inferSelect;
export type NewDocumentHistory = typeof documentHistories.$inferInsert;

View file

@ -9,6 +9,7 @@ export * from './apiKey';
export * from './asyncTask';
export * from './betterAuth';
export * from './chatGroup';
export * from './documentHistory';
export * from './file';
export * from './generation';
export * from './message';

View file

@ -12,6 +12,7 @@ import {
} from './agentEvals';
import { asyncTasks } from './asyncTask';
import { chatGroups, chatGroupsAgents } from './chatGroup';
import { documentHistories } from './documentHistory';
import { documents, files, knowledgeBases } from './file';
import { generationBatches, generations, generationTopics } from './generation';
import { messageGroups, messages, messagesFiles, messageTranslates } from './message';
@ -246,6 +247,18 @@ export const documentsRelations = relations(documents, ({ one, many }) => ({
}),
topics: many(topicDocuments),
chunks: many(documentChunks),
histories: many(documentHistories),
}));
export const documentHistoriesRelations = relations(documentHistories, ({ one }) => ({
document: one(documents, {
fields: [documentHistories.documentId],
references: [documents.id],
}),
user: one(users, {
fields: [documentHistories.userId],
references: [users.id],
}),
}));
export const topicDocumentsRelations = relations(topicDocuments, ({ one }) => ({

View file

@ -14,6 +14,7 @@ overrides:
pdfjs-dist: 5.4.530
react: 19.2.4
react-dom: 19.2.4
'@react-pdf/image': 3.0.4
patchedDependencies:
'@upstash/qstash': patches/@upstash__qstash.patch