diff --git a/.eslintrc.js b/.eslintrc.js
index cc14828d0c..4aa82d9c6e 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -30,6 +30,7 @@ config.overrides = [
files: ['*.mdx'],
rules: {
'@typescript-eslint/no-unused-vars': 1,
+ 'micromark-extension-mdx-jsx': 0,
'no-undef': 0,
'react/jsx-no-undef': 0,
'react/no-unescaped-entities': 0,
diff --git a/.i18nrc.js b/.i18nrc.js
index 0e77a6bbe3..e67ef5bde4 100644
--- a/.i18nrc.js
+++ b/.i18nrc.js
@@ -33,18 +33,13 @@ module.exports = defineConfig({
},
markdown: {
reference:
- '你需要保持 mdx 的组件格式,输出文本不需要在最外层包裹任何代码块语法。\n' +
+ 'You need to maintain the component format of the mdx file; the output text does not need to be wrapped in any code block syntax on the outermost layer.\n' +
fs.readFileSync(path.join(__dirname, 'docs/glossary.md'), 'utf-8'),
- entry: ['./README.zh-CN.md', './contributing/**/*.zh-CN.md', './docs/**/*.zh-CN.mdx'],
- entryLocale: 'zh-CN',
- outputLocales: ['en-US'],
+ entry: ['./README.md', './docs/**/*.md', './docs/**/*.mdx'],
+ entryLocale: 'en-US',
+ outputLocales: ['zh-CN'],
includeMatter: true,
- exclude: [
- './src/**/*',
- './contributing/_Sidebar.md',
- './contributing/_Footer.md',
- './contributing/Home.md',
- ],
+ exclude: ['./README.zh-CN.md', './docs/**/*.zh-CN.md', './docs/**/*.zh-CN.mdx'],
outputExtensions: (locale, { filePath }) => {
if (filePath.includes('.mdx')) {
if (locale === 'en-US') return '.mdx';
diff --git a/.remarkrc.mdx.js b/.remarkrc.mdx.js
index 22af1dc8a1..8da09727c3 100644
--- a/.remarkrc.mdx.js
+++ b/.remarkrc.mdx.js
@@ -2,5 +2,5 @@ const config = require('@lobehub/lint').remarklint;
module.exports = {
...config,
- plugins: ['remark-mdx', ...config.plugins],
+ plugins: ['remark-mdx', ...config.plugins, ['remark-lint-file-extension', false]],
};
diff --git a/README.md b/README.md
index 3dd17094d4..4fc208e9d1 100644
--- a/README.md
+++ b/README.md
@@ -422,11 +422,13 @@ Regardless of which database you choose, LobeChat can provide you with an excell
### [Support Multi-User Management][docs-feat-auth]
-LobeChat supports multi-user management and provides flexible user authentication solutions:
+LobeChat supports multi-user management and provides two main user authentication and management solutions to meet different needs:
-- **Better Auth**: LobeChat integrates `Better Auth`, a modern and flexible authentication library that supports multiple authentication methods, including OAuth, email login, credential login, magic link, and more. With `Better Auth`, you can easily implement user registration, login, session management, social login, multi-factor authentication (MFA), and other functions to ensure the security and privacy of user data.
+- **next-auth**: LobeChat integrates `next-auth`, a flexible and powerful identity verification library that supports multiple authentication methods, including OAuth, email login, credential login, etc. With `next-auth`, you can easily implement user registration, login, session management, social login, and other functions to ensure the security and privacy of user data.
-- **next-auth**: LobeChat also supports `next-auth`, a widely-used identity verification library with extensive OAuth provider support and flexible session management options.
+- [**Clerk**](https://go.clerk.com/exgqLG0): For users who need more advanced user management features, LobeChat also supports `Clerk`, a modern user management platform. `Clerk` provides richer functions, such as multi-factor authentication (MFA), user profile management, login activity monitoring, etc. With `Clerk`, you can get higher security and flexibility, and easily cope with complex user management needs.
+
+Regardless of which user management solution you choose, LobeChat can provide you with an excellent user experience and powerful functional support.
diff --git a/docs/.cdn.cache.json b/docs/.cdn.cache.json
index 92b1655b88..c8b60b2aff 100644
--- a/docs/.cdn.cache.json
+++ b/docs/.cdn.cache.json
@@ -1,26 +1,429 @@
{
+ "https://file.rene.wang/clipboard-1768907980491-9cc0669fc3a38.png": "/blog/assets8be3a46c8f9c5d3b61bc541f44b7f245.webp",
+ "https://file.rene.wang/clipboard-1768908081787-ed9eb1cb78bdb.png": "/blog/assetsab009b79dd794f02aec24b7607f342e8.webp",
+ "https://file.rene.wang/clipboard-1768908121691-b3517bf882633.png": "/blog/assetsd3cae44cba0d3f57df6440b46246e5e7.webp",
+ "https://file.rene.wang/clipboard-1768908209289-9d3ecff50142f.png": "/blog/assets75a5cf08b3e432d2477899d30acc9d47.webp",
+ "https://file.rene.wang/clipboard-1768908230723-3fce0ae5baf9b.png": "/blog/assets8e9b164fa30c795850ce8fa8ef7e7c24.webp",
+ "https://file.rene.wang/clipboard-1768908420554-e3b90ce1a2e5.png": "/blog/assets98cddf4b80b8bac0c250a5236062d198.webp",
+ "https://file.rene.wang/clipboard-1768908630618-30748e3c30adf.png": "/blog/assetsdd913561927c64d32bd390cee6846f9a.webp",
+ "https://file.rene.wang/clipboard-1768908653789-cc68b35708f2b.png": "/blog/assets3c160860feef0bd7c653eeb46f683445.webp",
+ "https://file.rene.wang/clipboard-1768908678216-fb89263572506.png": "/blog/assets974acc551878f2f395518a3fbb9bd924.webp",
+ "https://file.rene.wang/clipboard-1768908724824-2812deaf9e521.png": "/blog/assets62f82cba03d5dcad5465ec6c626aeb05.webp",
+ "https://file.rene.wang/clipboard-1768908820019-98c5a12b19fd6.png": "/blog/assets99a30932374a5f6193de7c842a34850f.webp",
+ "https://file.rene.wang/clipboard-1768908871983-94def40e520bb.png": "/blog/assets8b75f09941172c3a8620617cddfb7a4b.webp",
+ "https://file.rene.wang/clipboard-1768908943850-00bc4e05bc6bb.png": "/blog/assets7caf7e0d83b8a4f3d177283bb0bc55d1.webp",
+ "https://file.rene.wang/clipboard-1768960626260-35c3384ade91c.png": "/blog/assetsbcd98b0913d2dfc30d5a2b5523115d33.webp",
+ "https://file.rene.wang/clipboard-1768961800651-151a9b076745c.png": "/blog/assets6ebefe8183f31de4de5bac1a921fb153.webp",
+ "https://file.rene.wang/clipboard-1768961895831-d380de3507b63.png": "/blog/assets27b2bf8596f8e65d545322c66a0d81a2.webp",
+ "https://file.rene.wang/clipboard-1768961929559-f1d7d74c54ca1.png": "/blog/assets07820089deb72e5636024ae7e3d1855f.webp",
+ "https://file.rene.wang/clipboard-1768962259734-dc28b56340cb1.png": "/blog/assetseb9b70814679291052dfa4618a44a856.webp",
+ "https://file.rene.wang/clipboard-1768962364687-81d03308f7b3f.png": "/blog/assetse14ddb728d66905c164664b8b5e044d3.webp",
+ "https://file.rene.wang/clipboard-1768962379267-f580e519a03f2.png": "/blog/assetsd6eba2b1881977c9533ba86c1cd3dfce.webp",
+ "https://file.rene.wang/clipboard-1768963219814-ba3d14d1facf3.png": "/blog/assets2964497066067ca0588a7767eb4c1709.webp",
+ "https://file.rene.wang/clipboard-1768963283361-5b62ccd3a1862.png": "/blog/assetsbcd98b0913d2dfc30d5a2b5523115d33.webp",
+ "https://file.rene.wang/clipboard-1768976299511-a09372a7e6fd9.png": "/blog/assets60bf3667e56862024d047444d9b4c2fb.webp",
+ "https://file.rene.wang/clipboard-1768976422506-24d64ffd3fd26.png": "/blog/assets7bf0102f1cae47bf24aeb01eaa2796d9.webp",
+ "https://file.rene.wang/clipboard-1768976706650-e810aaaa86f44.png": "/blog/assets7caf7e0d83b8a4f3d177283bb0bc55d1.webp",
+ "https://file.rene.wang/clipboard-1769000274218-d02c4c8024709.png": "/blog/assets3cdf933016e6f53bca12b8cedb17061f.webp",
+ "https://file.rene.wang/clipboard-1769000328858-48f0503640245.png": "/blog/assets04d6fae3d9aa3c33697028f1cc9f4706.webp",
+ "https://file.rene.wang/clipboard-1769001225111-af0244fff25f3.png": "/blog/assetsb811f2aae8e8346aef16793d6bd10f88.webp",
+ "https://file.rene.wang/clipboard-1769049968016-4a62699ad0b36.png": "/blog/assetsa88afc117d283790187c366f29d03284.webp",
+ "https://file.rene.wang/clipboard-1769050621749-3709f9939d7d4.png": "/blog/assetsc89931ace11a936d87d0b87a6bca1069.webp",
+ "https://file.rene.wang/clipboard-1769050642741-4b72fd17934cf.png": "/blog/assetsdc6362a2c6db476fc1b7b9d6bf443af2.webp",
+ "https://file.rene.wang/clipboard-1769050808722-bd6e724a5b54b.png": "/blog/assets55bea1dc3e938580591d75d72908835a.webp",
+ "https://file.rene.wang/clipboard-1769050853107-750be5f83cbe3.png": "/blog/assetse6139c4d5b1b26b05f41a579d98fc6f3.webp",
+ "https://file.rene.wang/clipboard-1769052898732-b7bb78ae1f1f8.png": "/blog/assetsafa74c85aafea8a057e6047b0823e280.webp",
+ "https://file.rene.wang/clipboard-1769056077960-cac34bc157a65.png": "/blog/assetsa8e173bec038d1d21d413f6fa0ace342.webp",
+ "https://file.rene.wang/clipboard-1769155711708-710967bee57bc.png": "/blog/assets7f3b38c1d76cceb91edb29d6b1eb60db.webp",
+ "https://file.rene.wang/clipboard-1769155737647-1b4fc6558f029.png": "/blog/assets3a7f0b29839603336e39e923b423409b.webp",
+ "https://file.rene.wang/clipboard-1769155791342-7f43b72cc6b42.png": "/blog/assets35e6aa692b0c16009c61964279514166.webp",
+ "https://file.rene.wang/clipboard-1769155818070-7eb403550b6c7.png": "/blog/assetsce5d6dc93676f974be2e162e8ace03f0.webp",
+ "https://file.rene.wang/clipboard-1769155880302-272fbd2c5290b.png": "/blog/assetsdf48eed9de76b7e37c269b294285f09d.webp",
+ "https://file.rene.wang/clipboard-1769155935435-93dab92dd0f44.png": "/blog/assets902eb746fe2042fc2ea831c71002be72.webp",
+ "https://file.rene.wang/clipboard-1769155973881-ff1ee142d5b8f.png": "/blog/assets5cc27b8cae995074da20d4ffe06a1460.webp",
+ "https://file.rene.wang/clipboard-1769156005535-c2e79e11f4b56.png": "/blog/assets2a36d86a4eed6e7938dd6e9c684701ed.webp",
+ "https://file.rene.wang/clipboard-1769156036607-2b4fe37c4b56c.png": "/blog/assetsc0efdb82443556ae3acefe00099b3f23.webp",
+ "https://file.rene.wang/clipboard-1769156050787-ecf4f48474ae2.png": "/blog/assetse743f0a47127390dde766a0a790476db.webp",
+ "https://file.rene.wang/lobehub/467951f5-ad65-498d-aea9-fca8f35a4314.png": "/blog/assets907ea775d228958baca38e2dbb65939a.webp",
+ "https://file.rene.wang/lobehub/58d91528-373a-4a42-b520-cf6cb1f8ce1e.png": "/blog/assets7dccdd4df55aede71001da649639437f.webp",
+ "https://file.rene.wang/lobehub/ee700103-3c08-41dc-9ddf-c7705bb7bc6a.png": "/blog/assets196d679bc7071abbf71f2a8566f05aa3.webp",
+ "https://file.rene.wang/lobehub/image-2.png": "/blog/assets58737403bd41f2f0ea70bdea609e9169.webp",
+ "https://file.rene.wang/lobehub/image-3.png": "/blog/assets9e2139a302264b278eb3f4296640fe8a.webp",
+ "https://file.rene.wang/lobehub/image-4.png": "/blog/assets095af3a0a0f850fc206fc3bbc19a4095.webp",
+ "https://github.com/lobehub/lobe-chat/assets/13883964/48a0b702-05bd-4ce4-a007-a8ad00a36e5a": "/blog/assets13883964/48a0b702-05bd-4ce4-a007-a8ad00a36e5a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/13883964/4f9d83bd-b3fc-4abc-bcf4-ccbad65c219d": "/blog/assets13883964/4f9d83bd-b3fc-4abc-bcf4-ccbad65c219d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/13883964/c9d66fa0-158c-4bd3-a1fa-969e638259d2": "/blog/assets13883964/c9d66fa0-158c-4bd3-a1fa-969e638259d2.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/0c2c399f-2ed3-44b5-97c8-53e007e8c095": "/blog/assets17870709/0c2c399f-2ed3-44b5-97c8-53e007e8c095.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/1148639c-2687-4a9c-9950-8ca8672f34b6": "/blog/assets17870709/1148639c-2687-4a9c-9950-8ca8672f34b6.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/15e09e71-5899-4805-9c5e-1f7c57be04ae": "/blog/assets17870709/15e09e71-5899-4805-9c5e-1f7c57be04ae.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/1d840e27-fa74-4e71-b777-330bf41d6dff": "/blog/assets17870709/1d840e27-fa74-4e71-b777-330bf41d6dff.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/21b94782-875b-4dee-a572-3c5843f3e1e3": "/blog/assets17870709/21b94782-875b-4dee-a572-3c5843f3e1e3.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/385f663f-cae2-4383-9bb0-52c45e5d7d7a": "/blog/assets17870709/385f663f-cae2-4383-9bb0-52c45e5d7d7a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/3f31bc33-509f-4ad2-ba81-280c2a6ec5fa": "/blog/assets17870709/3f31bc33-509f-4ad2-ba81-280c2a6ec5fa.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/4e56e080-9b8c-42e1-87e1-11123dbb9067": "/blog/assets17870709/4e56e080-9b8c-42e1-87e1-11123dbb9067.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/4fae3e6f-e680-4471-93c4-987c19d7170a": "/blog/assets17870709/4fae3e6f-e680-4471-93c4-987c19d7170a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/51f8f8f6-5d8a-4cf0-a2e5-d96c69fe05b8": "/blog/assets17870709/51f8f8f6-5d8a-4cf0-a2e5-d96c69fe05b8.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/5efa34c2-6523-43e6-9ade-70ab5d802e13": "/blog/assets17870709/5efa34c2-6523-43e6-9ade-70ab5d802e13.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/620b956b-dcb2-442a-8bb1-9aa22681dfa4": "/blog/assets17870709/620b956b-dcb2-442a-8bb1-9aa22681dfa4.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/63d9f6d4-5b78-4c65-8cd1-ff8b7f143406": "/blog/assets17870709/63d9f6d4-5b78-4c65-8cd1-ff8b7f143406.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/65d2dd2a-fdcf-4f3f-a6af-4ed5164a510d": "/blog/assets17870709/65d2dd2a-fdcf-4f3f-a6af-4ed5164a510d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/6cdc5c0e-0508-44ed-a283-03f6b538ed8a": "/blog/assets17870709/6cdc5c0e-0508-44ed-a283-03f6b538ed8a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/79faa59a-dfc0-4365-a679-5fc12c12bc70": "/blog/assets17870709/79faa59a-dfc0-4365-a679-5fc12c12bc70.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/80e22593-dc0f-482c-99bf-69acdb62d952": "/blog/assets17870709/80e22593-dc0f-482c-99bf-69acdb62d952.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/82cf4f5c-be5c-4126-a475-3a03468a9c39": "/blog/assets17870709/82cf4f5c-be5c-4126-a475-3a03468a9c39.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/a3f9f63a-48f8-4567-b960-7f3636c0d4ed": "/blog/assets17870709/a3f9f63a-48f8-4567-b960-7f3636c0d4ed.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/a77b0fb2-87d7-4527-a804-2f7ad3634aa5": "/blog/assets17870709/a77b0fb2-87d7-4527-a804-2f7ad3634aa5.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/ab94a7b5-6bc4-41e0-97bc-724ee8e315db": "/blog/assets17870709/ab94a7b5-6bc4-41e0-97bc-724ee8e315db.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/ac10d9dd-a977-43fb-8397-b2bbdee6a1a1": "/blog/assets17870709/ac10d9dd-a977-43fb-8397-b2bbdee6a1a1.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/b4a01219-e7b1-48a0-888c-f0271b18e3a6": "/blog/assets17870709/b4a01219-e7b1-48a0-888c-f0271b18e3a6.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/ba8e688a-e0c1-4567-9013-94205f83fc60": "/blog/assets17870709/ba8e688a-e0c1-4567-9013-94205f83fc60.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/c9e5eafc-ca22-496b-a88d-cc0ae53bf720": "/blog/assets17870709/c9e5eafc-ca22-496b-a88d-cc0ae53bf720.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/ddb44517-8696-4492-acd9-25b590f6069c": "/blog/assets17870709/ddb44517-8696-4492-acd9-25b590f6069c.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/e1b5f84f-015e-437c-98cc-a3431fa3b077": "/blog/assets17870709/e1b5f84f-015e-437c-98cc-a3431fa3b077.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/e6a429cb-96e1-4e85-9aa3-1334ffcad8c0": "/blog/assets17870709/e6a429cb-96e1-4e85-9aa3-1334ffcad8c0.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/eb7273f8-f0ed-4b9b-884e-96d29c406cb7": "/blog/assets17870709/eb7273f8-f0ed-4b9b-884e-96d29c406cb7.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/f579b39b-e771-402c-a1d1-620e57a10c75": "/blog/assets17870709/f579b39b-e771-402c-a1d1-620e57a10c75.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/ff7ebacf-27f0-42d7-810b-00314499a084": "/blog/assets17870709/ff7ebacf-27f0-42d7-810b-00314499a084.webp",
+ "https://github.com/lobehub/lobe-chat/assets/17870709/ff9c3eb8-412b-4275-80be-177ae7b7acbc": "/blog/assets17870709/ff9c3eb8-412b-4275-80be-177ae7b7acbc.webp",
+ "https://github.com/lobehub/lobe-chat/assets/1845053/fe34fdfe-c2e4-4d6a-84d7-4ebc61b2516a": "/blog/assets1845053/fe34fdfe-c2e4-4d6a-84d7-4ebc61b2516a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/0249ea56-ab17-4aa9-a56c-9ebd556c2645": "/blog/assets28616219/0249ea56-ab17-4aa9-a56c-9ebd556c2645.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/034a328c-8465-4499-8f93-fdcdb03343cd": "/blog/assets28616219/034a328c-8465-4499-8f93-fdcdb03343cd.mp4",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/1c689738-809b-4199-b305-ba5770d39da7": "/blog/assets28616219/1c689738-809b-4199-b305-ba5770d39da7.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/1c82d707-cb6f-4924-b246-a5235a919864": "/blog/assets28616219/1c82d707-cb6f-4924-b246-a5235a919864.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/1ed8b13d-046e-47c8-bd61-116ffdf5d01b": "/blog/assets28616219/1ed8b13d-046e-47c8-bd61-116ffdf5d01b.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/2bfa13df-6e20-4768-97c0-4dad06c85a2f": "/blog/assets28616219/2bfa13df-6e20-4768-97c0-4dad06c85a2f.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/2ceb210c-eca0-4439-ba27-8734d4ebb3ee": "/blog/assets28616219/2ceb210c-eca0-4439-ba27-8734d4ebb3ee.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/31b999e5-2a74-45fc-935b-f036e72a684d": "/blog/assets28616219/31b999e5-2a74-45fc-935b-f036e72a684d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/31e5f625-8dc4-4a5f-a5fd-d28d0457782d": "/blog/assets28616219/31e5f625-8dc4-4a5f-a5fd-d28d0457782d.mp4",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/3b607482-4d99-455a-bc10-3090dd4fe3c5": "/blog/assets28616219/3b607482-4d99-455a-bc10-3090dd4fe3c5.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/41f7f677-0153-4a96-b849-5ac9b7ebefee": "/blog/assets28616219/41f7f677-0153-4a96-b849-5ac9b7ebefee.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/4e81decc-776c-43b8-9a54-dfb43e9f601a": "/blog/assets28616219/4e81decc-776c-43b8-9a54-dfb43e9f601a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/54b3696b-5b13-4761-8c1b-1e664867b2dd": "/blog/assets28616219/54b3696b-5b13-4761-8c1b-1e664867b2dd.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/5fdc9479-007f-46ab-9d6e-a9603e949116": "/blog/assets28616219/5fdc9479-007f-46ab-9d6e-a9603e949116.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/69414c79-642e-4323-9641-bfa43a74fcc8": "/blog/assets28616219/69414c79-642e-4323-9641-bfa43a74fcc8.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/6c3968a8-fbbb-4268-a587-edaced2d96af": "/blog/assets28616219/6c3968a8-fbbb-4268-a587-edaced2d96af.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/7049a811-a08b-45d3-8491-970f579c2ebd": "/blog/assets28616219/7049a811-a08b-45d3-8491-970f579c2ebd.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/763b18f9-2b5f-44bb-a479-9b56d46f7397": "/blog/assets28616219/763b18f9-2b5f-44bb-a479-9b56d46f7397.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/7b0ea46c-5157-40a8-888f-f47664a4884f": "/blog/assets28616219/7b0ea46c-5157-40a8-888f-f47664a4884f.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/7f9a9a9f-fd91-4f59-aac9-3f26c6d49a1e": "/blog/assets28616219/7f9a9a9f-fd91-4f59-aac9-3f26c6d49a1e.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/89883703-7a1a-4a11-b944-5d804544e57c": "/blog/assets28616219/89883703-7a1a-4a11-b944-5d804544e57c.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/95828c11-0ae5-4dfa-84ed-854124e927a6": "/blog/assets28616219/95828c11-0ae5-4dfa-84ed-854124e927a6.mp4",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/9c0d184c-3169-40fa-9115-011cfffb9ca7": "/blog/assets28616219/9c0d184c-3169-40fa-9115-011cfffb9ca7.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/9cb5150d-6e1e-4c59-9a18-4e418dce1a5d": "/blog/assets28616219/9cb5150d-6e1e-4c59-9a18-4e418dce1a5d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/a7fd85d5-fd32-4756-814e-ff7ab7567fe1": "/blog/assets28616219/a7fd85d5-fd32-4756-814e-ff7ab7567fe1.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/ab008be7-26b2-4b78-8bd9-24301bf34d23": "/blog/assets28616219/ab008be7-26b2-4b78-8bd9-24301bf34d23.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/b04723eb-64ad-4028-a901-dc4e4ee2d0c1": "/blog/assets28616219/b04723eb-64ad-4028-a901-dc4e4ee2d0c1.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/b3a78112-adc8-4837-b4e3-48f67058f16e": "/blog/assets28616219/b3a78112-adc8-4837-b4e3-48f67058f16e.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/b6b8226b-183f-4249-8255-663a5e9f5af4": "/blog/assets28616219/b6b8226b-183f-4249-8255-663a5e9f5af4.mp4",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/bb9cd00f-b20c-4d7b-9c60-b921d350e319": "/blog/assets28616219/bb9cd00f-b20c-4d7b-9c60-b921d350e319.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/bdeb678e-6502-4667-86b1-504221ee7ded": "/blog/assets28616219/bdeb678e-6502-4667-86b1-504221ee7ded.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/be0c95c0-6693-44ee-a490-7e8dfaa8b34d": "/blog/assets28616219/be0c95c0-6693-44ee-a490-7e8dfaa8b34d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/c1f945d1-f3e2-4100-b6bb-24d4cb13c438": "/blog/assets28616219/c1f945d1-f3e2-4100-b6bb-24d4cb13c438.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/c32b56db-c6a1-4876-9bc3-acbd37ec0c0c": "/blog/assets28616219/c32b56db-c6a1-4876-9bc3-acbd37ec0c0c.mp4",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/c9c58141-5ec6-43f1-8d97-0a84a04dcdba": "/blog/assets28616219/c9c58141-5ec6-43f1-8d97-0a84a04dcdba.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/c9f74ec4-ce63-4ce9-b9e2-34bda6fda10b": "/blog/assets28616219/c9f74ec4-ce63-4ce9-b9e2-34bda6fda10b.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/cd74152d-0ae8-44fd-b815-3307c56a3c18": "/blog/assets28616219/cd74152d-0ae8-44fd-b815-3307c56a3c18.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/d4a710cd-6404-4196-90d0-cd08ca385074": "/blog/assets28616219/d4a710cd-6404-4196-90d0-cd08ca385074.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/d6f5a918-7b50-4d6e-83a6-3894ab930ddf": "/blog/assets28616219/d6f5a918-7b50-4d6e-83a6-3894ab930ddf.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/da84edc3-46f7-4e2b-a0cd-dc33a98bf5cb": "/blog/assets28616219/da84edc3-46f7-4e2b-a0cd-dc33a98bf5cb.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/dfcc2cb3-2958-4498-a8a4-51bec584fe7d": "/blog/assets28616219/dfcc2cb3-2958-4498-a8a4-51bec584fe7d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/e0608cca-f62f-414a-bc55-28a61ba21f14": "/blog/assets28616219/e0608cca-f62f-414a-bc55-28a61ba21f14.webp",
"https://github.com/lobehub/lobe-chat/assets/28616219/f29475a3-f346-4196-a435-41a6373ab9e2": "/blog/assets/28616219/f29475a3-f346-4196-a435-41a6373ab9e2.mp4",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/f3885537-6d43-422f-b1b8-e70732401025": "/blog/assets28616219/f3885537-6d43-422f-b1b8-e70732401025.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/f50f47fb-5e8e-4930-bf4e-8cf6f5b8afb9": "/blog/assets28616219/f50f47fb-5e8e-4930-bf4e-8cf6f5b8afb9.webp",
+ "https://github.com/lobehub/lobe-chat/assets/28616219/fab4abb2-584b-49de-9340-813382951635": "/blog/assets28616219/fab4abb2-584b-49de-9340-813382951635.webp",
+ "https://github.com/lobehub/lobe-chat/assets/30863298/0beda150-d0b6-43cf-a9f1-fce928b83a96": "/blog/assets30863298/0beda150-d0b6-43cf-a9f1-fce928b83a96.webp",
+ "https://github.com/lobehub/lobe-chat/assets/30863298/3e0082df-9b6f-46f3-b67f-bdc79e1eb2cc": "/blog/assets30863298/3e0082df-9b6f-46f3-b67f-bdc79e1eb2cc.webp",
+ "https://github.com/lobehub/lobe-chat/assets/30863298/62fbd09f-a69a-4460-949b-0f6285fa65b9": "/blog/assets30863298/62fbd09f-a69a-4460-949b-0f6285fa65b9.webp",
+ "https://github.com/lobehub/lobe-chat/assets/30863298/880749a6-5ba4-4e20-a968-b583a54de7fa": "/blog/assets30863298/880749a6-5ba4-4e20-a968-b583a54de7fa.webp",
+ "https://github.com/lobehub/lobe-chat/assets/30863298/9891347e-a338-4aa9-8714-f16c8dbcfcec": "/blog/assets30863298/9891347e-a338-4aa9-8714-f16c8dbcfcec.webp",
+ "https://github.com/lobehub/lobe-chat/assets/30863298/df4cea85-616a-46f5-b2de-42725d9b82a6": "/blog/assets30863298/df4cea85-616a-46f5-b2de-42725d9b82a6.webp",
+ "https://github.com/lobehub/lobe-chat/assets/30863298/f068190f-0027-4d3b-8667-d632e43d5a86": "/blog/assets30863298/f068190f-0027-4d3b-8667-d632e43d5a86.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/0275a552-f189-42b5-bf40-f9891c428b3d": "/blog/assets34400653/0275a552-f189-42b5-bf40-f9891c428b3d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/094d701f-ce80-464a-bbbc-0a5ecc8d08e3": "/blog/assets34400653/094d701f-ce80-464a-bbbc-0a5ecc8d08e3.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/0cc6c9b8-4688-472b-a80f-f84c5ebbc719": "/blog/assets34400653/0cc6c9b8-4688-472b-a80f-f84c5ebbc719.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/11442ce4-a615-49c4-937a-ca2ae93dd27c": "/blog/assets34400653/11442ce4-a615-49c4-937a-ca2ae93dd27c.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/164b34b5-671e-418d-b34a-3b70f1156d06": "/blog/assets34400653/164b34b5-671e-418d-b34a-3b70f1156d06.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/198217a6-84fa-441c-bcbe-8cded1106d6c": "/blog/assets34400653/198217a6-84fa-441c-bcbe-8cded1106d6c.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/22ce5a72-bc46-41f3-b402-bda6dee90184": "/blog/assets34400653/22ce5a72-bc46-41f3-b402-bda6dee90184.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/28d89add-cb18-4b86-9807-f2a5ed65ceba": "/blog/assets34400653/28d89add-cb18-4b86-9807-f2a5ed65ceba.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/28e025dd-367b-4add-85b6-499f4aacda61": "/blog/assets34400653/28e025dd-367b-4add-85b6-499f4aacda61.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/2afffe79-1d37-423c-9363-f09605d5e640": "/blog/assets34400653/2afffe79-1d37-423c-9363-f09605d5e640.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/33d8ce3b-0083-48aa-9a66-3825e726c4de": "/blog/assets34400653/33d8ce3b-0083-48aa-9a66-3825e726c4de.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/34d92da4-783f-4f16-8c4a-9d8e9a03c8da": "/blog/assets34400653/34d92da4-783f-4f16-8c4a-9d8e9a03c8da.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/3c64b747-f6f1-4ed2-84bc-bfa8e5d90966": "/blog/assets34400653/3c64b747-f6f1-4ed2-84bc-bfa8e5d90966.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/40520a43-ac03-4954-8a4d-282fbb946066": "/blog/assets34400653/40520a43-ac03-4954-8a4d-282fbb946066.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/4485fbc3-c309-4c4e-83ee-cb82392307a1": "/blog/assets34400653/4485fbc3-c309-4c4e-83ee-cb82392307a1.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/470e5669-650b-46cf-8024-a1476c166059": "/blog/assets34400653/470e5669-650b-46cf-8024-a1476c166059.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/497e3b20-57ca-4963-b6f4-897c9710c16e": "/blog/assets34400653/497e3b20-57ca-4963-b6f4-897c9710c16e.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/4e057b43-1e3e-4e96-a948-7cdbff303dcb": "/blog/assets34400653/4e057b43-1e3e-4e96-a948-7cdbff303dcb.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/4e0e87d1-4970-45c5-a9ef-287098f6a198": "/blog/assets34400653/4e0e87d1-4970-45c5-a9ef-287098f6a198.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/4f8d0102-7ca7-4f23-b96f-3fc5cf2cd66e": "/blog/assets34400653/4f8d0102-7ca7-4f23-b96f-3fc5cf2cd66e.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/50b73232-01fc-4ef0-939a-3e06354d1b5a": "/blog/assets34400653/50b73232-01fc-4ef0-939a-3e06354d1b5a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/5707b392-1ee6-4db6-95cb-9d6c902747d2": "/blog/assets34400653/5707b392-1ee6-4db6-95cb-9d6c902747d2.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/5c3898ab-23d7-44c2-bbd9-b255e25e400c": "/blog/assets34400653/5c3898ab-23d7-44c2-bbd9-b255e25e400c.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/663335d0-fb37-4882-9c7f-ebbd53275644": "/blog/assets34400653/663335d0-fb37-4882-9c7f-ebbd53275644.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/6942287e-fbb1-4a10-a1ce-caaa6663da1e": "/blog/assets34400653/6942287e-fbb1-4a10-a1ce-caaa6663da1e.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/6d6f2bc5-1407-471d-95a8-fb03193edbdb": "/blog/assets34400653/6d6f2bc5-1407-471d-95a8-fb03193edbdb.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/703f170b-c03b-4c71-b57d-c2357596bdfb": "/blog/assets34400653/703f170b-c03b-4c71-b57d-c2357596bdfb.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/72f165f4-d529-4f01-a3ac-163c66e5ea73": "/blog/assets34400653/72f165f4-d529-4f01-a3ac-163c66e5ea73.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/7468594b-3355-4cb9-85bc-c9dace137653": "/blog/assets34400653/7468594b-3355-4cb9-85bc-c9dace137653.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/74768b36-28ca-4ec3-a42d-b32abe2c7057": "/blog/assets34400653/74768b36-28ca-4ec3-a42d-b32abe2c7057.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/750b5cd1-f16a-4330-b899-c27b28b1e837": "/blog/assets34400653/750b5cd1-f16a-4330-b899-c27b28b1e837.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/77b5feee-3f46-486d-9a36-31ff60efa5e9": "/blog/assets34400653/77b5feee-3f46-486d-9a36-31ff60efa5e9.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/7a012a11-87bd-4366-a567-0ebf6d12ae10": "/blog/assets34400653/7a012a11-87bd-4366-a567-0ebf6d12ae10.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/88948a3a-6681-4a8d-9734-a464e09e4957": "/blog/assets34400653/88948a3a-6681-4a8d-9734-a464e09e4957.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/8bf73498-4649-4c4d-a95b-b68447599781": "/blog/assets34400653/8bf73498-4649-4c4d-a95b-b68447599781.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/94836b32-7fc5-45ca-8556-7a23f53b15f9": "/blog/assets34400653/94836b32-7fc5-45ca-8556-7a23f53b15f9.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/95717e2b-1a55-4fca-a96b-b1c186ed4563": "/blog/assets34400653/95717e2b-1a55-4fca-a96b-b1c186ed4563.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/9a8dc1d4-152b-415f-a7cd-8f0c8fbb9913": "/blog/assets34400653/9a8dc1d4-152b-415f-a7cd-8f0c8fbb9913.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/a024af40-e1d9-4df0-b998-0e6e87cebe5b": "/blog/assets34400653/a024af40-e1d9-4df0-b998-0e6e87cebe5b.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/ac2ed716-d270-43f6-856b-3ff81265f4e6": "/blog/assets34400653/ac2ed716-d270-43f6-856b-3ff81265f4e6.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/b4d12904-9d5d-46de-bd66-901eeb9c8e52": "/blog/assets34400653/b4d12904-9d5d-46de-bd66-901eeb9c8e52.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/b695f26a-5bcd-477c-af08-bf03adb717c2": "/blog/assets34400653/b695f26a-5bcd-477c-af08-bf03adb717c2.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/b839e04e-0cef-46a3-bb84-0484a3f51c69": "/blog/assets34400653/b839e04e-0cef-46a3-bb84-0484a3f51c69.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/b83da559-73d1-4734-87d5-5e22955a9da2": "/blog/assets34400653/b83da559-73d1-4734-87d5-5e22955a9da2.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/ba3595e3-d9cb-4d0d-b414-8306b16df186": "/blog/assets34400653/ba3595e3-d9cb-4d0d-b414-8306b16df186.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/c1d1d816-6339-41a6-9bc9-e2c3b2762291": "/blog/assets34400653/c1d1d816-6339-41a6-9bc9-e2c3b2762291.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/c2e6a58b-95eb-4f40-8add-83f4316a719b": "/blog/assets34400653/c2e6a58b-95eb-4f40-8add-83f4316a719b.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/cbc23ca9-1188-4b85-8ef0-e75ac7d74b92": "/blog/assets34400653/cbc23ca9-1188-4b85-8ef0-e75ac7d74b92.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/cec2e032-54e1-49b1-a212-4d9736927156": "/blog/assets34400653/cec2e032-54e1-49b1-a212-4d9736927156.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/e6058456-8f9d-40c1-9ae5-1e9d5eeb9476": "/blog/assets34400653/e6058456-8f9d-40c1-9ae5-1e9d5eeb9476.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/eb57ca57-4f45-4409-91ce-9fa9c7c626d6": "/blog/assets34400653/eb57ca57-4f45-4409-91ce-9fa9c7c626d6.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/eee046cb-189b-4635-ac94-19d50b17a18a": "/blog/assets34400653/eee046cb-189b-4635-ac94-19d50b17a18a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/ef9ed1b8-6828-4dd6-b86b-bb0b4fa40619": "/blog/assets34400653/ef9ed1b8-6828-4dd6-b86b-bb0b4fa40619.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/f4a23c2a-503e-4731-bc4d-922bce0b6039": "/blog/assets34400653/f4a23c2a-503e-4731-bc4d-922bce0b6039.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/f539d104-6d64-4cc7-8781-3b36b00d32d0": "/blog/assets34400653/f539d104-6d64-4cc7-8781-3b36b00d32d0.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/f6e46f1c-0ac9-42ae-8e83-ddb0cc6c5bf8": "/blog/assets34400653/f6e46f1c-0ac9-42ae-8e83-ddb0cc6c5bf8.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/f7d59c7a-abd0-4ebd-8c72-ca10c47a0f1a": "/blog/assets34400653/f7d59c7a-abd0-4ebd-8c72-ca10c47a0f1a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/f892fe64-c734-4944-91ff-9916a41bd1c9": "/blog/assets34400653/f892fe64-c734-4944-91ff-9916a41bd1c9.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/f9a5a394-c8f8-4567-9d51-cf84811418ca": "/blog/assets34400653/f9a5a394-c8f8-4567-9d51-cf84811418ca.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/fa725e49-4c17-4055-82bc-98a31e73fa54": "/blog/assets34400653/fa725e49-4c17-4055-82bc-98a31e73fa54.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/fb0f7574-c2f5-40d6-8613-3749e85ce881": "/blog/assets34400653/fb0f7574-c2f5-40d6-8613-3749e85ce881.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/fcdcda9b-8668-4eac-b5cb-04803a888e92": "/blog/assets34400653/fcdcda9b-8668-4eac-b5cb-04803a888e92.webp",
+ "https://github.com/lobehub/lobe-chat/assets/34400653/fd06c0aa-4bd3-4f4e-bf2b-38374dfe775d": "/blog/assets34400653/fd06c0aa-4bd3-4f4e-bf2b-38374dfe775d.webp",
+ "https://github.com/lobehub/lobe-chat/assets/64475363/23131ca1-9e84-4a89-a840-ef79c4bc0251": "/blog/assets64475363/23131ca1-9e84-4a89-a840-ef79c4bc0251.webp",
+ "https://github.com/lobehub/lobe-chat/assets/64475363/2f919f99-2aaa-4fa7-9938-169d3ed09db7": "/blog/assets64475363/2f919f99-2aaa-4fa7-9938-169d3ed09db7.webp",
+ "https://github.com/lobehub/lobe-chat/assets/64475363/358bca8d-3d82-4e76-9a5e-90d16a39efde": "/blog/assets64475363/358bca8d-3d82-4e76-9a5e-90d16a39efde.webp",
+ "https://github.com/lobehub/lobe-chat/assets/64475363/6d69bdca-7d18-4cbc-b3e0-220d8815cd29": "/blog/assets64475363/6d69bdca-7d18-4cbc-b3e0-220d8815cd29.webp",
+ "https://github.com/lobehub/lobe-chat/assets/64475363/995780cb-9096-4a36-ab17-d422703ab970": "/blog/assets64475363/995780cb-9096-4a36-ab17-d422703ab970.webp",
+ "https://github.com/lobehub/lobe-chat/assets/64475363/c6108133-a918-48b0-ab1a-e3fa607572a4": "/blog/assets64475363/c6108133-a918-48b0-ab1a-e3fa607572a4.webp",
+ "https://github.com/lobehub/lobe-chat/assets/64475363/d7ef5ad1-b1a3-435e-b1bc-4436d2b6fecd": "/blog/assets64475363/d7ef5ad1-b1a3-435e-b1bc-4436d2b6fecd.webp",
+ "https://github.com/lobehub/lobe-chat/assets/67304509/4244634e-5f68-48d5-aac0-e5f4b06d1c4b": "/blog/assets67304509/4244634e-5f68-48d5-aac0-e5f4b06d1c4b.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/12451b47-8dcd-40a9-b18d-2806b07efecc": "/blog/assets8692892/12451b47-8dcd-40a9-b18d-2806b07efecc.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/14ecaa12-74a1-4e2f-b171-9d9ac09d3d63": "/blog/assets8692892/14ecaa12-74a1-4e2f-b171-9d9ac09d3d63.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/1699bf46-0c8d-4238-9eb5-34282bfe529a": "/blog/assets8692892/1699bf46-0c8d-4238-9eb5-34282bfe529a.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/20a257b5-d086-46f3-b5c2-f76394b11f55": "/blog/assets8692892/20a257b5-d086-46f3-b5c2-f76394b11f55.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/3564110d-bef9-47f3-b775-e5f28b4275b2": "/blog/assets8692892/3564110d-bef9-47f3-b775-e5f28b4275b2.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/375b3d73-6796-465c-9063-f2762093f763": "/blog/assets8692892/375b3d73-6796-465c-9063-f2762093f763.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/407b4eed-7f21-4aa6-b68f-9bae2faf09d0": "/blog/assets8692892/407b4eed-7f21-4aa6-b68f-9bae2faf09d0.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/40bb6b4c-18e0-4ae5-abae-ae0cf202cf08": "/blog/assets8692892/40bb6b4c-18e0-4ae5-abae-ae0cf202cf08.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/4ff3c3a0-9ca0-45ff-8f3a-219f4445098b": "/blog/assets8692892/4ff3c3a0-9ca0-45ff-8f3a-219f4445098b.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/5a0e6c58-9e6f-4ffb-8af2-32e48cfb45b0": "/blog/assets8692892/5a0e6c58-9e6f-4ffb-8af2-32e48cfb45b0.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/79c55d44-8dcb-429c-a072-d3eb014bbceb": "/blog/assets8692892/79c55d44-8dcb-429c-a072-d3eb014bbceb.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/9d5cb651-ad10-47c7-8c8b-2256163c5521": "/blog/assets8692892/9d5cb651-ad10-47c7-8c8b-2256163c5521.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/b05473ad-04a6-4ebc-9810-116c778d4448": "/blog/assets8692892/b05473ad-04a6-4ebc-9810-116c778d4448.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/bee24764-aa42-47d9-ad43-bcb8e7b35bc3": "/blog/assets8692892/bee24764-aa42-47d9-ad43-bcb8e7b35bc3.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/d12a2661-7b98-484f-8f3d-07e84d42ae08": "/blog/assets8692892/d12a2661-7b98-484f-8f3d-07e84d42ae08.webp",
+ "https://github.com/lobehub/lobe-chat/assets/8692892/d84235b1-45f6-447d-bdd9-58ab9527dc9b": "/blog/assets8692892/d84235b1-45f6-447d-bdd9-58ab9527dc9b.webp",
+ "https://github.com/user-attachments/assets/00c02637-873e-4e7e-9dc3-a95085b16dd7": "/blog/assets05d786345e99f92d11baae5667a04a62.webp",
"https://github.com/user-attachments/assets/03433283-08a5-481a-8f6c-069b2fc6bace": "/blog/assets/8d4c2cc0ce8654fa8ac06cc036a7f941.webp",
+ "https://github.com/user-attachments/assets/08ced88b-4968-46e8-b1da-0c04ddf5b743": "/blog/assets5fd5fb937b9b05d50ce8659cea3210a4.webp",
+ "https://github.com/user-attachments/assets/09be499c-3b04-4dd6-a161-6e8ebe788354": "/blog/assets65003d69fa745e7cac376a79ea4bb742.webp",
+ "https://github.com/user-attachments/assets/09c994cf-78f8-46ea-9fef-a06022c0f6d7": "/blog/assets6b6c251a2d4a77784c08fb07fc51abf9.webp",
+ "https://github.com/user-attachments/assets/0af85438-ac99-4c95-b888-a17e88ede043": "/blog/assetsf1e1ca1adaac36881ec6c3b2ce1a099e.webp",
+ "https://github.com/user-attachments/assets/0c73c453-6ee3-4f90-bc5d-119c52c38fef": "/blog/assets2a74d926ae05faf2ee9f8da858bec3f6.webp",
+ "https://github.com/user-attachments/assets/0e2fdc5d-9623-4a74-a7f6-dcb802d52297": "/blog/assets61324ea13398c8920f798b97ac19d58f.webp",
"https://github.com/user-attachments/assets/0e3a7174-6b66-4432-a319-dff60b033c24": "/blog/assets/39d7890f8cbe21e77db8d3c94f7f22e4.webp",
+ "https://github.com/user-attachments/assets/0f79c266-cce5-4936-aabd-4c8f19196d91": "/blog/assets6b67dabe7b9226cdff1bace5a3b8ab18.webp",
+ "https://github.com/user-attachments/assets/1028aa1a-6c19-4191-b28a-2020e5637155": "/blog/assetsd9e6fe2197270f0f774accd6abcf4019.webp",
+ "https://github.com/user-attachments/assets/1077bee5-b379-4063-b7bd-23b98ec146e2": "/blog/assetsa74aedc4fbbfb2caf6b51f286922b576.webp",
+ "https://github.com/user-attachments/assets/12863a0e-a1ee-406d-8dee-011b20701fd6": "/blog/assets513c9045fef49b4d46549f485e69f505.webp",
+ "https://github.com/user-attachments/assets/12c1957d-f050-4235-95da-d55ddedfa6c9": "/blog/assets74f5de8ee68e57b472d1fc56a2df40d5.webp",
+ "https://github.com/user-attachments/assets/143ff392-97b5-427a-97a7-f2f577915728": "/blog/assetsc52da5833158f3b3143e40bf2a534ac7.webp",
+ "https://github.com/user-attachments/assets/14696698-03f7-4856-b36c-9a53997eb12c": "/blog/assets710bf5a4704e520976b19dd2466fa56a.webp",
+ "https://github.com/user-attachments/assets/15af6d94-af4f-4aa9-bbab-7a46e9f9e837": "/blog/assetsffb84575674e2bc5dfdd07af8f41e794.webp",
+ "https://github.com/user-attachments/assets/15d92756-92f0-45da-8f95-bfe725d13003": "/blog/assets653a83fe7d837e0d225c1de12e60cf92.webp",
+ "https://github.com/user-attachments/assets/162bc64e-0d34-4a4e-815a-028247b73143": "/blog/assets308f9fd45d0e8a140c1c18e6c92a1a57.webp",
+ "https://github.com/user-attachments/assets/16cd9aef-c87b-48a4-95c0-b666082e7515": "/blog/assets0ceb7e446f9a850df283093563ba7803.webp",
+ "https://github.com/user-attachments/assets/199b862a-5de4-4a54-83b2-f4dbf69be902": "/blog/assetsb9d1f02ab6c26f8a2c7873a949b4dd3c.webp",
+ "https://github.com/user-attachments/assets/1a7e9600-cd0f-4c82-9d32-4e61bbb351cc": "/blog/assets5997a6461e20103f5bc9d6b78b872833.webp",
+ "https://github.com/user-attachments/assets/1bf1a5f0-32ad-418c-a8d1-6c54740f50b9": "/blog/assets4d0d191b487c114abf084eb7f2dc381c.webp",
+ "https://github.com/user-attachments/assets/1c6a3e42-8e24-4148-b2c3-0bfe60a8cf77": "/blog/assets8096422e62e10dcd58efe75c616f9e88.webp",
+ "https://github.com/user-attachments/assets/1d77cca4-7363-4a46-9ad5-10604e111d7c": "/blog/assets1049abec5850cebf8ce12cd50199b9c5.webp",
+ "https://github.com/user-attachments/assets/1e33aff2-6186-4e1f-80a8-4a2c855d8cc1": "/blog/assets6f2a84bee4245ca507e98e96247d5c5e.webp",
+ "https://github.com/user-attachments/assets/1fb5df18-5261-483e-a445-96f52f80dd20": "/blog/assets69146738e31a47ac6425070208ebd906.webp",
"https://github.com/user-attachments/assets/2048b4c2-4a56-4029-acf9-71e35ff08652": "/blog/assets/d9cbfcbef130183bc490d515d8a38aa4.webp",
+ "https://github.com/user-attachments/assets/21c52e2a-b2f8-4de8-a5d4-cf3444608db7": "/blog/assets50607dece1bbffe80fdcbe76324ff9b6.webp",
+ "https://github.com/user-attachments/assets/22e1a039-5e6e-4c40-8266-19821677618a": "/blog/assets89b45345c84f8b7c3bf4d554169689ac.webp",
+ "https://github.com/user-attachments/assets/237864d6-cc5d-4fe4-8a2b-c278016855c5": "/blog/assetsf3e7c2e961d1d2886fe231a4ac59e2f1.webp",
"https://github.com/user-attachments/assets/2787824c-a13c-466c-ba6f-820bddfe099f": "/blog/assets/8d6c17a6ea5e784edf4449fb18ca3f76.webp",
+ "https://github.com/user-attachments/assets/28590f7f-bfee-4215-b50b-8feddbf72366": "/blog/assets89a8dadc85902334ce8d2d5b78abf709.webp",
"https://github.com/user-attachments/assets/29508dda-2382-430f-bc81-fb23f02149f8": "/blog/assets/29b13dc042e3b839ad8865354afe2fac.webp",
"https://github.com/user-attachments/assets/2a4116a7-15ad-43e5-b801-cc62d8da2012": "/blog/assets/37d85fdfccff9ed56e9c6827faee01c7.webp",
+ "https://github.com/user-attachments/assets/2bb4c09d-75bb-4c46-bb2f-faf538308305": "/blog/assetsf0ebf396dbe9559eb3478f48f648a6e2.webp",
+ "https://github.com/user-attachments/assets/2dd3cde5-fa0d-4f52-b82b-28d9e89379a0": "/blog/assets66b0dfa56c1f5b3063b5ba740dd3ef8d.webp",
+ "https://github.com/user-attachments/assets/2f7c5c45-ec6a-4393-8fa9-19a4c5f52f7a": "/blog/assets89168f61edcb2ee92d2ad7064da218b2.webp",
+ "https://github.com/user-attachments/assets/3050839a-cb16-485d-8bae-1bc2f9ade632": "/blog/assetsf117203c39294f45930785d85773c83e.webp",
+ "https://github.com/user-attachments/assets/30c33426-412d-4dec-b096-317fe5880e79": "/blog/assets66829206b15b6c36fa3344835659c041.webp",
+ "https://github.com/user-attachments/assets/328e9755-8da9-4849-8569-e099924822fe": "/blog/assetsf78c85b0a0183a3ae3f2e916d59c0a67.webp",
+ "https://github.com/user-attachments/assets/35164b25-c964-42ce-9cb0-32f6ebe1d07c": "/blog/assetsb6af626eeb0e1e638d80dc9ff7a6eba9.webp",
+ "https://github.com/user-attachments/assets/37251adf-949b-4aec-bc49-bf4647e119da": "/blog/assetscd53b161a6d02424d03f8c5dcadc3dd5.webp",
+ "https://github.com/user-attachments/assets/378df8df-8ec4-436e-8451-fbc52705faee": "/blog/assetsba0243e75b0421b6dd7dadad02e4b0d6.webp",
"https://github.com/user-attachments/assets/385eaca6-daea-484a-9bea-ba7270b4753d": "/blog/assets/d6129350de510a62fe87b2d2f0fb9477.webp",
+ "https://github.com/user-attachments/assets/3ad2655e-dd20-4534-bf6d-080b3677df86": "/blog/assets48b5c19e20fb870c7bdd34bd3aefbb21.webp",
+ "https://github.com/user-attachments/assets/3c1a492d-a3d4-4570-9e74-785c2942ca41": "/blog/assets9880145be3e52b8f9dcd8343cd34a6ca.webp",
"https://github.com/user-attachments/assets/3d80e0f5-d32a-4412-85b2-e709731460a0": "/blog/assets2d409f43b58953ad5396c6beab8a0719.webp",
+ "https://github.com/user-attachments/assets/3da4c8c4-88c6-40a9-8005-6a0a44aa3b1f": "/blog/assetse717764a3618df4e56212e447a6c20cd.webp",
+ "https://github.com/user-attachments/assets/3ed3226c-3d4c-49ef-b2c0-8953dac8a92e": "/blog/assets7dbd100dac8e2614ef6b297885b3c9e2.webp",
+ "https://github.com/user-attachments/assets/411e2002-61f0-4010-9841-18e88ca895ec": "/blog/assets7c3eab218c0823fa353b1cd23afe21c3.webp",
+ "https://github.com/user-attachments/assets/420379cd-d8a4-4ab3-9a46-75dcc3d56920": "/blog/assets0ca3e3989fb3884658765ee0ef2587a0.webp",
+ "https://github.com/user-attachments/assets/4257e123-9018-4562-ac66-0f39278906f5": "/blog/assetsadbc0db573a0f581b22c30ecf243f721.webp",
+ "https://github.com/user-attachments/assets/433fdce4-0af5-417f-b80d-163c2d4f02f6": "/blog/assets4aaf8d5d092608b649230e0e6fc92df6.webp",
+ "https://github.com/user-attachments/assets/452d0b48-5ff7-4f42-a46e-68a62b87632b": "/blog/assets78232916d13ddc942ab3d0b62b639509.webp",
+ "https://github.com/user-attachments/assets/467bb431-ca0d-4bb4-ac17-e5e2b764a770": "/blog/assetsff480f9009cf873852a43c252ac36828.webp",
"https://github.com/user-attachments/assets/484f28f4-017c-4ed7-948b-4a8d51f0b63a": "/blog/assets/5bbb4b421d6df63780b3c7a05f5a102d.webp",
+ "https://github.com/user-attachments/assets/4c792f62-5203-4f13-8f23-df228f70d67f": "/blog/assets94f55c97a24a08c7a5923c23ee2d7eef.webp",
+ "https://github.com/user-attachments/assets/4cbbbcce-36be-48ff-bb0b-31607a0bba5c": "/blog/assetsb33085e7553d2b7194005b102184553e.webp",
+ "https://github.com/user-attachments/assets/4d671a7c-5d94-4c4b-b4fd-71a5a0e9d227": "/blog/assetsc74cf5c8daee1515c37a85bce087f0d6.webp",
+ "https://github.com/user-attachments/assets/4e04928d-0171-48d1-afff-e22fc2faaf4e": "/blog/assetsb26b68a4875a6510ddc202dd4b40d010.webp",
+ "https://github.com/user-attachments/assets/530c7c96-bac3-456d-a429-f60e7d2ade66": "/blog/assets6541bab7e0047f9c5dbad98dc272d64d.webp",
+ "https://github.com/user-attachments/assets/5321f987-2c64-4211-8549-bd30ca9b59b9": "/blog/assetsaf57d31364a41634b10c243ed9b1f8f8.webp",
"https://github.com/user-attachments/assets/533f7a5e-8a93-4a57-a62f-8233897d72b5": "/blog/assets/9498087e85f27e692716a63cb3b58d79.webp",
+ "https://github.com/user-attachments/assets/539349dd-2c16-4f42-b525-cca74e113541": "/blog/assetsd8927338578c426b833e5cb57e0b57ec.webp",
+ "https://github.com/user-attachments/assets/55028fe5-44db-49e2-93c5-5dabbd664f10": "/blog/assets8f3458d794828c38220e88b66e994e2a.webp",
+ "https://github.com/user-attachments/assets/55230f32-b8dd-47db-a2ba-b3fe7e533dc8": "/blog/assetsc1d4290f05fe474dba74577409cad6e9.webp",
+ "https://github.com/user-attachments/assets/5abcf21d-5a6c-4fc8-8de6-bc47d4d2fa98": "/blog/assetsbd39adddc9a1cdb85ce4a0e37fa595c1.webp",
+ "https://github.com/user-attachments/assets/5b816379-c07b-40ea-bde4-df16e2e4e523": "/blog/assets2d41542b390020209bbd5814009abcdf.webp",
+ "https://github.com/user-attachments/assets/5d672e8b-566f-4f82-bdce-947168726bc0": "/blog/assetsd89998edb6b2dc8311d8d86664d5cd4d.webp",
+ "https://github.com/user-attachments/assets/5ea37821-4ea8-437c-a15e-3b182d10f19e": "/blog/assets273112caa08528852992b81dd8f3b75d.webp",
+ "https://github.com/user-attachments/assets/5f344314-ecbc-41e6-9120-520a2d5352ff": "/blog/assets01dbe69cc092163f7ac782afb7d314c5.webp",
+ "https://github.com/user-attachments/assets/5fe4c373-ebd0-42a9-bdca-0ab7e0a2e747": "/blog/assets18168d5fe64ea34905a7e52fd82d0e9d.webp",
"https://github.com/user-attachments/assets/6069332b-8e15-4d3c-8a77-479e8bc09c23": "/blog/assets/603fefbb944bc6761ebdab5956fc0084.webp",
+ "https://github.com/user-attachments/assets/6234428d-5633-4b2f-be22-1a1772a69a55": "/blog/assets67a17a6c66592ad85dbcf190e73b182d.webp",
+ "https://github.com/user-attachments/assets/629adf4e-e9e1-40dc-b9e5-d7b908878170": "/blog/assets257e1a3a0d99ee043a4c6cb90e160a2b.webp",
"https://github.com/user-attachments/assets/635f1c74-6327-48a8-a8d9-68d7376c7749": "/blog/assets/f6d047a345e47a52592cff916c9a64ce.webp",
+ "https://github.com/user-attachments/assets/638dcd7c-2bff-4adb-bade-da2aaef872bf": "/blog/assets95e6fe7c19ebfb9ead1c5a267aaf2a4e.webp",
"https://github.com/user-attachments/assets/639ed70b-abc5-476f-9eb0-10c739e5a115": "/blog/assets/b2845057b23bccfec3bfea90e43ac381.webp",
+ "https://github.com/user-attachments/assets/63e5ced7-1d23-44e1-b933-cc3b5df47eab": "/blog/assets5f1a6cb003752055b9ed131c1715154c.webp",
+ "https://github.com/user-attachments/assets/659b5ac1-82f1-43bd-9d4b-a98491e05794": "/blog/assets856bd407c8a1510f616a4bdb1e02a883.webp",
+ "https://github.com/user-attachments/assets/669c68bf-3f85-4a6f-bb08-d0d7fb7f7417": "/blog/assets02dce7325584974cdba327fe2f996b9e.webp",
+ "https://github.com/user-attachments/assets/692e7c67-f173-45da-86ef-5c69e17988e4": "/blog/assets6b01801b405c366fa4ebe683a77f289d.webp",
"https://github.com/user-attachments/assets/6935e155-4a1d-4ab7-a61a-2b813d65bb7b": "/blog/assets/6ee2609d79281b6b915e317461013f31.webp",
+ "https://github.com/user-attachments/assets/6d068fe0-8100-4b43-b0c3-7934f54e688f": "/blog/assets87c281587b15f05b6b4e1afcd5bb47e8.webp",
+ "https://github.com/user-attachments/assets/6dbf4560-3f62-4b33-9f41-96e12b5087b1": "/blog/assets03f3f52817a626339071e6329b445cb3.webp",
+ "https://github.com/user-attachments/assets/6e383b75-09e3-42d1-8a6c-5fb7cf558f00": "/blog/assets15ecc1bbe365f3e02702631e28c7b764.webp",
+ "https://github.com/user-attachments/assets/6f9f400a-72e0-49de-94cb-5069fddf1163": "/blog/assets9db16311eb6772ea74eb63dd2d397bc0.webp",
+ "https://github.com/user-attachments/assets/702c191f-8250-4462-aed7-accb18b18dea": "/blog/assetsd56d1af67bb2be60b0c580be0a6c7110.webp",
+ "https://github.com/user-attachments/assets/71035610-0706-434e-9488-ab5819b55330": "/blog/assets18bb134dbc5792d6a624199cca8bf7d3.webp",
+ "https://github.com/user-attachments/assets/7239d611-1989-414b-a51c-444e47096d75": "/blog/assets8669131e67e5276fe0744754ba4b1645.webp",
+ "https://github.com/user-attachments/assets/7257eb0e-4e2c-4db2-981d-354598e2c60f": "/blog/assets2ad69e4e124f49710fcedf8e9827f2f3.webp",
+ "https://github.com/user-attachments/assets/72da7af1-e180-4759-84a5-a6f6ca28392e": "/blog/assets688e6e10904ad46cf7f44bba6359f90c.webp",
+ "https://github.com/user-attachments/assets/72f02ce5-9991-425b-9864-9113ee1ed6bf": "/blog/assetsfa2c650be15522ac2fd71a3e434a1b2e.webp",
+ "https://github.com/user-attachments/assets/7350f211-61ce-488e-b0e2-f0fcac25caeb": "/blog/assetsf9ed064fe764cbeff2f46910e7099a91.webp",
+ "https://github.com/user-attachments/assets/76ad163e-ee19-4f95-a712-85bea764d3ec": "/blog/assets5205b6dd0f80b8ba02c297fcdfc1aecb.webp",
+ "https://github.com/user-attachments/assets/796c94af-9bad-4e3c-b1c7-dbb17c215c56": "/blog/assetsbd8c97ef67055e3ff93c56e46c33fa8d.webp",
+ "https://github.com/user-attachments/assets/798ddb18-50c7-462a-a083-0c6841351d26": "/blog/assets11a8089b511aaa61e8982dea0a3665c5.webp",
+ "https://github.com/user-attachments/assets/7cb3019b-78c1-48e0-a64c-a6a4836affd9": "/blog/assets3ca963d92475f34b0789cfa50071bc52.webp",
+ "https://github.com/user-attachments/assets/808f8849-5738-4a60-8ccf-01e300b0dc88": "/blog/assets0f893c504377ba45a9f5cdbb5ccb1612.webp",
+ "https://github.com/user-attachments/assets/81d0349a-44fe-4dfc-bbc4-8e9a1e09567d": "/blog/assets29de82efbe7657a8b9ba7daf0904585d.webp",
+ "https://github.com/user-attachments/assets/82a7ebe0-69ad-43b6-8767-1316b443fa03": "/blog/assets5374759bfe39ca7fc864e72ddfce98d0.webp",
"https://github.com/user-attachments/assets/82bfc467-e0c6-4d99-9b1f-18e4aea24285": "/blog/assets/eb477e62217f4d1b644eff975c7ac168.webp",
+ "https://github.com/user-attachments/assets/840442b1-bf56-4a5f-9700-b3608b16a8a5": "/blog/assetsc6ff27b7134f280727e1fd7ff83ed2fa.webp",
+ "https://github.com/user-attachments/assets/84a5c971-1262-4639-b79f-c8b138530803": "/blog/assetsb09a1f1dc99b86343ae196fcfdcc3fe1.webp",
+ "https://github.com/user-attachments/assets/8570db14-dac6-4279-ab71-04a072c15490": "/blog/assetsc376d2e9e97f9ea9d788589f0a9e23d6.webp",
+ "https://github.com/user-attachments/assets/868df2eb-0c44-4419-a76a-e173094e1e17": "/blog/assetsf3ccd42bf36b1c75f06f925ffe049f0c.webp",
+ "https://github.com/user-attachments/assets/872756dc-305e-4e63-9fb7-60550280fc12": "/blog/assets56e5331e7fae3754820790c824cdc480.webp",
+ "https://github.com/user-attachments/assets/8787716c-833e-44ab-b506-922ddb6121de": "/blog/assets217222e643d99ab3ba01fe92906f3314.webp",
+ "https://github.com/user-attachments/assets/88e14294-20a6-47c6-981e-fb65453b57cd": "/blog/assets6ef9f3f3627633bb5282fe9df1d31a4a.webp",
+ "https://github.com/user-attachments/assets/8910186f-4609-4798-a588-2780dcf8db60": "/blog/assets4175fc55c2093d635f15a3287e89e977.webp",
+ "https://github.com/user-attachments/assets/899a4393-db41-45a6-97ec-9813e1f9879d": "/blog/assets88248c034ef28ca9b909219d2e7ef32a.webp",
+ "https://github.com/user-attachments/assets/8a0225e0-16ed-40ce-9cd5-553dda561679": "/blog/assets74fbd94a0dc865d2178954662dc964ae.webp",
+ "https://github.com/user-attachments/assets/8ce79bd6-f1a3-48bb-b3d0-5271c84801c2": "/blog/assets5f8cc99da9c3c1eaca284411833c99e3.webp",
+ "https://github.com/user-attachments/assets/8d90ae64-cf8e-4d90-8a31-c18ab484740b": "/blog/assets04ab03ac7920031925f7ee27846b3f7d.webp",
+ "https://github.com/user-attachments/assets/8ec7656e-1e3d-41e0-95a0-f6883135c2fc": "/blog/assets71b5cfd165bc907f437bf807048a3e67.webp",
+ "https://github.com/user-attachments/assets/91fe32a8-e5f0-47ff-b8ae-d036c8a7bff1": "/blog/assets1837dd567f75fcc083553a1078c0f088.webp",
+ "https://github.com/user-attachments/assets/9336d6c5-2a83-4aa9-854e-75e245b665cb": "/blog/assetsc16177645281b332883403e7f193f6e3.webp",
+ "https://github.com/user-attachments/assets/97899819-278f-42fd-804a-144d521d4b4f": "/blog/assets7006b60baaf62aa0d95cd40456e24afe.webp",
+ "https://github.com/user-attachments/assets/9a78bbb9-7c96-4f32-9b66-e57f92660410": "/blog/assets0e9c7125960a2d00b8c3c3d15d88f0a7.webp",
+ "https://github.com/user-attachments/assets/9b70b292-6c52-4715-b844-ff5df78d16b9": "/blog/assetsbfe7d519c29884b6699e89866e1db7e2.webp",
+ "https://github.com/user-attachments/assets/9baacac6-5af4-460b-862d-682b76c18459": "/blog/assets195200c7bc42360675e78a6bfa9fe320.webp",
+ "https://github.com/user-attachments/assets/9cb27b68-f2ac-4ff9-8f97-d96314b1af03": "/blog/assetsd3fefc9a525701b9d0f25116cea2ff00.webp",
+ "https://github.com/user-attachments/assets/9f989104-bb8e-4acd-9721-6b1db1017d2b": "/blog/assets5d3551635c580d8781e31256e1fb0f2e.webp",
+ "https://github.com/user-attachments/assets/a00f06cc-da7c-41e8-a4d5-d4b675a22673": "/blog/assetse0d53ba2bfb6ba5bf33f2b8a547f4e41.webp",
+ "https://github.com/user-attachments/assets/a1af5778-f47a-4fdc-baf5-ca2a1e66f48e": "/blog/assets97ac48dab1a35e45e034fefe0a1a1006.webp",
+ "https://github.com/user-attachments/assets/a1ba8ec0-e259-4da4-8980-0cf82ca5f52b": "/blog/assetsbd69842ebb37848ecd50c242aad835b0.webp",
+ "https://github.com/user-attachments/assets/a42ba52b-491e-4993-8e2f-217aa1776e0f": "/blog/assets0f847842a5dedf7bef1f534278aec584.webp",
+ "https://github.com/user-attachments/assets/a53deb11-2c14-441a-8a5c-a0f3a74e2a63": "/blog/assets65c86d6e63ddd5dd9896a6a67c054c0d.webp",
+ "https://github.com/user-attachments/assets/a9de7780-d0cb-47d5-ad9c-fcbbec14b940": "/blog/assets79e8fff075490d2a4535590a02333316.webp",
+ "https://github.com/user-attachments/assets/aa91ca54-65fc-4e33-8c76-999f0a5d2bee": "/blog/assetsf625540e8340bafe69ccbb89ad75707a.webp",
+ "https://github.com/user-attachments/assets/aaa3e2c5-7f16-4cfb-86b6-2814a1aafe3a": "/blog/assets93da89c4892a80e2e5a6caa49d80af5f.webp",
+ "https://github.com/user-attachments/assets/ab87120c-15ff-4bc7-bb28-4b0b43cfe91a": "/blog/assetsec0f694c9f6140620217bde441440170.webp",
+ "https://github.com/user-attachments/assets/ae03eab5-a319-4d2a-a5f6-1683ab7739ee": "/blog/assetsa25c48c9faa225bf6f72658e5bd58d64.webp",
+ "https://github.com/user-attachments/assets/aea782b1-27bd-4d9c-b521-c172c2095fe6": "/blog/assets52c8de6425a785409464561c09f8c98d.webp",
+ "https://github.com/user-attachments/assets/aead3c6c-891e-47c3-9f34-bdc33875e0c2": "/blog/assetsb6959f725c38f86053e4b07c9188d825.webp",
"https://github.com/user-attachments/assets/aee846d5-b5ee-46cb-9dd0-d952ea708b67": "/blog/assets/8a8d361b4c0cce6da350cc0de65c0ad6.webp",
+ "https://github.com/user-attachments/assets/b2b36128-6a43-4a1f-9c08-99fe73fb565f": "/blog/assets85af5a2a51b851fe125055d374cc8263.webp",
+ "https://github.com/user-attachments/assets/b3ab6e35-4fbc-468d-af10-e3e0c687350f": "/blog/assets4cd6d49afb0ab1354156961d396195a1.webp",
+ "https://github.com/user-attachments/assets/b49ed0c1-d6bf-4f46-b9df-5f7c730afaa3": "/blog/assets74000cc1bc59ee4a15e8f0304afbf866.webp",
+ "https://github.com/user-attachments/assets/b4e89dd4-877b-43fe-aa42-4680de17ba8e": "/blog/assets1b9283f9cc5fc5073ff9cffc24880e96.webp",
+ "https://github.com/user-attachments/assets/b6e6a3eb-13c6-46f0-9c7c-69a20deae30f": "/blog/assets768ec6fd300785186b202437985857c4.webp",
+ "https://github.com/user-attachments/assets/b824b741-f2d8-42c8-8cb9-1266862affa7": "/blog/assets89d0dcbf5ffccd21086845cea3a514cc.webp",
+ "https://github.com/user-attachments/assets/b9da065e-f964-44f2-8260-59e182be2729": "/blog/assets80a8b9627374fc345f4bf8e3adf11074.webp",
+ "https://github.com/user-attachments/assets/bd399cef-283c-4706-bdc8-de9de662de41": "/blog/assets4224bf4978bea84e82b3b3aec77656f0.webp",
"https://github.com/user-attachments/assets/bd6d0c82-8f14-4167-ad09-2a841f1e34e4": "/blog/assets/d7e57f8e69f97b76b3c2414f3441b6e4.webp",
+ "https://github.com/user-attachments/assets/be06e348-8d4c-440c-b59f-b71120f21335": "/blog/assetsd9f99f2adff9051313ca44205b022d8c.webp",
+ "https://github.com/user-attachments/assets/be7dcd49-0165-4f7b-bf90-0739cc9dd212": "/blog/assetsf069368b9162f58247318dde850c0807.webp",
+ "https://github.com/user-attachments/assets/bfda556a-d3fc-409f-8647-e718788f2fb8": "/blog/assets2cfe64ead120815f7ba7100bc3dcfd48.webp",
+ "https://github.com/user-attachments/assets/c44b6894-70cb-4876-b792-2e76e75ac542": "/blog/assets94499977be2f01c795b9876e4fe60709.webp",
+ "https://github.com/user-attachments/assets/c4fe4430-7860-4339-b014-4d8d264a12c0": "/blog/assets87010372bdf39890a7478a7a8cd4a9f0.webp",
+ "https://github.com/user-attachments/assets/c6319e83-c4e7-48cf-9625-2edfc4aa77b3": "/blog/assetsfae60ba54155478a1c363f0065ce76a6.webp",
"https://github.com/user-attachments/assets/c68e88e4-cf2e-4122-82bc-89ba193b1eb4": "/blog/assets/1f6c4f1c5e6211735ca4924c7807aca1.webp",
+ "https://github.com/user-attachments/assets/c75eb19e-e0f5-4135-91e4-55be8be8a996": "/blog/assets0f97d1dfccd5ba07172aff71ff9acd7b.webp",
+ "https://github.com/user-attachments/assets/c77fcf70-9039-49ff-86e4-f8eaa267bbf6": "/blog/assets5a2f360c19fcf9a037b2d1609479b713.webp",
+ "https://github.com/user-attachments/assets/cb4ba5fe-c223-4b9f-a662-de93e4a536d1": "/blog/assets45d90e73abffd7ae7d85808f81827bb9.webp",
+ "https://github.com/user-attachments/assets/cc1f6146-8063-4a4d-947a-7fd6b9133c0c": "/blog/assets28749075f0c4d62c1642694a4ed9ec08.webp",
+ "https://github.com/user-attachments/assets/cf3bfd44-9c13-4026-95cd-67f54f40ce6c": "/blog/assetsc557d9ee77afeb958d198abf5ca79761.webp",
+ "https://github.com/user-attachments/assets/d0a5e152-160a-4862-8393-546f4e2e5387": "/blog/assets06d4e543cbaca9a2762923a23b2cae67.webp",
+ "https://github.com/user-attachments/assets/d3626294-74ba-4944-9a63-052e6cf719ab": "/blog/assets0f244d5fe648127774636a54ae9ffafc.webp",
+ "https://github.com/user-attachments/assets/d524c20d-306a-45bc-971b-96920b87fab4": "/blog/assetsbeefe4dbe3e6f141e09c62064c6dc397.webp",
+ "https://github.com/user-attachments/assets/d643af6d-ca0f-4abd-9dd2-977dacecb25d": "/blog/assets34424062ad6ab98df7f56c9e61341be5.webp",
+ "https://github.com/user-attachments/assets/d693be02-e08c-43ae-8bde-1294f180aaf6": "/blog/assets4169b5d9f7534f9f89c8426445e9a080.webp",
+ "https://github.com/user-attachments/assets/d6ace96f-0398-4847-83e1-75c3004a0e8b": "/blog/assetsf7007eebef93bc1d8a29aaf9080ab404.webp",
+ "https://github.com/user-attachments/assets/d7666e2a-0202-4b45-8338-9806ddffa44e": "/blog/assets8f95f09ce51ad5917107d84db1e980ab.webp",
+ "https://github.com/user-attachments/assets/d7d65e32-679d-4e50-a933-28cf5dde1330": "/blog/assetsc51018f1581b769727ad1bb3bb641567.webp",
+ "https://github.com/user-attachments/assets/d902b5df-edb1-48d6-b659-daf948a97aed": "/blog/assets1e640c898e897bfb4ce4b66d5377010b.webp",
+ "https://github.com/user-attachments/assets/d961f2af-47b0-4806-8288-b1e8f7ee8a47": "/blog/assets9c1839eb146b89e9e2d262ca95d24323.webp",
+ "https://github.com/user-attachments/assets/db59a5e7-32ed-49d7-a791-8f8ee6618c01": "/blog/assetsf601ee6fa15bed25e17d6b6879691f0f.webp",
+ "https://github.com/user-attachments/assets/dba58ea6-7df8-4971-b6d4-b24d5f486ba7": "/blog/assetsbbe90aa719d182d3d2f327e4182732c5.webp",
+ "https://github.com/user-attachments/assets/dd6bc4a4-3c20-4162-87fd-5cac57e5d7e7": "/blog/assetseebf66254337ce88357629c34e78c08d.webp",
"https://github.com/user-attachments/assets/dde2c9c5-cdda-4a65-8f32-b6f4da907df2": "/blog/assets/d47654360d626f80144cdedb979a3526.webp",
+ "https://github.com/user-attachments/assets/dec6665a-b3ec-4c50-a57f-7c7eb3160e7b": "/blog/assets8d4fbb776e2209a1ec58c6b3516351a1.webp",
+ "https://github.com/user-attachments/assets/dfc45807-2ed6-43eb-af4c-47df66dfff7d": "/blog/assetscad58c557fda04b9379000cbbaa4c493.webp",
+ "https://github.com/user-attachments/assets/e269bd27-d323-43ba-811b-c0f5e4137903": "/blog/assetse12925fba0dda232168e695e6a5e4384.webp",
+ "https://github.com/user-attachments/assets/e3f44bc8-2fa5-441d-8934-943481472450": "/blog/assets3c54d6f2d55fae843fbbfdc0bd7ffec7.webp",
+ "https://github.com/user-attachments/assets/e43dacf6-313e-499c-8888-f1065c53e424": "/blog/assets89b0698da3476c6df24ba1f0a07e438e.webp",
+ "https://github.com/user-attachments/assets/e617def1-ce50-4acc-974b-12f5ed592a0e": "/blog/assets3386e7adc46d19be5cc6dae46533d9bd.webp",
"https://github.com/user-attachments/assets/e70c2db6-05c9-43ea-b111-6f6f99e0ae88": "/blog/assets/944c671604833cd2457445b211ebba33.webp",
+ "https://github.com/user-attachments/assets/e887fa04-c553-45f1-917f-5c123ac9c68b": "/blog/assets73ba166f1e6d54e8c860b91f61c23355.webp",
+ "https://github.com/user-attachments/assets/e89d2a56-4bf0-4bff-ac39-0d44789fa858": "/blog/assets9f6d4113be26efbcab41d83ed39dcb14.webp",
+ "https://github.com/user-attachments/assets/eaa2a1fb-41ad-473d-ac10-a39c05886425": "/blog/assetsf5a62c963127764ebdf1cd226fac3dac.webp",
"https://github.com/user-attachments/assets/eaed3762-136f-4297-b161-ca92a27c4982": "/blog/assets/50b38eac1769ae6f13aef72f3d725eec.webp",
+ "https://github.com/user-attachments/assets/eb027093-5ceb-4a9d-8850-b791fbf69a71": "/blog/assetsd0c4369f894abb5ad6e514059b8f378e.webp",
"https://github.com/user-attachments/assets/eb3f3d8a-79ce-40aa-a206-2c846206c0c0": "/blog/assets/f10a4b98782e36797c38071eed785c6f.webp",
- "https://github.com/user-attachments/assets/fa8fab19-ace2-4f85-8428-a3a0e28845bb": "/blog/assets/2d678631c55369ba7d753c3ffcb73782.webp"
+ "https://github.com/user-attachments/assets/eb41f77f-ccdd-4a48-a8a2-7badac868c03": "/blog/assets0a81d34f707bd87cee3852f26a3d14f0.webp",
+ "https://github.com/user-attachments/assets/ebdbc01a-a6b5-4bbc-b7ff-240d6015fbfc": "/blog/assets13656829368732a95940edeff9ddfca6.webp",
+ "https://github.com/user-attachments/assets/ed6965c8-6884-4adf-a457-573a96755f55": "/blog/assets2f83a9f03f13e73b7393641078627cf1.webp",
+ "https://github.com/user-attachments/assets/f0b2e72d-9eee-46a8-b094-4834b78764df": "/blog/assets8d6bb40d21d74cfa0312bdec347a11d0.webp",
+ "https://github.com/user-attachments/assets/f3068287-8ade-4eca-9841-ea67d8ff1226": "/blog/assetsa343af49a2d7da73a3fa51f2086afdd4.webp",
+ "https://github.com/user-attachments/assets/f3177ce2-281c-4ed4-a061-239547b466c6": "/blog/assets86924c724c66931cf61417dbdcc04ee8.webp",
+ "https://github.com/user-attachments/assets/f4dbbadb-7461-4370-a836-09c487fdd206": "/blog/assets94397c91265c37b9f313dc439b90125f.webp",
+ "https://github.com/user-attachments/assets/f54c912d-3ee9-4f85-b8bf-619790e51b49": "/blog/assets620c308554394e72034d27ea743f8bff.webp",
+ "https://github.com/user-attachments/assets/f67180c2-47ba-4b04-9f12-d274c7821085": "/blog/assetscbda3a61a2d158eeb6046e1d1bf9972f.webp",
+ "https://github.com/user-attachments/assets/f878355f-710b-452e-8606-0c75c47f29d2": "/blog/assets3e2af0090f02059c687b6add6b73a90b.webp",
+ "https://github.com/user-attachments/assets/f9ccce84-4fd4-48ca-9450-40660112d0d7": "/blog/assetsd94f3e0cf32639bea46dbf92e0862f89.webp",
+ "https://github.com/user-attachments/assets/f9f7ed26-e506-4c52-a118-e0bb5e0918db": "/blog/assetse5dff9a2e16a134d85e891e4eb98fe55.webp",
+ "https://github.com/user-attachments/assets/fa8fab19-ace2-4f85-8428-a3a0e28845bb": "/blog/assets/2d678631c55369ba7d753c3ffcb73782.webp",
+ "https://github.com/user-attachments/assets/facdc83c-e789-4649-8060-7f7a10a1b1dd": "/blog/assets05b20e40c03ced0ec8707fed2e8e0f25.webp",
+ "https://github.com/user-attachments/assets/fcdfb9c5-819a-488f-b28d-0857fe861219": "/blog/assets8477415ecec1f37e38ab38ff1217d0a7.webp"
}
diff --git a/docs/changelog/2023-09-09-plugin-system.mdx b/docs/changelog/2023-09-09-plugin-system.mdx
index 9ef28b4b00..dfe217de57 100644
--- a/docs/changelog/2023-09-09-plugin-system.mdx
+++ b/docs/changelog/2023-09-09-plugin-system.mdx
@@ -1,11 +1,11 @@
---
-title: LobeChat Plugin Ecosystem - Functionality Extensions and Development Resources
+title: LobeHub Plugin Ecosystem - Functionality Extensions and Development Resources
description: >-
- Discover how the LobeChat plugin ecosystem enhances the utility and
- flexibility of the LobeChat assistant, along with the development resources
- and plugin development guidelines provided.
+ Discover how the LobeHub plugin ecosystem enhances the utility and flexibility
+ of the LobeHub assistant, along with the development resources and plugin
+ development guidelines provided.
tags:
- - LobeChat
+ - LobeHub
- Plugins
- Real-time Information
- Voice Options
@@ -13,11 +13,11 @@ tags:
# Supported Plugin System
-The LobeChat plugin ecosystem is a significant extension of its core functionalities, greatly enhancing the utility and flexibility of the LobeChat assistant.
+The LobeHub plugin ecosystem is a significant extension of its core functionalities, greatly enhancing the utility and flexibility of the LobeHub assistant.
-
+
-By leveraging plugins, the LobeChat assistants are capable of accessing and processing real-time information, such as searching online for data and providing users with timely and relevant insights.
+By leveraging plugins, the LobeHub assistants are capable of accessing and processing real-time information, such as searching online for data and providing users with timely and relevant insights.
Moreover, these plugins are not solely limited to news aggregation; they can also extend to other practical functionalities, such as quickly retrieving documents, generating images, obtaining data from various platforms such as Bilibili and Steam, and interacting with an array of third-party services.
diff --git a/docs/changelog/2023-09-09-plugin-system.zh-CN.mdx b/docs/changelog/2023-09-09-plugin-system.zh-CN.mdx
index 2e43186c8a..e1ed877a0d 100644
--- a/docs/changelog/2023-09-09-plugin-system.zh-CN.mdx
+++ b/docs/changelog/2023-09-09-plugin-system.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 插件生态系统 - 功能扩展与开发资源
-description: 了解 LobeChat 插件生态系统如何增强 LobeChat 助手的实用性和灵活性,以及提供的开发资源和插件开发指南。
+title: LobeHub 插件生态系统 - 功能扩展与开发资源
+description: 了解 LobeHub 插件生态系统如何增强 LobeHub 助手的实用性和灵活性,以及提供的开发资源和插件开发指南。
tags:
- - LobeChat
+ - LobeHub
- 插件系统
- 实时信息
- 第三方服务
@@ -10,11 +10,11 @@ tags:
# 支持插件系统
-LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地增强了 LobeChat 助手的实用性和灵活性。
+LobeHub 的插件生态系统是其核心功能的重要扩展,它极大地增强了 LobeHub 助手的实用性和灵活性。
-
+
-通过利用插件,LobeChat 的助手们能够实现实时信息的获取和处理,例如搜索网络信息,为用户提供即时且相关的资讯。
+通过利用插件,LobeHub 的助手们能够实现实时信息的获取和处理,例如搜索网络信息,为用户提供即时且相关的资讯。
此外,这些插件不仅局限于新闻聚合,还可以扩展到其他实用的功能,如快速检索文档、生成图片、获取 Bilibili 、Steam 等各种平台数据,以及与其他各式各样的第三方服务交互。
diff --git a/docs/changelog/2023-11-14-gpt4-vision.mdx b/docs/changelog/2023-11-14-gpt4-vision.mdx
index e62c5941a5..60eb6c011a 100644
--- a/docs/changelog/2023-11-14-gpt4-vision.mdx
+++ b/docs/changelog/2023-11-14-gpt4-vision.mdx
@@ -1,15 +1,15 @@
---
title: >-
- LobeChat Supports Multimodal Interaction: Visual Recognition Enhances
+ LobeHub Supports Multimodal Interaction: Visual Recognition Enhances
Intelligent Dialogue
description: >-
- LobeChat supports various large language models with visual recognition
+ LobeHub supports various large language models with visual recognition
capabilities, allowing users to upload or drag and drop images. The assistant
will recognize the content and engage in intelligent dialogue, creating a more
intelligent and diverse chat environment.
tags:
- Visual Recognition
- - LobeChat
+ - LobeHub
- GPT-4 Vision
- Google Gemini Pro
- Multimodal Interaction
@@ -17,6 +17,6 @@ tags:
# Supported Models for Visual Recognition
-LobeChat now supports several large language models with visual recognition capabilities, including OpenAI's [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision), Google Gemini Pro vision, and Zhiyuan GLM-4 Vision. This empowers LobeChat with multimodal interaction capabilities. Users can effortlessly upload images or drag and drop them into the chat window, where the assistant can recognize the image content and engage in intelligent dialogue, building a smarter and more diverse chat experience.
+LobeHub now supports several large language models with visual recognition capabilities, including OpenAI's [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision), Google Gemini Pro vision, and Zhiyuan GLM-4 Vision. This empowers LobeHub with multimodal interaction capabilities. Users can effortlessly upload images or drag and drop them into the chat window, where the assistant can recognize the image content and engage in intelligent dialogue, building a smarter and more diverse chat experience.
This feature opens up new avenues for interaction, allowing communication that extends beyond text to include rich visual elements. Whether sharing images during everyday use or interpreting graphics in specific industries, the assistant delivers an exceptional conversational experience. Additionally, we have carefully selected a range of high-quality voice options (OpenAI Audio, Microsoft Edge Speech) to cater to users from different regions and cultural backgrounds. Users can choose a suitable voice based on personal preferences or specific contexts, thus receiving a more personalized communication experience.
diff --git a/docs/changelog/2023-11-14-gpt4-vision.zh-CN.mdx b/docs/changelog/2023-11-14-gpt4-vision.zh-CN.mdx
index 8aeca20562..f4d0b0a8d2 100644
--- a/docs/changelog/2023-11-14-gpt4-vision.zh-CN.mdx
+++ b/docs/changelog/2023-11-14-gpt4-vision.zh-CN.mdx
@@ -1,16 +1,16 @@
---
-title: LobeChat 支持多模态交互:视觉识别助力智能对话
-description: LobeChat 支持多种具有视觉识别能力的大语言模型,用户可上传或拖拽图片,助手将识别内容并展开智能对话,打造更智能、多元化的聊天场景。
+title: LobeHub 支持多模态交互:视觉识别助力智能对话
+description: LobeHub 支持多种具有视觉识别能力的大语言模型,用户可上传或拖拽图片,助手将识别内容并展开智能对话,打造更智能、多元化的聊天场景。
tags:
- 视觉识别
- 多模态交互
- - LobeChat
+ - LobeHub
- GPT-4
- Google Gemini Pro
---
# 支持模型视觉识别
-LobeChat 已经支持 OpenAI 的 [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision) 、Google Gemini Pro vision、智谱 GLM-4 Vision 等具有视觉识别能力的大语言模型,这使得 LobeChat 具备了多模态交互的能力。用户可以轻松上传图片或者拖拽图片到对话框中,助手将能够识别图片内容,并在此基础上进行智能对话,构建更智能、更多元化的聊天场景。
+LobeHub 已经支持 OpenAI 的 [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision) 、Google Gemini Pro vision、智谱 GLM-4 Vision 等具有视觉识别能力的大语言模型,这使得 LobeHub 具备了多模态交互的能力。用户可以轻松上传图片或者拖拽图片到对话框中,助手将能够识别图片内容,并在此基础上进行智能对话,构建更智能、更多元化的聊天场景。
这一特性打开了新的互动方式,使得交流不再局限于文字,而是可以涵盖丰富的视觉元素。无论是日常使用中的图片分享,还是在特定行业内的图像解读,助手都能提供出色的对话体验。,我们精心挑选了一系列高品质的声音选项 (OpenAI Audio, Microsoft Edge Speech),以满足不同地域和文化背景用户的需求。用户可以根据个人喜好或者特定场景来选择合适的语音,从而获得个性化的交流体验。
diff --git a/docs/changelog/2023-11-19-tts-stt.mdx b/docs/changelog/2023-11-19-tts-stt.mdx
index 8d3124f559..c2cff00a54 100644
--- a/docs/changelog/2023-11-19-tts-stt.mdx
+++ b/docs/changelog/2023-11-19-tts-stt.mdx
@@ -1,19 +1,19 @@
---
-title: LobeChat Text-to-Image Generation Technology
+title: LobeHub Text-to-Image Generation Technology
description: >-
- LobeChat supports Text-to-Speech (TTS) and Speech-to-Text (STT) technologies,
+ LobeHub supports Text-to-Speech (TTS) and Speech-to-Text (STT) technologies,
offering high-quality voice options for a personalized communication
experience. Learn more about Lobe TTS Toolkit.
tags:
- TTS
- STT
- Voice Conversations
- - LobeChat
+ - LobeHub
- Audio Technology
---
# Supporting TTS & STT Voice Conversations
-LobeChat supports Text-to-Speech (TTS) and Speech-to-Text (STT) technologies, allowing our application to transform textual information into clear voice output. Users can interact with our conversational agents as if they were talking to a real person. There are various voice options for users to choose from, providing the right audio source for their assistant. Additionally, for those who prefer auditory learning or seek to gain information while on the go, TTS offers an excellent solution.
+LobeHub supports Text-to-Speech (TTS) and Speech-to-Text (STT) technologies, allowing our application to transform textual information into clear voice output. Users can interact with our conversational agents as if they were talking to a real person. There are various voice options for users to choose from, providing the right audio source for their assistant. Additionally, for those who prefer auditory learning or seek to gain information while on the go, TTS offers an excellent solution.
-In LobeChat, we have carefully curated a selection of high-quality voice options (OpenAI Audio, Microsoft Edge Speech) to cater to users from different regions and cultural backgrounds. Users can select suitable voices based on personal preferences or specific scenarios, thus achieving a personalized communication experience.
+In LobeHub, we have carefully curated a selection of high-quality voice options (OpenAI Audio, Microsoft Edge Speech) to cater to users from different regions and cultural backgrounds. Users can select suitable voices based on personal preferences or specific scenarios, thus achieving a personalized communication experience.
diff --git a/docs/changelog/2023-11-19-tts-stt.zh-CN.mdx b/docs/changelog/2023-11-19-tts-stt.zh-CN.mdx
index 9a7c789c24..d8b60e7a0c 100644
--- a/docs/changelog/2023-11-19-tts-stt.zh-CN.mdx
+++ b/docs/changelog/2023-11-19-tts-stt.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: LobeChat 文生图:文本转图片生成技术
-description: LobeChat 支持文字转语音(TTS)和语音转文字(STT)技术,提供高品质声音选项,个性化交流体验。了解更多关于 Lobe TTS 工具包。
+title: LobeHub 文生图:文本转图片生成技术
+description: LobeHub 支持文字转语音(TTS)和语音转文字(STT)技术,提供高品质声音选项,个性化交流体验。了解更多关于 Lobe TTS 工具包。
tags:
- TTS
- STT
- 语音会话
- - LobeChat
+ - LobeHub
- 文字转语音
- 语音转文字
---
# 支持 TTS & STT 语音会话
-LobeChat 支持文字转语音(Text-to-Speech,TTS)和语音转文字(Speech-to-Text,STT)技术,我们的应用能够将文本信息转化为清晰的语音输出,用户可以像与真人交谈一样与我们的对话代理进行交流。用户可以从多种声音中选择,给助手搭配合适的音源。 同时,对于那些倾向于听觉学习或者想要在忙碌中获取信息的用户来说,TTS 提供了一个极佳的解决方案。
+LobeHub 支持文字转语音(Text-to-Speech,TTS)和语音转文字(Speech-to-Text,STT)技术,我们的应用能够将文本信息转化为清晰的语音输出,用户可以像与真人交谈一样与我们的对话代理进行交流。用户可以从多种声音中选择,给助手搭配合适的音源。 同时,对于那些倾向于听觉学习或者想要在忙碌中获取信息的用户来说,TTS 提供了一个极佳的解决方案。
-在 LobeChat 中,我们精心挑选了一系列高品质的声音选项 (OpenAI Audio, Microsoft Edge Speech),以满足不同地域和文化背景用户的需求。用户可以根据个人喜好或者特定场景来选择合适的语音,从而获得个性化的交流体验。
+在 LobeHub 中,我们精心挑选了一系列高品质的声音选项 (OpenAI Audio, Microsoft Edge Speech),以满足不同地域和文化背景用户的需求。用户可以根据个人喜好或者特定场景来选择合适的语音,从而获得个性化的交流体验。
diff --git a/docs/changelog/2023-12-22-dalle-3.mdx b/docs/changelog/2023-12-22-dalle-3.mdx
index fc1d742b5c..5738425c38 100644
--- a/docs/changelog/2023-12-22-dalle-3.mdx
+++ b/docs/changelog/2023-12-22-dalle-3.mdx
@@ -1,14 +1,14 @@
---
-title: 'LobeChat Text-to-Image: Text-to-Image Generation Technology'
+title: 'LobeHub Text-to-Image: Text-to-Image Generation Technology'
description: >-
- LobeChat now supports the latest text-to-image generation technology, allowing
+ LobeHub now supports the latest text-to-image generation technology, allowing
users to directly invoke the text-to-image tool during conversations with the
assistant for creative purposes. By utilizing AI tools such as DALL-E 3,
MidJourney, and Pollinations, assistants can turn your ideas into images,
making the creative process more intimate and immersive.
tags:
- Text-to-Image
- - LobeChat
+ - LobeHub
- AI Tools
- DALL-E 3
- MidJourney
@@ -16,4 +16,4 @@ tags:
# Support for Text-to-Image Generation
-The latest text-to-image generation technology is now supported, enabling LobeChat users to directly use the text-to-image tool during conversations with their assistant. By harnessing the capabilities of AI tools like [`DALL-E 3`](https://openai.com/dall-e-3), [`MidJourney`](https://www.midjourney.com/), and [`Pollinations`](https://pollinations.ai/), assistants can now transform your ideas into images. This allows for a more intimate and immersive creative process.
+The latest text-to-image generation technology is now supported, enabling LobeHub users to directly use the text-to-image tool during conversations with their assistant. By harnessing the capabilities of AI tools like [`DALL-E 3`](https://openai.com/dall-e-3), [`MidJourney`](https://www.midjourney.com/), and [`Pollinations`](https://pollinations.ai/), assistants can now transform your ideas into images. This allows for a more intimate and immersive creative process.
diff --git a/docs/changelog/2023-12-22-dalle-3.zh-CN.mdx b/docs/changelog/2023-12-22-dalle-3.zh-CN.mdx
index c14d1d9887..166a2065ad 100644
--- a/docs/changelog/2023-12-22-dalle-3.zh-CN.mdx
+++ b/docs/changelog/2023-12-22-dalle-3.zh-CN.mdx
@@ -1,7 +1,7 @@
---
-title: LobeChat 文生图:文本转图片生成技术
+title: LobeHub 文生图:文本转图片生成技术
description: >-
- LobeChat 现在支持最新的文本到图片生成技术,让用户可以在与助手对话中直接调用文生图工具进行创作。利用 DALL-E 3、MidJourney 和
+ LobeHub 现在支持最新的文本到图片生成技术,让用户可以在与助手对话中直接调用文生图工具进行创作。利用 DALL-E 3、MidJourney 和
Pollinations 等 AI 工具,助手们可以将你的想法转化为图像,让创作过程更私密和沉浸式。
tags:
- Text to Image
@@ -11,4 +11,4 @@ tags:
# 支持 Text to Image 文生图
-现已支持最新的文本到图片生成技术,LobeChat 现在能够让用户在与助手对话中直接调用文成图工具进行创作。通过利用 [`DALL-E 3`](https://openai.com/dall-e-3)、[`MidJourney`](https://www.midjourney.com/) 和 [`Pollinations`](https://pollinations.ai/) 等 AI 工具的能力, 助手们现在可以将你的想法转化为图像。同时可以更私密和沉浸式的完成你的创造过程。
+现已支持最新的文本到图片生成技术,LobeHub 现在能够让用户在与助手对话中直接调用文成图工具进行创作。通过利用 [`DALL-E 3`](https://openai.com/dall-e-3)、[`MidJourney`](https://www.midjourney.com/) 和 [`Pollinations`](https://pollinations.ai/) 等 AI 工具的能力, 助手们现在可以将你的想法转化为图像。同时可以更私密和沉浸式的完成你的创造过程。
diff --git a/docs/changelog/2024-02-08-sso-oauth.mdx b/docs/changelog/2024-02-08-sso-oauth.mdx
index 1b4768efa4..28617624be 100644
--- a/docs/changelog/2024-02-08-sso-oauth.mdx
+++ b/docs/changelog/2024-02-08-sso-oauth.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat Supports Multi-User Management with Clerk and Next-Auth
+title: LobeHub Supports Multi-User Management with Clerk and Next-Auth
description: >-
- LobeChat offers various user authentication and management solutions,
- including Clerk and Next-Auth, to meet the diverse needs of different users.
+ LobeHub offers various user authentication and management solutions, including
+ Clerk and Next-Auth, to meet the diverse needs of different users.
tags:
- User Management
- Next-Auth
@@ -13,11 +13,11 @@ tags:
# Support for Multi-User Management with Clerk and Next-Auth
-In modern applications, user management and authentication are crucial features. To cater to the diverse needs of users, LobeChat provides two primary user authentication and management solutions: `next-auth` and `Clerk`. Whether you're looking for simple user registration and login or need more advanced multi-factor authentication and user management, LobeChat can flexibly accommodate your requirements.
+In modern applications, user management and authentication are crucial features. To cater to the diverse needs of users, LobeHub provides two primary user authentication and management solutions: `next-auth` and `Clerk`. Whether you're looking for simple user registration and login or need more advanced multi-factor authentication and user management, LobeHub can flexibly accommodate your requirements.
## Next-Auth: A Flexible and Powerful Authentication Library
-LobeChat integrates `next-auth`, a flexible and powerful authentication library that supports various authentication methods, including OAuth, email login, and credential-based login. With `next-auth`, you can easily implement the following features:
+LobeHub integrates `next-auth`, a flexible and powerful authentication library that supports various authentication methods, including OAuth, email login, and credential-based login. With `next-auth`, you can easily implement the following features:
- **User Registration and Login**: Supports multiple authentication methods to meet different user needs.
- **Session Management**: Efficiently manage user sessions to ensure security.
@@ -26,7 +26,7 @@ LobeChat integrates `next-auth`, a flexible and powerful authentication library
## Clerk: A Modern User Management Platform
-For users who require more advanced user management capabilities, LobeChat also supports [Clerk](https://clerk.com), a modern user management platform. Clerk offers a richer set of features, helping you achieve enhanced security and flexibility:
+For users who require more advanced user management capabilities, LobeHub also supports [Clerk](https://clerk.com), a modern user management platform. Clerk offers a richer set of features, helping you achieve enhanced security and flexibility:
- **Multi-Factor Authentication (MFA)**: Provides an additional layer of security.
- **User Profile Management**: Easily manage user information and settings.
diff --git a/docs/changelog/2024-02-08-sso-oauth.zh-CN.mdx b/docs/changelog/2024-02-08-sso-oauth.zh-CN.mdx
index 55e74a7912..6256c94bf8 100644
--- a/docs/changelog/2024-02-08-sso-oauth.zh-CN.mdx
+++ b/docs/changelog/2024-02-08-sso-oauth.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: LobeChat 支持 Clerk 与 Next-Auth 多用户管理支持
-description: LobeChat 提供 Clerk 和 Next-Auth 等多种用户认证和管理方案,以满足不同用户的需求。
+title: LobeHub 支持 Clerk 与 Next-Auth 多用户管理支持
+description: LobeHub 提供 Clerk 和 Next-Auth 等多种用户认证和管理方案,以满足不同用户的需求。
tags:
- 用户管理
- 身份验证
@@ -11,11 +11,11 @@ tags:
# 支持 Clerk 与 Next-Auth 多用户管理支持
-在现代应用中,用户管理和身份验证是至关重要的功能。为满足不同用户的多样化需求,LobeChat 提供了两种主要的用户认证和管理方案:`next-auth` 和 `Clerk`。无论您是追求简便的用户注册登录,还是需要更高级的多因素认证和用户管理,LobeChat 都可以灵活实现。
+在现代应用中,用户管理和身份验证是至关重要的功能。为满足不同用户的多样化需求,LobeHub 提供了两种主要的用户认证和管理方案:`next-auth` 和 `Clerk`。无论您是追求简便的用户注册登录,还是需要更高级的多因素认证和用户管理,LobeHub 都可以灵活实现。
## next-auth:灵活且强大的身份验证库
-LobeChat 集成了 `next-auth`,一个灵活且强大的身份验证库,支持多种身份验证方式,包括 OAuth、邮件登录、凭证登录等。通过 `next-auth`,您可以轻松实现以下功能:
+LobeHub 集成了 `next-auth`,一个灵活且强大的身份验证库,支持多种身份验证方式,包括 OAuth、邮件登录、凭证登录等。通过 `next-auth`,您可以轻松实现以下功能:
- **用户注册和登录**:支持多种认证方式,满足不同用户的需求。
- **会话管理**:高效管理用户会话,确保安全性。
@@ -24,7 +24,7 @@ LobeChat 集成了 `next-auth`,一个灵活且强大的身份验证库,支
## Clerk:现代化用户管理平台
-对于需要更高级用户管理功能的用户,LobeChat 还支持 [Clerk](https://clerk.com) ,一个现代化的用户管理平台。Clerk 提供了更丰富的功能,帮助您实现更高的安全性和灵活性:
+对于需要更高级用户管理功能的用户,LobeHub 还支持 [Clerk](https://clerk.com) ,一个现代化的用户管理平台。Clerk 提供了更丰富的功能,帮助您实现更高的安全性和灵活性:
- **多因素认证 (MFA)**:提供更高的安全保障。
- **用户配置文件管理**:便捷管理用户信息和配置。
diff --git a/docs/changelog/2024-02-14-ollama.mdx b/docs/changelog/2024-02-14-ollama.mdx
index 906cc8efdd..a296d993db 100644
--- a/docs/changelog/2024-02-14-ollama.mdx
+++ b/docs/changelog/2024-02-14-ollama.mdx
@@ -1,9 +1,9 @@
---
-title: LobeChat Supports Ollama for Local Large Language Model (LLM) Calls
-description: LobeChat v0.127.0 supports using Ollama to call local large language models.
+title: LobeHub Supports Ollama for Local Large Language Model (LLM) Calls
+description: LobeHub v0.127.0 supports using Ollama to call local large language models.
tags:
- Ollama AI
- - LobeChat
+ - LobeHub
- Local LLMs
- AI Conversations
- GPT-4
@@ -11,15 +11,15 @@ tags:
# Support for Ollama Calls to Local Large Language Models 🦙
-With the release of LobeChat v0.127.0, we're excited to introduce a fantastic new feature—Ollama AI support! 🤯 Thanks to the robust infrastructure provided by [Ollama AI](https://ollama.ai/) and the [efforts of the community](https://github.com/lobehub/lobe-chat/pull/1265), you can now interact with local LLMs (Large Language Models) within LobeChat! 🤩
+With the release of LobeHub v0.127.0, we're excited to introduce a fantastic new feature—Ollama AI support! 🤯 Thanks to the robust infrastructure provided by [Ollama AI](https://ollama.ai/) and the [efforts of the community](https://github.com/lobehub/lobe-chat/pull/1265), you can now interact with local LLMs (Large Language Models) within LobeHub! 🤩
-We are thrilled to unveil this revolutionary feature to all LobeChat users at this special moment. The integration of Ollama AI not only represents a significant leap in our technology but also reaffirms our commitment to continuously seek more efficient and intelligent ways of communication with our users.
+We are thrilled to unveil this revolutionary feature to all LobeHub users at this special moment. The integration of Ollama AI not only represents a significant leap in our technology but also reaffirms our commitment to continuously seek more efficient and intelligent ways of communication with our users.
## 💡 How to Start a Conversation with Local LLMs?
-If you're facing challenges with private deployments, we strongly recommend trying out the LobeChat Cloud service. We offer comprehensive model support to help you easily embark on your AI conversation journey.
+If you're facing challenges with private deployments, we strongly recommend trying out the LobeHub Cloud service. We offer comprehensive model support to help you easily embark on your AI conversation journey.
-Experience the newly upgraded LobeChat v1.6 and feel the powerful conversational capabilities brought by GPT-4!
+Experience the newly upgraded LobeHub v1.6 and feel the powerful conversational capabilities brought by GPT-4!
```bash
docker run -d -p 3210:3210 -e OLLAMA_PROXY_URL=http://host.docker.internal:11434/v1 lobehub/lobe-chat
diff --git a/docs/changelog/2024-02-14-ollama.zh-CN.mdx b/docs/changelog/2024-02-14-ollama.zh-CN.mdx
index 565c2a7e92..37f336ab11 100644
--- a/docs/changelog/2024-02-14-ollama.zh-CN.mdx
+++ b/docs/changelog/2024-02-14-ollama.zh-CN.mdx
@@ -1,24 +1,24 @@
---
-title: LobeChat 支持 Ollama 调用本地大语言模型(LLM)
-description: LobeChat vLobeChat v0.127.0 支持 Ollama 调用本地大语言模型。
+title: LobeHub 支持 Ollama 调用本地大语言模型(LLM)
+description: LobeHub vLobeHub v0.127.0 支持 Ollama 调用本地大语言模型。
tags:
- Ollama AI
- - LobeChat
+ - LobeHub
- 大语言模型
- AI 对话
---
# 支持 Ollama 调用本地大语言模型 🦙
-随着 LobeChat v0.127.0 的发布,我们迎来了一个激动人心的特性 —— Ollama AI 支持!🤯 在 [Ollama AI](https://ollama.ai/) 强大的基础设施和 [社区的共同努力](https://github.com/lobehub/lobe-chat/pull/1265) 下,现在您可以在 LobeChat 中与本地 LLM (Large Language Model) 进行交流了!🤩
+随着 LobeHub v0.127.0 的发布,我们迎来了一个激动人心的特性 —— Ollama AI 支持!🤯 在 [Ollama AI](https://ollama.ai/) 强大的基础设施和 [社区的共同努力](https://github.com/lobehub/lobe-chat/pull/1265) 下,现在您可以在 LobeHub 中与本地 LLM (Large Language Model) 进行交流了!🤩
-我们非常高兴能在这个特别的时刻,向所有 LobeChat 用户介绍这项革命性的特性。Ollama AI 的集成不仅标志着我们技术上的一个巨大飞跃,更是向用户承诺,我们将不断追求更高效、更智能的沟通方式。
+我们非常高兴能在这个特别的时刻,向所有 LobeHub 用户介绍这项革命性的特性。Ollama AI 的集成不仅标志着我们技术上的一个巨大飞跃,更是向用户承诺,我们将不断追求更高效、更智能的沟通方式。
## 💡 如何启动与本地 LLM 的对话?
-如果您在私有化部署方面遇到困难,强烈推荐尝试 LobeChat Cloud 服务。我们提供全方位的模型支持,让您轻松开启 AI 对话之旅。
+如果您在私有化部署方面遇到困难,强烈推荐尝试 LobeHub Cloud 服务。我们提供全方位的模型支持,让您轻松开启 AI 对话之旅。
-赶快来体验全新升级的 LobeChat v1.6,感受 GPT-4 带来的强大对话能力!
+赶快来体验全新升级的 LobeHub v1.6,感受 GPT-4 带来的强大对话能力!
```bash
docker run -d -p 3210:3210 -e OLLAMA_PROXY_URL=http://host.docker.internal:11434/v1 lobehub/lobe-chat
diff --git a/docs/changelog/2024-06-19-lobe-chat-v1.mdx b/docs/changelog/2024-06-19-lobe-chat-v1.mdx
index ac59cfdb75..8a30b03f51 100644
--- a/docs/changelog/2024-06-19-lobe-chat-v1.mdx
+++ b/docs/changelog/2024-06-19-lobe-chat-v1.mdx
@@ -1,29 +1,29 @@
---
-title: 'LobeChat 1.0: New Architecture and New Possibilities'
+title: 'LobeHub 1.0: New Architecture and New Possibilities'
description: >-
- LobeChat 1.0 brings a brand-new architecture and features for server-side
+ LobeHub 1.0 brings a brand-new architecture and features for server-side
databases and user authentication management, opening up new possibilities. On
- this basis, LobeChat Cloud has entered beta testing.
+ this basis, LobeHub Cloud has entered beta testing.
tags:
- - LobeChat
+ - LobeHub
- Version 1.0
- Server-Side Database
- User Authentication
- Cloud Beta Testing
---
-# LobeChat 1.0: New Architecture and New Possibilities
+# LobeHub 1.0: New Architecture and New Possibilities
-Since announcing our move towards version 1.0 in March, we’ve been busy upgrading every aspect of our platform. After two months of intensive development, we are excited to announce the official release of LobeChat 1.0! Let’s take a look at our new features.
+Since announcing our move towards version 1.0 in March, we’ve been busy upgrading every aspect of our platform. After two months of intensive development, we are excited to announce the official release of LobeHub 1.0! Let’s take a look at our new features.
## Server-Side Database Support
-The most significant feature of LobeChat 1.0 is the support for server-side databases. In the 0.x era, the lack of persistent storage on the server side made it challenging, if not impossible, to implement many features that users urgently needed, such as knowledge bases, cross-device synchronization, and private assistant markets.
+The most significant feature of LobeHub 1.0 is the support for server-side databases. In the 0.x era, the lack of persistent storage on the server side made it challenging, if not impossible, to implement many features that users urgently needed, such as knowledge bases, cross-device synchronization, and private assistant markets.
## User Authentication Management
In the 0.x era, the most requested feature to be paired with server-side databases was user authentication management. Previously, we had integrated next-auth and Clerk as our authentication solutions. In response to demands for multi-user management, we have restructured the settings interface into a user panel, consolidating relevant user information within the new user interface.
-## LobeChat Cloud Beta Testing
+## LobeHub Cloud Beta Testing
-LobeChat Cloud is our commercial version based on the open-source LobeChat, and all the features from version 1.0 are now live in LobeChat Cloud, which has entered beta testing. If you’re interested, you can join our waitlist here. During the beta testing period, a limited number of access slots will be released daily for testing opportunities.
+LobeHub Cloud is our commercial version based on the open-source LobeHub, and all the features from version 1.0 are now live in LobeHub Cloud, which has entered beta testing. If you’re interested, you can join our waitlist here. During the beta testing period, a limited number of access slots will be released daily for testing opportunities.
diff --git a/docs/changelog/2024-06-19-lobe-chat-v1.zh-CN.mdx b/docs/changelog/2024-06-19-lobe-chat-v1.zh-CN.mdx
index af60976954..96d677fc03 100644
--- a/docs/changelog/2024-06-19-lobe-chat-v1.zh-CN.mdx
+++ b/docs/changelog/2024-06-19-lobe-chat-v1.zh-CN.mdx
@@ -1,27 +1,27 @@
---
-title: LobeChat 1.0:新的架构与新的可能
+title: LobeHub 1.0:新的架构与新的可能
description: >-
- LobeChat 1.0 带来了服务端数据库、用户鉴权管理的全新架构与特性,开启了新的可能 。在此基础上, LobeChat Cloud 开启 Beta
+ LobeHub 1.0 带来了服务端数据库、用户鉴权管理的全新架构与特性,开启了新的可能 。在此基础上, LobeHub Cloud 开启 Beta
版测试。
tags:
- - LobeChat
+ - LobeHub
- 服务端数据库
- 用户鉴权
- Beta 测试
---
-# LobeChat 1.0:新的架构与新的可能
+# LobeHub 1.0:新的架构与新的可能
-自从 3 月份宣布迈向 1.0 ,我们就开始着手全方面的升级。经过 2 个月的密集研发,我们很高兴地宣布 LobeChat 1.0 正式发布了!一起来看看我们的全新样貌吧~
+自从 3 月份宣布迈向 1.0 ,我们就开始着手全方面的升级。经过 2 个月的密集研发,我们很高兴地宣布 LobeHub 1.0 正式发布了!一起来看看我们的全新样貌吧~
## 服务端数据库支持
-在 LobeChat 1.0 中,最大的特性是支持了服务端数据库。在 0.x 时代,由于缺乏服务端持久化存储,许多用户迫切需要的功能实现困难,或完全无法实现,例如知识库、跨端同步、私有助手市场等等。
+在 LobeHub 1.0 中,最大的特性是支持了服务端数据库。在 0.x 时代,由于缺乏服务端持久化存储,许多用户迫切需要的功能实现困难,或完全无法实现,例如知识库、跨端同步、私有助手市场等等。
## 用户鉴权管理
在 0.x 时代,和服务端数据库搭配的呼声最高的特性就是用户鉴权管理。在此之前,我们已经接入了 next-auth 和 clerk 作为鉴权解决方案。并针对多用户管理的诉求,将设置界面重构为了用户面板,在新的用户面板中整合了相关的用户信息。
-## LobeChat Cloud 开启 Beta 测试
+## LobeHub Cloud 开启 Beta 测试
-LobeChat Cloud 是我们基于 LobeChat 开源版的商业化版本,上述 1.0 的功能在 LobeChat Cloud 中均已上线,目前已开启 Beta 测试。如果你感兴趣,可以在这里加入我们的 waitlist , Beta 测试期间每天都会发放体验名额。
+LobeHub Cloud 是我们基于 LobeHub 开源版的商业化版本,上述 1.0 的功能在 LobeHub Cloud 中均已上线,目前已开启 Beta 测试。如果你感兴趣,可以在这里加入我们的 waitlist , Beta 测试期间每天都会发放体验名额。
diff --git a/docs/changelog/2024-07-19-gpt-4o-mini.mdx b/docs/changelog/2024-07-19-gpt-4o-mini.mdx
index 1a2b6e62fb..4d6698a9a8 100644
--- a/docs/changelog/2024-07-19-gpt-4o-mini.mdx
+++ b/docs/changelog/2024-07-19-gpt-4o-mini.mdx
@@ -1,11 +1,11 @@
---
-title: 'LobeChat Fully Enters the GPT-4 Era: GPT-4o Mini Officially Launched'
+title: 'LobeHub Fully Enters the GPT-4 Era: GPT-4o Mini Officially Launched'
description: >-
- LobeChat v1.6 has been released with support for GPT-4o mini, while LobeChat
+ LobeHub v1.6 has been released with support for GPT-4o mini, while LobeHub
Cloud services have been fully upgraded to provide users with a more powerful
AI conversation experience.
tags:
- - LobeChat
+ - LobeHub
- GPT-4o Mini
- AI Conversation
- Cloud Service
@@ -13,18 +13,18 @@ tags:
# GPT-4o Mini Makes a Stunning Debut, Ushering in a New GPT-4 Era 🚀
-We are excited to announce that LobeChat v1.6 is now officially released! This update brings thrilling and significant upgrades:
+We are excited to announce that LobeHub v1.6 is now officially released! This update brings thrilling and significant upgrades:
## 🌟 Major Updates
- **GPT-4o Mini Officially Launched**: OpenAI's entire model lineup has been upgraded to GPT-4
-- **LobeChat Cloud Service Upgrade**:
+- **LobeHub Cloud Service Upgrade**:
- GPT-3.5-turbo has been upgraded to GPT-4o Mini as the default model
- Providing users with a superior conversation experience
## 🎯 Cloud Service Highlights
-LobeChat Cloud offers you a convenient one-stop AI conversation service:
+LobeHub Cloud offers you a convenient one-stop AI conversation service:
- 📦 **Ready to Use**: Free registration for immediate experience
- 🤖 **Multi-Model Support**:
@@ -35,6 +35,6 @@ LobeChat Cloud offers you a convenient one-stop AI conversation service:
## 💡 Usage Recommendations
-If you encounter difficulties with private deployment, we highly recommend trying the LobeChat Cloud service. We provide comprehensive model support to help you easily embark on your AI conversation journey.
+If you encounter difficulties with private deployment, we highly recommend trying the LobeHub Cloud service. We provide comprehensive model support to help you easily embark on your AI conversation journey.
-Come and experience the newly upgraded LobeChat v1.6, and feel the powerful conversational capabilities brought by GPT-4!
+Come and experience the newly upgraded LobeHub v1.6, and feel the powerful conversational capabilities brought by GPT-4!
diff --git a/docs/changelog/2024-07-19-gpt-4o-mini.zh-CN.mdx b/docs/changelog/2024-07-19-gpt-4o-mini.zh-CN.mdx
index fde0f598f2..78ebb12a76 100644
--- a/docs/changelog/2024-07-19-gpt-4o-mini.zh-CN.mdx
+++ b/docs/changelog/2024-07-19-gpt-4o-mini.zh-CN.mdx
@@ -1,28 +1,28 @@
---
-title: LobeChat 全面进入 GPT-4 时代:GPT-4o mini 正式上线
+title: LobeHub 全面进入 GPT-4 时代:GPT-4o mini 正式上线
description: >-
- LobeChat v1.6 重磅发布 GPT-4o mini 支持,同时 LobeChat Cloud 服务全面升级默认模型,为用户带来更强大的 AI
+ LobeHub v1.6 重磅发布 GPT-4o mini 支持,同时 LobeHub Cloud 服务全面升级默认模型,为用户带来更强大的 AI
对话体验。
tags:
- - LobeChat
+ - LobeHub
- GPT-4o mini
- AI 对话服务
---
# GPT-4o mini 震撼登场,开启全新 GPT-4 时代 🚀
-我们很高兴地宣布,LobeChat v1.6 现已正式发布!这次更新带来了激动人心的重大升级:
+我们很高兴地宣布,LobeHub v1.6 现已正式发布!这次更新带来了激动人心的重大升级:
## 🌟 主要更新
- **GPT-4o mini 正式上线**:OpenAI 全系列模型实现 GPT-4 升级
-- **LobeChat Cloud 服务升级**:
+- **LobeHub Cloud 服务升级**:
- GPT-3.5-turbo 升级为 GPT-4o mini 作为默认模型
- 为用户带来更优质的对话体验
## 🎯 Cloud 服务亮点
-LobeChat Cloud 为您提供便捷的一站式 AI 对话服务:
+LobeHub Cloud 为您提供便捷的一站式 AI 对话服务:
- 📦 **开箱即用**:免费注册,即刻体验
- 🤖 **多模型支持**:
@@ -33,6 +33,6 @@ LobeChat Cloud 为您提供便捷的一站式 AI 对话服务:
## 💡 使用建议
-如果您在私有化部署方面遇到困难,强烈推荐尝试 LobeChat Cloud 服务。我们提供全方位的模型支持,让您轻松开启 AI 对话之旅。
+如果您在私有化部署方面遇到困难,强烈推荐尝试 LobeHub Cloud 服务。我们提供全方位的模型支持,让您轻松开启 AI 对话之旅。
-赶快来体验全新升级的 LobeChat v1.6,感受 GPT-4 带来的强大对话能力!
+赶快来体验全新升级的 LobeHub v1.6,感受 GPT-4 带来的强大对话能力!
diff --git a/docs/changelog/2024-08-02-lobe-chat-database-docker.mdx b/docs/changelog/2024-08-02-lobe-chat-database-docker.mdx
index baae9f8d39..3a0ab0fa5b 100644
--- a/docs/changelog/2024-08-02-lobe-chat-database-docker.mdx
+++ b/docs/changelog/2024-08-02-lobe-chat-database-docker.mdx
@@ -1,20 +1,20 @@
---
-title: LobeChat Database Docker Image Official Release
+title: LobeHub Database Docker Image Official Release
description: >-
- LobeChat v1.8.0 launches the official database Docker image, supporting cloud
+ LobeHub v1.8.0 launches the official database Docker image, supporting cloud
data synchronization and user management, along with comprehensive
self-deployment documentation.
tags:
- - LobeChat
+ - LobeHub
- Docker Image
- Cloud Deployment
- Database
- Postgres
---
-# LobeChat Database Docker Image: The Final Piece of the Cloud Deployment Puzzle
+# LobeHub Database Docker Image: The Final Piece of the Cloud Deployment Puzzle
-We are excited to announce the official release of the long-awaited database Docker image for LobeChat v1.8.0! This marks a significant milestone in our server database offerings, providing users with a complete cloud deployment solution.
+We are excited to announce the official release of the long-awaited database Docker image for LobeHub v1.8.0! This marks a significant milestone in our server database offerings, providing users with a complete cloud deployment solution.
## 🚀 Core Features
@@ -31,10 +31,10 @@ To ensure users can complete the deployment smoothly, we have optimized the stru
- Detailed deployment case studies
- Comprehensive self-deployment operation guide
-You can start deploying your own LobeChat service by visiting the [official documentation](https://lobehub.com/en/docs/self-hosting/server-database).
+You can start deploying your own LobeHub service by visiting the [official documentation](https://lobehub.com/en/docs/self-hosting/server-database).
## 🔮 Future Outlook
Our knowledge base feature is also in development, so stay tuned for more exciting updates!
-This update marks a significant breakthrough for LobeChat in cloud deployment solutions, making private deployment easier than ever. We appreciate the community's patience, and we will continue to strive to provide users with a better experience.
+This update marks a significant breakthrough for LobeHub in cloud deployment solutions, making private deployment easier than ever. We appreciate the community's patience, and we will continue to strive to provide users with a better experience.
diff --git a/docs/changelog/2024-08-02-lobe-chat-database-docker.zh-CN.mdx b/docs/changelog/2024-08-02-lobe-chat-database-docker.zh-CN.mdx
index 3148b25084..9bc35d5da1 100644
--- a/docs/changelog/2024-08-02-lobe-chat-database-docker.zh-CN.mdx
+++ b/docs/changelog/2024-08-02-lobe-chat-database-docker.zh-CN.mdx
@@ -1,16 +1,16 @@
---
-title: LobeChat Database Docker 镜像正式发布
-description: LobeChat v1.8.0 推出官方数据库 Docker 镜像,支持云端数据同步与用户管理,并提供完整的自部署文档指南。
+title: LobeHub Database Docker 镜像正式发布
+description: LobeHub v1.8.0 推出官方数据库 Docker 镜像,支持云端数据同步与用户管理,并提供完整的自部署文档指南。
tags:
- - LobeChat
+ - LobeHub
- Docker 镜像
- 云端部署
- 数据库
---
-# LobeChat Database Docker 镜像:云端部署的最后一块拼图
+# LobeHub Database Docker 镜像:云端部署的最后一块拼图
-我们很高兴地宣布,LobeChat v1.8.0 正式发布了期待已久的数据库 Docker 镜像!这是我们在服务端数据库领域的重要里程碑,为用户提供了完整的云端部署解决方案。
+我们很高兴地宣布,LobeHub v1.8.0 正式发布了期待已久的数据库 Docker 镜像!这是我们在服务端数据库领域的重要里程碑,为用户提供了完整的云端部署解决方案。
## 🚀 核心特性
@@ -27,10 +27,10 @@ tags:
- 详细的部署案例指引
- 完整的自部署操作指南
-现在,您可以通过访问 [官方文档](https://lobehub.com/zh/docs/self-hosting/server-database) 开始部署您自己的 LobeChat 服务。
+现在,您可以通过访问 [官方文档](https://lobehub.com/zh/docs/self-hosting/server-database) 开始部署您自己的 LobeHub 服务。
## 🔮 未来展望
我们的知识库功能也正在开发中,敬请期待更多激动人心的更新!
-这次更新标志着 LobeChat 在云端部署方案上的重要突破,让私有部署变得前所未有的简单。感谢社区的耐心等待,我们将继续努力为用户带来更好的体验。
+这次更新标志着 LobeHub 在云端部署方案上的重要突破,让私有部署变得前所未有的简单。感谢社区的耐心等待,我们将继续努力为用户带来更好的体验。
diff --git a/docs/changelog/2024-08-21-file-upload-and-knowledge-base.mdx b/docs/changelog/2024-08-21-file-upload-and-knowledge-base.mdx
index 7535a10c3e..4c3d7f1b0f 100644
--- a/docs/changelog/2024-08-21-file-upload-and-knowledge-base.mdx
+++ b/docs/changelog/2024-08-21-file-upload-and-knowledge-base.mdx
@@ -1,13 +1,13 @@
---
title: >-
- LobeChat Launches Knowledge Base Feature: A New Experience in Intelligent File
+ LobeHub Launches Knowledge Base Feature: A New Experience in Intelligent File
Management and Dialogue
description: >-
- LobeChat introduces a brand new knowledge base feature that supports all types
+ LobeHub introduces a brand new knowledge base feature that supports all types
of file management, intelligent vectorization, and file dialogue, making
knowledge management and information retrieval easier and smarter.
tags:
- - LobeChat
+ - LobeHub
- Knowledge Base
- File Management
- Open Source
@@ -16,7 +16,7 @@ tags:
# Major Release of Knowledge Base Feature: A Revolution in Intelligent File Management and Dialogue
-We are excited to announce that the highly anticipated LobeChat knowledge base feature is now officially launched! 🎉 This feature is now available in both the open-source version and the Cloud version (lobechat.com).
+We are excited to announce that the highly anticipated LobeHub knowledge base feature is now officially launched! 🎉 This feature is now available in both the open-source version and the Cloud version (LobeHub.com).
## A Brand New File Management Experience
@@ -38,4 +38,4 @@ We are excited to announce that the highly anticipated LobeChat knowledge base f
- 🎯 **Real-Time Feedback**: An optimized upload experience provides clear progress feedback.
- ☁️ **Two Versions Available**: Offers both an open-source self-hosted version and an official Cloud version to meet different user needs.
-All features are open-sourced on the [GitHub repository](https://github.com/lobehub/lobe-chat). We invite you to visit [LobeChat Cloud](http://lobechat.com) to experience the full functionality.
+All features are open-sourced on the [GitHub repository](https://github.com/lobehub/lobe-chat). We invite you to visit [LobeHub Cloud](http://LobeHub.com) to experience the full functionality.
diff --git a/docs/changelog/2024-08-21-file-upload-and-knowledge-base.zh-CN.mdx b/docs/changelog/2024-08-21-file-upload-and-knowledge-base.zh-CN.mdx
index cdb3f23564..69f32ed1e3 100644
--- a/docs/changelog/2024-08-21-file-upload-and-knowledge-base.zh-CN.mdx
+++ b/docs/changelog/2024-08-21-file-upload-and-knowledge-base.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 重磅发布知识库功能:打造智能文件管理与对话新体验
-description: LobeChat 推出全新知识库功能,支持全类型文件管理、智能向量化和文件对话,让知识管理和信息检索更轻松、更智能。
+title: LobeHub 重磅发布知识库功能:打造智能文件管理与对话新体验
+description: LobeHub 推出全新知识库功能,支持全类型文件管理、智能向量化和文件对话,让知识管理和信息检索更轻松、更智能。
tags:
- - LobeChat
+ - LobeHub
- 知识库
- 文件管理
- 智能处理
@@ -10,7 +10,7 @@ tags:
# 知识库功能重磅发布:智能文件管理与对话的革新
-我们很高兴地宣布,备受期待的 LobeChat 知识库功能现已正式发布!🎉 该功能已同步在开源版和 Cloud 版(lobechat.com)中上线。
+我们很高兴地宣布,备受期待的 LobeHub 知识库功能现已正式发布!🎉 该功能已同步在开源版和 Cloud 版(LobeHub.com)中上线。
## 全新的文件管理体验
@@ -32,4 +32,4 @@ tags:
- 🎯 **实时反馈**:优化的上传体验,提供清晰的进度反馈
- ☁️ **双版本可选**:提供开源自部署版本和官方 Cloud 版本,满足不同用户需求
-所有功能均已在 [GitHub 仓库](https://github.com/lobehub/lobe-chat) 开源,欢迎访问 [LobeChat Cloud](http://lobechat.com) 体验完整功能。
+所有功能均已在 [GitHub 仓库](https://github.com/lobehub/lobe-chat) 开源,欢迎访问 [LobeHub Cloud](http://LobeHub.com) 体验完整功能。
diff --git a/docs/changelog/2024-09-13-openai-o1-models.mdx b/docs/changelog/2024-09-13-openai-o1-models.mdx
index 6cb27260e9..914388b1f2 100644
--- a/docs/changelog/2024-09-13-openai-o1-models.mdx
+++ b/docs/changelog/2024-09-13-openai-o1-models.mdx
@@ -1,19 +1,19 @@
---
-title: LobeChat Perfectly Adapts to OpenAI O1 Series Models
+title: LobeHub Perfectly Adapts to OpenAI O1 Series Models
description: >-
- LobeChat v1.17.0 now supports OpenAI's latest o1-preview and o1-mini models,
+ LobeHub v1.17.0 now supports OpenAI's latest o1-preview and o1-mini models,
bringing users enhanced coding and mathematical capabilities.
tags:
- OpenAI O1
- - LobeChat
+ - LobeHub
- AI Models
- Code Writing
- Mathematical Problem Solving
---
-# OpenAI O1 Series Models Now Available on LobeChat
+# OpenAI O1 Series Models Now Available on LobeHub
-We are excited to announce that LobeChat v1.17.0 fully supports OpenAI's newly launched O1 series models. Whether you are a community edition user or a [Cloud version](https://lobechat.com) subscriber, you can experience this significant update.
+We are excited to announce that LobeHub v1.17.0 fully supports OpenAI's newly launched O1 series models. Whether you are a community edition user or a [Cloud version](https://LobeHub.com) subscriber, you can experience this significant update.
## New Model Support
@@ -34,4 +34,4 @@ The O1 series models excel in the following areas:
- 🌐 Cloud version subscribers can start using it immediately
- 🔧 Self-hosted users can begin experiencing it by updating to v1.17.0
-This update marks an important step for LobeChat in supporting the latest AI models. We look forward to seeing how the O1 series models can help users unlock new possibilities!
+This update marks an important step for LobeHub in supporting the latest AI models. We look forward to seeing how the O1 series models can help users unlock new possibilities!
diff --git a/docs/changelog/2024-09-13-openai-o1-models.zh-CN.mdx b/docs/changelog/2024-09-13-openai-o1-models.zh-CN.mdx
index 9b1e5e5654..2bef0dc647 100644
--- a/docs/changelog/2024-09-13-openai-o1-models.zh-CN.mdx
+++ b/docs/changelog/2024-09-13-openai-o1-models.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: LobeChat 完美适配 OpenAI O1 系列模型
-description: LobeChat v1.17.0 现已支持 OpenAI 最新发布的 o1-preview 和 o1-mini 模型,为用户带来更强大的代码和数学能力。
+title: LobeHub 完美适配 OpenAI O1 系列模型
+description: LobeHub v1.17.0 现已支持 OpenAI 最新发布的 o1-preview 和 o1-mini 模型,为用户带来更强大的代码和数学能力。
tags:
- OpenAI O1
- - LobeChat
+ - LobeHub
- AI 模型
- 代码编写
- 数学问题
---
-# OpenAI O1 系列模型现已登陆 LobeChat
+# OpenAI O1 系列模型现已登陆 LobeHub
-我们很高兴地宣布,LobeChat v1.17.0 已完整支持 OpenAI 最新推出的 O1 系列模型。无论是社区版还是 [Cloud 版本](https://lobechat.com)用户,都可以体验到这一重大更新。
+我们很高兴地宣布,LobeHub v1.17.0 已完整支持 OpenAI 最新推出的 O1 系列模型。无论是社区版还是 [Cloud 版本](https://LobeHub.com)用户,都可以体验到这一重大更新。
## 新增模型支持
@@ -29,7 +29,7 @@ O1 系列模型在以下方面表现出色:
## 立即体验
-- 🌐 [Cloud 版本](https://lobechat.com) 订阅用户现已可以直接使用
+- 🌐 [Cloud 版本](https://LobeHub.com) 订阅用户现已可以直接使用
- 🔧 自部署用户可通过更新至 v1.17.0 开始体验
-这次更新让 LobeChat 在支持最新 AI 模型方面又迈出了重要一步。我们期待 O1 系列模型能够帮助用户实现更多可能!
+这次更新让 LobeHub 在支持最新 AI 模型方面又迈出了重要一步。我们期待 O1 系列模型能够帮助用户实现更多可能!
diff --git a/docs/changelog/2024-09-20-artifacts.mdx b/docs/changelog/2024-09-20-artifacts.mdx
index 3e571c4b26..0124ec2911 100644
--- a/docs/changelog/2024-09-20-artifacts.mdx
+++ b/docs/changelog/2024-09-20-artifacts.mdx
@@ -1,24 +1,24 @@
---
-title: 'Major Update: LobeChat Enters the Era of Artifacts'
+title: 'Major Update: LobeHub Enters the Era of Artifacts'
description: >-
- LobeChat v1.19 brings significant updates, including full feature support for
+ LobeHub v1.19 brings significant updates, including full feature support for
Claude Artifacts, a brand new discovery page design, and support for GitHub
Models providers, greatly enhancing the capabilities of the AI assistant.
tags:
- - LobeChat
+ - LobeHub
- AI Assistant
- Artifacts
- GitHub Models
- Interactive Experience
---
-# Major Update: LobeChat Enters the Era of Artifacts
+# Major Update: LobeHub Enters the Era of Artifacts
-We are excited to announce the official release of LobeChat v1.19! This update introduces several important features that elevate the interactive experience of the AI assistant.
+We are excited to announce the official release of LobeHub v1.19! This update introduces several important features that elevate the interactive experience of the AI assistant.
## 🎨 Artifacts Support: Unlocking New Creative Dimensions
-In this version, we have nearly fully replicated the core features of Claude Artifacts. Now, you can experience the following in LobeChat:
+In this version, we have nearly fully replicated the core features of Claude Artifacts. Now, you can experience the following in LobeHub:
- SVG graphic generation and display
- HTML page generation and real-time rendering
@@ -26,7 +26,7 @@ In this version, we have nearly fully replicated the core features of Claude Art
It is worth mentioning that the Python code execution feature has also been developed and will be available in future versions. At that time, users will be able to utilize both Claude Artifacts and OpenAI Code Interpreter, significantly enhancing the practicality of the AI assistant.
-
+
## 🔍 New Discovery Page: Explore More Possibilities
@@ -45,7 +45,7 @@ This redesign not only increases the information density of the page but also op
## 🚀 GitHub Models Support: More Model Choices
-Thanks to community member [@CloudPassenger](https://github.com/CloudPassenger) for their contributions, LobeChat now supports GitHub Models providers. Users simply need to:
+Thanks to community member [@CloudPassenger](https://github.com/CloudPassenger) for their contributions, LobeHub now supports GitHub Models providers. Users simply need to:
1. Prepare a GitHub Personal Access Token (PAT)
2. Configure provider information in the settings
@@ -55,10 +55,10 @@ The addition of this feature greatly expands the range of models available to us
## 🔜 Future Outlook
-We will continue to focus on enhancing the functionality and user experience of LobeChat. In upcoming versions, we plan to:
+We will continue to focus on enhancing the functionality and user experience of LobeHub. In upcoming versions, we plan to:
- Improve the Python code execution feature
- Add support for more types of Artifacts
- Expand the content dimensions of the discovery page
-Thank you to every user for your support and feedback. Let’s look forward to more surprises from LobeChat together!
+Thank you to every user for your support and feedback. Let’s look forward to more surprises from LobeHub together!
diff --git a/docs/changelog/2024-09-20-artifacts.zh-CN.mdx b/docs/changelog/2024-09-20-artifacts.zh-CN.mdx
index 801466498a..fb6244fb60 100644
--- a/docs/changelog/2024-09-20-artifacts.zh-CN.mdx
+++ b/docs/changelog/2024-09-20-artifacts.zh-CN.mdx
@@ -1,23 +1,23 @@
---
-title: 重磅更新:LobeChat 迎来 Artifacts 时代
+title: 重磅更新:LobeHub 迎来 Artifacts 时代
description: >-
- LobeChat v1.19 带来了重大更新,包括 Claude Artifacts 完整特性支持、全新的发现页面设计,以及 GitHub Models
+ LobeHub v1.19 带来了重大更新,包括 Claude Artifacts 完整特性支持、全新的发现页面设计,以及 GitHub Models
服务商支持,让 AI 助手的能力得到显著提升。
tags:
- - LobeChat
+ - LobeHub
- Artifacts
- AI 助手
- 更新
- GitHub Models
---
-# 重磅更新:LobeChat 迎来 Artifacts 时代
+# 重磅更新:LobeHub 迎来 Artifacts 时代
-我们很高兴地宣布 LobeChat v1.19 版本正式发布!这次更新带来了多项重要功能,让 AI 助手的交互体验更上一层楼。
+我们很高兴地宣布 LobeHub v1.19 版本正式发布!这次更新带来了多项重要功能,让 AI 助手的交互体验更上一层楼。
## 🎨 Artifacts 支持:解锁全新创作维度
-在这个版本中,我们几乎完整还原了 Claude Artifacts 的核心特性。现在,您可以在 LobeChat 中体验到:
+在这个版本中,我们几乎完整还原了 Claude Artifacts 的核心特性。现在,您可以在 LobeHub 中体验到:
- SVG 图形生成与展示
- HTML 页面生成与实时渲染
@@ -25,7 +25,7 @@ tags:
值得一提的是,Python 代码执行功能也已完成开发,将在后续版本中与大家见面。届时,用户将能够同时运用 Claude Artifacts 和 OpenAI Code Interpreter 这两大强大工具,极大提升 AI 助手的实用性。
-
+
## 🔍 全新发现页面:探索更多可能
@@ -44,7 +44,7 @@ tags:
## 🚀 GitHub Models 支持:更多模型选择
-感谢社区成员 [@CloudPassenger](https://github.com/CloudPassenger) 的贡献,现在 LobeChat 已经支持 GitHub Models 服务商。用户只需:
+感谢社区成员 [@CloudPassenger](https://github.com/CloudPassenger) 的贡献,现在 LobeHub 已经支持 GitHub Models 服务商。用户只需:
1. 准备 GitHub Personal Access Token (PAT)
2. 在设置中配置服务商信息
@@ -54,10 +54,10 @@ tags:
## 🔜 未来展望
-我们将持续致力于提升 LobeChat 的功能和用户体验。接下来的版本中,我们计划:
+我们将持续致力于提升 LobeHub 的功能和用户体验。接下来的版本中,我们计划:
- 完善 Python 代码执行功能
- 增加更多 Artifacts 类型支持
- 扩展发现页面的内容维度
-感谢每一位用户的支持与反馈,让我们一起期待 LobeChat 带来更多惊喜!
+感谢每一位用户的支持与反馈,让我们一起期待 LobeHub 带来更多惊喜!
diff --git a/docs/changelog/2024-10-27-pin-assistant.mdx b/docs/changelog/2024-10-27-pin-assistant.mdx
index d65f66dff3..2583d133ae 100644
--- a/docs/changelog/2024-10-27-pin-assistant.mdx
+++ b/docs/changelog/2024-10-27-pin-assistant.mdx
@@ -1,7 +1,7 @@
---
-title: LobeChat Introduces Persistent Assistant Sidebar Feature
+title: LobeHub Introduces Persistent Assistant Sidebar Feature
description: >-
- LobeChat v1.26.0 launches the persistent assistant sidebar feature, supporting
+ LobeHub v1.26.0 launches the persistent assistant sidebar feature, supporting
quick key switching for easy access to frequently used assistants,
significantly enhancing efficiency.
tags:
@@ -21,9 +21,9 @@ In version v1.26.0, we are excited to introduce a long-awaited new feature — t
- **Space Optimization**: Activating the sidebar automatically hides the conversation list, providing you with a larger conversation area.
- **Intelligent Display**: Automatically syncs pinned assistants to the sidebar, ensuring that important assistants are always within view.
-
+
-
+
## How to Use
diff --git a/docs/changelog/2024-10-27-pin-assistant.zh-CN.mdx b/docs/changelog/2024-10-27-pin-assistant.zh-CN.mdx
index 2971ad514b..12880a7db1 100644
--- a/docs/changelog/2024-10-27-pin-assistant.zh-CN.mdx
+++ b/docs/changelog/2024-10-27-pin-assistant.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: LobeChat 新增助手常驻侧边栏功能
-description: LobeChat v1.26.0 推出助手常驻侧边栏功能,支持快捷键切换,让高频使用的助手触手可及,大幅提升使用效率。
+title: LobeHub 新增助手常驻侧边栏功能
+description: LobeHub v1.26.0 推出助手常驻侧边栏功能,支持快捷键切换,让高频使用的助手触手可及,大幅提升使用效率。
tags:
- 助手常驻侧边栏
- 对话体验
@@ -17,9 +17,9 @@ tags:
- **空间优化**:激活侧边栏时会自动隐藏会话列表,为您腾出更大的对话空间
- **智能显示**:将置顶助手自动同步到侧边栏,让重要助手始终在视线范围内
-
+
-
+
## 如何使用
diff --git a/docs/changelog/2024-11-06-share-text-json.mdx b/docs/changelog/2024-11-06-share-text-json.mdx
index e8f1948d26..fbf9f5f2b0 100644
--- a/docs/changelog/2024-11-06-share-text-json.mdx
+++ b/docs/changelog/2024-11-06-share-text-json.mdx
@@ -1,10 +1,10 @@
---
-title: LobeChat Supports Sharing Conversations in Text Format (Markdown/JSON)
+title: LobeHub Supports Sharing Conversations in Text Format (Markdown/JSON)
description: >-
- LobeChat v1.28.0 introduces support for exporting conversations in Markdown
- and OpenAI format JSON, making it easy to convert conversation content into
- note materials, development debugging data, and training corpora,
- significantly enhancing the reusability of conversation content.
+ LobeHub v1.28.0 introduces support for exporting conversations in Markdown and
+ OpenAI format JSON, making it easy to convert conversation content into note
+ materials, development debugging data, and training corpora, significantly
+ enhancing the reusability of conversation content.
tags:
- Text Format Export
- Markdown Export
@@ -17,11 +17,11 @@ In the latest version v1.28.0, we have launched the text format export feature f
The Markdown export feature meets users' needs for directly using conversation content in note-taking and document writing. You can easily save valuable conversation content and manage it across various note-taking applications for reuse.
-
+
Additionally, we support exporting conversations in JSON format that complies with OpenAI messages specifications. This format can be used directly for API debugging and serves as high-quality training data for models.
-
+
It is particularly noteworthy that we retain the original data of Tools Calling within the conversation, which is crucial for enhancing the model's tool invocation capabilities.
diff --git a/docs/changelog/2024-11-06-share-text-json.zh-CN.mdx b/docs/changelog/2024-11-06-share-text-json.zh-CN.mdx
index 90f78831a6..8ed09deb5c 100644
--- a/docs/changelog/2024-11-06-share-text-json.zh-CN.mdx
+++ b/docs/changelog/2024-11-06-share-text-json.zh-CN.mdx
@@ -1,7 +1,7 @@
---
-title: LobeChat 支持分享对话为文本格式(Markdown/JSON)
+title: LobeHub 支持分享对话为文本格式(Markdown/JSON)
description: >-
- LobeChat v1.28.0 新增 Markdown 和 OpenAI 格式 JSON
+ LobeHub v1.28.0 新增 Markdown 和 OpenAI 格式 JSON
导出支持,让对话内容能轻松转化为笔记素材、开发调试数据和训练语料,显著提升对话内容的复用价值。
tags:
- 对话内容
@@ -15,11 +15,11 @@ tags:
Markdown 格式导出功能满足了用户将对话内容直接用于笔记和文档撰写的需求。您可以轻松地将有价值的对话内容保存下来,并在各类笔记软件中进行管理和复用。
-
+
同时,我们还支持将对话导出为符合 OpenAI messages 规范的 JSON 格式。这种格式不仅可以直接用于 API 调试,还能作为高质量的模型训练语料。
-
+
特别值得一提的是,我们会完整保留对话中的 Tools Calling 原始数据,这对提升模型的工具调用能力具有重要价值。
diff --git a/docs/changelog/2024-11-25-november-providers.mdx b/docs/changelog/2024-11-25-november-providers.mdx
index 013272dc4f..49bde5c0c3 100644
--- a/docs/changelog/2024-11-25-november-providers.mdx
+++ b/docs/changelog/2024-11-25-november-providers.mdx
@@ -1,10 +1,10 @@
---
-title: New Model Providers Added to LobeChat in November
+title: New Model Providers Added to LobeHub in November
description: >-
- LobeChat model providers now support Gitee AI, InternLM (ShuSheng PuYu), xAI,
+ LobeHub model providers now support Gitee AI, InternLM (ShuSheng PuYu), xAI,
and Cloudflare WorkersAI
tags:
- - LobeChat
+ - LobeHub
- AI Model Providers
- Gitee AI
- InternLM
@@ -12,9 +12,9 @@ tags:
- Cloudflare Workers AI
---
-# New Model Providers Added to LobeChat in November 🎉
+# New Model Providers Added to LobeHub in November 🎉
-We're excited to announce that LobeChat has expanded its AI model support with the following providers:
+We're excited to announce that LobeHub has expanded its AI model support with the following providers:
- **Gitee AI**: [https://ai.gitee.com](https://ai.gitee.com)
- **InternLM**: [https://internlm.intern-ai.org.cn](https://internlm.intern-ai.org.cn)
diff --git a/docs/changelog/2024-11-25-november-providers.zh-CN.mdx b/docs/changelog/2024-11-25-november-providers.zh-CN.mdx
index 04b311ddea..f92133659b 100644
--- a/docs/changelog/2024-11-25-november-providers.zh-CN.mdx
+++ b/docs/changelog/2024-11-25-november-providers.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 11 月新增模型服务
-description: 'LobeChat 模型服务新增支持 Gitee AI, InternLM (书生浦语), xAI, Cloudflare WorkersAI'
+title: LobeHub 11 月新增模型服务
+description: 'LobeHub 模型服务新增支持 Gitee AI, InternLM (书生浦语), xAI, Cloudflare WorkersAI'
tags:
- - LobeChat
+ - LobeHub
- AI模型服务
- Gitee AI
- InternLM
@@ -10,9 +10,9 @@ tags:
- Cloudflare Workers AI
---
-# LobeChat 11 月新增模型服务支持 🎉
+# LobeHub 11 月新增模型服务支持 🎉
-我们很高兴地宣布,LobeChat 在 11 月份新增了以下 AI 模型服务的支持:
+我们很高兴地宣布,LobeHub 在 11 月份新增了以下 AI 模型服务的支持:
- **Gitee AI**: [https://ai.gitee.com](https://ai.gitee.com)
- **InternLM (书生浦语)**: [https://internlm.intern-ai.org.cn](https://internlm.intern-ai.org.cn)
diff --git a/docs/changelog/2024-11-27-forkable-chat.mdx b/docs/changelog/2024-11-27-forkable-chat.mdx
index 2fe7a4658a..57a26ecc55 100644
--- a/docs/changelog/2024-11-27-forkable-chat.mdx
+++ b/docs/changelog/2024-11-27-forkable-chat.mdx
@@ -1,17 +1,17 @@
---
-title: LobeChat Supports Branching Conversations
+title: LobeHub Supports Branching Conversations
description: >-
- LobeChat now allows you to create new conversation branches from any message,
+ LobeHub now allows you to create new conversation branches from any message,
freeing your thoughts.
tags:
- Branching Conversations
- - LobeChat
+ - LobeHub
- Chat Features
---
# Exciting Launch of Branching Conversations Feature 🎉
-We are thrilled to announce that LobeChat has introduced a brand new branching conversations feature, making your conversation experience smoother and more natural:
+We are thrilled to announce that LobeHub has introduced a brand new branching conversations feature, making your conversation experience smoother and more natural:
## Key Features
diff --git a/docs/changelog/2024-11-27-forkable-chat.zh-CN.mdx b/docs/changelog/2024-11-27-forkable-chat.zh-CN.mdx
index 7033bd4a91..4c7230e953 100644
--- a/docs/changelog/2024-11-27-forkable-chat.zh-CN.mdx
+++ b/docs/changelog/2024-11-27-forkable-chat.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 支持分支对话
-description: LobeChat 现已支持从任意消息创建新的对话分支,让您的思维不再受限
+title: LobeHub 支持分支对话
+description: LobeHub 现已支持从任意消息创建新的对话分支,让您的思维不再受限
tags:
- - LobeChat
+ - LobeHub
- 分支对话
- 对话功能
- 用户体验
@@ -10,7 +10,7 @@ tags:
# 重磅推出分支对话功能 🎉
-我们很高兴地宣布,LobeChat 推出了全新的分支对话功能,让您的对话体验更加流畅自然:
+我们很高兴地宣布,LobeHub 推出了全新的分支对话功能,让您的对话体验更加流畅自然:
## 核心特性
diff --git a/docs/changelog/2025-01-03-user-profile.mdx b/docs/changelog/2025-01-03-user-profile.mdx
index 38d2364386..8b1d795abb 100644
--- a/docs/changelog/2025-01-03-user-profile.mdx
+++ b/docs/changelog/2025-01-03-user-profile.mdx
@@ -1,10 +1,10 @@
---
-title: LobeChat Supports User Data Statistics and Activity Sharing
+title: LobeHub Supports User Data Statistics and Activity Sharing
description: >-
- LobeChat now supports multi-dimensional user data statistics and activity
+ LobeHub now supports multi-dimensional user data statistics and activity
sharing
tags:
- - LobeChat
+ - LobeHub
- User Statistics
- Activity Sharing
- AI Data
@@ -12,9 +12,9 @@ tags:
# User Data Statistics and Activity Sharing 💯
-Want to know about your activity performance on LobeChat?
+Want to know about your activity performance on LobeHub?
-Now, you can comprehensively understand your AI data through the statistics feature, and even generate personal activity sharing images to share your LobeChat activity with friends.
+Now, you can comprehensively understand your AI data through the statistics feature, and even generate personal activity sharing images to share your LobeHub activity with friends.
## 📊 Data Statistics
diff --git a/docs/changelog/2025-01-03-user-profile.zh-CN.mdx b/docs/changelog/2025-01-03-user-profile.zh-CN.mdx
index 65df51b3f5..0e52eb2904 100644
--- a/docs/changelog/2025-01-03-user-profile.zh-CN.mdx
+++ b/docs/changelog/2025-01-03-user-profile.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: LobeChat 支持用户数据统计与活跃度分享
-description: LobeChat 现已支持多维度用户数据统计与活跃度分享
+title: LobeHub 支持用户数据统计与活跃度分享
+description: LobeHub 现已支持多维度用户数据统计与活跃度分享
tags:
- 用户数据统计
- 活跃度分享
- - LobeChat
+ - LobeHub
---
# 用户数据统计与活跃度分享 💯
-想要了解自己在 LobeChat 上的活跃度表现吗?
+想要了解自己在 LobeHub 上的活跃度表现吗?
-现在,您可以通过数据统计功能,全方位了解自己的 AI 数据,还可以生成个人活跃度分享图片,与好友分享您在 LobeChat 上的活跃度。
+现在,您可以通过数据统计功能,全方位了解自己的 AI 数据,还可以生成个人活跃度分享图片,与好友分享您在 LobeHub 上的活跃度。
## 📊 数据统计
diff --git a/docs/changelog/2025-01-22-new-ai-provider.mdx b/docs/changelog/2025-01-22-new-ai-provider.mdx
index 8caa6e5f74..111db93ce3 100644
--- a/docs/changelog/2025-01-22-new-ai-provider.mdx
+++ b/docs/changelog/2025-01-22-new-ai-provider.mdx
@@ -1,10 +1,10 @@
---
-title: LobeChat Launches New AI Provider Management System
+title: LobeHub Launches New AI Provider Management System
description: >-
- LobeChat has revamped its AI Provider Management System, now supporting custom
+ LobeHub has revamped its AI Provider Management System, now supporting custom
AI providers and models.
tags:
- - LobeChat
+ - LobeHub
- AI Provider
- Provider Management
- Multimodal
@@ -12,7 +12,7 @@ tags:
# New AI Provider Management System 🎉
-We are excited to announce that LobeChat has launched a brand new AI Provider Management System, now available in both the open-source version and the Cloud version ([lobechat.com](https://lobechat.com)):
+We are excited to announce that LobeHub has launched a brand new AI Provider Management System, now available in both the open-source version and the Cloud version ([LobeHub.com](https://LobeHub.com)):
## 🚀 Key Updates
diff --git a/docs/changelog/2025-01-22-new-ai-provider.zh-CN.mdx b/docs/changelog/2025-01-22-new-ai-provider.zh-CN.mdx
index 154b5ef7b2..ed12b658c3 100644
--- a/docs/changelog/2025-01-22-new-ai-provider.zh-CN.mdx
+++ b/docs/changelog/2025-01-22-new-ai-provider.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 推出全新 AI Provider 管理系统
-description: LobeChat 焕新全新 AI Provider 管理系统,已支持自定义 AI 服务商与自定义模型
+title: LobeHub 推出全新 AI Provider 管理系统
+description: LobeHub 焕新全新 AI Provider 管理系统,已支持自定义 AI 服务商与自定义模型
tags:
- - LobeChat
+ - LobeHub
- AI Provider
- 服务商管理
- 多模态
@@ -10,7 +10,7 @@ tags:
# 全新 AI Provider 管理系统 🎉
-我们很高兴地宣布,LobeChat 推出了全新的 AI Provider 管理系统,已经在开源版与 Cloud 版([lobechat.com](https://lobechat.com))中可用:
+我们很高兴地宣布,LobeHub 推出了全新的 AI Provider 管理系统,已经在开源版与 Cloud 版([LobeHub.com](https://LobeHub.com))中可用:
## 🚀 主要更新
diff --git a/docs/changelog/2025-02-02-deepseek-r1.mdx b/docs/changelog/2025-02-02-deepseek-r1.mdx
index 28f1853ca3..d5a4f3dbc4 100644
--- a/docs/changelog/2025-02-02-deepseek-r1.mdx
+++ b/docs/changelog/2025-02-02-deepseek-r1.mdx
@@ -1,33 +1,33 @@
---
title: >-
- LobeChat Integrates DeepSeek R1, Bringing a Revolutionary Chain of Thought
+ LobeHub Integrates DeepSeek R1, Bringing a Revolutionary Chain of Thought
Experience
description: >-
- LobeChat v1.49.12 fully supports the DeepSeek R1 model, providing users with
- an unprecedented interactive experience in the chain of thought.
+ LobeHub v1.49.12 fully supports the DeepSeek R1 model, providing users with an
+ unprecedented interactive experience in the chain of thought.
tags:
- - LobeChat
+ - LobeHub
- DeepSeek
- Chain of Thought
---
# Perfect Integration of DeepSeek R1 and it's Deep Thinking Experience 🎉
-After nearly 10 days of meticulous refinement, LobeChat has fully integrated the DeepSeek R1 model in version v1.49.12, offering users a revolutionary interactive experience in the chain of thought!
+After nearly 10 days of meticulous refinement, LobeHub has fully integrated the DeepSeek R1 model in version v1.49.12, offering users a revolutionary interactive experience in the chain of thought!
## 🚀 Major Updates
-- 🤯 **Comprehensive Support for DeepSeek R1**: Now fully integrated in both the Community and Cloud versions ([lobechat.com](https://lobechat.com)).
+- 🤯 **Comprehensive Support for DeepSeek R1**: Now fully integrated in both the Community and Cloud versions ([LobeHub.com](https://LobeHub.com)).
- 🧠 **Real-Time Chain of Thought Display**: Transparently presents the AI's reasoning process, making the resolution of complex issues clear and visible.
- ⚡️ **Deep Thinking Experience**: Utilizing Chain of Thought technology, it provides more insightful AI conversations.
- 💫 **Intuitive Problem Analysis**: Makes the analysis of complex issues clear and easy to understand.
## 🌟 How to Use
-1. Upgrade to LobeChat v1.49.12 or visit [lobechat.com](https://lobechat.com).
+1. Upgrade to LobeHub v1.49.12 or visit [LobeHub.com](https://LobeHub.com).
2. Select the DeepSeek R1 model in the settings.
3. Experience a whole new level of intelligent conversation!
## 📢 Feedback and Support
-If you encounter any issues while using the application or have suggestions for new features, feel free to engage with us through GitHub Discussions. Let's work together to create a better LobeChat!
+If you encounter any issues while using the application or have suggestions for new features, feel free to engage with us through GitHub Discussions. Let's work together to create a better LobeHub!
diff --git a/docs/changelog/2025-02-02-deepseek-r1.zh-CN.mdx b/docs/changelog/2025-02-02-deepseek-r1.zh-CN.mdx
index c6ca15e187..05226954fe 100644
--- a/docs/changelog/2025-02-02-deepseek-r1.zh-CN.mdx
+++ b/docs/changelog/2025-02-02-deepseek-r1.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: LobeChat 重磅集成 DeepSeek R1,带来革命性思维链体验
-description: LobeChat v1.49.12 已完整支持 DeepSeek R1 模型,为用户带来前所未有的思维链交互体验
+title: LobeHub 重磅集成 DeepSeek R1,带来革命性思维链体验
+description: LobeHub v1.49.12 已完整支持 DeepSeek R1 模型,为用户带来前所未有的思维链交互体验
tags:
- DeepSeek R1
- CoT
@@ -9,21 +9,21 @@ tags:
# 完美集成 DeepSeek R1 ,开启思维链新体验
-经过近 10 天的精心打磨,LobeChat 已在 v1.49.12 版本中完整集成了 DeepSeek R1 模型,为用户带来革命性的思维链交互体验!
+经过近 10 天的精心打磨,LobeHub 已在 v1.49.12 版本中完整集成了 DeepSeek R1 模型,为用户带来革命性的思维链交互体验!
## 🚀 重大更新
-- 🤯 **DeepSeek R1 全面支持**: 现已在社区版与 Cloud 版([lobechat.com](https://lobechat.com))中完整接入
+- 🤯 **DeepSeek R1 全面支持**: 现已在社区版与 Cloud 版([LobeHub.com](https://LobeHub.com))中完整接入
- 🧠 **实时思维链展示**: 透明呈现 AI 的推理过程,让复杂问题的解决过程清晰可见
- ⚡️ **深度思考体验**: 通过 Chain of Thought 技术,带来更具洞察力的 AI 对话
- 💫 **直观的问题解析**: 让复杂问题的分析过程变得清晰易懂
## 🌟 使用方式
-1. 升级到 LobeChat v1.49.12 或访问 [lobechat.com](https://lobechat.com)
+1. 升级到 LobeHub v1.49.12 或访问 [LobeHub.com](https://LobeHub.com)
2. 在设置中选择 DeepSeek R1 模型
3. 开启全新的智能对话体验!
## 📢 反馈与支持
-如果您在使用过程中遇到任何问题,或对新功能有任何建议,欢迎通过 GitHub Discussions 与我们交流。让我们一起打造更好的 LobeChat!
+如果您在使用过程中遇到任何问题,或对新功能有任何建议,欢迎通过 GitHub Discussions 与我们交流。让我们一起打造更好的 LobeHub!
diff --git a/docs/changelog/2025-03-02-new-models.zh-CN.mdx b/docs/changelog/2025-03-02-new-models.zh-CN.mdx
new file mode 100644
index 0000000000..faec4eda8f
--- /dev/null
+++ b/docs/changelog/2025-03-02-new-models.zh-CN.mdx
@@ -0,0 +1,28 @@
+---
+title: 全面升级 AI 生态,50+ 模型与 10+ 服务商加入 🚀
+description: LobeHub v1.49.12 已完整支持 DeepSeek R1 模型,为用户带来前所未有的思维链交互体验
+tags:
+ - DeepSeek R1
+ - CoT
+ - 思维链
+---
+
+# 完美集成 DeepSeek R1 ,开启思维链新体验
+
+LobeChat 在二月完成了史上最大规模的 AI 生态扩展,带来更强大、更灵活的 AI 对话体验。
+
+## 🌟 重大更新
+
+- 🔮 AI 服务商矩阵全面扩充:新增 10+ 个主流 AI 提供商,覆盖全球与国内主流平台
+- 🧠 推理模型全面接入:支持 Claude 3.7、OpenAI o3-mini 等新一代推理模型的思维链实时展示,优化 DeepSeek R1 多平台解析
+- 🌐 在线搜索能力革新:集成 SearchXNG、Perplexity 搜索,支持网页深度爬取,Gemini 2.0、Qwen 系列支持原生搜索
+
+## 📊 模型库大更新
+
+更新 50+ 个模型配置,包括:
+
+- OpenAI gpt-4.5-preview
+- Claude 3.7 Sonnet & Haiku 3.5
+- Gemini 2.0 系列优化
+- 月之暗面、通义千问、MiniMax 等国内平台最新模型
+- Perplexity、Cloudflare、硅基流动等平台模型刷新
diff --git a/docs/changelog/2025-04-06-exports.zh-CN.mdx b/docs/changelog/2025-04-06-exports.zh-CN.mdx
new file mode 100644
index 0000000000..e3ea0dfa99
--- /dev/null
+++ b/docs/changelog/2025-04-06-exports.zh-CN.mdx
@@ -0,0 +1,32 @@
+---
+title: 快捷键设置、数据导出与多项功能优化 ⚡
+description: LobeHub v1.49.12 已完整支持 DeepSeek R1 模型,为用户带来前所未有的思维链交互体验
+tags:
+ - LobeHub 快捷键
+ - CoT
+ - 思维链
+---
+
+# 完美集成 DeepSeek R1 ,开启思维链新体验
+
+LobeChat 在三月持续优化用户体验,新增快捷键自定义、数据导出等实用功能,并扩展 AI 服务商生态。
+
+## 🌟 重要更新
+
+- ⚡ 快捷键自定义:支持自定义键盘快捷键,打造个性化操作体验
+- 💾 数据导出功能:支持 PGlite 和 PostgreSQL 数据导出,数据安全更有保障
+- 🔮 AI 服务商扩展:新增 Xinference、Cohere、Search1API、Infini-AI、PPIO 等服务商
+- 🧠 推理模型优化:支持推理内容选择器,优化 Claude 3.7、DeepSeek R1 等模型的思维链展示
+- 🌐 网页爬取增强:特别支持 YouTube、Reddit、微信公众号链接,优化短内容爬取,新增 Search1API 爬虫实现
+
+## 📊 模型库更新
+
+新增了多个主流 AI 模型,包括 Google 的 Gemini 2.5 Pro Experimental 和 Gemini 2.0 Flash 系列变体,Anthropic 支持上下文缓存功能,OpenAI 的 gpt-4o-mini-tts 语音模型, DeepSeek-V3-0324 和 Hunyuan-T1-Latest,以及 QwQ、QVQ-Max、文心 ernie-x1-32k-preview 等模型。
+
+## 💫 体验优化
+
+- 界面改进:重构 Drawer 样式,优化编辑滚动体验,支持截图分享到剪贴板
+- 搜索增强:支持非函数调用模型(如 DeepSeek R1)使用在线搜索,Wenxin、Hunyuan 支持内置网络搜索
+- 文件处理:新增 EPUB 文件分块支持,优化 PDF 处理
+- 性能提升:重构 Agent Runtime 实现,优化数据库核心代码,修复 LiteLLM 流式使用统计
+- 稳定性修复:解决主题闪烁、知识库、微信登录等多个问题
diff --git a/docs/changelog/2025-05-08-desktop-app.zh-CN.mdx b/docs/changelog/2025-05-08-desktop-app.zh-CN.mdx
new file mode 100644
index 0000000000..9f9a0d4b13
--- /dev/null
+++ b/docs/changelog/2025-05-08-desktop-app.zh-CN.mdx
@@ -0,0 +1,35 @@
+---
+title: 全新设计风格与桌面端发布 ✨
+description: LobeChat 正式发布桌面端应用,带来更现代、更流畅的使用体验
+tags:
+ - 桌面端
+ - LobeHub
+ - 思维链
+---
+
+# 全新设计风格与桌面端发布 ✨
+
+LobeChat 在四月完成重大视觉升级,推出全新 Lobe UI v2 设计系统,并正式发布桌面端应用,带来更现代、更流畅的使用体验。
+
+## 🌟 重大更新
+
+- 🎨 全新设计系统:升级至 Lobe UI v2,带来更现代化的界面设计与交互体验
+- 💻 桌面端正式发布:支持 Windows、macOS 系统托盘、窗口控制等原生功能,提供更便捷的桌面使用体验
+- 🔌 MCP 协议增强:支持 Streamable HTTP MCP 服务器,优化 stdio MCP 服务器安装体验,新增环境变量参数支持
+- 🔍 搜索功能扩展:新增 Search1API 搜索服务商支持,优化 SearXNG 分类与时间范围选择
+- 🔑 SSO 认证扩展:新增 Keycloak 单点登录支持,改进 OIDC OAuth 工作流
+
+## 📊 模型库更新
+
+- OpenAI: GPT-4.1 系列、o3/o4-mini
+- Google: Gemini 2.5 Pro Experimental、推理 Token 统计支持
+- xAI: Grok 3 系列模型
+- Anthropic、Mistral、Qwen、Ollama 等平台最新模型
+
+## 💫 体验优化
+
+- 界面改进:支持系统角色折叠、优化移动端样式、整齐排列模型标签、错误时允许复制 / 编辑
+- 性能统计:显示 Token 生成性能、阿里云百炼 Token 使用追踪、Google Gemini 推理 Token 统计
+- 快捷键增强:新增清除聊天消息、删除消息等快捷键,支持自定义快捷键设置
+- 工具调用优化:支持更新工具调用参数并重新触发、本地文件插件新增写入文件功能
+- 网页爬取:新增小红书爬虫规则支持
diff --git a/docs/changelog/2025-06-08-claude-4.zh-CN.mdx b/docs/changelog/2025-06-08-claude-4.zh-CN.mdx
new file mode 100644
index 0000000000..66805bf7c2
--- /dev/null
+++ b/docs/changelog/2025-06-08-claude-4.zh-CN.mdx
@@ -0,0 +1,20 @@
+---
+title: 提示词变量与 Claude 4 推理模型支持 🚀
+description: 支持 Claude 4 推理模型,并扩展多个 AI 服务商的搜索与推理能力
+tags:
+ - DeepSeek R1
+ - 提示词变量
+ - Claude Sonnet 4
+---
+
+# 提示词变量与 Claude 4 推理模型支持 🚀
+
+LobeChat 在五月至六月持续优化核心功能,新增提示词变量系统、支持 Claude 4 推理模型,并扩展多个 AI 服务商的搜索与推理能力。
+
+## 🌟 主要更新
+
+- 💬 提示词变量系统:支持在提示词和输入框中使用占位符变量,实现动态内容替换
+- 🧠 Claude 4 系列支持:完整接入 Anthropic Claude 4 推理模型,支持 Web Search 工具与 Beta Header
+- 🔍 搜索能力扩展:新增 ModelScope 服务商,支持更多平台的搜索与爬虫功能
+- 📄 文件上传优化:支持直接将文件上传至聊天上下文,改进 PDF、XLSX 文件内容解析
+- 🔐 页面保护功能:支持页面访问保护,优化 Clerk 中间件路由保护
diff --git a/docs/changelog/2025-07-08-mcp-market.zh-CN.mdx b/docs/changelog/2025-07-08-mcp-market.zh-CN.mdx
new file mode 100644
index 0000000000..1c877859ab
--- /dev/null
+++ b/docs/changelog/2025-07-08-mcp-market.zh-CN.mdx
@@ -0,0 +1,28 @@
+---
+title: MCP 市场与搜索服务商扩展 🔍
+description: 新增多个搜索服务商支持,并集成 Amazon Cognito 与 Google SSO 认证,持续优化用户体验与开发者生态
+tags:
+ - MCP 市场
+ - Best MCP
+ - CoT
+---
+
+# MCP 市场与搜索服务商扩展 🔍
+
+LobeChat 在六月至七月推出 MCP 插件市场,新增多个搜索服务商支持,并集成 Amazon Cognito 与 Google SSO 认证,持续优化用户体验与开发者生态。
+
+## 🌟 重大更新
+
+- 🛒 MCP 市场上线:桌面端支持 MCP 插件一键安装,提供丰富的插件生态与便捷的安装体验
+- 🔍 搜索服务商扩展:新增 Brave、Google PSE、Kagi 等内置搜索服务商,支持 Vertex AI Google Search Grounding
+- 🔐 认证系统增强:集成 Amazon Cognito 与 Google SSO 作为认证提供商,支持页面访问保护
+- 🤖 v0 (Vercel) 支持:新增 Vercel v0 服务商支持
+- 📊 数据分析框架:实现数据分析事件追踪框架,优化用户行为分析
+
+## 💫 体验优化
+
+- 界面改进:优化移动端模型选择布局、改进文本溢出处理、修复加载动画切换问题
+- 推理配置:优化 Gemini thinkingBudget 配置、正确处理 reasoning_effort 参数
+- 搜索优化:支持 Browserless blockAds 与 stealth 参数、修复 Firefox Mermaid 显示错误
+- 桌面端增强:改进多显示器窗口打开体验、修复主题问题、优化分块加载
+- 响应动画:改进响应动画合并逻辑、支持过渡动画开关
diff --git a/docs/changelog/2025-08-08-image-generation.zh-CN.mdx b/docs/changelog/2025-08-08-image-generation.zh-CN.mdx
new file mode 100644
index 0000000000..f79f888702
--- /dev/null
+++ b/docs/changelog/2025-08-08-image-generation.zh-CN.mdx
@@ -0,0 +1,28 @@
+---
+title: 全新设计风格与桌面端发布 ✨
+description: LobeHub v1.49.12 已完整支持 DeepSeek R1 模型,为用户带来前所未有的思维链交互体验
+tags:
+ - DeepSeek R1
+ - CoT
+ - 思维链
+---
+
+# AI 图像生成与桌面端增强 🎨
+
+LobeChat 在七月至八月推出 AI 图像生成功能,新增多个服务商支持,并持续优化桌面端体验与认证系统。
+
+## 🌟 重大更新
+
+- 🎨 AI 图像生成:支持通过 Google Imagen、Qwen、Zhipu CogView4、MiniMax 等服务商生成图像
+- 🔐 认证系统扩展:新增 Amazon Cognito、Google SSO、Okta 认证支持
+- 🖥️ 桌面端优化:支持网络代理配置、自定义快捷键、OAuth 重构与远程聊天支持
+- 🔌 MCP 认证增强:支持 Streamable HTTP MCP Server 认证
+- 🔑 API Key 管理:实现完整的 API Key 管理功能
+
+## 📊 模型库更新
+
+新增 Claude Opus 4.1、Grok-4、Kimi K2、Ollama gpt-oss 支持,更新 Gemini 2.5 Flash-Lite GA、Hunyuan A13B thinking、Doubao 思维模型等
+
+## 💫 体验优化
+
+桌面端新增通知功能、优化设置窗口布局、改进多显示器体验;优化 MCP 插件调用与显示、修复 Gemini Artifacts 换行问题
diff --git a/docs/changelog/2025-09-08-gemini.zh-CN.mdx b/docs/changelog/2025-09-08-gemini.zh-CN.mdx
new file mode 100644
index 0000000000..4da4ecf326
--- /dev/null
+++ b/docs/changelog/2025-09-08-gemini.zh-CN.mdx
@@ -0,0 +1,24 @@
+---
+title: Gemini 图像生成与非流式模式支持 🎨
+description: LobeHub v1.49.12 已完整支持 DeepSeek R1 模型,为用户带来前所未有的思维链交互体验
+tags:
+ - Gemini
+ - Nano banana
+ - AI 生图
+---
+
+# Gemini 图像生成与非流式模式支持 🎨
+
+LobeChat 在八月至九月新增 Gemini 2.5 Flash Image 图像生成能力,支持非流式响应模式,并扩展多个 AI 服务商与模型支持。
+
+## 🌟 重大更新
+
+- 🎨 Gemini 图像生成:支持 Gemini 2.5 Flash Image(Nano Banana)、Imagen 4 GA 等图像生成模型
+- 🔄 非流式模式:新增非流式响应模式支持,适配更多使用场景
+- 🌐 服务商扩展:新增 Nebius、AkashChat、BFL 等图像生成服务商支持
+- 🖼️ Azure OpenAI 图像生成:支持通过 Azure OpenAI 生成图像
+- 🔧 HTML 预览:支持 HTML 内容预览功能 📊 模型库更新新增 GPT-5 系列、Claude Opus 4.1、Grok Code Fast 1、DeepSeek V3.1、Gemini URL Context Tool 支持
+
+## 💫 体验优化
+
+优化思维滚动遮罩效果、支持会话切换快捷键、改进移动端控件表单显示、优化 Gemini 错误提示
diff --git a/docs/changelog/2025-10-08-python.zh-CN.mdx b/docs/changelog/2025-10-08-python.zh-CN.mdx
new file mode 100644
index 0000000000..83eba3c68c
--- /dev/null
+++ b/docs/changelog/2025-10-08-python.zh-CN.mdx
@@ -0,0 +1,29 @@
+---
+title: Claude Sonnet 4.5 与内置 Python 插件 🐍
+description: LobeHub v1.49.12 已完整支持 DeepSeek R1 模型,为用户带来前所未有的思维链交互体验
+tags:
+ - Claude Sonnet 4.5
+ - 思维链
+ - Vercel AI Gateway
+---
+
+# Claude Sonnet 4.5 与内置 Python 插件 🐍
+
+LobeChat 在九月至十月新增 Claude Sonnet 4.5 模型支持,推出内置 Python 插件,并优化聊天列表导航与富文本编辑体验。
+
+## 🌟 主要更新
+
+- 🐍 内置 Python 插件:支持直接在聊天中执行 Python 代码
+- 🤖 Claude Sonnet 4.5:接入 Anthropic 最新推理模型
+- 🗺️ 聊天列表小地图:新增快速导航功能,提升长对话浏览效率
+- 📝 富文本编辑器:支持数学公式、任务列表、并行发送等功能
+- 🎨 Qwen 图像编辑:支持通过 Qwen 模型进行图像编辑
+- 🌐 Vercel AI Gateway:新增 Vercel AI Gateway 服务商支持
+
+## 📊 模型库更新
+
+新增 Seedream 4.0、CometAPI、NewAPI 等服务商,更新 Gemini 2.5 视频理解能力
+
+## 💫 体验优化
+
+优化聊天输入框支持调整大小、改进移动端标题显示、支持 Base64 图像语法、优化 .doc 文件解析
diff --git a/docs/changelog/2025-11-08-comfy-ui.zh-CN.mdx b/docs/changelog/2025-11-08-comfy-ui.zh-CN.mdx
new file mode 100644
index 0000000000..849479e2f1
--- /dev/null
+++ b/docs/changelog/2025-11-08-comfy-ui.zh-CN.mdx
@@ -0,0 +1,28 @@
+---
+title: ComfyUI 集成与知识库优化 ⭐
+description: 集成 ComfyUI 工作流,新增多个 AI 服务商与模型支持,并持续优化知识库与用户体验
+tags:
+ - AI 知识库
+ - 工作流
+ - ComfyUI
+---
+
+# ComfyUI 集成与知识库优化 ⭐
+
+LobeChat 在十月至十一月集成 ComfyUI 工作流,新增多个 AI 服务商与模型支持,并持续优化知识库与用户体验。
+
+## 🌟 重要更新
+
+- 🎨 ComfyUI 集成:支持 ComfyUI 工作流集成
+- 🤖 新增服务商:Cerebras、CometAPI 等服务商支持
+- 📄 PDF 导出:支持将对话导出为 PDF 格式
+- 🗂️ 知识库优化:新增瀑布流布局、支持上传时自动解压文件
+- 🖼️ 图像生成扩展:支持硅基流动、混元 Text-to-Image 3 等图像生成服务
+
+## 📊 模型库更新
+
+新增 Claude Haiku 4.5、GPT-5 Pro、MiniMax-M2、Imagen 4 for Vertex AI 等模型
+
+## 💫 体验优化
+
+优化富文本链接显示、改进搜索体验、支持禁用富文本编辑、新增删除与重新生成快捷键、改进更新通知
diff --git a/docs/changelog/2025-12-20-mcp.zh-CN.mdx b/docs/changelog/2025-12-20-mcp.zh-CN.mdx
new file mode 100644
index 0000000000..0afc68faf3
--- /dev/null
+++ b/docs/changelog/2025-12-20-mcp.zh-CN.mdx
@@ -0,0 +1,26 @@
+---
+title: MCP 云端点与模型库扩展 🔌
+description: 新增多个 AI 服务商,并改进知识库功能。
+tags:
+ - MCP
+ - LobeHub
+ - CoT
+ - 思维链
+---
+
+# MCP 云端点与模型库扩展 🔌
+
+LobeChat 在十一月持续优化模型支持与用户体验,新增多个 AI 服务商,并改进知识库功能。
+
+## 🌟 重要更新
+
+- 🔌 MCP 云端点:支持市场云端点 MCP 集成,扩展工具生态
+- 🤖 新增服务商:支持 ZenMux、Nano Banana Pro、七牛云等多个 AI 服务商
+- 📚 知识库增强:支持创建页面、优化文件管理,改进 RAG 搜索体验
+- 🎨 图像生成:新增多个图像模型支持,优化图像生成配置
+- 🔐 认证优化:改进 OIDC 认证流程,优化桌面端登录体验
+- 💬 对话优化:支持话题超链接、改进消息编辑与删除功能
+
+## 💫 体验优化
+
+优化话题列表交互、改进工具调用显示、完善富文本编辑、优化 Token 使用统计动画、改进模型选择器排序
diff --git a/docs/changelog/index.json b/docs/changelog/index.json
index 6a5f5236e7..f76924d02a 100644
--- a/docs/changelog/index.json
+++ b/docs/changelog/index.json
@@ -2,6 +2,66 @@
"$schema": "https://github.com/lobehub/lobe-chat/blob/main/docs/changelog/schema.json",
"cloud": [],
"community": [
+ {
+ "image": "https://file.rene.wang/clipboard-1769155711708-710967bee57bc.png",
+ "id": "2025-12-20-mcp",
+ "date": "2025-12-20",
+ "versionRange": ["1.142.8", "1.143"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769155737647-1b4fc6558f029.png",
+ "id": "2025-11-08-comfy-ui",
+ "date": "2025-11-08",
+ "versionRange": ["1.133.5", "1.142.8"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769155791342-7f43b72cc6b42.png",
+ "id": "2025-10-08-python",
+ "date": "2025-10-08",
+ "versionRange": ["1.120.7", "1.133.5"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769155818070-7eb403550b6c7.png",
+ "id": "2025-09-08-gemini",
+ "date": "2025-09-08",
+ "versionRange": ["1.109.1", "1.120.7"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769155880302-272fbd2c5290b.png",
+ "id": "2025-08-08-image-generation",
+ "date": "2025-08-08",
+ "versionRange": ["1.97.10", "1.109.1"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769155935435-93dab92dd0f44.png",
+ "id": "2025-07-08-mcp-market",
+ "date": "2025-07-08",
+ "versionRange": ["1.93.3", "1.97.10"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769155973881-ff1ee142d5b8f.png",
+ "id": "2025-06-08-claude-4",
+ "date": "2025-06-08",
+ "versionRange": ["1.84.27", "1.93.3"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769156005535-c2e79e11f4b56.png",
+ "id": "2025-05-08-desktop-app",
+ "date": "2025-05-08",
+ "versionRange": ["1.77.17", "1.84.27"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769156036607-2b4fe37c4b56c.png",
+ "id": "2025-04-06-exports",
+ "date": "2025-04-06",
+ "versionRange": ["1.67.2", "1.77.17"]
+ },
+ {
+ "image": "https://file.rene.wang/clipboard-1769156050787-ecf4f48474ae2.png",
+ "id": "2025-03-02-new-models",
+ "date": "2025-03-02",
+ "versionRange": ["1.49.13", "1.67.2"]
+ },
{
"image": "https://github.com/user-attachments/assets/5fe4c373-ebd0-42a9-bdca-0ab7e0a2e747",
"id": "2025-02-02-deepseek-r1",
diff --git a/docs/development/basic/add-new-authentication-providers.mdx b/docs/development/basic/add-new-authentication-providers.mdx
index 2f059403c9..4119e3f161 100644
--- a/docs/development/basic/add-new-authentication-providers.mdx
+++ b/docs/development/basic/add-new-authentication-providers.mdx
@@ -1,14 +1,21 @@
---
title: New Authentication Provider Guide
+description: Learn how to implement a new authentication provider using Auth.js in LobeHub.
+tags:
+ - Authentication
+ - Auth.js
+ - LobeHub
+ - Okta
+ - OAuth
---
# New Authentication Provider Guide
-LobeChat uses [Better Auth](https://www.better-auth.com) as its authentication service. This document explains how to add new SSO authentication providers.
+LobeHub uses [Auth.js v5](https://authjs.dev/) as the external authentication service. Auth.js is an open-source authentication library that provides a simple way to implement authentication and authorization features. This document will introduce how to use Auth.js to implement a new authentication provider.
## Architecture Overview
-Better Auth SSO providers fall into two categories:
+To add a new authentication provider in LobeHub (for example, adding Okta), you need to follow the steps below:
| Type | Description | Examples |
| --------- | ------------------------------------------- | -------------------------------- |
@@ -176,20 +183,4 @@ AUTH_OKTA_SECRET=your-client-secret
AUTH_OKTA_ISSUER=https://your-domain.okta.com
```
-## Debugging Tips
-
-1. **Environment variable check fails**: Ensure all required environment variables are set
-2. **Callback URL errors**: Verify the callback URL configured in your OAuth application
-3. **User profile mapping**: Use `mapProfileToUser` to customize the mapping from OAuth profile to user info
-
-## Related Files
-
-| File | Description |
-| ----------------------------------------- | -------------------------------- |
-| `src/libs/better-auth/sso/providers/*.ts` | Provider definitions |
-| `src/libs/better-auth/sso/index.ts` | Provider registration |
-| `src/libs/better-auth/sso/types.ts` | Type definitions |
-| `src/libs/better-auth/sso/helpers.ts` | Helper functions |
-| `src/libs/better-auth/constants.ts` | Built-in provider constants |
-| `src/envs/auth.ts` | Environment variable definitions |
-| `src/libs/better-auth/define-config.ts` | Better Auth configuration |
+Now, you can use Okta as your provider to implement the authentication feature in LobeHub.
diff --git a/docs/development/basic/add-new-authentication-providers.zh-CN.mdx b/docs/development/basic/add-new-authentication-providers.zh-CN.mdx
index c006516757..d765b6fe7e 100644
--- a/docs/development/basic/add-new-authentication-providers.zh-CN.mdx
+++ b/docs/development/basic/add-new-authentication-providers.zh-CN.mdx
@@ -1,12 +1,20 @@
---
-title: 新身份验证提供商开发指南
+title: 新身份验证方式开发指南
+description: 学习如何在 LobeHub 中使用 Auth.js v5 添加新的身份验证提供者。
+tags:
+ - 身份验证
+ - Auth.js
+ - Okta
+ - 开发指南
---
-# 新身份验证提供商开发指南
+# 新身份验证方式开发指南
+
+LobeHub 使用 [Auth.js v5](https://authjs.dev/) 作为外部身份验证服务。Auth.js 是一个开源的身份验证库,它提供了一种简单的方式来实现身份验证和授权功能。本文档将介绍如何使用 Auth.js 来实现新的身份验证方式。
LobeChat 使用 [Better Auth](https://www.better-auth.com) 作为身份验证服务。本文档介绍如何添加新的 SSO 身份验证提供商。
-## 架构概述
+为了在 LobeHub 中添加新的身份验证提供者(例如添加 Okta),你需要完成以下步骤:
Better Auth SSO 提供商分为两类:
diff --git a/docs/development/basic/add-new-image-model.mdx b/docs/development/basic/add-new-image-model.mdx
index fa74480521..b18105b347 100644
--- a/docs/development/basic/add-new-image-model.mdx
+++ b/docs/development/basic/add-new-image-model.mdx
@@ -1,5 +1,12 @@
---
title: Adding New Image Models
+description: >-
+ Explore how to add new image models for AI generation with standard
+ parameters.
+tags:
+ - AI Image Generation
+ - Image Models
+ - OpenAI Compatibility
---
# Adding New Image Models
diff --git a/docs/development/basic/add-new-image-model.zh-CN.mdx b/docs/development/basic/add-new-image-model.zh-CN.mdx
index f65db8fb86..95daa34711 100644
--- a/docs/development/basic/add-new-image-model.zh-CN.mdx
+++ b/docs/development/basic/add-new-image-model.zh-CN.mdx
@@ -1,5 +1,10 @@
---
title: 添加新的图像模型
+description: 了解如何添加新的图像模型并兼容 OpenAI 请求格式。
+tags:
+ - 图像模型
+ - AI 绘画
+ - OpenAI 兼容
---
# 添加新的图像模型
diff --git a/docs/development/basic/architecture.mdx b/docs/development/basic/architecture.mdx
index 86a1ac08f1..fb7079e573 100644
--- a/docs/development/basic/architecture.mdx
+++ b/docs/development/basic/architecture.mdx
@@ -1,41 +1,50 @@
---
title: Architecture Design
+description: >-
+ Explore the architecture of LobeHub, an AI chat app built on Next.js,
+ featuring frontend, APIs, and markets.
+tags:
+ - LobeHub
+ - AI Chat Application
+ - Next.js
+ - Architecture Design
+ - Frontend Development
---
# Architecture Design
-LobeChat is an AI chat application built on the Next.js framework, aiming to provide an AI productivity platform that enables users to interact with AI through natural language. The following is an overview of the architecture design of LobeChat:
+LobeHub is an AI chat application built on the Next.js framework, aiming to provide an AI productivity platform that enables users to interact with AI through natural language. The following is an overview of the architecture design of LobeHub:
## Application Architecture Overview
-The overall architecture of LobeChat consists of the frontend, EdgeRuntime API, Agents Market, Plugin Market, and independent plugins. These components collaborate to provide a complete AI experience.
+The overall architecture of LobeHub consists of the frontend, EdgeRuntime API, Agents Market, Plugin Market, and independent plugins. These components collaborate to provide a complete AI experience.
## Frontend Architecture
-The frontend of LobeChat adopts the Next.js framework, leveraging its powerful server-side rendering (SSR) capability and routing functionality. The frontend utilizes a stack of technologies, including the antd component library, lobe-ui AIGC component library, zustand state management, swr request library, i18next internationalization library, and more. These technologies collectively support the functionality and features of LobeChat.
+The frontend of LobeHub adopts the Next.js framework, leveraging its powerful server-side rendering (SSR) capability and routing functionality. The frontend utilizes a stack of technologies, including the antd component library, lobe-ui AIGC component library, zustand state management, swr request library, i18next internationalization library, and more. These technologies collectively support the functionality and features of LobeHub.
The components in the frontend architecture include app, components, config, const, features, helpers, hooks, layout, locales, migrations, prompts, services, store, styles, types, and utils. Each component has specific responsibilities and collaborates with others to achieve different functionalities.
## Edge Runtime API
-The Edge Runtime API is one of the core components of LobeChat, responsible for handling the core logic of AI conversations. It provides interaction interfaces with the AI engine, including natural language processing, intent recognition, and response generation. The EdgeRuntime API communicates with the frontend, receiving user input and returning corresponding responses.
+The Edge Runtime API is one of the core components of LobeHub, responsible for handling the core logic of AI conversations. It provides interaction interfaces with the AI engine, including natural language processing, intent recognition, and response generation. The EdgeRuntime API communicates with the frontend, receiving user input and returning corresponding responses.
## Agents Market
-The Agents Market is a crucial part of LobeChat, providing various AI agents for different scenarios to handle specific tasks and domains. The Agents Market also offers functionality for discovering and uploading agents, allowing users to find agents created by others and easily share their own agents in the market.
+The Agents Market is a crucial part of LobeHub, providing various AI agents for different scenarios to handle specific tasks and domains. The Agents Market also offers functionality for discovering and uploading agents, allowing users to find agents created by others and easily share their own agents in the market.
## Plugin Market
-The Plugin Market is another key component of LobeChat, offering various plugins to extend the functionality and features of LobeChat. Plugins can be independent functional modules or integrated with agents from the Agents Market. During conversations, the assistant automatically identifies user input, recognizes suitable plugins, and passes them to the corresponding plugins for processing and returns the results.
+The Plugin Market is another key component of LobeHub, offering various plugins to extend the functionality and features of LobeHub. Plugins can be independent functional modules or integrated with agents from the Agents Market. During conversations, the assistant automatically identifies user input, recognizes suitable plugins, and passes them to the corresponding plugins for processing and returns the results.
## Security and Performance Optimization
-LobeChat's security strategy includes authentication and permission management. Users need to authenticate before using LobeChat, and operations are restricted based on the user's permissions.
+LobeHub's security strategy includes authentication and permission management. Users need to authenticate before using LobeHub, and operations are restricted based on the user's permissions.
-To optimize performance, LobeChat utilizes Next.js SSR functionality to achieve fast page loading and response times. Additionally, a series of performance optimization measures are implemented, including code splitting, caching, and resource compression.
+To optimize performance, LobeHub utilizes Next.js SSR functionality to achieve fast page loading and response times. Additionally, a series of performance optimization measures are implemented, including code splitting, caching, and resource compression.
## Development and Deployment Process
-LobeChat's development process includes version control, testing, continuous integration, and continuous deployment. The development team uses version control systems for code management and conducts unit and integration testing to ensure code quality. Continuous integration and deployment processes ensure rapid delivery and deployment of code.
+LobeHub's development process includes version control, testing, continuous integration, and continuous deployment. The development team uses version control systems for code management and conducts unit and integration testing to ensure code quality. Continuous integration and deployment processes ensure rapid delivery and deployment of code.
-The above is a brief introduction to the architecture design of LobeChat, detailing the responsibilities and collaboration of each component, as well as the impact of design decisions on application functionality and performance.
+The above is a brief introduction to the architecture design of LobeHub, detailing the responsibilities and collaboration of each component, as well as the impact of design decisions on application functionality and performance.
diff --git a/docs/development/basic/architecture.zh-CN.mdx b/docs/development/basic/architecture.zh-CN.mdx
index c1cbb7ad3b..c1ecdd9f4f 100644
--- a/docs/development/basic/architecture.zh-CN.mdx
+++ b/docs/development/basic/architecture.zh-CN.mdx
@@ -1,41 +1,48 @@
---
title: 架构设计
+description: 深入了解 LobeHub 的架构设计,包括前端、API 和市场组件。
+tags:
+ - LobeHub
+ - 架构设计
+ - AI 聊天应用
+ - Next.js
+ - Edge Runtime API
---
# 架构设计
-LobeChat 是一个基于 Next.js 框架构建的 AI 聊天应用,旨在提供一个 AI 生产力平台,使用户能够与 AI 进行自然语言交互。以下是 LobeChat 的架构设计介稿:
+LobeHub 是一个基于 Next.js 框架构建的 AI 聊天应用,旨在提供一个 AI 生产力平台,使用户能够与 AI 进行自然语言交互。以下是 LobeHub 的架构设计介稿:
## 应用架构概览
-LobeChat 的整体架构由前端、EdgeRuntime API、Agents 市场、插件市场和独立插件组成。这些组件相互协作,以提供完整的 AI 体验。
+LobeHub 的整体架构由前端、EdgeRuntime API、Agents 市场、插件市场和独立插件组成。这些组件相互协作,以提供完整的 AI 体验。
## 前端架构
-LobeChat 的前端采用 Next.js 框架,利用其强大的 SSR(服务器端渲染)能力和路由功能。前端使用了一系列技术栈,包括 antd 组件库和 lobe-ui AIGC 组件库、zustand 状态管理、swr 请求库、i18next 国际化库等。这些技术栈共同支持了 LobeChat 的功能和特性。
+LobeHub 的前端采用 Next.js 框架,利用其强大的 SSR(服务器端渲染)能力和路由功能。前端使用了一系列技术栈,包括 antd 组件库和 lobe-ui AIGC 组件库、zustand 状态管理、swr 请求库、i18next 国际化库等。这些技术栈共同支持了 LobeHub 的功能和特性。
前端架构中的组件包括 app、components、config、const、features、helpers、hooks、layout、locales、migrations、prompts、services、store、styles、types 和 utils。每个组件都有特定的职责,并与其他组件协同工作,以实现不同的功能。
## Edge Runtime API
-Edge Runtime API 是 LobeChat 的核心组件之一,负责处理 AI 会话的核心逻辑。它提供了与 AI 引擎的交互接口,包括自然语言处理、意图识别和回复生成等。EdgeRuntime API 与前端进行通信,接收用户的输入并返回相应的回复。
+Edge Runtime API 是 LobeHub 的核心组件之一,负责处理 AI 会话的核心逻辑。它提供了与 AI 引擎的交互接口,包括自然语言处理、意图识别和回复生成等。EdgeRuntime API 与前端进行通信,接收用户的输入并返回相应的回复。
## Agents 市场
-Agents 市场是 LobeChat 的一个重要组成部分,它提供了各种不同场景的 AI Agent,用于处理特定的任务和领域。Agents 市场还提供了使用和上传 Agent 的功能,使用户能够发现其他人制作的 Agent ,也可以一键分享自己的 Agent 到市场上。
+Agents 市场是 LobeHub 的一个重要组成部分,它提供了各种不同场景的 AI Agent,用于处理特定的任务和领域。Agents 市场还提供了使用和上传 Agent 的功能,使用户能够发现其他人制作的 Agent ,也可以一键分享自己的 Agent 到市场上。
## 插件市场
-插件市场是 LobeChat 的另一个关键组件,它提供了各种插件,用于扩展 LobeChat 的功能和特性。插件可以是独立的功能模块,也可以与 Agents 市场的 Agent 进行集成。在会话中,助手将自动识别用户的输入,并识别适合的插件并传递给相应的插件进行处理,并返回处理结果。
+插件市场是 LobeHub 的另一个关键组件,它提供了各种插件,用于扩展 LobeHub 的功能和特性。插件可以是独立的功能模块,也可以与 Agents 市场的 Agent 进行集成。在会话中,助手将自动识别用户的输入,并识别适合的插件并传递给相应的插件进行处理,并返回处理结果。
## 安全性和性能优化
-LobeChat 的安全性策略包括身份验证和权限管理。用户需要进行身份验证后才能使用 LobeChat,同时根据用户的权限进行相应的操作限制。
+LobeHub 的安全性策略包括身份验证和权限管理。用户需要进行身份验证后才能使用 LobeHub,同时根据用户的权限进行相应的操作限制。
-为了优化性能,LobeChat 使用了 Next.js 的 SSR 功能,实现了快速的页面加载和响应时间。此外,还采用了一系列的性能优化措施,包括代码分割、缓存和资源压缩等。
+为了优化性能,LobeHub 使用了 Next.js 的 SSR 功能,实现了快速的页面加载和响应时间。此外,还采用了一系列的性能优化措施,包括代码分割、缓存和资源压缩等。
## 开发和部署流程
-LobeChat 的开发流程包括版本控制、测试、持续集成和持续部署。开发团队使用版本控制系统进行代码管理,并进行单元测试和集成测试以确保代码质量。持续集成和持续部署流程确保了代码的快速交付和部署。
+LobeHub 的开发流程包括版本控制、测试、持续集成和持续部署。开发团队使用版本控制系统进行代码管理,并进行单元测试和集成测试以确保代码质量。持续集成和持续部署流程确保了代码的快速交付和部署。
-以上是 LobeChat 的架构设计介绍简介,详细解释了各个组件的职责和协作方式,以及设计决策对应用功能和性能的影响。
+以上是 LobeHub 的架构设计介绍简介,详细解释了各个组件的职责和协作方式,以及设计决策对应用功能和性能的影响。
diff --git a/docs/development/basic/chat-api.mdx b/docs/development/basic/chat-api.mdx
index 685fad6d4d..fd3789bdd1 100644
--- a/docs/development/basic/chat-api.mdx
+++ b/docs/development/basic/chat-api.mdx
@@ -1,5 +1,13 @@
---
title: Lobe Chat API Client-Server Interaction Logic
+description: >-
+ Explore the client-server interaction logic of Lobe Chat API, including event
+ sequences.
+tags:
+ - Lobe Chat API
+ - Client-Server Interaction
+ - Event Sequences
+ - API Logic
---
# Lobe Chat API Client-Server Interaction Logic
@@ -251,7 +259,7 @@ AgentRuntime is a core abstraction layer in Lobe Chat that encapsulates a unifie
baseURL: OPENROUTER_BASE_URL,
defaultHeaders: {
'HTTP-Referer': 'https://github.com/lobehub/lobe-chat',
- 'X-Title': 'LobeChat',
+ 'X-Title': 'LobeHub',
},
});
this.baseURL = OPENROUTER_BASE_URL;
diff --git a/docs/development/basic/chat-api.zh-CN.mdx b/docs/development/basic/chat-api.zh-CN.mdx
index c3b2de9847..02ff7dca31 100644
--- a/docs/development/basic/chat-api.zh-CN.mdx
+++ b/docs/development/basic/chat-api.zh-CN.mdx
@@ -1,5 +1,11 @@
---
title: Lobe Chat API 前后端交互逻辑
+description: 深入了解 Lobe Chat API 的前后端交互实现逻辑和核心组件。
+tags:
+ - Lobe Chat API
+ - 前后端交互
+ - 事件序列
+ - 核心组件
---
# Lobe Chat API 前后端交互逻辑
@@ -251,7 +257,7 @@ AgentRuntime 是 Lobe Chat 中的一个核心抽象层,它封装了与不同 A
baseURL: OPENROUTER_BASE_URL,
defaultHeaders: {
'HTTP-Referer': 'https://github.com/lobehub/lobe-chat',
- 'X-Title': 'LobeChat',
+ 'X-Title': 'LobeHub',
},
});
this.baseURL = OPENROUTER_BASE_URL;
diff --git a/docs/development/basic/comfyui-development.mdx b/docs/development/basic/comfyui-development.mdx
index ee9f3fcc08..3ea5bf06d5 100644
--- a/docs/development/basic/comfyui-development.mdx
+++ b/docs/development/basic/comfyui-development.mdx
@@ -1,7 +1,7 @@
---
title: ComfyUI Extension Development Guide
description: >-
- Learn how to add new models, workflows, and features to LobeChat's ComfyUI
+ Learn how to add new models, workflows, and features to LobeHub's ComfyUI
integration
tags:
- ComfyUI
@@ -12,11 +12,11 @@ tags:
# ComfyUI Extension Development Guide
-This guide is based on actual code implementation and helps developers extend LobeChat's ComfyUI integration functionality.
+This guide is based on actual code implementation and helps developers extend LobeHub's ComfyUI integration functionality.
## Architecture Overview
-LobeChat ComfyUI integration uses a four-layer service architecture built around the main `LobeComfyUI` class:
+LobeHub ComfyUI integration uses a four-layer service architecture built around the main `LobeComfyUI` class:
```plaintext
packages/model-runtime/src/providers/comfyui/
diff --git a/docs/development/basic/comfyui-development.zh-CN.mdx b/docs/development/basic/comfyui-development.zh-CN.mdx
index d20e30aa5a..ccd4d23530 100644
--- a/docs/development/basic/comfyui-development.zh-CN.mdx
+++ b/docs/development/basic/comfyui-development.zh-CN.mdx
@@ -1,6 +1,6 @@
---
title: ComfyUI 扩展开发指南
-description: 学习如何为 LobeChat ComfyUI 集成添加新模型、工作流和功能扩展
+description: 学习如何为 LobeHub ComfyUI 集成添加新模型、工作流和功能扩展
tags:
- ComfyUI
- 开发指南
@@ -10,11 +10,11 @@ tags:
# ComfyUI 扩展开发指南
-本指南基于实际代码实现,帮助开发者扩展 LobeChat 的 ComfyUI 集成功能。
+本指南基于实际代码实现,帮助开发者扩展 LobeHub 的 ComfyUI 集成功能。
## 架构概览
-LobeChat ComfyUI 集成采用四层服务架构,围绕 `LobeComfyUI` 主类构建:
+LobeHub ComfyUI 集成采用四层服务架构,围绕 `LobeComfyUI` 主类构建:
```plaintext
packages/model-runtime/src/providers/comfyui/
@@ -995,4 +995,4 @@ const optimizedResult = await comfyUI.createImage({
- 添加新模型时,请遵循测试架构指南确保测试完整性
- 在提交代码前务必运行相关测试确保覆盖率达标
-通过遵循这些指南,开发者可以有效地在 LobeChat 中使用和扩展 ComfyUI 功能,为用户提供强大的图像生成和处理能力。
+通过遵循这些指南,开发者可以有效地在 LobeHub 中使用和扩展 ComfyUI 功能,为用户提供强大的图像生成和处理能力。
diff --git a/docs/development/basic/contributing-guidelines.mdx b/docs/development/basic/contributing-guidelines.mdx
index 69d7f327eb..4ab4b6075b 100644
--- a/docs/development/basic/contributing-guidelines.mdx
+++ b/docs/development/basic/contributing-guidelines.mdx
@@ -1,14 +1,23 @@
---
title: Code Style and Contribution Guidelines
+description: >-
+ Learn about LobeHub's code style and contribution process for consistent
+ coding.
+tags:
+ - Code Style
+ - Contribution Guidelines
+ - LobeHub
+ - ESLint
+ - Prettier
---
# Code Style and Contribution Guidelines
-Welcome to the Code Style and Contribution Guidelines for LobeChat. This guide will help you understand our code standards and contribution process, ensuring code consistency and smooth project progression.
+Welcome to the Code Style and Contribution Guidelines for LobeHub. This guide will help you understand our code standards and contribution process, ensuring code consistency and smooth project progression.
## Code Style
-In LobeChat, we use the [@lobehub/lint](https://github.com/lobehub/lobe-lint) package to maintain a unified code style. This package incorporates configurations for `ESLint`, `Prettier`, `remarklint`, and `stylelint` to ensure that our JavaScript, Markdown, and CSS files adhere to the same coding standards.
+In LobeHub, we use the [@lobehub/lint](https://github.com/lobehub/lobe-lint) package to maintain a unified code style. This package incorporates configurations for `ESLint`, `Prettier`, `remarklint`, and `stylelint` to ensure that our JavaScript, Markdown, and CSS files adhere to the same coding standards.
### ESLint
@@ -36,7 +45,7 @@ You don't need to manually run these checks. The project is configured with husk
## Contribution Process
-LobeChat follows the gitmoji and semantic release as our code submission and release process.
+LobeHub follows the gitmoji and semantic release as our code submission and release process.
### Gitmoji
diff --git a/docs/development/basic/contributing-guidelines.zh-CN.mdx b/docs/development/basic/contributing-guidelines.zh-CN.mdx
index 3420b4bdb6..f3347f06ec 100644
--- a/docs/development/basic/contributing-guidelines.zh-CN.mdx
+++ b/docs/development/basic/contributing-guidelines.zh-CN.mdx
@@ -1,14 +1,21 @@
---
title: 代码风格与贡献指南
+description: 了解 LobeHub 的代码规范和贡献流程,确保代码一致性。
+tags:
+ - 代码风格
+ - 贡献指南
+ - LobeHub
+ - ESLint
+ - Prettier
---
# 代码风格与贡献指南
-欢迎来到 LobeChat 的代码风格与贡献指南。本指南将帮助您理解我们的代码规范和贡献流程,确保代码的一致性和项目的顺利进行。
+欢迎来到 LobeHub 的代码风格与贡献指南。本指南将帮助您理解我们的代码规范和贡献流程,确保代码的一致性和项目的顺利进行。
## 代码风格
-在 LobeChat 中,我们使用 [@lobehub/lint](https://github.com/lobehub/lobe-lint) 程序包来统一代码风格。该程序包内置了 `ESLint`、`Prettier`、`remarklint` 和 `stylelint` 的配置,以确保我们的 JavaScript、Markdown 和 CSS 文件遵循相同的编码标准。
+在 LobeHub 中,我们使用 [@lobehub/lint](https://github.com/lobehub/lobe-lint) 程序包来统一代码风格。该程序包内置了 `ESLint`、`Prettier`、`remarklint` 和 `stylelint` 的配置,以确保我们的 JavaScript、Markdown 和 CSS 文件遵循相同的编码标准。
### ESLint
@@ -36,7 +43,7 @@ Prettier 负责代码格式化,以保证代码的一致性。您可以在 `.pr
## 贡献流程
-LobeChat 采用 gitmoji 和 semantic release 作为我们的代码提交和发布流程。
+LobeHub 采用 gitmoji 和 semantic release 作为我们的代码提交和发布流程。
### Gitmoji
diff --git a/docs/development/basic/feature-development-frontend.mdx b/docs/development/basic/feature-development-frontend.mdx
index defac17e90..4cdbf49657 100644
--- a/docs/development/basic/feature-development-frontend.mdx
+++ b/docs/development/basic/feature-development-frontend.mdx
@@ -1,10 +1,19 @@
---
title: How to Develop a New Feature
+description: >-
+ Learn how to implement the Chat Messages feature in LobeHub using Next.js and
+ TypeScript.
+tags:
+ - LobeHub
+ - Next.js
+ - TypeScript
+ - Chat Feature
+ - Zustand
---
# How to Develop a New Feature
-LobeChat is built on the Next.js framework and uses TypeScript as the primary development language. When developing a new feature, we need to follow a certain development process to ensure the quality and stability of the code. The general process can be divided into the following five steps:
+LobeHub is built on the Next.js framework and uses TypeScript as the primary development language. When developing a new feature, we need to follow a certain development process to ensure the quality and stability of the code. The general process can be divided into the following five steps:
1. Routing: Define routes (`src/app`).
2. Data Structure: Define data structures (`src/types`).
@@ -80,7 +89,7 @@ const ChatPage = () => {
renderItem={(message) => (
{message.content}
-
+
)}
/>
@@ -90,7 +99,7 @@ const ChatPage = () => {
export default ChatPage;
```
-> **Note on Feature Organization**: LobeChat uses two patterns for organizing features:
+> **Note on Feature Organization**: LobeHub uses two patterns for organizing features:
>
> - **Global features** (`src/features/`): Shared components like `ChatInput`, `Conversation` used across the app
> - **Page-specific features** (`src/app//features/`): Components used only within a specific page route
@@ -119,12 +128,12 @@ const ChatPage = () => {
renderItem={(message) => (
{message.content}
-
+
)}
/>
-
+
>
);
};
@@ -132,4 +141,4 @@ const ChatPage = () => {
export default ChatPage;
```
-The above is the step to implement the "chat message" feature in LobeChat. Of course, in the actual development of LobeChat, the business requirements and scenarios faced in real situations are far more complex than the above demo. Please develop according to the actual situation.
+The above is the step to implement the "chat message" feature in LobeHub. Of course, in the actual development of LobeHub, the business requirements and scenarios faced in real situations are far more complex than the above demo. Please develop according to the actual situation.
diff --git a/docs/development/basic/feature-development-frontend.zh-CN.mdx b/docs/development/basic/feature-development-frontend.zh-CN.mdx
index ec9dc69a34..09aebe17d7 100644
--- a/docs/development/basic/feature-development-frontend.zh-CN.mdx
+++ b/docs/development/basic/feature-development-frontend.zh-CN.mdx
@@ -1,10 +1,17 @@
---
title: 如何开发一个新功能:前端实现
+description: 学习如何在 LobeHub 中实现会话消息功能,使用 Next.js 和 TypeScript。
+tags:
+ - 前端开发
+ - Next.js
+ - TypeScript
+ - Zustand
+ - 功能实现
---
# 如何开发一个新功能:前端实现
-LobeChat 基于 Next.js 框架构建,使用 TypeScript 作为主要开发语言。在开发新功能时,我们需要遵循一定的开发流程,以确保代码的质量和稳定性。大致的流程分为以下五步:
+LobeHub 基于 Next.js 框架构建,使用 TypeScript 作为主要开发语言。在开发新功能时,我们需要遵循一定的开发流程,以确保代码的质量和稳定性。大致的流程分为以下五步:
1. 路由:定义路由 (`src/app`)
2. 数据结构: 定义数据结构 ( `src/types` )
@@ -80,7 +87,7 @@ const ChatPage = () => {
renderItem={(message) => (
{message.content}
-
+
)}
/>
@@ -90,7 +97,7 @@ const ChatPage = () => {
export default ChatPage;
```
-> **关于功能组件组织方式的说明**:LobeChat 使用两种模式来组织功能组件:
+> **关于功能组件组织方式的说明**:LobeHub 使用两种模式来组织功能组件:
>
> - **全局功能**(`src/features/`):跨应用共享的组件,如 `ChatInput`、`Conversation` 等
> - **页面专属功能**(`src/app//features/`):仅在特定页面路由中使用的组件
@@ -119,12 +126,12 @@ const ChatPage = () => {
renderItem={(message) => (
{message.content}
-
+
)}
/>
-
+
>
);
};
@@ -132,4 +139,4 @@ const ChatPage = () => {
export default ChatPage;
```
-以上就是在 LobeChat 中实现 "会话消息" 功能的步骤。当然,在 LobeChat 的实际开发中,真实场景所面临的业务诉求和场景远比上述 demo 复杂,请根据实际情况进行开发。
+以上就是在 LobeHub 中实现 "会话消息" 功能的步骤。当然,在 LobeHub 的实际开发中,真实场景所面临的业务诉求和场景远比上述 demo 复杂,请根据实际情况进行开发。
diff --git a/docs/development/basic/feature-development.mdx b/docs/development/basic/feature-development.mdx
index dadc2dbe85..74dd0c6983 100644
--- a/docs/development/basic/feature-development.mdx
+++ b/docs/development/basic/feature-development.mdx
@@ -1,10 +1,17 @@
---
-title: LobeChat Feature Development Complete Guide
+title: LobeHub Feature Development Complete Guide
+description: A comprehensive guide for developers on implementing features in LobeHub.
+tags:
+ - LobeHub
+ - Feature Development
+ - Developer Guide
+ - Postgres
+ - Drizzle ORM
---
-# LobeChat Feature Development Complete Guide
+# LobeHub Feature Development Complete Guide
-This document aims to guide developers on how to develop a complete feature in LobeChat.
+This document aims to guide developers on how to develop a complete feature in LobeHub.
We will use [RFC 021 - Custom Assistant Opening Guidance](https://github.com/lobehub/lobe-chat/discussions/891) as an example to illustrate the complete implementation process.
@@ -366,7 +373,7 @@ const OpeningQuestions = memo(() => {
{isRepeat && (
-
+
);
});
@@ -448,7 +455,7 @@ const WelcomeMessage = () => {
{chatItem}
{/* Render guiding questions */}
-
+
) : (
chatItem
@@ -467,4 +474,4 @@ For the current scenario, I recommend running the tests locally to see which tes
## Summary
-The above is the complete implementation process for the LobeChat opening settings feature. Developers can refer to this document for the development and testing of related features.
+The above is the complete implementation process for the LobeHub opening settings feature. Developers can refer to this document for the development and testing of related features.
diff --git a/docs/development/basic/feature-development.zh-CN.mdx b/docs/development/basic/feature-development.zh-CN.mdx
index c4c80c0bd7..9b4f355dea 100644
--- a/docs/development/basic/feature-development.zh-CN.mdx
+++ b/docs/development/basic/feature-development.zh-CN.mdx
@@ -1,10 +1,16 @@
---
-title: LobeChat 功能开发完全指南
+title: LobeHub 功能开发完全指南
+description: 了解如何在 LobeHub 中开发完整的功能需求,提升开发效率。
+tags:
+ - LobeHub
+ - 功能开发
+ - 开发指南
+ - 开场设置
---
-# LobeChat 功能开发完全指南
+# LobeHub 功能开发完全指南
-本文档旨在指导开发者了解如何在 LobeChat 中开发一块完整的功能需求。
+本文档旨在指导开发者了解如何在 LobeHub 中开发一块完整的功能需求。
我们将以 [RFC 021 - 自定义助手开场引导](https://github.com/lobehub/lobe-chat/discussions/891) 为例,阐述完整的实现流程。
@@ -366,7 +372,7 @@ const OpeningQuestions = memo(() => {
{isRepeat && (
-
+
);
});
@@ -448,7 +454,7 @@ const WelcomeMessage = () => {
{chatItem}
{/* 渲染引导性问题 */}
-
+
) : (
chatItem
@@ -467,4 +473,4 @@ export default WelcomeMessage;
## 总结
-以上就是 LobeChat 开场设置功能的完整实现流程。开发者可以参考本文档进行相关功能的开发和测试。
+以上就是 LobeHub 开场设置功能的完整实现流程。开发者可以参考本文档进行相关功能的开发和测试。
diff --git a/docs/development/basic/folder-structure.mdx b/docs/development/basic/folder-structure.mdx
index c5e5ead22c..3bcf13f54c 100644
--- a/docs/development/basic/folder-structure.mdx
+++ b/docs/development/basic/folder-structure.mdx
@@ -1,10 +1,19 @@
---
title: Directory Structure
+description: >-
+ Explore the organized directory structure of LobeHub, including app,
+ components, and services.
+tags:
+ - LobeHub
+ - Directory Structure
+ - Next.js
+ - App Router
+ - API Architecture
---
# Directory Structure
-The directory structure of LobeChat is as follows:
+The directory structure of LobeHub is as follows:
```bash
src
diff --git a/docs/development/basic/folder-structure.zh-CN.mdx b/docs/development/basic/folder-structure.zh-CN.mdx
index 795f9025df..a1f1ad2f7a 100644
--- a/docs/development/basic/folder-structure.zh-CN.mdx
+++ b/docs/development/basic/folder-structure.zh-CN.mdx
@@ -1,10 +1,17 @@
---
title: 目录架构
+description: 深入了解 LobeHub 的文件夹目录架构及其功能模块。
+tags:
+ - LobeHub
+ - 目录架构
+ - Next.js
+ - API路由
+ - 前端开发
---
# 目录架构
-LobeChat 的文件夹目录架构如下:
+LobeHub 的文件夹目录架构如下:
```bash
src
diff --git a/docs/development/basic/resources.mdx b/docs/development/basic/resources.mdx
index e0a8bcf7d4..34173916dd 100644
--- a/docs/development/basic/resources.mdx
+++ b/docs/development/basic/resources.mdx
@@ -1,17 +1,26 @@
---
title: Resources and References
+description: >-
+ Explore key resources and references for LobeHub's design and development
+ process.
+tags:
+ - LobeHub
+ - OpenAI API
+ - AI SDK
+ - LangChain
+ - Next.js
---
# Resources and References
-The design and development of LobeChat would not have been possible without the excellent projects in the community and ecosystem. We have used or referred to some outstanding resources and guides in the design and development process. Here are some key reference resources for developers to refer to during the development and learning process:
+The design and development of LobeHub would not have been possible without the excellent projects in the community and ecosystem. We have used or referred to some outstanding resources and guides in the design and development process. Here are some key reference resources for developers to refer to during the development and learning process:
1. **OpenAI API Guide**: We use OpenAI's API to access and process AI conversation data. You can check out the [OpenAI API Guide](https://platform.openai.com/docs/api-reference/introduction) for more details.
2. **OpenAI SDK**: We use OpenAI's Node.js SDK to interact with OpenAI's API. You can view the source code and documentation on the [OpenAI SDK](https://github.com/openai/openai-node) GitHub repository.
3. **AI SDK**: We use Vercel's AI SDK to access and process AI conversation data. You can refer to the documentation of [AI SDK](https://sdk.vercel.ai/docs) for more details.
4. **LangChain**: Our early conversation feature was implemented based on LangChain. You can visit [LangChain](https://langchain.com) to learn more about it.
-5. **Chat-Next-Web**: Chat Next Web is an excellent project, and some of LobeChat's features and workflows are referenced from its implementation. You can view the source code and documentation on the [Chat-Next-Web](https://github.com/Yidadaa/ChatGPT-Next-Web) GitHub repository.
+5. **Chat-Next-Web**: Chat Next Web is an excellent project, and some of LobeHub's features and workflows are referenced from its implementation. You can view the source code and documentation on the [Chat-Next-Web](https://github.com/Yidadaa/ChatGPT-Next-Web) GitHub repository.
6. **Next.js Documentation**: Our project is built on Next.js, and you can refer to the [Next.js Documentation](https://nextjs.org/docs) for more information about Next.js.
-7. **FlowGPT**: FlowGPT is currently the world's largest Prompt community, and some of the agents in LobeChat come from active authors in FlowGPT. You can visit [FlowGPT](https://flowgpt.com/) to learn more about it.
+7. **FlowGPT**: FlowGPT is currently the world's largest Prompt community, and some of the agents in LobeHub come from active authors in FlowGPT. You can visit [FlowGPT](https://flowgpt.com/) to learn more about it.
We will continue to update and supplement this list to provide developers with more reference resources.
diff --git a/docs/development/basic/resources.zh-CN.mdx b/docs/development/basic/resources.zh-CN.mdx
index 6428bf6674..7543b7b90e 100644
--- a/docs/development/basic/resources.zh-CN.mdx
+++ b/docs/development/basic/resources.zh-CN.mdx
@@ -1,17 +1,24 @@
---
title: 资源与参考
+description: 探索 LobeHub 的设计与开发参考资源,助力开发者学习与成长。
+tags:
+ - LobeHub
+ - 开发资源
+ - OpenAI API
+ - AI SDK
+ - LangChain
---
# 资源与参考
-LobeChat 的设计和开发离不开社区和生态中的优秀项目。我们在设计和开发过程中使用或参考了一些优秀的资源和指南。以下是一些主要的参考资源,供开发者在开发和学习过程中参考:
+LobeHub 的设计和开发离不开社区和生态中的优秀项目。我们在设计和开发过程中使用或参考了一些优秀的资源和指南。以下是一些主要的参考资源,供开发者在开发和学习过程中参考:
1. **OpenAI API 指南**:我们使用 OpenAI 的 API 来获取和处理 AI 的会话数据。你可以查看 [OpenAI API 指南](https://platform.openai.com/docs/api-reference/introduction) 了解更多详情。
2. **OpenAI SDK**:我们使用 OpenAI 的 Node.js SDK 来与 OpenAI 的 API 交互。你可以在 [OpenAI SDK](https://github.com/openai/openai-node) 的 GitHub 仓库中查看源码和文档。
3. **AI SDK**:我们使用 Vercel 的 AI SDK 来获取和处理 AI 的会话数据。你可以查看 [AI SDK](https://sdk.vercel.ai/docs) 的文档来了解更多详情。
4. **LangChain**:我们早期的会话功能是基于 LangChain 实现的。你可以访问 [LangChain](https://langchain.com) 来了解更多关于它的信息。
-5. **Chat-Next-Web**:Chat Next Web 是一个优秀的项目,LobeChat 的部分功能、Workflow 等参考了它的实现。你可以在 [Chat-Next-Web](https://github.com/Yidadaa/ChatGPT-Next-Web) 的 GitHub 仓库中查看源码和文档。
+5. **Chat-Next-Web**:Chat Next Web 是一个优秀的项目,LobeHub 的部分功能、Workflow 等参考了它的实现。你可以在 [Chat-Next-Web](https://github.com/Yidadaa/ChatGPT-Next-Web) 的 GitHub 仓库中查看源码和文档。
6. **Next.js 文档**:我们的项目是基于 Next.js 构建的,你可以查看 [Next.js 文档](https://nextjs.org/docs) 来了解更多关于 Next.js 的信息。
-7. **FlowGPT**:FlowGPT 是目前全球最大的 Prompt 社区,LobeChat 中的一些 Agent 来自 FlowGPT 的活跃作者。你可以访问 [FlowGPT](https://flowgpt.com/) 来了解更多关于它的信息。
+7. **FlowGPT**:FlowGPT 是目前全球最大的 Prompt 社区,LobeHub 中的一些 Agent 来自 FlowGPT 的活跃作者。你可以访问 [FlowGPT](https://flowgpt.com/) 来了解更多关于它的信息。
我们会持续更新和补充这个列表,为开发者提供更多的参考资源。
diff --git a/docs/development/basic/setup-development.mdx b/docs/development/basic/setup-development.mdx
index b9fe8e29ae..209314e864 100644
--- a/docs/development/basic/setup-development.mdx
+++ b/docs/development/basic/setup-development.mdx
@@ -1,10 +1,21 @@
---
title: Environment Setup Guide
+description: >-
+ Step-by-step guide to set up LobeHub development environment locally or
+ online.
+tags:
+ - LobeHub
+ - Development Setup
+ - Node.js
+ - PNPM
+ - Bun
+ - Git
+ - VSCode
---
# Environment Setup Guide
-Welcome to the LobeChat development environment setup guide.
+Welcome to the LobeHub development environment setup guide.
## Online Development
@@ -14,13 +25,13 @@ If you have access to GitHub Codespaces, you can click the button below to enter
## Local Development
-Before starting development on LobeChat, you need to install and configure some necessary software and tools in your local environment. This document will guide you through these steps.
+Before starting development on LobeHub, you need to install and configure some necessary software and tools in your local environment. This document will guide you through these steps.
### Development Environment Requirements
First, you need to install the following software:
-- Node.js: LobeChat is built on Node.js, so you need to install Node.js. We recommend installing the latest stable version.
+- Node.js: LobeHub is built on Node.js, so you need to install Node.js. We recommend installing the latest stable version.
- PNPM: We use PNPM as the preferred package manager. You can download and install it from the [PNPM official website](https://pnpm.io/installation).
- Bun: We use Bun as the npm scripts runner. You can download and install it from the [Bun official website](https://bun.com/docs/installation).
- Git: We use Git for version control. You can download and install it from the Git official website.
@@ -32,9 +43,9 @@ We recommend installing the extensions listed in [.vscode/extensions.json](https
### Project Setup
-After installing the above software, you can start setting up the LobeChat project.
+After installing the above software, you can start setting up the LobeHub project.
-1. **Get the code**: First, you need to clone the LobeChat codebase from GitHub. Run the following command in the terminal:
+1. **Get the code**: First, you need to clone the LobeHub codebase from GitHub. Run the following command in the terminal:
```bash
git clone https://github.com/lobehub/lobe-chat.git
@@ -53,13 +64,13 @@ pnpm i
bun run dev
```
-Now, you can open `http://localhost:3010` in your browser, and you should see the welcome page of LobeChat. This indicates that you have successfully set up the development environment.
+Now, you can open `http://localhost:3010` in your browser, and you should see the welcome page of LobeHub. This indicates that you have successfully set up the development environment.

## Working with Server-Side Features
-The basic setup above uses LobeChat's client-side database mode. If you need to work with server-side features such as:
+The basic setup above uses LobeHub's client-side database mode. If you need to work with server-side features such as:
- Database persistence
- File uploads and storage
@@ -69,7 +80,7 @@ The basic setup above uses LobeChat's client-side database mode. If you need to
Please refer to the [Work with Server-Side Database](/docs/development/basic/work-with-server-side-database) guide for complete setup instructions.
-During the development process, if you encounter any issues with environment setup or have any questions about LobeChat development, feel free to ask us at any time. We look forward to seeing your contributions!
+During the development process, if you encounter any issues with environment setup or have any questions about LobeHub development, feel free to ask us at any time. We look forward to seeing your contributions!
[codespaces-link]: https://codespaces.new/lobehub/lobe-chat
[codespaces-shield]: https://github.com/codespaces/badge.svg
diff --git a/docs/development/basic/setup-development.zh-CN.mdx b/docs/development/basic/setup-development.zh-CN.mdx
index 69b629a170..cdcf4a2b81 100644
--- a/docs/development/basic/setup-development.zh-CN.mdx
+++ b/docs/development/basic/setup-development.zh-CN.mdx
@@ -1,10 +1,17 @@
---
title: 环境设置指南
+description: 详细介绍 LobeHub 的开发环境设置,包括软件安装和项目配置。
+tags:
+ - LobeHub
+ - 开发环境
+ - Node.js
+ - PNPM
+ - Git
---
# 环境设置指南
-欢迎阅读 LobeChat 的开发环境设置指南。
+欢迎阅读 LobeHub 的开发环境设置指南。
## 在线开发
@@ -14,13 +21,13 @@ title: 环境设置指南
## 本地开发
-在开始开发 LobeChat 之前,你需要在本地环境中安装和配置一些必要的软件和工具。本文档将指导你完成这些步骤。
+在开始开发 LobeHub 之前,你需要在本地环境中安装和配置一些必要的软件和工具。本文档将指导你完成这些步骤。
### 开发环境需求
首先,你需要安装以下软件:
-- Node.js:LobeChat 是基于 Node.js 构建的,因此你需要安装 Node.js。我们建议安装最新的稳定版。
+- Node.js:LobeHub 是基于 Node.js 构建的,因此你需要安装 Node.js。我们建议安装最新的稳定版。
- PNPM:我们使用 PNPM 作为管理器。你可以从 [pnpm 的官方网站](https://pnpm.io/installation) 上下载并安装。
- Bun:我们使用 Bun 作为 npm scripts runner, 你可以从 [Bun 的官方网站](https://bun.com/docs/installation) 上下载并安装。
- Git:我们使用 Git 进行版本控制。你可以从 Git 的官方网站上下载并安装。
@@ -32,9 +39,9 @@ title: 环境设置指南
### 项目设置
-完成上述软件的安装后,你可以开始设置 LobeChat 项目了。
+完成上述软件的安装后,你可以开始设置 LobeHub 项目了。
-1. **获取代码**:首先,你需要从 GitHub 上克隆 LobeChat 的代码库。在终端中运行以下命令:
+1. **获取代码**:首先,你需要从 GitHub 上克隆 LobeHub 的代码库。在终端中运行以下命令:
```bash
git clone https://github.com/lobehub/lobe-chat.git
@@ -53,13 +60,13 @@ pnpm i
bun run dev
```
-现在,你可以在浏览器中打开 `http://localhost:3010`,你应该能看到 LobeChat 的欢迎页面。这表明你已经成功地设置了开发环境。
+现在,你可以在浏览器中打开 `http://localhost:3010`,你应该能看到 LobeHub 的欢迎页面。这表明你已经成功地设置了开发环境。

## 使用服务端功能
-上述基础设置使用 LobeChat 的客户端数据库模式。如果你需要开发服务端功能,如:
+上述基础设置使用 LobeHub 的客户端数据库模式。如果你需要开发服务端功能,如:
- 数据库持久化
- 文件上传和存储
@@ -69,7 +76,7 @@ bun run dev
请参考[使用服务端数据库](/docs/development/basic/work-with-server-side-database)指南获得完整的设置说明。
-在开发过程中,如果你在环境设置上遇到任何问题,或者有任何关于 LobeChat 开发的问题,欢迎随时向我们提问。我们期待看到你的贡献!
+在开发过程中,如果你在环境设置上遇到任何问题,或者有任何关于 LobeHub 开发的问题,欢迎随时向我们提问。我们期待看到你的贡献!
[codespaces-link]: https://codespaces.new/lobehub/lobe-chat
[codespaces-shield]: https://github.com/codespaces/badge.svg
diff --git a/docs/development/basic/test.mdx b/docs/development/basic/test.mdx
index cda71c511f..bf170ca509 100644
--- a/docs/development/basic/test.mdx
+++ b/docs/development/basic/test.mdx
@@ -1,10 +1,19 @@
---
title: Testing Guide
+description: >-
+ Explore LobeHub's testing strategy, including unit and end-to-end testing
+ methods.
+tags:
+ - LobeHub
+ - Testing
+ - Unit Testing
+ - End-to-End Testing
+ - vitest
---
# Testing Guide
-LobeChat's testing strategy includes unit testing and end-to-end (E2E) testing. Below are detailed explanations of each type of testing:
+LobeHub's testing strategy includes unit testing and end-to-end (E2E) testing. Below are detailed explanations of each type of testing:
## Unit Testing
@@ -24,13 +33,13 @@ We encourage developers to write corresponding unit tests while writing code to
End-to-end testing is used to test the functionality and performance of the application in a real environment. It simulates real user operations and verifies the application's performance in different scenarios.
-Currently, there is no integrated end-to-end testing in LobeChat. We will gradually introduce end-to-end testing in subsequent iterations.
+Currently, there is no integrated end-to-end testing in LobeHub. We will gradually introduce end-to-end testing in subsequent iterations.
## Development Testing
### 1. Unit Testing
-Unit testing is conducted on the smallest testable units in the application, usually functions, components, or modules. In LobeChat, we use [vitest][vitest-url] for unit testing.
+Unit testing is conducted on the smallest testable units in the application, usually functions, components, or modules. In LobeHub, we use [vitest][vitest-url] for unit testing.
#### Writing Test Cases
diff --git a/docs/development/basic/test.zh-CN.mdx b/docs/development/basic/test.zh-CN.mdx
index c00c1acef7..9899b7c97d 100644
--- a/docs/development/basic/test.zh-CN.mdx
+++ b/docs/development/basic/test.zh-CN.mdx
@@ -1,10 +1,16 @@
---
title: 测试指南
+description: 了解 LobeHub 的单元测试和端到端测试策略,确保代码质量与稳定性。
+tags:
+ - LobeHub
+ - 单元测试
+ - 端到端测试
+ - 测试策略
---
# 测试指南
-LobeChat 的测试策略包括单元测试和端到端 (E2E) 测试。下面是每种测试的详细说明:
+LobeHub 的测试策略包括单元测试和端到端 (E2E) 测试。下面是每种测试的详细说明:
## 单元测试
@@ -24,13 +30,13 @@ npm run test
端到端测试用于测试应用在真实环境中的功能和性能。它模拟用户的真实操作,并验证应用在不同场景下的表现。
-在 LobeChat 中,目前暂时没有集成端到端测试,我们会在后续迭代中逐步引入端到端测试。
+在 LobeHub 中,目前暂时没有集成端到端测试,我们会在后续迭代中逐步引入端到端测试。
## 开发测试
### 1. 单元测试
-单元测试是针对应用中的最小可测试单元进行的测试,通常是针对函数、组件或模块进行的测试。在 LobeChat 中,我们使用 [vitest][vitest-url] 进行单元测试。
+单元测试是针对应用中的最小可测试单元进行的测试,通常是针对函数、组件或模块进行的测试。在 LobeHub 中,我们使用 [vitest][vitest-url] 进行单元测试。
#### 编写测试用例
diff --git a/docs/development/basic/work-with-server-side-database.mdx b/docs/development/basic/work-with-server-side-database.mdx
index d421acc9ca..d6f022d9b7 100644
--- a/docs/development/basic/work-with-server-side-database.mdx
+++ b/docs/development/basic/work-with-server-side-database.mdx
@@ -1,10 +1,17 @@
---
title: Work with Server-Side Database
+description: Learn how to set up a server-side database for LobeHub with Docker.
+tags:
+ - LobeHub
+ - Server-Side Database
+ - Docker
+ - PostgreSQL
+ - MinIO
---
# Work with Server-Side Database
-LobeChat provides a battery-included experience with its client-side database.
+LobeHub provides a battery-included experience with its client-side database.
While some features you really care about is only available at a server-side development.
In order to work with the aspect of server-side database,
@@ -55,7 +62,7 @@ You should see: `✅ database migration pass.`
### Start Development Server
-Launch the LobeChat development server:
+Launch the LobeHub development server:
```bash
pnpm dev
@@ -144,7 +151,7 @@ await fetch(uploadUrl, {
When running with Docker Compose development setup:
-- **PostgreSQL**: `postgres://postgres@localhost:5432/lobechat`
+- **PostgreSQL**: `postgres://postgres@localhost:5432/LobeHub`
- **MinIO API**: `http://localhost:9000`
- **MinIO Console**: `http://localhost:9001` (admin/CHANGE\_THIS\_PASSWORD\_IN\_PRODUCTION)
- **Application**: `http://localhost:3010`
diff --git a/docs/development/basic/work-with-server-side-database.zh-CN.mdx b/docs/development/basic/work-with-server-side-database.zh-CN.mdx
index 6b72028059..632feaa580 100644
--- a/docs/development/basic/work-with-server-side-database.zh-CN.mdx
+++ b/docs/development/basic/work-with-server-side-database.zh-CN.mdx
@@ -1,10 +1,17 @@
---
title: 使用服务端数据库
+description: 快速设置 LobeHub 服务端数据库,支持 Docker 和图像生成。
+tags:
+ - 服务端数据库
+ - LobeHub
+ - Docker
+ - 图像生成
+ - PostgreSQL
---
# 使用服务端数据库
-LobeChat 提供了内置的客户端数据库体验。
+LobeHub 提供了内置的客户端数据库体验。
但某些重要功能仅在服务端开发中可用。
为了使用服务端数据库功能,
@@ -55,7 +62,7 @@ pnpm db:migrate
### 启动开发服务器
-启动 LobeChat 开发服务器:
+启动 LobeHub 开发服务器:
```bash
pnpm dev
@@ -144,7 +151,7 @@ await fetch(uploadUrl, {
运行 Docker Compose 开发环境时:
-- **PostgreSQL**:`postgres://postgres@localhost:5432/lobechat`
+- **PostgreSQL**:`postgres://postgres@localhost:5432/LobeHub`
- **MinIO API**:`http://localhost:9000`
- **MinIO 控制台**:`http://localhost:9001` (admin/CHANGE\_THIS\_PASSWORD\_IN\_PRODUCTION)
- **应用程序**:`http://localhost:3010`
diff --git a/docs/development/internationalization/add-new-locale.mdx b/docs/development/internationalization/add-new-locale.mdx
index 0fb889f87c..51c57429d9 100644
--- a/docs/development/internationalization/add-new-locale.mdx
+++ b/docs/development/internationalization/add-new-locale.mdx
@@ -1,14 +1,21 @@
---
title: New Locale Guide
+description: Learn how to add Vietnamese language support in LobeHub using lobe-i18n.
+tags:
+ - LobeHub
+ - i18n
+ - language support
+ - Vietnamese
+ - localization
---
# New Locale Guide
-LobeChat uses [lobe-i18n](https://github.com/lobehub/lobe-cli-toolbox/tree/master/packages/lobe-i18n) as the i18n solution, which allows for quick addition of new language support in the application.
+LobeHub uses [lobe-i18n](https://github.com/lobehub/lobe-cli-toolbox/tree/master/packages/lobe-i18n) as the i18n solution, which allows for quick addition of new language support in the application.
## Adding New Language Support
-To add new language internationalization support in LobeChat (for example, adding Vietnamese `vi-VN`), please follow the steps below:
+To add new language internationalization support in LobeHub (for example, adding Vietnamese `vi-VN`), please follow the steps below:
### Step 1: Update the Internationalization Configuration File
@@ -33,7 +40,7 @@ module.exports = {
### Step 2: Automatically Translate Language Files
-LobeChat uses the `lobe-i18n` tool to automatically translate language files, so manual updating of i18n files is not required.
+LobeHub uses the `lobe-i18n` tool to automatically translate language files, so manual updating of i18n files is not required.
Run the following command to automatically translate and generate the Vietnamese language files:
@@ -47,12 +54,12 @@ This will utilize the `lobe-i18n` tool to process the language files.
Once you have completed the above steps, you need to submit your changes and create a Pull Request.
-Ensure that you follow LobeChat's contribution guidelines and provide a necessary description to explain your changes. For example, refer to a similar previous Pull Request [#759](https://github.com/lobehub/lobe-chat/pull/759).
+Ensure that you follow LobeHub's contribution guidelines and provide a necessary description to explain your changes. For example, refer to a similar previous Pull Request [#759](https://github.com/lobehub/lobe-chat/pull/759).
### Additional Information
- After submitting your Pull Request, please patiently wait for the project maintainers to review it.
-- If you encounter any issues, you can reach out to the LobeChat community for assistance.
+- If you encounter any issues, you can reach out to the LobeHub community for assistance.
- For more accurate results, ensure that your Pull Request is based on the latest main branch and stays in sync with the main branch.
-By following the above steps, you can successfully add new language support to LobeChat and ensure that the application provides a localized experience for more users.
+By following the above steps, you can successfully add new language support to LobeHub and ensure that the application provides a localized experience for more users.
diff --git a/docs/development/internationalization/add-new-locale.zh-CN.mdx b/docs/development/internationalization/add-new-locale.zh-CN.mdx
index 33a7fb68e6..ba14bd3bc3 100644
--- a/docs/development/internationalization/add-new-locale.zh-CN.mdx
+++ b/docs/development/internationalization/add-new-locale.zh-CN.mdx
@@ -1,14 +1,21 @@
---
title: 新语种添加指南
+description: 学习如何在 LobeHub 中添加新的语言支持,提升用户体验。
+tags:
+ - LobeHub
+ - 国际化
+ - 语言支持
+ - lobe-i18n
+ - 越南语
---
# 新语种添加指南
-LobeChat 使用 [lobe-i18n](https://github.com/lobehub/lobe-cli-toolbox/tree/master/packages/lobe-i18n) 作为 i18n 解决方案,可以在应用中快速添加新的语言支持。
+LobeHub 使用 [lobe-i18n](https://github.com/lobehub/lobe-cli-toolbox/tree/master/packages/lobe-i18n) 作为 i18n 解决方案,可以在应用中快速添加新的语言支持。
## 添加新的语言支持
-为了在 LobeChat 中添加新的语言国际化支持,(例如添加越南语 `vi-VN`),请按照以下步骤操作:
+为了在 LobeHub 中添加新的语言国际化支持,(例如添加越南语 `vi-VN`),请按照以下步骤操作:
### 步骤 1: 更新国际化配置文件
@@ -33,7 +40,7 @@ module.exports = {
### 步骤 2: 自动翻译语言文件
-LobeChat 使用 `lobe-i18n` 工具来自动翻译语言文件,因此不需要手动更新 i18n 文件。
+LobeHub 使用 `lobe-i18n` 工具来自动翻译语言文件,因此不需要手动更新 i18n 文件。
运行以下命令来自动翻译并生成越南语的语言文件:
@@ -47,12 +54,12 @@ npm run i18n
一旦你完成了上述步骤,你需要提交你的更改并创建一个 Pull Request。
-请确保你遵循了 LobeChat 的贡献指南,并提供必要的描述来说明你的更改。例如,参考之前的类似 Pull Request [#759](https://github.com/lobehub/lobe-chat/pull/759)。
+请确保你遵循了 LobeHub 的贡献指南,并提供必要的描述来说明你的更改。例如,参考之前的类似 Pull Request [#759](https://github.com/lobehub/lobe-chat/pull/759)。
### 附加信息
- 提交你的 Pull Request 后,请耐心等待项目维护人员的审查。
-- 如果遇到任何问题,可以联系 LobeChat 社区寻求帮助。
+- 如果遇到任何问题,可以联系 LobeHub 社区寻求帮助。
- 为了更精确的结果,确保你的 Pull Request 是基于最新的主分支,并且与主分支保持同步。
-通过遵循上述步骤,你可以成功为 LobeChat 添加新的语言支持,并且确保应用能够为更多用户提供本地化的体验。
+通过遵循上述步骤,你可以成功为 LobeHub 添加新的语言支持,并且确保应用能够为更多用户提供本地化的体验。
diff --git a/docs/development/internationalization/internationalization-implementation.mdx b/docs/development/internationalization/internationalization-implementation.mdx
index 5360962b39..63965ba4a6 100644
--- a/docs/development/internationalization/internationalization-implementation.mdx
+++ b/docs/development/internationalization/internationalization-implementation.mdx
@@ -1,18 +1,27 @@
---
title: Internationalization Implementation Guide
+description: >-
+ Learn how to implement internationalization in LobeHub for multilingual
+ support.
+tags:
+ - Internationalization
+ - i18next
+ - LobeHub
+ - Multilingual Support
+ - Localization
---
# Internationalization Implementation Guide
-Welcome to the LobeChat Internationalization Implementation Guide. This document will guide you through understanding the internationalization mechanism of LobeChat, including file structure and how to add new languages. LobeChat uses `i18next` and `lobe-i18n` as the internationalization solution, aiming to provide users with seamless multilingual support.
+Welcome to the LobeHub Internationalization Implementation Guide. This document will guide you through understanding the internationalization mechanism of LobeHub, including file structure and how to add new languages. LobeHub uses `i18next` and `lobe-i18n` as the internationalization solution, aiming to provide users with seamless multilingual support.
## Internationalization Overview
-Internationalization (i18n for short) is the process of enabling an application to adapt to different languages and regions. In LobeChat, we support multiple languages and achieve dynamic language switching and content localization through the `i18next` library. Our goal is to provide a localized experience for global users.
+Internationalization (i18n for short) is the process of enabling an application to adapt to different languages and regions. In LobeHub, we support multiple languages and achieve dynamic language switching and content localization through the `i18next` library. Our goal is to provide a localized experience for global users.
## File Structure
-In the LobeChat project, internationalization-related files are organized as follows:
+In the LobeHub project, internationalization-related files are organized as follows:
- `src/locales/default`: Contains translation files for the default development language (Chinese), which we use as Chinese.
- `locales`: Contains folders for all supported languages, with each language folder containing the respective translation files generated by lobe-i18n.
@@ -58,14 +67,14 @@ locales
## Core Implementation Logic
-The internationalization core implementation logic of LobeChat is as follows:
+The internationalization core implementation logic of LobeHub is as follows:
- Initialize and configure using the `i18next` library.
- Automatically detect the user's language preference using `i18next-browser-languagedetector`.
- Dynamically load translation resources using `i18next-resources-to-backend`.
- Set the direction of the HTML document (LTR or RTL) based on the user's language preference.
-Here is a simplified pseudo code example to illustrate the core implementation logic of internationalization in LobeChat:
+Here is a simplified pseudo code example to illustrate the core implementation logic of internationalization in LobeHub:
```ts
import i18n from 'i18next';
@@ -99,7 +108,7 @@ const createI18nInstance = (lang) => {
};
```
-In this example, we demonstrate how to use `i18next` and related plugins to initialize internationalization settings. We dynamically import translation resources and respond to language change events to adjust the text direction of the page. This process provides LobeChat with flexible multilingual support capabilities.
+In this example, we demonstrate how to use `i18next` and related plugins to initialize internationalization settings. We dynamically import translation resources and respond to language change events to adjust the text direction of the page. This process provides LobeHub with flexible multilingual support capabilities.
## Adding Support for New Languages
@@ -118,4 +127,4 @@ To add support for new languages, please refer to the detailed steps in the [New
- [i18next Official Documentation](https://www.i18next.com/)
- [lobe-i18n Tool Description](https://github.com/lobehub/lobe-cli-toolbox/tree/master/packages/lobe-i18n)
-By following this guide, you can better understand and participate in the internationalization work of LobeChat, providing a seamless multilingual experience for global users.
+By following this guide, you can better understand and participate in the internationalization work of LobeHub, providing a seamless multilingual experience for global users.
diff --git a/docs/development/internationalization/internationalization-implementation.zh-CN.mdx b/docs/development/internationalization/internationalization-implementation.zh-CN.mdx
index a3cc5b60d4..187b56ddba 100644
--- a/docs/development/internationalization/internationalization-implementation.zh-CN.mdx
+++ b/docs/development/internationalization/internationalization-implementation.zh-CN.mdx
@@ -1,18 +1,24 @@
---
title: 国际化实现指南
+description: 了解 LobeHub 的国际化机制,支持多语言体验。
+tags:
+ - 国际化
+ - LobeHub
+ - i18next
+ - 多语言支持
---
# 国际化实现指南
-欢迎阅读 LobeChat 国际化实现指南。本文档将指导你了解 LobeChat 的国际化机制,包括文件结构、如何添加新语种。LobeChat 采用 `i18next` 和 `lobe-i18n` 作为国际化解决方案,旨在为用户提供流畅的多语言支持。
+欢迎阅读 LobeHub 国际化实现指南。本文档将指导你了解 LobeHub 的国际化机制,包括文件结构、如何添加新语种。LobeHub 采用 `i18next` 和 `lobe-i18n` 作为国际化解决方案,旨在为用户提供流畅的多语言支持。
## 国际化概述
-国际化(Internationalization,简称为 i18n)是一个让应用能够适应不同语言和地区的过程。在 LobeChat 中,我们支持多种语言,并通过 `i18next` 库来实现语言的动态切换和内容的本地化。我们的目标是让 LobeChat 能够为全球用户提供本地化的体验。
+国际化(Internationalization,简称为 i18n)是一个让应用能够适应不同语言和地区的过程。在 LobeHub 中,我们支持多种语言,并通过 `i18next` 库来实现语言的动态切换和内容的本地化。我们的目标是让 LobeHub 能够为全球用户提供本地化的体验。
## 文件结构
-在 LobeChat 的项目中,国际化相关的文件被组织如下:
+在 LobeHub 的项目中,国际化相关的文件被组织如下:
- `src/locales/default`: 包含默认开发语言(中文)的翻译文件,我们作为中文。
- `locales`: 包含所有支持的语言文件夹,每个语言文件夹中包含相应语言的翻译文件,这些翻译文件通过 lobe-i18n 自动生成。
@@ -58,14 +64,14 @@ locales
## 核心实现逻辑
-LobeChat 的国际化核心实现逻辑如下:
+LobeHub 的国际化核心实现逻辑如下:
- 使用 `i18next` 库进行初始化和配置。
- 使用 `i18next-browser-languagedetector` 自动检测用户的语言偏好。
- 使用 `i18next-resources-to-backend` 动态加载翻译资源。
- 根据用户的语言偏好,设置 HTML 文档的方向(LTR 或 RTL)。
-以下是一个简化的伪代码示例,用以说明 LobeChat 国际化的核心实现逻辑:
+以下是一个简化的伪代码示例,用以说明 LobeHub 国际化的核心实现逻辑:
```ts
import i18n from 'i18next';
@@ -99,7 +105,7 @@ const createI18nInstance = (lang) => {
};
```
-在这个示例中,我们展示了如何使用 `i18next` 和相关插件来初始化国际化设置。我们动态导入了翻译资源,并响应语言变化事件来调整页面的文本方向。这个过程为 LobeChat 提供了灵活的多语言支持能力。
+在这个示例中,我们展示了如何使用 `i18next` 和相关插件来初始化国际化设置。我们动态导入了翻译资源,并响应语言变化事件来调整页面的文本方向。这个过程为 LobeHub 提供了灵活的多语言支持能力。
## 添加新的语言支持
@@ -118,4 +124,4 @@ const createI18nInstance = (lang) => {
- [i18next 官方文档](https://www.i18next.com/)
- [lobe-i18n 工具说明](https://github.com/lobehub/lobe-cli-toolbox/tree/master/packages/lobe-i18n)
-通过遵循本指南,你可以更好地理解和参与到 LobeChat 的国际化工作中,为全球用户提供无缝的多语言体验。
+通过遵循本指南,你可以更好地理解和参与到 LobeHub 的国际化工作中,为全球用户提供无缝的多语言体验。
diff --git a/docs/development/others/lighthouse.mdx b/docs/development/others/lighthouse.mdx
index 286866cac6..2a71aa0c83 100644
--- a/docs/development/others/lighthouse.mdx
+++ b/docs/development/others/lighthouse.mdx
@@ -1,5 +1,10 @@
---
title: Lighthouse Reports
+description: Explore Lighthouse reports for chat and discover pages on LobeHub.
+tags:
+ - Lighthouse Reports
+ - Chat Page
+ - Discover Page
---
# Lighthouse Reports
@@ -7,7 +12,7 @@ title: Lighthouse Reports
## Chat Page
> **Info**\
-> [https://lobechat.com/chat](https://lobechat.com/chat)
+> [https://LobeHub.com/chat](https://LobeHub.com/chat)
| Desktop | Mobile |
| :-----------------------------------------: | :----------------------------------------: |
@@ -17,7 +22,7 @@ title: Lighthouse Reports
## Discover Page
> **Info**\
-> [https://lobechat.com/discover](https://lobechat.com/discover)
+> [https://LobeHub.com/discover](https://LobeHub.com/discover)
| Desktop | Mobile |
| :---------------------------------------------: | :--------------------------------------------: |
@@ -25,10 +30,10 @@ title: Lighthouse Reports
| [⚡️ Lighthouse Report][discover-desktop-report] | [⚡️ Lighthouse Report][discover-mobile-report] |
[chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg
-[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/lobechat_com_chat.html
+[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/LobeHub_com_chat.html
[chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg
-[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/lobechat_com_chat.html
+[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/LobeHub_com_chat.html
[discover-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/desktop/pagespeed.svg
-[discover-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/desktop/lobechat_com_discover.html
+[discover-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/desktop/LobeHub_com_discover.html
[discover-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/mobile/pagespeed.svg
-[discover-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/mobile/lobechat_com_discover.html
+[discover-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/mobile/LobeHub_com_discover.html
diff --git a/docs/development/others/lighthouse.zh-CN.mdx b/docs/development/others/lighthouse.zh-CN.mdx
index c3a6a6bc48..50dd817037 100644
--- a/docs/development/others/lighthouse.zh-CN.mdx
+++ b/docs/development/others/lighthouse.zh-CN.mdx
@@ -1,5 +1,11 @@
---
title: Lighthouse 测试报告
+description: 查看Lighthouse测试报告,了解聊天和发现页面的性能表现。
+tags:
+ - Lighthouse
+ - 测试报告
+ - 聊天页面
+ - 发现页面
---
# Lighthouse 测试报告
@@ -7,7 +13,7 @@ title: Lighthouse 测试报告
## Chat 聊天页面
> **Info**\
-> [https://lobechat.com/chat](https://lobechat.com/chat)
+> [https://LobeHub.com/chat](https://LobeHub.com/chat)
| Desktop | Mobile |
| :-----------------------------------------: | :----------------------------------------: |
@@ -17,7 +23,7 @@ title: Lighthouse 测试报告
## Discover 发现页面
> **Info**\
-> [https://lobechat.com/discover](https://lobechat.com/discover)
+> [https://LobeHub.com/discover](https://LobeHub.com/discover)
| Desktop | Mobile |
| :---------------------------------------------: | :--------------------------------------------: |
@@ -25,10 +31,10 @@ title: Lighthouse 测试报告
| [⚡️ Lighthouse Report][discover-desktop-report] | [⚡️ Lighthouse Report][discover-mobile-report] |
[chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg
-[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/lobechat_com_chat.html
+[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/LobeHub_com_chat.html
[chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg
-[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/lobechat_com_chat.html
+[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/LobeHub_com_chat.html
[discover-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/desktop/pagespeed.svg
-[discover-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/desktop/lobechat_com_discover.html
+[discover-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/desktop/LobeHub_com_discover.html
[discover-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/mobile/pagespeed.svg
-[discover-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/mobile/lobechat_com_discover.html
+[discover-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/mobile/LobeHub_com_discover.html
diff --git a/docs/development/start.mdx b/docs/development/start.mdx
index e185bfa66b..d0da8d0add 100644
--- a/docs/development/start.mdx
+++ b/docs/development/start.mdx
@@ -1,14 +1,23 @@
---
title: Technical Development Getting Started Guide
+description: >-
+ Explore the LobeHub development setup, technology stack, and contribution
+ guidelines.
+tags:
+ - LobeHub
+ - Next.js
+ - Development Guide
+ - Internationalization
+ - Open Source
---
# Technical Development Getting Started Guide
-Welcome to the LobeChat Technical Development Getting Started Guide. LobeChat is an AI conversation application built on the Next.js framework, incorporating a range of technology stacks to achieve diverse functionalities and features. This guide will detail the main technical components of LobeChat and how to configure and use these technologies in your development environment.
+Welcome to the LobeHub Technical Development Getting Started Guide. LobeHub is an AI conversation application built on the Next.js framework, incorporating a range of technology stacks to achieve diverse functionalities and features. This guide will detail the main technical components of LobeHub and how to configure and use these technologies in your development environment.
## Basic Technology Stack
-The core technology stack of LobeChat is as follows:
+The core technology stack of LobeHub is as follows:
- **Framework**: We chose [Next.js](https://nextjs.org/), a powerful React framework that provides key features such as server-side rendering, routing framework, and Router Handler.
- **Component Library**: We use [Ant Design (antd)](https://ant.design/) as the basic component library, along with [lobe-ui](https://github.com/lobehub/lobe-ui) as our business component library.
@@ -21,7 +30,7 @@ The core technology stack of LobeChat is as follows:
## Folder Directory Structure
-The folder directory structure of LobeChat is as follows:
+The folder directory structure of LobeHub is as follows:
```bash
src
@@ -47,7 +56,7 @@ This section outlines setting up the development environment and local developme
We recommend using WebStorm as your integrated development environment (IDE).
-1. **Get the code**: Clone the LobeChat code repository locally:
+1. **Get the code**: Clone the LobeHub code repository locally:
```bash
git clone https://github.com/lobehub/lobe-chat.git
@@ -74,11 +83,11 @@ bun run dev
> \[!IMPORTANT]\
> If you encounter the error "Could not find 'stylelint-config-recommended'" when installing dependencies with `npm`, please reinstall the dependencies using `pnpm` or `bun`.
-Now, you should be able to see the welcome page of LobeChat in your browser. For a detailed environment setup guide, please refer to [Development Environment Setup Guide](/docs/development/basic/setup-development).
+Now, you should be able to see the welcome page of LobeHub in your browser. For a detailed environment setup guide, please refer to [Development Environment Setup Guide](/docs/development/basic/setup-development).
## Code Style and Contribution Guide
-In the LobeChat project, we place great emphasis on the quality and consistency of the code. For this reason, we have established a series of code style standards and contribution processes to ensure that every developer can smoothly participate in the project. Here are the code style and contribution guidelines you need to follow as a developer.
+In the LobeHub project, we place great emphasis on the quality and consistency of the code. For this reason, we have established a series of code style standards and contribution processes to ensure that every developer can smoothly participate in the project. Here are the code style and contribution guidelines you need to follow as a developer.
- **Code Style**: We use `@lobehub/lint` to unify the code style, including ESLint, Prettier, remarklint, and stylelint configurations. Please adhere to our code standards to maintain code consistency and readability.
- **Contribution Process**: We use gitmoji and semantic release for code submission and release processes. Please use gitmoji to annotate your commit messages and ensure compliance with the semantic release standards so that our automation systems can correctly handle version control and releases.
@@ -89,7 +98,7 @@ For detailed code style and contribution guidelines, please refer to [Code Style
## Internationalization Implementation Guide
-LobeChat uses `i18next` and `lobe-i18n` to implement multilingual support, ensuring a global user experience.
+LobeHub uses `i18next` and `lobe-i18n` to implement multilingual support, ensuring a global user experience.
Internationalization files are located in `src/locales`, containing the default language (Chinese). We generate other language JSON files automatically through `lobe-i18n`.
@@ -99,8 +108,8 @@ For a detailed guide on internationalization implementation, please refer to [In
## Appendix: Resources and References
-To support developers in better understanding and using the technology stack of LobeChat, we provide a comprehensive list of resources and references — [LobeChat Resources and References](/docs/development/basic/resources) - Visit our maintained list of resources, including tutorials, articles, and other useful links.
+To support developers in better understanding and using the technology stack of LobeHub, we provide a comprehensive list of resources and references — [LobeHub Resources and References](/docs/development/basic/resources) - Visit our maintained list of resources, including tutorials, articles, and other useful links.
-We encourage developers to utilize these resources to deepen their learning and enhance their skills, join community discussions through [LobeChat GitHub Discussions](https://github.com/lobehub/lobe-chat/discussions) or [Discord](https://discord.com/invite/AYFPHvv2jT), ask questions, or share your experiences.
+We encourage developers to utilize these resources to deepen their learning and enhance their skills, join community discussions through [LobeHub GitHub Discussions](https://github.com/lobehub/lobe-chat/discussions) or [Discord](https://discord.com/invite/AYFPHvv2jT), ask questions, or share your experiences.
If you have any questions or need further assistance, please do not hesitate to contact us through the above channels.
diff --git a/docs/development/start.zh-CN.mdx b/docs/development/start.zh-CN.mdx
index c30ad56396..480fc3fa15 100644
--- a/docs/development/start.zh-CN.mdx
+++ b/docs/development/start.zh-CN.mdx
@@ -1,14 +1,21 @@
---
title: 技术开发上手指南
+description: 了解 LobeHub 的技术栈和开发环境设置,快速上手开发。
+tags:
+ - LobeHub
+ - 技术开发
+ - Next.js
+ - 国际化
+ - 状态管理
---
# 技术开发上手指南
-欢迎来到 LobeChat 技术开发上手指南。LobeChat 是一款基于 Next.js 框架构建的 AI 会话应用,它汇集了一系列的技术栈,以实现多样化的功能和特性。本指南将详细介绍 LobeChat 的主要技术组成,以及如何在你的开发环境中配置和使用这些技术。
+欢迎来到 LobeHub 技术开发上手指南。LobeHub 是一款基于 Next.js 框架构建的 AI 会话应用,它汇集了一系列的技术栈,以实现多样化的功能和特性。本指南将详细介绍 LobeHub 的主要技术组成,以及如何在你的开发环境中配置和使用这些技术。
## 基础技术栈
-LobeChat 的核心技术栈如下:
+LobeHub 的核心技术栈如下:
- **框架**:我们选择了 [Next.js](https://nextjs.org/),这是一款强大的 React 框架,为我们的项目提供了服务端渲染、路由框架、Router Handler 等关键功能。
- **组件库**:我们使用了 [Ant Design (antd)](https://ant.design/) 作为基础组件库,同时引入了 [lobe-ui](https://github.com/lobehub/lobe-ui) 作为我们的业务组件库。
@@ -21,7 +28,7 @@ LobeChat 的核心技术栈如下:
## 文件夹目录架构
-LobeChat 的文件夹目录架构如下:
+LobeHub 的文件夹目录架构如下:
```bash
src
@@ -47,7 +54,7 @@ src
我们推荐使用 WebStorm 作为你的集成开发环境(IDE)。
-1. **获取代码**:克隆 LobeChat 的代码库到本地:
+1. **获取代码**:克隆 LobeHub 的代码库到本地:
```bash
git clone https://github.com/lobehub/lobe-chat.git
@@ -74,11 +81,11 @@ bun run dev
> \[!IMPORTANT]\
> 如果使用`npm`安装依赖出现`Could not find "stylelint-config-recommended"`错误,请使用 `pnpm` 或者 `bun` 重新安装依赖。
-现在,你应该可以在浏览器中看到 LobeChat 的欢迎页面。详细的环境配置指南,请参考 [开发环境设置指南](/zh/docs/development/basic/setup-development)。
+现在,你应该可以在浏览器中看到 LobeHub 的欢迎页面。详细的环境配置指南,请参考 [开发环境设置指南](/zh/docs/development/basic/setup-development)。
## 代码风格与贡献指南
-在 LobeChat 项目中,我们十分重视代码的质量和一致性。为此,我们制定了一系列的代码风格规范和贡献流程,以确保每位开发者都能顺利地参与到项目中。以下是你作为开发者需要遵守的代码风格和贡献准则。
+在 LobeHub 项目中,我们十分重视代码的质量和一致性。为此,我们制定了一系列的代码风格规范和贡献流程,以确保每位开发者都能顺利地参与到项目中。以下是你作为开发者需要遵守的代码风格和贡献准则。
- **代码风格**:我们使用 `@lobehub/lint` 统一代码风格,包括 ESLint、Prettier、remarklint 和 stylelint 配置。请遵守我们的代码规范,以保持代码的一致性和可读性。
- **贡献流程**:我们采用 gitmoji 和 semantic release 作为代码提交和发布流程。请使用 gitmoji 标注您的提交信息,并确保遵循 semantic release 的规范,以便我们的自动化系统能够正确处理版本控制和发布。
@@ -89,7 +96,7 @@ bun run dev
## 国际化实现指南
-LobeChat 采用 `i18next` 和 `lobe-i18n` 实现多语言支持,确保用户全球化体验。
+LobeHub 采用 `i18next` 和 `lobe-i18n` 实现多语言支持,确保用户全球化体验。
国际化文件位于 `src/locales`,包含默认语言(中文)。 我们会通过 `lobe-i18n` 自动生成其他的语言 JSON 文件。
@@ -99,8 +106,8 @@ LobeChat 采用 `i18next` 和 `lobe-i18n` 实现多语言支持,确保用户
## 附录:资源与参考
-为了支持开发者更好地理解和使用 LobeChat 的技术栈,我们提供了一份详尽的资源与参考列表 —— [LobeChat 资源与参考](/zh/docs/development/basic/resources) - 访问我们维护的资源列表,包括教程、文章和其他有用的链接。
+为了支持开发者更好地理解和使用 LobeHub 的技术栈,我们提供了一份详尽的资源与参考列表 —— [LobeHub 资源与参考](/zh/docs/development/basic/resources) - 访问我们维护的资源列表,包括教程、文章和其他有用的链接。
-我们鼓励开发者利用这些资源深入学习和提升技能,通过 [LobeChat GitHub Discussions](https://github.com/lobehub/lobe-chat/discussions) 或者 [Discord](https://discord.com/invite/AYFPHvv2jT) 加入社区讨论,提出问题或分享你的经验。
+我们鼓励开发者利用这些资源深入学习和提升技能,通过 [LobeHub GitHub Discussions](https://github.com/lobehub/lobe-chat/discussions) 或者 [Discord](https://discord.com/invite/AYFPHvv2jT) 加入社区讨论,提出问题或分享你的经验。
如果你有任何疑问,或者需要进一步的帮助,请不要犹豫,请通过上述渠道与我们联系。
diff --git a/docs/development/state-management/state-management-intro.mdx b/docs/development/state-management/state-management-intro.mdx
index 4d932700c5..06388dbb7e 100644
--- a/docs/development/state-management/state-management-intro.mdx
+++ b/docs/development/state-management/state-management-intro.mdx
@@ -1,10 +1,18 @@
---
title: Best Practices for State Management
+description: >-
+ Explore effective state management practices for LobeHub's complex data flow
+ architecture.
+tags:
+ - State Management
+ - LobeHub
+ - Data Flow
+ - Best Practices
---
# Best Practices for State Management
-LobeChat differs from traditional CRUD web applications in that it involves a large amount of rich interactive capabilities. Therefore, it is crucial to design a data flow architecture that is easy to develop and maintain. This document will introduce the best practices for data flow management in LobeChat.
+LobeHub differs from traditional CRUD web applications in that it involves a large amount of rich interactive capabilities. Therefore, it is crucial to design a data flow architecture that is easy to develop and maintain. This document will introduce the best practices for data flow management in LobeHub.
## Key Concepts
@@ -64,10 +72,10 @@ SortableTree/store
- **High Complexity**: Involves over 30 states and 20 actions, requiring modular cohesion using slices. Each slice declares its own initState, actions, reducers, and selectors.
-The directory structure of the previous version of SessionStore for LobeChat, with high complexity, implements a large amount of business logic. However, with the modularization of slices and the fractal architecture, it is easy to find the corresponding modules, making it easy to maintain and iterate on new features.
+The directory structure of the previous version of SessionStore for LobeHub, with high complexity, implements a large amount of business logic. However, with the modularization of slices and the fractal architecture, it is easy to find the corresponding modules, making it easy to maintain and iterate on new features.
```bash
-LobeChat SessionStore
+LobeHub SessionStore
├── index.ts
├── initialState.ts
├── selectors.ts
@@ -107,11 +115,11 @@ LobeChat SessionStore
└── store.ts
```
-Based on the provided directory structure of LobeChat SessionStore, we can update the previous document and convert the examples to the implementation of LobeChat's SessionStore. The following is a portion of the updated document:
+Based on the provided directory structure of LobeHub SessionStore, we can update the previous document and convert the examples to the implementation of LobeHub's SessionStore. The following is a portion of the updated document:
-### Best Practices for LobeChat SessionStore Directory Structure
+### Best Practices for LobeHub SessionStore Directory Structure
-In the LobeChat application, session management is a complex functional module, so we use the Slice pattern to organize the data flow. Below is the directory structure of LobeChat SessionStore, where each directory and file has its specific purpose:
+In the LobeHub application, session management is a complex functional module, so we use the Slice pattern to organize the data flow. Below is the directory structure of LobeHub SessionStore, where each directory and file has its specific purpose:
{/* eslint-disable no-irregular-whitespace */}
@@ -148,7 +156,7 @@ src/store/session
## Implementation of SessionStore
-In LobeChat, the SessionStore is designed as the core module for managing session state and logic. It consists of multiple Slices, with each Slice managing a relevant portion of state and logic. Below is a simplified example of the SessionStore implementation:
+In LobeHub, the SessionStore is designed as the core module for managing session state and logic. It consists of multiple Slices, with each Slice managing a relevant portion of state and logic. Below is a simplified example of the SessionStore implementation:
### store.ts
@@ -177,7 +185,7 @@ export const useSessionStore = createWithEqualityFn()(
persist(
subscribeWithSelector(
devtools(createStore, {
- name: 'LobeChat_Session' + (isDev ? '_DEV' : ''),
+ name: 'LobeHub_Session' + (isDev ? '_DEV' : ''),
}),
),
persistOptions,
@@ -218,4 +226,4 @@ export const createSessionSlice: StateCreator<
In the `action.ts` file, we define a `SessionActions` interface to describe session-related actions and implement a `useFetchSessions` function to create these actions. Then, we merge these actions with the initial state to form the session-related Slice.
-Through this layered and modular approach, we can ensure that LobeChat's SessionStore is clear, maintainable, and easy to extend and test.
+Through this layered and modular approach, we can ensure that LobeHub's SessionStore is clear, maintainable, and easy to extend and test.
diff --git a/docs/development/state-management/state-management-intro.zh-CN.mdx b/docs/development/state-management/state-management-intro.zh-CN.mdx
index f1fbe9dece..770a969df1 100644
--- a/docs/development/state-management/state-management-intro.zh-CN.mdx
+++ b/docs/development/state-management/state-management-intro.zh-CN.mdx
@@ -1,10 +1,16 @@
---
title: 状态管理最佳实践
+description: 探索 LobeHub 中的状态管理最佳实践,提升数据流架构的易用性与维护性。
+tags:
+ - 状态管理
+ - LobeHub
+ - 数据流
+ - 最佳实践
---
# 状态管理最佳实践
-LobeChat 不同于传统 CRUD 的网页,存在大量的富交互能力,如何设计一个易于开发与易于维护的数据流架构非常重要。本篇文档将介绍 LobeChat 中的数据流管理最佳实践。
+LobeHub 不同于传统 CRUD 的网页,存在大量的富交互能力,如何设计一个易于开发与易于维护的数据流架构非常重要。本篇文档将介绍 LobeHub 中的数据流管理最佳实践。
## 概念要素
@@ -69,7 +75,7 @@ SortableTree/store
下述这个数据流的目录结构是之前一版 SessionStore,具有很高的复杂度,实现了大量的业务逻辑。但借助于 slice 的模块化和分形架构的心智,我们可以很容易地找到对应的模块,新增功能与迭代都很易于维护。
```bash
-LobeChat SessionStore
+LobeHub SessionStore
├── index.ts
├── initialState.ts
├── selectors.ts
@@ -109,9 +115,9 @@ LobeChat SessionStore
└── store.ts
```
-### LobeChat SessionStore 目录结构最佳实践
+### LobeHub SessionStore 目录结构最佳实践
-在 LobeChat 应用中,由于会话管理是一个复杂的功能模块,因此我们采用了 [slice 模式](https://github.com/pmndrs/zustand/blob/main/docs/guides/slices-pattern.md) 来组织数据流。下面是 LobeChat SessionStore 的目录结构,其中每个目录和文件都有其特定的用途:
+在 LobeHub 应用中,由于会话管理是一个复杂的功能模块,因此我们采用了 [slice 模式](https://github.com/pmndrs/zustand/blob/main/docs/guides/slices-pattern.md) 来组织数据流。下面是 LobeHub SessionStore 的目录结构,其中每个目录和文件都有其特定的用途:
{/* eslint-disable no-irregular-whitespace */}
@@ -140,7 +146,7 @@ src/store/session
## SessionStore 的实现
-在 LobeChat 中,SessionStore 被设计为管理会话状态和逻辑的核心模块。它由多个 Slices 组成,每个 Slice 管理一部分相关的状态和逻辑。下面是一个简化的 SessionStore 的实现示例:
+在 LobeHub 中,SessionStore 被设计为管理会话状态和逻辑的核心模块。它由多个 Slices 组成,每个 Slice 管理一部分相关的状态和逻辑。下面是一个简化的 SessionStore 的实现示例:
### store.ts
@@ -169,7 +175,7 @@ export const useSessionStore = createWithEqualityFn()(
persist(
subscribeWithSelector(
devtools(createStore, {
- name: 'LobeChat_Session' + (isDev ? '_DEV' : ''),
+ name: 'LobeHub_Session' + (isDev ? '_DEV' : ''),
}),
),
persistOptions,
@@ -210,4 +216,4 @@ export const createSessionSlice: StateCreator<
在 `action.ts` 文件中,我们定义了一个 `SessionActions` 接口来描述会话相关的动作,并且实现了一个 `useFetchSessions` 函数来创建这些动作。然后,我们将这些动作与初始状态合并,以形成会话相关的 Slice。
-通过这种结构分层和模块化的方法,我们可以确保 LobeChat 的 SessionStore 是清晰、可维护的,同时也便于扩展和测试。
+通过这种结构分层和模块化的方法,我们可以确保 LobeHub 的 SessionStore 是清晰、可维护的,同时也便于扩展和测试。
diff --git a/docs/development/state-management/state-management-selectors.mdx b/docs/development/state-management/state-management-selectors.mdx
index db4e9276d9..2d9403ee23 100644
--- a/docs/development/state-management/state-management-selectors.mdx
+++ b/docs/development/state-management/state-management-selectors.mdx
@@ -1,10 +1,19 @@
---
title: Data Store Selector
+description: >-
+ Explore the role of selectors in LobeHub for efficient data retrieval and
+ management.
+tags:
+ - Data Store
+ - Selectors
+ - LobeHub
+ - TypeScript
+ - State Management
---
# Data Store Selector
-Selectors are data retrieval modules under the LobeChat data flow development framework. Their role is to extract data from the store using specific business logic for consumption by components.
+Selectors are data retrieval modules under the LobeHub data flow development framework. Their role is to extract data from the store using specific business logic for consumption by components.
Taking `src/store/plugin/selectors.ts` as an example:
@@ -65,9 +74,9 @@ The benefits of implementing this approach are:
1. **Decoupling and reusability**: By separating selectors from components, we can reuse these selectors across multiple components without rewriting data retrieval logic. This reduces duplicate code, improves development efficiency, and makes the codebase cleaner and easier to maintain.
2. **Performance optimization**: Selectors can be used to compute derived data, avoiding redundant calculations in each component. When the state changes, only the selectors dependent on that part of the state will recalculate, reducing unnecessary rendering and computation.
3. **Ease of testing**: Selectors are pure functions, relying only on the passed parameters. This means they can be tested in an isolated environment without the need to simulate the entire store or component tree.
-4. **Type safety**: As LobeChat uses TypeScript, each selector has explicit input and output type definitions. This provides developers with the advantage of auto-completion and compile-time checks, reducing runtime errors.
+4. **Type safety**: As LobeHub uses TypeScript, each selector has explicit input and output type definitions. This provides developers with the advantage of auto-completion and compile-time checks, reducing runtime errors.
5. **Maintainability**: Selectors centralize the logic for reading state, making it more intuitive to track state changes and management. If the state structure changes, only the relevant selectors need to be updated, rather than searching and replacing in multiple places throughout the codebase.
6. **Composability**: Selectors can be composed with other selectors to create more complex selection logic. This pattern allows developers to build a hierarchy of selectors, making state selection more flexible and powerful.
7. **Simplified component logic**: Components do not need to know the structure of the state or how to retrieve and compute the required data. Components only need to call selectors to obtain the data needed for rendering, simplifying and clarifying component logic.
-With this design, LobeChat developers can focus more on building the user interface and business logic without worrying about the details of data retrieval and processing. This pattern also provides better adaptability and scalability for potential future changes in state structure.
+With this design, LobeHub developers can focus more on building the user interface and business logic without worrying about the details of data retrieval and processing. This pattern also provides better adaptability and scalability for potential future changes in state structure.
diff --git a/docs/development/state-management/state-management-selectors.zh-CN.mdx b/docs/development/state-management/state-management-selectors.zh-CN.mdx
index 438db18091..f2e351c9b8 100644
--- a/docs/development/state-management/state-management-selectors.zh-CN.mdx
+++ b/docs/development/state-management/state-management-selectors.zh-CN.mdx
@@ -1,10 +1,17 @@
---
title: 数据存储取数模块
+description: 了解 LobeHub 数据存储取数模块及其选择器的使用和优势。
+tags:
+ - LobeHub
+ - 数据存储
+ - 选择器
+ - TypeScript
+ - 前端开发
---
# 数据存储取数模块
-selectors 是 LobeChat 数据流研发框架下的取数模块,它的作用是从 store 中以特定特务逻辑取出数据,供组件消费使用。
+selectors 是 LobeHub 数据流研发框架下的取数模块,它的作用是从 store 中以特定特务逻辑取出数据,供组件消费使用。
以 `src/store/tool/slices/plugin/selectors.ts` 为例:
@@ -46,9 +53,9 @@ const Render = () => {
1. **解耦和重用**:通过将选择器独立于组件,我们可以在多个组件之间复用这些选择器而不需要重写取数逻辑。这减少了重复代码,提高了开发效率,并且使得代码库更加干净和易于维护。
2. **性能优化**:选择器可以用来计算派生数据,这样可以避免在每个组件中重复计算相同的数据。当状态发生变化时,只有依赖于这部分状态的选择器才会重新计算,从而减少不必要的渲染和计算。
3. **易于测试**:选择器是纯函数,它们仅依赖于传入的参数。这意味着它们可以在隔离的环境中进行测试,无需模拟整个 store 或组件树。
-4. **类型安全**:由于 LobeChat 使用 TypeScript,每个选择器都有明确的输入和输出类型定义。这为开发者提供了自动完成和编译时检查的优势,减少了运行时错误。
+4. **类型安全**:由于 LobeHub 使用 TypeScript,每个选择器都有明确的输入和输出类型定义。这为开发者提供了自动完成和编译时检查的优势,减少了运行时错误。
5. **可维护性**:选择器集中了状态的读取逻辑,使得跟踪状态的变化和管理更加直观。如果状态结构发生变化,我们只需要更新相应的选择器,而不是搜索和替换整个代码库中的多个位置。
6. **可组合性**:选择器可以组合其他选择器,以创建更复杂的选择逻辑。这种模式允许开发者构建一个选择器层次结构,使得状态选择更加灵活和强大。
7. **简化组件逻辑**:组件不需要知道状态的结构或如何获取和计算需要的数据。组件只需调用选择器即可获取渲染所需的数据,这使得组件逻辑变得更简单和清晰。
-通过这样的设计,LobeChat 的开发者可以更专注于构建用户界面和业务逻辑,而不必担心数据的获取和处理细节。这种模式也为未来可能的状态结构变更提供了更好的适应性和扩展性。
+通过这样的设计,LobeHub 的开发者可以更专注于构建用户界面和业务逻辑,而不必担心数据的获取和处理细节。这种模式也为未来可能的状态结构变更提供了更好的适应性和扩展性。
diff --git a/docs/development/tests/integration-testing.mdx b/docs/development/tests/integration-testing.mdx
new file mode 100644
index 0000000000..7c5edfa1f3
--- /dev/null
+++ b/docs/development/tests/integration-testing.mdx
@@ -0,0 +1,410 @@
+---
+title: Integration Testing Guide
+description: >-
+ Learn how to effectively conduct integration testing to verify module
+ interactions and data integrity.
+tags:
+ - Integration Testing
+ - Software Testing
+ - Test Automation
+---
+
+# Integration Testing Guide
+
+## Overview
+
+Integration testing verifies the correctness of multiple modules working together, ensuring that the complete call chain (Router → Service → Model → Database) functions as expected.
+
+## Why Do We Need Integration Tests?
+
+Even with high unit test coverage (80%+), integration issues can still occur:
+
+### Common Issue Example
+
+```typescript
+// ❌ Issue: Parameter lost in the call chain
+// Router layer
+const messageId = await messageModel.create({
+ content: 'test',
+ sessionId: 'xxx',
+ topicId: 'yyy', // ← topicId is passed in
+});
+
+// Model layer (assume there's a bug)
+async create(data) {
+ return this.db.insert(messages).values({
+ content: data.content,
+ sessionId: data.sessionId,
+ // ❌ Forgot to pass topicId
+ });
+}
+
+// Result: Unit test passes (because Model is mocked), but topicId is lost in actual execution
+```
+
+### Issues Caught by Integration Tests
+
+1. **Missing parameter propagation**: containerId, threadId, topicId, etc., lost in the call chain
+2. **Database constraints**: foreign keys, cascading deletes, etc., cannot be verified with mocks
+3. **Transaction integrity**: atomicity of cross-table operations
+4. **Permission checks**: cross-user access control
+5. **Real-world scenarios**: simulate complete user workflows
+
+## Running Integration Tests
+
+```bash
+# Run all integration tests
+pnpm test:integration
+
+# Run a specific test file
+pnpm vitest tests/integration/routers/message.integration.test.ts
+
+# Watch mode
+pnpm vitest tests/integration --watch
+
+# Generate coverage report
+pnpm test:integration --coverage
+```
+
+## Directory Structure
+
+```
+tests/integration/
+├── README.md # Integration test documentation
+├── setup.ts # Common setup and utility functions
+└── routers/ # Router layer integration tests
+ ├── message.integration.test.ts # Message Router tests
+ ├── session.integration.test.ts # Session Router tests
+ ├── topic.integration.test.ts # Topic Router tests
+ └── chat-flow.integration.test.ts # Full chat flow tests
+```
+
+## Writing Integration Tests
+
+### Basic Template
+
+```typescript
+// @vitest-environment node
+import { eq } from 'drizzle-orm';
+import { afterEach, beforeEach, describe, expect, it } from 'vitest';
+
+import { getTestDB } from '@/database/models/__tests__/_util';
+import { messages, sessions, users } from '@/database/schemas';
+import { LobeHubDatabase } from '@/database/type';
+import { messageRouter } from '@/server/routers/lambda/message';
+
+import { cleanupTestUser, createTestContext, createTestUser } from '../setup';
+
+describe('Your Feature Integration Tests', () => {
+ let serverDB: LobeHubDatabase;
+ let userId: string;
+
+ beforeEach(async () => {
+ // 1. Get test database
+ serverDB = await getTestDB();
+
+ // 2. Create test user
+ userId = await createTestUser(serverDB);
+
+ // 3. Prepare other test data
+ // ...
+ });
+
+ afterEach(async () => {
+ // Clean up test data
+ await cleanupTestUser(serverDB, userId);
+ });
+
+ it('should do something', async () => {
+ // 1. Create tRPC caller
+ const caller = messageRouter.createCaller(createTestContext(userId));
+
+ // 2. Perform operation
+ const result = await caller.someMethod({
+ /* params */
+ });
+
+ // 3. Assert result
+ expect(result).toBeDefined();
+
+ // 4. 🔥 Key: Verify from database
+ const [dbRecord] = await serverDB.select().from(messages).where(eq(messages.id, result));
+
+ expect(dbRecord).toMatchObject({
+ // Verify all critical fields
+ });
+ });
+});
+```
+
+### Best Practices
+
+#### 1. Test the Full Call Chain
+
+```typescript
+it('should create message with correct associations', async () => {
+ const caller = messageRouter.createCaller(createTestContext(userId));
+
+ // Perform operation
+ const messageId = await caller.createMessage({
+ content: 'Test',
+ sessionId: testSessionId,
+ topicId: testTopicId,
+ });
+
+ // ✅ Verify from database, not just return value
+ const [message] = await serverDB.select().from(messages).where(eq(messages.id, messageId));
+
+ expect(message.sessionId).toBe(testSessionId);
+ expect(message.topicId).toBe(testTopicId);
+ expect(message.userId).toBe(userId);
+});
+```
+
+#### 2. Test Cascading Operations
+
+```typescript
+it('should cascade delete messages when session is deleted', async () => {
+ const sessionCaller = sessionRouter.createCaller(createTestContext(userId));
+ const messageCaller = messageRouter.createCaller(createTestContext(userId));
+
+ // Create session and messages
+ const sessionId = await sessionCaller.createSession({
+ /* ... */
+ });
+ await messageCaller.createMessage({ sessionId /* ... */ });
+
+ // Delete session
+ await sessionCaller.removeSession({ id: sessionId });
+
+ // ✅ Verify related messages are also deleted
+ const remainingMessages = await serverDB
+ .select()
+ .from(messages)
+ .where(eq(messages.sessionId, sessionId));
+
+ expect(remainingMessages).toHaveLength(0);
+});
+```
+
+#### 3. Test Cross-Router Collaboration
+
+```typescript
+it('should handle complete chat flow', async () => {
+ const sessionCaller = sessionRouter.createCaller(createTestContext(userId));
+ const topicCaller = topicRouter.createCaller(createTestContext(userId));
+ const messageCaller = messageRouter.createCaller(createTestContext(userId));
+
+ // 1. Create session
+ const sessionId = await sessionCaller.createSession({
+ /* ... */
+ });
+
+ // 2. Create topic
+ const topicId = await topicCaller.createTopic({ sessionId /* ... */ });
+
+ // 3. Create message
+ const messageId = await messageCaller.createMessage({
+ sessionId,
+ topicId,
+ /* ... */
+ });
+
+ // ✅ Verify full associations
+ const [message] = await serverDB.select().from(messages).where(eq(messages.id, messageId));
+
+ expect(message.sessionId).toBe(sessionId);
+ expect(message.topicId).toBe(topicId);
+});
+```
+
+#### 4. Test Error Scenarios
+
+```typescript
+it('should prevent cross-user access', async () => {
+ // User A creates session
+ const sessionId = await sessionRouter.createCaller(createTestContext(userA)).createSession({
+ /* ... */
+ });
+
+ // User B tries to access
+ const callerB = messageRouter.createCaller(createTestContext(userB));
+
+ // ✅ Should throw error
+ await expect(
+ callerB.createMessage({
+ sessionId,
+ content: 'Unauthorized',
+ }),
+ ).rejects.toThrow();
+});
+```
+
+#### 5. Test Concurrency
+
+```typescript
+it('should handle concurrent operations', async () => {
+ const caller = messageRouter.createCaller(createTestContext(userId));
+
+ // Concurrently create multiple messages
+ const promises = Array.from({ length: 10 }, (_, i) =>
+ caller.createMessage({
+ content: `Message ${i}`,
+ sessionId: testSessionId,
+ }),
+ );
+
+ const messageIds = await Promise.all(promises);
+
+ // ✅ Verify all messages created successfully and are unique
+ expect(messageIds).toHaveLength(10);
+ expect(new Set(messageIds).size).toBe(10);
+});
+```
+
+### Data Isolation
+
+Each test case should be independent and not rely on others:
+
+```typescript
+beforeEach(async () => {
+ // Create new data for each test
+ userId = await createTestUser(serverDB);
+ testSessionId = await createTestSession(serverDB, userId);
+});
+
+afterEach(async () => {
+ // Clean up test data
+ await cleanupTestUser(serverDB, userId);
+});
+```
+
+### Test Naming
+
+Use clear names to describe the test's intent:
+
+```typescript
+// ✅ Good naming
+it('should create message with correct sessionId and topicId');
+it('should cascade delete messages when session is deleted');
+it('should prevent cross-user access to messages');
+
+// ❌ Poor naming
+it('test message creation');
+it('test delete');
+```
+
+## Differences from Unit Tests
+
+| Dimension | Unit Test | Integration Test |
+| ---------------- | --------------------------- | --------------------------------- |
+| **Scope** | Single function/class | Multiple modules working together |
+| **Dependencies** | Mocks external dependencies | Uses real dependencies |
+| **Database** | Mocked | Real test database |
+| **Speed** | Fast (ms level) | Slower (seconds) |
+| **Quantity** | Many (60%) | Fewer (30%) |
+| **Purpose** | Verify logic correctness | Verify integration correctness |
+
+## Testing Pyramid
+
+```
+ /\
+ /E2E\ ← 10% (Critical business flows)
+ /------\
+ /Integration\ ← 30% (API integration tests) ⭐ Focus of this guide
+ /------------\
+ / Unit Tests \ ← 60% (Already 80%+ coverage)
+ /----------------\
+```
+
+## Coverage Goals
+
+### Priority P0 (Must Cover)
+
+- ✅ Cross-layer ID propagation (sessionId, topicId, containerId, threadId)
+- ✅ Permission checks (users can only access their own resources)
+- ✅ Cascading deletes (deleting a session also deletes related data)
+- ✅ Foreign key constraints (cannot create associations to non-existent records)
+
+### Priority P1 (Should Cover)
+
+- Concurrency (multiple requests at the same time)
+- Pagination (correct data slicing)
+- Search functionality (keyword search)
+- Batch operations (bulk create/delete)
+
+### Priority P2 (Nice to Have)
+
+- Analytics (counts, rankings)
+- Complex queries (multi-condition filters)
+- Performance testing (large data scenarios)
+
+## Debugging Tips
+
+### 1. Inspect Test Database State
+
+```typescript
+it('debug test', async () => {
+ // Perform operation
+ await caller.createMessage({
+ /* ... */
+ });
+
+ // Print database state
+ const allMessages = await serverDB.select().from(messages);
+ console.log('All messages:', allMessages);
+});
+```
+
+### 2. Use Drizzle Studio
+
+```bash
+# Launch Drizzle Studio to inspect test database
+pnpm db:studio
+```
+
+### 3. Retain Test Data
+
+```typescript
+afterEach(async () => {
+ // Temporarily comment out cleanup to retain data for debugging
+ // await cleanupTestUser(serverDB, userId);
+});
+```
+
+## FAQ
+
+### Q: Integration tests are slow. What can I do?
+
+A:
+
+1. Focus on critical paths, avoid over-testing
+2. Use `test.concurrent` to run independent tests in parallel
+3. Optimize test data setup to avoid redundant creation
+
+### Q: Tests interfere with each other. How to fix?
+
+A:
+
+1. Ensure each test uses a unique userId
+2. Thoroughly clean up data in `afterEach`
+3. Use transaction isolation (if supported by the database)
+
+### Q: How to test APIs that require authentication?
+
+A: Use `createTestContext(userId)` to create an authenticated context:
+
+```typescript
+const caller = messageRouter.createCaller(createTestContext(userId));
+```
+
+## References
+
+- [Vitest Documentation](https://vitest.dev/)
+- [Drizzle ORM Documentation](https://orm.drizzle.team/)
+- [tRPC Testing Guide](https://trpc.io/docs/server/testing)
+- [Test Pyramid by Martin Fowler](https://martinfowler.com/articles/practical-test-pyramid.html)
+
+## Contributing
+
+You're welcome to contribute more integration test cases! Please follow the style of existing test files.
diff --git a/docs/development/tests/integration-testing.zh-CN.mdx b/docs/development/tests/integration-testing.zh-CN.mdx
index 1d49be2f51..6f96e13e0a 100644
--- a/docs/development/tests/integration-testing.zh-CN.mdx
+++ b/docs/development/tests/integration-testing.zh-CN.mdx
@@ -1,5 +1,11 @@
---
title: 集成测试指南
+description: 了解集成测试的重要性及最佳实践,确保系统模块协同工作。
+tags:
+ - 集成测试
+ - 测试指南
+ - 软件测试
+ - 模块协作
---
# 集成测试指南
@@ -83,13 +89,13 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { getTestDB } from '@/database/models/__tests__/_util';
import { messages, sessions, users } from '@/database/schemas';
-import { LobeChatDatabase } from '@/database/type';
+import { LobeHubDatabase } from '@/database/type';
import { messageRouter } from '@/server/routers/lambda/message';
import { cleanupTestUser, createTestContext, createTestUser } from '../setup';
describe('Your Feature Integration Tests', () => {
- let serverDB: LobeChatDatabase;
+ let serverDB: LobeHubDatabase;
let userId: string;
beforeEach(async () => {
diff --git a/docs/glossary.md b/docs/glossary.md
index f588b0a46f..9145a1f48b 100644
--- a/docs/glossary.md
+++ b/docs/glossary.md
@@ -2,10 +2,10 @@
以下是一些词汇的固定翻译:
-| develop key | zh-CN(中文) | en-US(English) |
-|-------------| ----------- | -------------- |
-| agent | 助理 | Agent |
-| agentGroup | 群组 | Group |
-| page | 文稿 | Page |
-| topic | 话题 | Topic |
-| thread | 子话题 | Thread |
+| develop key | zh-CN (中文) | en-US(English) |
+| ----------- | ------------ | -------------- |
+| agent | 助理 | Agent |
+| agentGroup | 群组 | Group |
+| page | 文稿 | Page |
+| topic | 话题 | Topic |
+| thread | 子话题 | Thread |
diff --git a/docs/glossary.zh-CN.md b/docs/glossary.zh-CN.md
new file mode 100644
index 0000000000..d1a9347fd3
--- /dev/null
+++ b/docs/glossary.zh-CN.md
@@ -0,0 +1,13 @@
+```markdown
+# 术语表
+
+以下是一些词汇的固定翻译:
+
+| 开发键 | zh-CN(中文) | en-US(英文) |
+| ---------- | ------------- | ------------- |
+| agent | 助理 | Agent |
+| agentGroup | 群组 | Group |
+| page | 文稿 | Page |
+| topic | 话题 | Topic |
+| thread | 子话题 | Thread |
+```
diff --git a/docs/self-hosting/advanced/analytics.mdx b/docs/self-hosting/advanced/analytics.mdx
index 0ccd0e8d66..a5ccbbb75e 100644
--- a/docs/self-hosting/advanced/analytics.mdx
+++ b/docs/self-hosting/advanced/analytics.mdx
@@ -1,10 +1,10 @@
---
-title: Integrating Data Analytics Services in LobeChat for User Usage Analysis
+title: Integrating Data Analytics Services in LobeHub for User Usage Analysis
description: >-
- Learn how to integrate free/open-source data analytics services in LobeChat to
+ Learn how to integrate free/open-source data analytics services in LobeHub to
collect user usage data efficiently.
tags:
- - LobeChat
+ - LobeHub
- data analytics
- user usage analysis
- Vercel Analytics
@@ -13,7 +13,7 @@ tags:
# Data Analysis
-To better help analyze the usage of LobeChat users, we have integrated several free/open-source data analytics services in LobeChat for collecting user usage data, which you can enable as needed.
+To better help analyze the usage of LobeHub users, we have integrated several free/open-source data analytics services in LobeHub for collecting user usage data, which you can enable as needed.
Currently, the integrated data analytics platforms only support deployment and usage on
diff --git a/docs/self-hosting/advanced/analytics.zh-CN.mdx b/docs/self-hosting/advanced/analytics.zh-CN.mdx
index cf23d413b0..fc7b043c6c 100644
--- a/docs/self-hosting/advanced/analytics.zh-CN.mdx
+++ b/docs/self-hosting/advanced/analytics.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 数据分析集成服务介绍
-description: 了解如何在 LobeChat 中集成免费/开源的数据统计服务,帮助分析用户使用情况。包括 Vercel Analytics 的设置和使用教程。
+title: LobeHub 数据分析集成服务介绍
+description: 了解如何在 LobeHub 中集成免费/开源的数据统计服务,帮助分析用户使用情况。包括 Vercel Analytics 的设置和使用教程。
tags:
- - LobeChat
+ - LobeHub
- 数据分析
- Vercel Analytics
- 数据统计服务
@@ -11,7 +11,7 @@ tags:
# 数据分析
-为更好地帮助分析 LobeChat 的用户使用情况,我们在 LobeChat 中集成了若干免费 / 开源的数据统计服务,用于收集用户的使用情况,你可以按需开启。
+为更好地帮助分析 LobeHub 的用户使用情况,我们在 LobeHub 中集成了若干免费 / 开源的数据统计服务,用于收集用户的使用情况,你可以按需开启。
目前集成的数据分析平台,均只支持 Vercel / Zeabur 平台部署使用,不支持 Docker/Docker Compose 部署
diff --git a/docs/self-hosting/advanced/auth.mdx b/docs/self-hosting/advanced/auth.mdx
index d0f603c485..82d5ba1e50 100644
--- a/docs/self-hosting/advanced/auth.mdx
+++ b/docs/self-hosting/advanced/auth.mdx
@@ -1,9 +1,9 @@
---
-title: LobeChat Authentication Service Configuration
+title: LobeHub Authentication Service Configuration
description: >-
- Learn how to configure Better Auth for centralized user authorization
- management. Supported SSO providers include Google, GitHub, Microsoft, and
- more.
+ Learn how to configure external authentication services using Better Auth,
+ Clerk, or Next Auth for centralized user authorization management. Supported
+ authentication services include Auth0, Azure ID, etc.
tags:
- Authentication Service
- Better Auth
@@ -12,13 +12,23 @@ tags:
# Authentication Service
-LobeChat uses [Better Auth](https://www.better-auth.com) as its authentication solution, providing comprehensive, secure, and flexible identity verification for self-hosted deployments.
+LobeHub supports the configuration of external authentication services using Better Auth, Clerk, or Next Auth for internal use within enterprises/organizations to centrally manage user authorization.
Looking for legacy authentication methods? See [Legacy Authentication](/docs/self-hosting/advanced/auth/legacy) for NextAuth and Clerk documentation.
-## Key Features
+Clerk is a comprehensive identity verification solution that has recently gained popularity. It provides a simple yet powerful API and services to handle user authentication and session management. Clerk's design philosophy is to offer a concise and modern authentication solution that enables developers to easily integrate and use it.
+
+LobeHub has deeply integrated with Clerk to provide users with a more secure and convenient login and registration experience. It also relieves developers from the burden of managing authentication logic. Clerk's concise and modern design philosophy aligns perfectly with LobeHub's goals, making user management on the entire platform more efficient and reliable.
+
+By setting the environment variables `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` and `CLERK_SECRET_KEY` in LobeHub's environment, you can enable and use Clerk.
+
+## Better Auth
+
+[Better Auth](https://www.better-auth.com) is a modern, framework-agnostic authentication library designed to provide comprehensive, secure, and flexible authentication solutions. It supports various authentication methods including email/password, magic links, and multiple OAuth/SSO providers.
+
+### Key Features
- **Email/Password Authentication**: Built-in support for traditional email and password login with secure password hashing
- **Email Verification**: Optional email verification flow with customizable email templates
@@ -28,12 +38,14 @@ LobeChat uses [Better Auth](https://www.better-auth.com) as its authentication s
## Getting Started
-To enable Better Auth in LobeChat, set the following environment variables:
+To enable Better Auth in LobeHub, set the following environment variables:
-| Environment Variable | Type | Description |
-| -------------------- | -------- | ------------------------------------------------------------------------------ |
-| `AUTH_SECRET` | Required | Key used to encrypt session tokens. Generate using: `openssl rand -base64 32` |
-| `AUTH_SSO_PROVIDERS` | Optional | Comma-separated list of enabled SSO providers, e.g., `google,github,microsoft` |
+| Environment Variable | Type | Description |
+| -------------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| `NEXT_PUBLIC_ENABLE_BETTER_AUTH` | Required | Set to `1` to enable Better Auth service |
+| `AUTH_SECRET` | Required | Key used to encrypt session tokens. Generate using: `openssl rand -base64 32` |
+| `NEXT_PUBLIC_AUTH_URL` | Required | The browser-accessible base URL for Better Auth (e.g., `http://localhost:3010`, `https://LobeHub.com`). Optional for Vercel deployments (auto-detected from `VERCEL_URL`) |
+| `AUTH_SSO_PROVIDERS` | Optional | Comma-separated list of enabled SSO providers, e.g., `google,github,microsoft` |
## Supported SSO Providers
@@ -140,9 +152,7 @@ Send emails via SMTP protocol, suitable for users with existing email services.
### Common Configuration
-| Environment Variable | Type | Description | Example |
-| ------------------------- | -------- | --------------------------------------------------------- | ------- |
-| `AUTH_EMAIL_VERIFICATION` | Optional | Set to `1` to require email verification (off by default) | `1` |
+Before using NextAuth, please set the following variables in LobeHub's environment variables:
## Magic Link (Passwordless) Login
@@ -190,4 +200,16 @@ Set the `AUTH_ALLOWED_EMAILS` environment variable with a comma-separated list o
- Allow only `example.com` domain: `AUTH_ALLOWED_EMAILS=example.com`
- Allow multiple domains and specific emails: `AUTH_ALLOWED_EMAILS=example.com,company.org,admin@other.com`
-Leave empty to allow all emails. This restriction applies to both email registration and SSO login.
+## Additional Features
+
+### Webhook Support
+
+Allow LobeHub to receive notifications when user information is updated in the identity provider. Supported providers include Casdoor and Logto. Please refer to the specific provider documentation for configuration details.
+
+### Database Session
+
+Allow the session store in database, see also the [Auth.js Session Documentation](https://authjs.dev/concepts/session-strategies#database-session).
+
+## Other SSO Providers
+
+Please refer to the [Auth.js](https://authjs.dev/getting-started/authentication/oauth) documentation and feel free to submit a Pull Request.
diff --git a/docs/self-hosting/advanced/auth.zh-CN.mdx b/docs/self-hosting/advanced/auth.zh-CN.mdx
index d173f2d9c2..16830241aa 100644
--- a/docs/self-hosting/advanced/auth.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth.zh-CN.mdx
@@ -1,21 +1,34 @@
---
-title: LobeChat 身份验证服务配置
-description: 了解如何配置 Better Auth 以统一管理用户授权。支持的 SSO 提供商包括 Google、GitHub、Microsoft 等。
+title: LobeHub 身份验证服务配置
+description: >-
+ 了解如何使用 Better Auth、Clerk 或 Next Auth 配置外部身份验证服务,以统一管理用户授权。支持的身份验证服务包括 Auth0、
+ Azure ID 等。
tags:
- 身份验证服务
- Better Auth
+ - LobeHub
- SSO
---
# 身份验证服务
-LobeChat 使用 [Better Auth](https://www.better-auth.com) 作为身份验证解决方案,为自托管部署提供全面、安全、灵活的身份验证服务。
+LobeHub 支持使用 Better Auth、Clerk 或者 Next Auth 配置外部身份验证服务,供企业 / 组织内部使用,统一管理用户授权。
需要使用旧版身份验证方案?请参阅 [旧版身份验证](/zh/docs/self-hosting/advanced/auth/legacy) 了解 NextAuth 和 Clerk 的文档。
-## 主要特性
+Clerk 是一个近期流行起来的全面的身份验证解决方案,它提供了简单而强大的 API 和服务来处理用户认证和会话管理。Clerk 的设计哲学是提供一套简洁、现代的认证解决方案,使得开发者可以轻松集成和使用。
+
+LobeHub 与 Clerk 做了深度集成,能够为用户提供一个更加安全、便捷的登录和注册体验,同时也为开发者减轻了管理身份验证逻辑的负担。Clerk 的简洁和现代的设计理念与 LobeHub 的目标非常契合,使得整个平台的用户管理更加高效和可靠。
+
+在 LobeHub 的环境变量中设置 `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` 和 `CLERK_SECRET_KEY`,即可开启和使用 Clerk。
+
+## Better Auth
+
+[Better Auth](https://www.better-auth.com) 是一个现代化、框架无关的身份验证库,旨在提供全面、安全、灵活的身份验证解决方案。它支持多种认证方式,包括邮箱 / 密码登录、魔法链接登录以及多种 OAuth/SSO 提供商。
+
+### 主要特性
- **邮箱 / 密码认证**:内置支持传统的邮箱和密码登录,采用安全的密码哈希算法
- **邮箱验证**:可选的邮箱验证流程,支持自定义邮件模板
@@ -25,12 +38,14 @@ LobeChat 使用 [Better Auth](https://www.better-auth.com) 作为身份验证解
## 快速开始
-要在 LobeChat 中启用 Better Auth,请设置以下环境变量:
+要在 LobeHub 中启用 Better Auth,请设置以下环境变量:
-| 环境变量 | 类型 | 描述 |
-| -------------------- | -- | ------------------------------------------------ |
-| `AUTH_SECRET` | 必选 | 用于加密会话令牌的密钥。使用以下命令生成:`openssl rand -base64 32` |
-| `AUTH_SSO_PROVIDERS` | 可选 | 启用的 SSO 提供商列表,以逗号分隔,例如 `google,github,microsoft` |
+| 环境变量 | 类型 | 描述 |
+| -------------------------------- | -- | --------------------------------------------------------------------------------------------------------------- |
+| `NEXT_PUBLIC_ENABLE_BETTER_AUTH` | 必选 | 设置为 `1` 以启用 Better Auth 服务 |
+| `AUTH_SECRET` | 必选 | 用于加密会话令牌的密钥。使用以下命令生成:`openssl rand -base64 32` |
+| `NEXT_PUBLIC_AUTH_URL` | 必选 | 浏览器可访问的 Better Auth 基础 URL(例如 `http://localhost:3010`、`https://LobeHub.com`)。Vercel 部署时可选(会自动从 `VERCEL_URL` 获取) |
+| `AUTH_SSO_PROVIDERS` | 可选 | 启用的 SSO 提供商列表,以逗号分隔,例如 `google,github,microsoft` |
## 支持的 SSO 提供商
@@ -137,9 +152,7 @@ LobeChat 使用 [Better Auth](https://www.better-auth.com) 作为身份验证解
### 通用配置
-| 环境变量 | 类型 | 描述 | 示例 |
-| ------------------------- | -- | --------------------------- | --- |
-| `AUTH_EMAIL_VERIFICATION` | 可选 | 设置为 `1` 以要求用户在登录前验证邮箱(默认关闭) | `1` |
+在使用 NextAuth 之前,请先在 LobeHub 的环境变量中设置以下变量:
## 魔法链接(免密)登录
@@ -188,4 +201,14 @@ Better Auth 支持内置提供商(Google、GitHub、Microsoft、Apple、AWS Co
- 只允许 `example.com` 域名:`AUTH_ALLOWED_EMAILS=example.com`
- 允许多个域名和特定邮箱:`AUTH_ALLOWED_EMAILS=example.com,company.org,admin@other.com`
-留空表示允许所有邮箱注册。此限制对邮箱注册和 SSO 登录均有效。
+### Webhook 支持
+
+允许 LobeHub 在身份提供商中用户信息更新时接收通知。支持的提供商包括 Casdoor 和 Logto。请参考具体提供商文档进行配置。
+
+### 数据库会话
+
+允许会话存储在数据库中,详情请参阅 [Auth.js 会话文档](https://authjs.dev/concepts/session-strategies#database-session)。
+
+## 其他 SSO 提供商
+
+请参考 [NextAuth.js](https://next-auth.js.org/providers) 文档,欢迎提交 Pull Request。
diff --git a/docs/self-hosting/advanced/auth/clerk.mdx b/docs/self-hosting/advanced/auth/clerk.mdx
index 3e9b7be3aa..aebf3a5d8a 100644
--- a/docs/self-hosting/advanced/auth/clerk.mdx
+++ b/docs/self-hosting/advanced/auth/clerk.mdx
@@ -20,7 +20,7 @@ Go to [Clerk](https://clerk.com?utm_source=lobehub\&utm_medium=docs) to register
Add `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` and `CLERK_SECRET_KEY` environment variables. You can click on the "API Keys" in the menu and copy the corresponding values to get these environment variables.
-
+
The environment variables required for this step are as follows:
@@ -36,7 +36,7 @@ Go to [Clerk](https://clerk.com?utm_source=lobehub\&utm_medium=docs) to register
We need to add an endpoint in Clerk's Webhooks to inform Clerk to send notifications to this endpoint when a user's information changes.
-
+
Fill in your project URL in the endpoint, such as `https://your-project.com/api/webhooks/clerk`. Then, subscribe to events by checking the three user events (`user.created`, `user.deleted`, `user.updated`), and click create.
@@ -44,13 +44,13 @@ Go to [Clerk](https://clerk.com?utm_source=lobehub\&utm_medium=docs) to register
The `https://` in the URL is essential to maintain the integrity of the URL.
-
+
### Add Webhook Secret to Environment Variables
After creating, you can find the secret of this Webhook in the bottom right corner:
-
+
The environment variable corresponding to this secret is `CLERK_WEBHOOK_SECRET`:
diff --git a/docs/self-hosting/advanced/auth/clerk.zh-CN.mdx b/docs/self-hosting/advanced/auth/clerk.zh-CN.mdx
index 79f4a1a08e..f323fdc402 100644
--- a/docs/self-hosting/advanced/auth/clerk.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/clerk.zh-CN.mdx
@@ -19,7 +19,7 @@ tags:
添加 `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` 和 `CLERK_SECRET_KEY` 环境变量。你可以在菜单中点击「API Keys」,然后复制对应的值获取该环境变量。
-
+
此步骤所需的环境变量如下:
@@ -34,19 +34,19 @@ tags:
我们需要在 Clerk 的 Webhooks 中添加一个端点(Endpoint),告诉 Clerk 当用户发生变更时,向这个端点发送通知。
-
+
在 endpoint 中填写你的项目 URL,如 `https://your-project.com/api/webhooks/clerk`。然后在订阅事件(Subscribe to events)中,勾选 user 的三个事件(`user.created` 、`user.deleted`、`user.updated`),然后点击创建。
URL 的`https://`不可缺失,须保持 URL 的完整性
-
+
### 将 Webhook 秘钥添加到环境变量
创建完毕后,可以在右下角找到该 Webhook 的秘钥:
-
+
这个秘钥所对应的环境变量名为 `CLERK_WEBHOOK_SECRET`:
diff --git a/docs/self-hosting/advanced/auth/next-auth/auth0.mdx b/docs/self-hosting/advanced/auth/next-auth/auth0.mdx
index fea7d6c04d..7287d3a03b 100644
--- a/docs/self-hosting/advanced/auth/next-auth/auth0.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/auth0.mdx
@@ -1,7 +1,7 @@
---
-title: Configure Auth0 Identity Verification Service for LobeChat
+title: Configure Auth0 Identity Verification Service for LobeHub
description: >-
- Learn how to configure Auth0 Identity Verification Service for LobeChat for
+ Learn how to configure Auth0 Identity Verification Service for LobeHub for
your organization, including creating applications, adding users, and
configuring environment variables.
tags:
@@ -21,15 +21,15 @@ tags:
Register and log in to [Auth0][auth0-client-page], click on the "Applications" in the left navigation bar to switch to the application management interface, and click "Create Application" in the upper right corner to create an application.
-
+
Fill in the application name you want to display to the organization users, choose any application type, and click "Create".
-
+
After successful creation, click on the corresponding application to enter the application details page, switch to the "Settings" tab, and you can see the corresponding configuration information.
-
+
In the application configuration page, you also need to configure Allowed Callback URLs, where you should fill in:
@@ -37,7 +37,7 @@ tags:
http(s)://your-domain/api/auth/callback/auth0
```
-
+
You can fill in or modify Allowed Callback URLs after deployment, but make sure the filled URL is
@@ -46,13 +46,13 @@ tags:
### Add Users
- Click on the "Users Management" in the left navigation bar to enter the user management interface, where you can create users for your organization to log in to LobeChat.
+ Click on the "Users Management" in the left navigation bar to enter the user management interface, where you can create users for your organization to log in to LobeHub.
-
+
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -69,7 +69,7 @@ tags:
- After successful deployment, users will be able to authenticate and use LobeChat using the users
+ After successful deployment, users will be able to authenticate and use LobeHub using the users
configured in Auth0.
@@ -81,17 +81,17 @@ If your enterprise or organization already has a unified identity authentication
Auth0 supports single sign-on services such as Azure Active Directory, Slack, Google Workspace, Office 365, Zoom, and more. For a detailed list of supported services, please refer to [this link][auth0-sso-integrations].
-
+
### Configuring Social Login
If your enterprise or organization needs to support external user logins, you can configure social login services in Authentication -> Social.
-
+
Configuring social login services by default allows anyone to authenticate, which may lead to
- LobeChat being abused by external users.
+ LobeHub being abused by external users.
diff --git a/docs/self-hosting/advanced/auth/next-auth/auth0.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/auth0.zh-CN.mdx
index 725696b43b..37c13003ff 100644
--- a/docs/self-hosting/advanced/auth/next-auth/auth0.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/auth0.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中配置 Auth0 身份验证服务 - 详细步骤和环境变量设置
-description: 学习如何在 LobeChat 中配置 Auth0 身份验证服务,包括创建应用、新增用户、配置环境变量等。了解如何连接现有的单点登录服务和配置社交登录。
+title: 在 LobeHub 中配置 Auth0 身份验证服务 - 详细步骤和环境变量设置
+description: 学习如何在 LobeHub 中配置 Auth0 身份验证服务,包括创建应用、新增用户、配置环境变量等。了解如何连接现有的单点登录服务和配置社交登录。
tags:
- Auth0
- 身份验证
@@ -17,15 +17,15 @@ tags:
注册并登录 [Auth0](https://manage.auth0.com/dashboard),点击左侧导航栏的「Applications」,切换到应用管理界面,点击右上角「Create Application」以创建应用。
-
+
填写你想向组织用户显示的应用名称,可选择任意应用类型,点击「Create」。
-
+
创建成功后,点击相应的应用,进入应用详情页,切换到「Settings」标签页,就可以看到相应的配置信息
-
+
在应用配置页面中,还需要配置 Allowed Callback URLs,在此处填写:
@@ -33,7 +33,7 @@ tags:
http(s)://your-domain/api/auth/callback/auth0
```
-
+
可以在部署后再填写或修改 Allowed Callback URLs,但是务必保证填写的 URL 与部署的 URL 一致
@@ -41,13 +41,13 @@ tags:
### 新增用户
- 点击左侧导航栏的「Users Management」,进入用户管理界面,可以为你的组织新建用户,用以登录 LobeChat
+ 点击左侧导航栏的「Users Management」,进入用户管理界面,可以为你的组织新建用户,用以登录 LobeHub
-
+
### 配置环境变量
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------------- |
@@ -64,7 +64,7 @@ tags:
- 部署成功后,用户将可以使用 Auth0 中配置的用户通过身份认证并使用 LobeChat。
+ 部署成功后,用户将可以使用 Auth0 中配置的用户通过身份认证并使用 LobeHub。
## 进阶配置
@@ -75,16 +75,16 @@ tags:
Auth0 支持 Azure Active Directory / Slack / Google Workspace / Office 365 / Zoom 等单点登录服务,详细支持列表可参考 [这里](https://marketplace.auth0.com/features/sso-integrations)
-
+
### 配置社交登录
如果你的企业或组织需要支持外部人员登录,可以在 Authentication -> Social 中,配置社交登录服务。
-
+
- 配置社交登录服务默认会允许所有人通过认证,这可能会导致 LobeChat 被外部人员滥用。
+ 配置社交登录服务默认会允许所有人通过认证,这可能会导致 LobeHub 被外部人员滥用。
diff --git a/docs/self-hosting/advanced/auth/next-auth/authelia.mdx b/docs/self-hosting/advanced/auth/next-auth/authelia.mdx
index 6b2ec0a535..d59ed805de 100644
--- a/docs/self-hosting/advanced/auth/next-auth/authelia.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/authelia.mdx
@@ -1,13 +1,13 @@
---
-title: Configuring Authelia Authentication Service for LobeChat
+title: Configuring Authelia Authentication Service for LobeHub
description: >-
- Learn how to configure Authelia authentication service in LobeChat, including
- creating a provider, configuring environment variables, and deploying
- LobeChat. Detailed steps and necessary environment variable settings.
+ Learn how to configure Authelia authentication service in LobeHub, including
+ creating a provider, configuring environment variables, and deploying LobeHub.
+ Detailed steps and necessary environment variable settings.
tags:
- Authelia Configuration
- Single Sign-On (SSO)
- - LobeChat Authentication
+ - LobeHub Authentication
- Environment Variables
- Deployment Instructions
---
@@ -19,7 +19,7 @@ tags:
### Create an Authelia Identity Provider
- We assume you are already familiar with using Authelia. Let's say your LobeChat instance is deployed at [https://lobe.example.com/](https://lobe.example.com/). Note that currently only localhost supports HTTP access; other domains need to enable TLS, otherwise Authelia will actively interrupt authentication by default.
+ We assume you are already familiar with using Authelia. Let's say your LobeHub instance is deployed at [https://lobe.example.com/](https://lobe.example.com/). Note that currently only localhost supports HTTP access; other domains need to enable TLS, otherwise Authelia will actively interrupt authentication by default.
Now, let's open and edit the configuration file of your Authelia instance:
@@ -32,7 +32,7 @@ tags:
## The other portions of the mandatory OpenID Connect 1.0 configuration go here.
## See: https://www.authelia.com/c/oidc
- id: lobe-chat
- description: LobeChat
+ description: LobeHub
secret: '$pbkdf2-sha512$310000$c8p78n7pUMln0jzvd4aK4Q$JNRBzwAo0ek5qKn50cFzzvE9RXV88h1wJn5KGiHrD0YKtZaR/nCb2CJPOsKaPK0hjf.9yHxzQGZziziccp6Yng' # The digest of 'insecure_secret'.
public: false
authorization_policy: two_factor
@@ -45,13 +45,13 @@ tags:
userinfo_signing_algorithm: none
```
- Make sure to replace secret and `redirect_urls` with your own values. Note! The secret configured in Authelia is ciphertext, i.e., a salted hash value. Its corresponding plaintext needs to be filled in LobeChat later.
+ Make sure to replace secret and `redirect_urls` with your own values. Note! The secret configured in Authelia is ciphertext, i.e., a salted hash value. Its corresponding plaintext needs to be filled in LobeHub later.
Save the configuration file and restart the Authelia service. Now we have completed the Authelia configuration.
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -68,6 +68,6 @@ tags:
- After a successful deployment, users will be able to use LobeChat by authenticating with the users
+ After a successful deployment, users will be able to use LobeHub by authenticating with the users
configured in Authelia.
diff --git a/docs/self-hosting/advanced/auth/next-auth/authelia.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/authelia.zh-CN.mdx
index 437439d973..56391ca589 100644
--- a/docs/self-hosting/advanced/auth/next-auth/authelia.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/authelia.zh-CN.mdx
@@ -1,12 +1,12 @@
---
-title: 在 LobeChat 中配置 Authelia 身份验证服务
-description: 学习如何在 LobeChat 中配置 Authelia 身份验证服务,包括创建提供程序、配置环境变量和部署 LobeChat。详细步骤和必要环境变量设置。
+title: 在 LobeHub 中配置 Authelia 身份验证服务
+description: 学习如何在 LobeHub 中配置 Authelia 身份验证服务,包括创建提供程序、配置环境变量和部署 LobeHub。详细步骤和必要环境变量设置。
tags:
- Authelia
- 身份验证
- 单点登录
- 环境变量
- - LobeChat
+ - LobeHub
---
# 配置 Authelia 身份验证服务
@@ -16,7 +16,7 @@ tags:
### 创建 Authelia 提供应用
- 我们现在默认您已经了解了如何使用 Authelia。假设您的 LobeChat 实例部署在 `https://lobe.example.com/` 中。注意,目前只有 `localhost` 支持 HTTP 访问,其他域名需要启用 TLS,否则 Authelia 默认将主动中断身份认证。
+ 我们现在默认您已经了解了如何使用 Authelia。假设您的 LobeHub 实例部署在 `https://lobe.example.com/` 中。注意,目前只有 `localhost` 支持 HTTP 访问,其他域名需要启用 TLS,否则 Authelia 默认将主动中断身份认证。
现在,我们打开 Authelia 实例的配置文件进行编辑:
@@ -30,7 +30,7 @@ tags:
## The other portions of the mandatory OpenID Connect 1.0 configuration go here.
## See: https://www.authelia.com/c/oidc
- id: lobe-chat
- description: LobeChat
+ description: LobeHub
secret: '$pbkdf2-sha512$310000$c8p78n7pUMln0jzvd4aK4Q$JNRBzwAo0ek5qKn50cFzzvE9RXV88h1wJn5KGiHrD0YKtZaR/nCb2CJPOsKaPK0hjf.9yHxzQGZziziccp6Yng' # The digest of 'insecure_secret'.
public: false
authorization_policy: two_factor
@@ -43,13 +43,13 @@ tags:
userinfo_signing_algorithm: none
```
- 请您确保 `secret` 和 `redirect_urls` 替换成您自己的值。注意!Authelia 中配置 `secret` 是密文,即加盐哈希值。其对应的明文稍后需要填写在 lobeChat 中。
+ 请您确保 `secret` 和 `redirect_urls` 替换成您自己的值。注意!Authelia 中配置 `secret` 是密文,即加盐哈希值。其对应的明文稍后需要填写在 LobeHub 中。
保存配置文件,然后重启 Authelia 服务。现在我们完成了 Authelia 的配置工作。
### 配置环境变量
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------------------ |
@@ -66,5 +66,5 @@ tags:
- 部署成功后,用户将可以使用 Authelia 中配置的用户通过身份认证并使用 LobeChat。
+ 部署成功后,用户将可以使用 Authelia 中配置的用户通过身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/authentik.mdx b/docs/self-hosting/advanced/auth/next-auth/authentik.mdx
index ad7af1c253..fd85b55f52 100644
--- a/docs/self-hosting/advanced/auth/next-auth/authentik.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/authentik.mdx
@@ -1,13 +1,13 @@
---
-title: Configuring Authentik Authentication Service for LobeChat
+title: Configuring Authentik Authentication Service for LobeHub
description: >-
- Learn how to configure Authentik for Single Sign-On (SSO) for LobeChat,
+ Learn how to configure Authentik for Single Sign-On (SSO) for LobeHub,
including creating an application provider, setting environment variables, and
deployment instructions.
tags:
- Authentik Configuration
- Single Sign-On (SSO)
- - LobeChat Authentication
+ - LobeHub Authentication
- Environment Variables
- Deployment Instructions
---
@@ -34,7 +34,7 @@ tags:
URL matches the deployed URL. - Replace `your-domain` with your own domain name
-
+
Click **Done**
@@ -46,7 +46,7 @@ tags:
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -63,6 +63,6 @@ tags:
- After a successful deployment, users will be able to use LobeChat by authenticating with the users
+ After a successful deployment, users will be able to use LobeHub by authenticating with the users
configured in Authentik.
diff --git a/docs/self-hosting/advanced/auth/next-auth/authentik.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/authentik.zh-CN.mdx
index aa7ed8b2ed..4f3fce0425 100644
--- a/docs/self-hosting/advanced/auth/next-auth/authentik.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/authentik.zh-CN.mdx
@@ -1,12 +1,12 @@
---
-title: 在 LobeChat 中配置 Authentik 身份验证服务
-description: 学习如何在 LobeChat 中配置 Authentik 身份验证服务,包括创建提供程序、配置环境变量和部署 LobeChat。详细步骤和必要环境变量设置。
+title: 在 LobeHub 中配置 Authentik 身份验证服务
+description: 学习如何在 LobeHub 中配置 Authentik 身份验证服务,包括创建提供程序、配置环境变量和部署 LobeHub。详细步骤和必要环境变量设置。
tags:
- Authentik
- 身份验证
- 单点登录
- 环境变量
- - LobeChat
+ - LobeHub
---
# 配置 Authentik 身份验证服务
@@ -31,7 +31,7 @@ tags:
your-domain 请替换为自己的域名
-
+
点击「完成」
@@ -41,7 +41,7 @@ tags:
### 配置环境变量
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------------- |
@@ -58,5 +58,5 @@ tags:
- 部署成功后,用户将可以使用 Authentik 中配置的用户通过身份认证并使用 LobeChat。
+ 部署成功后,用户将可以使用 Authentik 中配置的用户通过身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/casdoor.mdx b/docs/self-hosting/advanced/auth/next-auth/casdoor.mdx
index 3d309b46a9..55b09678db 100644
--- a/docs/self-hosting/advanced/auth/next-auth/casdoor.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/casdoor.mdx
@@ -1,14 +1,14 @@
---
-title: Configuring Casdoor Authentication Service in LobeChat
+title: Configuring Casdoor Authentication Service in LobeHub
description: >-
- Learn how to configure the Casdoor authentication service in LobeChat,
+ Learn how to configure the Casdoor authentication service in LobeHub,
including deployment, creation, permission settings, and environment
variables.
tags:
- Casdoor Authentication
- Environment Variable Configuration
- Single Sign-On
- - LobeChat
+ - LobeHub
---
# Configuring Casdoor Authentication Service
@@ -17,19 +17,19 @@ tags:
If you want to privately deploy Casdoor, we recommend using Docker Compose to deploy it together
- with the LobeChat database version, allowing LobeChat to share the same Postgres instance.
+ with the LobeHub database version, allowing LobeHub to share the same Postgres instance.
## Casdoor Configuration Process
If you are deploying using a local network IP, the following assumptions apply:
-- Your LobeChat database version IP/port is `http://LOBECHAT_IP:3210`.
+- Your LobeHub database version IP/port is `http://LobeHub_IP:3210`.
- You privately deploy Casdoor, and its domain is `http://CASDOOR_IP:8000`.
If you are deploying using a public network, the following assumptions apply:
-- Your LobeChat database version domain is `https://lobe.example.com`.
+- Your LobeHub database version domain is `https://lobe.example.com`.
- You privately deploy Casdoor, and its domain is `https://lobe-auth-api.example.com`.
@@ -37,12 +37,12 @@ If you are deploying using a public network, the following assumptions apply:
Access your privately deployed Casdoor WebUI (default is `http://localhost:8000/`) to enter the console. The default account is `admin`, and the password is `123`.
- Go to `Authentication` -> `Applications`, create a `LobeChat` application or directly modify the built-in `built-in` application. You can explore other fields, but you must configure at least the following fields:
+ Go to `Authentication` -> `Applications`, create a `LobeHub` application or directly modify the built-in `built-in` application. You can explore other fields, but you must configure at least the following fields:
- - Name, Display Name: `LobeChat`
+ - Name, Display Name: `LobeHub`
- Redirect URLs:
- Local Development Environment: `http://localhost:3210/api/auth/callback/casdoor`
- - Local Network IP Deployment: `http://LOBECHAT_IP:3210/api/auth/callback/casdoor`
+ - Local Network IP Deployment: `http://LobeHub_IP:3210/api/auth/callback/casdoor`
- Public Network Environment: `https://lobe.example.com/api/auth/callback/casdoor`
There are also some optional fields that can enhance user experience:
@@ -97,7 +97,7 @@ If you are deploying using a public network, the following assumptions apply:
### Disable User Registration
- Go to `Identity` -> `Applications`, select the `LobeChat` application, and set `Allow Register` to `false`.
+ Go to `Identity` -> `Applications`, select the `LobeHub` application, and set `Allow Register` to `false`.
Disabling user registration is necessary to prevent users from registering through the Casdoor
@@ -108,7 +108,7 @@ If you are deploying using a public network, the following assumptions apply:
> Available on Casdoor `>=1.843.0`.
- Configure the Casdoor webhook so that LobeChat can receive notifications when user information is updated.
+ Configure the Casdoor webhook so that LobeHub can receive notifications when user information is updated.
Go to `Admin` -> `Webhooks`, add a webhook, and fill in the following fields:
@@ -125,20 +125,20 @@ If you are deploying using a public network, the following assumptions apply:
### Configure Environment Variables
- Set the obtained `Client ID` and `Client Secret` as `AUTH_CASDOOR_ID` and `AUTH_CASDOOR_SECRET` in the LobeChat environment variables.
+ Set the obtained `Client ID` and `Client Secret` as `AUTH_CASDOOR_ID` and `AUTH_CASDOOR_SECRET` in the LobeHub environment variables.
- Configure `AUTH_CASDOOR_ISSUER` in the LobeChat environment variables as follows:
+ Configure `AUTH_CASDOOR_ISSUER` in the LobeHub environment variables as follows:
- `http://localhost:8000/` if you are in a local development environment.
- `http://CASDOOR_IP:8000/` if you are privately deploying Casdoor in a local network.
- `https://lobe-auth-api.example.com/` if you are deploying Casdoor in a public network environment.
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `AUTH_SECRET` | Required | A key for encrypting Auth.js session tokens. You can generate a key using the command: `openssl rand -base64 32`. |
- | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeChat. Fill in `casdoor` for using Casdoor. |
+ | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeHub. Fill in `casdoor` for using Casdoor. |
| `AUTH_CASDOOR_ID` | Required | The client ID from the Casdoor application details page. |
| `AUTH_CASDOOR_SECRET` | Required | The client secret from the Casdoor application details page. |
| `AUTH_CASDOOR_ISSUER` | Required | The OpenID Connect issuer for the Casdoor provider. |
@@ -152,5 +152,5 @@ If you are deploying using a public network, the following assumptions apply:
- Once deployed successfully, users will be able to authenticate via Casdoor and use LobeChat.
+ Once deployed successfully, users will be able to authenticate via Casdoor and use LobeHub.
diff --git a/docs/self-hosting/advanced/auth/next-auth/casdoor.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/casdoor.zh-CN.mdx
index cdeeeed468..226fc30033 100644
--- a/docs/self-hosting/advanced/auth/next-auth/casdoor.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/casdoor.zh-CN.mdx
@@ -1,11 +1,11 @@
---
-title: 在 LobeChat 中配置 Casdoor 身份验证服务
-description: 学习如何在 LobeChat 中配置 Casdoor 身份验证服务,包括部署、创建、设置权限和环境变量。
+title: 在 LobeHub 中配置 Casdoor 身份验证服务
+description: 学习如何在 LobeHub 中配置 Casdoor 身份验证服务,包括部署、创建、设置权限和环境变量。
tags:
- Casdoor 身份验证
- 环境变量配置
- 单点登录
- - LobeChat
+ - LobeHub
---
# 配置 Casdoor 身份验证服务
@@ -13,20 +13,20 @@ tags:
[Casdoor](https://github.com/casdoor/casdoor) 是一个开源的身份验证服务,功能配置丰富且易于上手。
- 若你想要私有部署 Casdoor,我们建议你将之与 LobeChat 数据库版本一同使用 Docker Compose 部署,此时
- LobeChat 可以与之共用同一个 Postgres 实例。
+ 若你想要私有部署 Casdoor,我们建议你将之与 LobeHub 数据库版本一同使用 Docker Compose 部署,此时
+ LobeHub 可以与之共用同一个 Postgres 实例。
## Casdoor 配置流程
若你使用局域网 IP 部署,下文假设:
-- 你的 LobeChat 数据库版本 IP / 端口为 `http://LOBECHAT_IP:3210`。
+- 你的 LobeHub 数据库版本 IP / 端口为 `http://LobeHub_IP:3210`。
- 你私有部署 Casdoor,其域名为 `http://CASDOOR_IP:8000`。
若你使用公网部署,下文假设:
-- 你的 LobeChat 数据库版本域名为 `https://lobe.example.com`。
+- 你的 LobeHub 数据库版本域名为 `https://lobe.example.com`。
- 你私有部署 Casdoor,其域名为 `https://lobe-auth-api.example.com`。
@@ -34,12 +34,12 @@ tags:
访问你私有部署的 Casdoor WebUI(默认为 `http://localhost:8000/`) 进入控制台,默认账号为 `admin`,密码为 `123`。
- 前往 `身份认证` -> `应用`,创建一个 `LobeChat` 应用或直接修改内置的 `built-in` 应用,其他字段可以自行探索,但你至少需要配置以下字段:
+ 前往 `身份认证` -> `应用`,创建一个 `LobeHub` 应用或直接修改内置的 `built-in` 应用,其他字段可以自行探索,但你至少需要配置以下字段:
- - 名称、显示名称:`LobeChat`
+ - 名称、显示名称:`LobeHub`
- 重定向 URLs:
- 本地开发环境:`http://localhost:3210/api/auth/callback/casdoor`
- - 局域网 IP 部署:`http://LOBECHAT_IP:3210/api/auth/callback/casdoor`
+ - 局域网 IP 部署:`http://LobeHub_IP:3210/api/auth/callback/casdoor`
- 公网环境:`https://lobe.example.com/api/auth/callback/casdoor`
还有一些不必需但是可以提高用户体验的字段:
@@ -96,7 +96,7 @@ tags:
> 在 Casdoor `>=1.843.0` 上可用。
- 配置 Casdoor 的 Webhook 以便在用户信息更新时同步到 LobeChat 。
+ 配置 Casdoor 的 Webhook 以便在用户信息更新时同步到 LobeHub 。
前往 `管理工具` -> `Webhooks`,创建一个 Webhook,添加一个 Webhook,填写以下字段:
@@ -123,15 +123,15 @@ tags:
### 配置环境变量
- 将获取到的 `客户端 ID` 和 `客户端`,设为 LobeChat 环境变量中的 `AUTH_CASDOOR_ID` 和 `AUTH_CASDOOR_SECRET`。
+ 将获取到的 `客户端 ID` 和 `客户端`,设为 LobeHub 环境变量中的 `AUTH_CASDOOR_ID` 和 `AUTH_CASDOOR_SECRET`。
- 配置 LobeChat 环境变量中 `AUTH_CASDOOR_ISSUER` 为:
+ 配置 LobeHub 环境变量中 `AUTH_CASDOOR_ISSUER` 为:
- `http://localhost:8000/`,若你是本地开发环境
- `http://CASDOOR_IP:8000/`,若你是局域网私有部署的 Casdoor
- `https://lobe-auth-api.example.com/`,若你是公网环境部署的 Casdoor
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------------------ |
@@ -148,4 +148,4 @@ tags:
-部署成功后,用户将可以通过 Casdoor 身份认证并使用 LobeChat。
+部署成功后,用户将可以通过 Casdoor 身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.mdx b/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.mdx
index ee8bb552db..2290273508 100644
--- a/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.mdx
@@ -1,13 +1,13 @@
---
-title: Configuring Cloudflare Zero Trust Authentication Service for LobeChat
+title: Configuring Cloudflare Zero Trust Authentication Service for LobeHub
description: >-
Learn how to configure Cloudflare Zero Trust for Single Sign-On (SSO) for
- LobeChat, including creating an application provider, setting environment
+ LobeHub, including creating an application provider, setting environment
variables, and deployment instructions.
tags:
- Cloudflare Zero Trust
- Single Sign-On (SSO)
- - LobeChat Authentication
+ - LobeHub Authentication
- Environment Variables
- Deployment Instructions
---
@@ -19,33 +19,33 @@ tags:
### Creating an Application in Cloudflare Zero Trust
- We assume you are already familiar with using the Cloudflare Zero Trust platform and that your LobeChat instance is deployed at `https://chat.example.com`.
+ We assume you are already familiar with using the Cloudflare Zero Trust platform and that your LobeHub instance is deployed at `https://chat.example.com`.
First, we need to visit `https://one.dash.cloudflare.com/` and navigate to `Access - Applications`.
- 
+ 
Now, on the current page, click `Add an application` and select `SaaS`.
- 
+ 
- In the `Application` text box, enter the application name, such as `LobeChat SSO`. Then click `Select OIDC`, followed by clicking `Add application`.
+ In the `Application` text box, enter the application name, such as `LobeHub SSO`. Then click `Select OIDC`, followed by clicking `Add application`.
- 
+ 
- At this point, you have successfully created a SaaS application named `LobeChat SSO` in Cloudflare Zero Trust.
+ At this point, you have successfully created a SaaS application named `LobeHub SSO` in Cloudflare Zero Trust.
Next, we need to enter `https://chat.example.com/api/auth/callback/cloudflare-zero-trust` in the `Redirect URLs` field (note that `chat.example.com` should be replaced with your instance's address).
- 
+ 
- Finally, scroll down the page and record the following three values: `Client secret`, `Client ID`, and `Issuer`. You will need these for setting the environment variables when deploying LobeChat.
+ Finally, scroll down the page and record the following three values: `Client secret`, `Client ID`, and `Issuer`. You will need these for setting the environment variables when deploying LobeHub.
- 
+ 
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ----------------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -62,6 +62,6 @@ tags:
- After a successful deployment, users will be able to use LobeChat by authenticating with the users
+ After a successful deployment, users will be able to use LobeHub by authenticating with the users
configured in Cloudflare Zero Trust.
diff --git a/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.zh-CN.mdx
index e58d347385..33dd0b22d0 100644
--- a/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/cloudflare-zero-trust.zh-CN.mdx
@@ -1,14 +1,14 @@
---
-title: 在 LobeChat 中配置 Cloudflare Zero Trust 身份验证服务
+title: 在 LobeHub 中配置 Cloudflare Zero Trust 身份验证服务
description: >-
- 学习如何在 LobeChat 中配置 Cloudflare Zero Trust 身份验证服务,包括创建提供程序、配置环境变量和部署
- LobeChat。详细步骤和必要环境变量设置。
+ 学习如何在 LobeHub 中配置 Cloudflare Zero Trust 身份验证服务,包括创建提供程序、配置环境变量和部署
+ LobeHub。详细步骤和必要环境变量设置。
tags:
- Cloudflare Zero Trust
- 身份验证
- 单点登录
- 环境变量
- - LobeChat
+ - LobeHub
---
# 配置 Cloudflare Zero Trust 身份验证服务
@@ -18,31 +18,31 @@ tags:
### 在 Cloudflare Zero Trust 中创建应用
- 我们现在默认您已经了解了如何使用 Cloudflare Zero Trust 平台且假设您的 LobeChat 实例部署在 `https://chat.example.com` 中。
+ 我们现在默认您已经了解了如何使用 Cloudflare Zero Trust 平台且假设您的 LobeHub 实例部署在 `https://chat.example.com` 中。
首先我们需要访问 `https://one.dash.cloudflare.com/` 并前往 `Access - Applications` 中。
- 
+ 
现在,在所在页面点击 `Add an application` 并选择 `SaaS`。
- 
+ 
- 在 `Application` 文本框内填入应用名称,如:`LobeChat SSO`,然后点击 `Select OIDC` 后点击 `Add applicaiton`
+ 在 `Application` 文本框内填入应用名称,如:`LobeHub SSO`,然后点击 `Select OIDC` 后点击 `Add applicaiton`
- 
+ 
- 至此您已成功在 Clouflare Zero Trust 中创建了一个名为 `LobeChat SSO` 的 SaaS 应用。
+ 至此您已成功在 Clouflare Zero Trust 中创建了一个名为 `LobeHub SSO` 的 SaaS 应用。
- 接下来我们需要在 `Redirect URLs` 中填入 `https://chat.example.com/api/auth/callback/cloudflare-zero-trust`(注意此处的 `chat.example.com` 需要替换为您的实例地址) 
+ 接下来我们需要在 `Redirect URLs` 中填入 `https://chat.example.com/api/auth/callback/cloudflare-zero-trust`(注意此处的 `chat.example.com` 需要替换为您的实例地址) 
- 最后我们将页面往下滚动,您将需要记录以下三个值 `Client secret`, `Client ID` 及 `Issuer` 以备后续部署 LobeChat 环境变量使用。
+ 最后我们将页面往下滚动,您将需要记录以下三个值 `Client secret`, `Client ID` 及 `Issuer` 以备后续部署 LobeHub 环境变量使用。
- 
+ 
### 配置环境变量
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ----------------------------------- | -- | ------------------------------------------------------------------------------------------------------------ |
@@ -59,5 +59,5 @@ tags:
- 部署成功后,用户将可以使用 Cloudflare Zero Trust 中配置的用户通过身份认证并使用 LobeChat。
+ 部署成功后,用户将可以使用 Cloudflare Zero Trust 中配置的用户通过身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/github.mdx b/docs/self-hosting/advanced/auth/next-auth/github.mdx
index 0feb0f42b1..717463fe49 100644
--- a/docs/self-hosting/advanced/auth/next-auth/github.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/github.mdx
@@ -1,12 +1,12 @@
---
-title: Configuring Github Authentication Service for LobeChat
+title: Configuring Github Authentication Service for LobeHub
description: >-
- Learn how to configure Github authentication service for LobeChat, including
+ Learn how to configure Github authentication service for LobeHub, including
creating a Github provider, setting up environment variables, and deploying
- LobeChat.
+ LobeHub.
tags:
- Github authentication
- - LobeChat
+ - LobeHub
- Environment variables
- Single Sign-On
- OAuth authentication
@@ -23,40 +23,40 @@ tags:
Fill in the Github App name, Homepage URL, and Callback URL.
-
+
Set the webhook callback URL according to your needs.
-
+
Set the permission to read email addresses.
-
+
-
+
Set whether it is accessible publicly or only accessible to yourself.
-
+
Click "Create Github App".
After successful creation, click "Generate a new client secret" to create a client secret.
-
+
After successful creation, save the `Client ID` and `Client Secret`.
-
+
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `AUTH_SECRET` | Required | Key used to encrypt Auth.js session tokens. You can generate the key using the command: `openssl rand -base64 32` |
- | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the Single Sign-On provider for LobeChat. Use `github` for Github. |
+ | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the Single Sign-On provider for LobeHub. Use `github` for Github. |
| `AUTH_GITHUB_ID` | Required | Client ID in the Github App details page. |
| `AUTH_GITHUB_SECRET` | Required | Client Secret in the Github App details page. |
| `AUTH_URL` | Required | This URL is used to specify the callback address for Auth.js when performing OAuth authentication. Only set it if the default generated redirect address is incorrect. `https://example.com/api/auth` |
@@ -68,7 +68,7 @@ tags:
- After successful deployment, users will be able to authenticate with Github and use LobeChat.
+ After successful deployment, users will be able to authenticate with Github and use LobeHub.
[github-create-app]: https://github.com/settings/apps/new
diff --git a/docs/self-hosting/advanced/auth/next-auth/github.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/github.zh-CN.mdx
index 7ad231731c..b8d98e8003 100644
--- a/docs/self-hosting/advanced/auth/next-auth/github.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/github.zh-CN.mdx
@@ -1,12 +1,12 @@
---
-title: 在 LobeChat 中配置 Github 身份验证服务
-description: 学习如何在 LobeChat 中配置 Github 身份验证服务,包括创建新的 Github App、设置权限和环境变量。
+title: 在 LobeHub 中配置 Github 身份验证服务
+description: 学习如何在 LobeHub 中配置 Github 身份验证服务,包括创建新的 Github App、设置权限和环境变量。
tags:
- Github 身份验证
- Github App
- 环境变量配置
- 单点登录
- - LobeChat
+ - LobeHub
---
# 配置 Github 身份验证服务
@@ -20,35 +20,35 @@ tags:
填写 Github App name、Homepage URL、Callbak URL
-
+
按照自己所需设置 Webhook 回调地址
-
+
设置读取邮件地址权限
-
+
-
+
设置公开访问还是仅自己访问
-
+
点击「Create Github App」
创建成功后,点击「Generate a new client secret」创建客户端 Secret
-
+
创建成功后, 将 `客户端 ID` 和 `客户端 Secret` 保存下来。
-
+
### 配置环境变量
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------------- |
@@ -63,4 +63,4 @@ tags:
-部署成功后,用户将可以通过 Github 身份认证并使用 LobeChat。
+部署成功后,用户将可以通过 Github 身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/google.mdx b/docs/self-hosting/advanced/auth/next-auth/google.mdx
index 34ae3870b8..ee23612299 100644
--- a/docs/self-hosting/advanced/auth/next-auth/google.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/google.mdx
@@ -1,17 +1,15 @@
---
-title: Configuration of Google SSO Authentication Service for LobeChat
+title: Configuration of Google SSO Authentication Service for LobeHub
description: >-
- Learn how to configure Google SSO Authentication Service for LobeChat, create
+ Learn how to configure Google SSO Authentication Service for LobeHub, create
OAuth applications, add users, and set up environment variables for seamless
integration.
tags:
- Google SSO
- - Authentication Service
+ - OAuth 2.0
+ - LobeHub
+ - Authentication
- Google Cloud
- - OAuth
- - SSO
- - Environment Variables
- - LobeChat
---
# Configuration of Google SSO Authentication Service
@@ -34,8 +32,8 @@ tags:
```
- \- You can add or modify redirect URIs after registration, but make sure the URL matches your deployed LobeChat instance.
- \- Replace "your-domain" with your actual domain.
+ - You can add or modify redirect URIs after registration, but make sure the URL matches your deployed LobeHub instance.
+ - Replace "your-domain" with your actual domain.
Click **Create**.
@@ -51,12 +49,12 @@ tags:
### Configure Environment Variables
- When deploying LobeChat, configure the following environment variables:
+ When deploying LobeHub, configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | -------------------------------------------------------------------------------------------------------------------- |
| `NEXT_AUTH_SECRET` | Required | Key to encrypt Auth.js session tokens. Generate using: `openssl rand -base64 32` |
- | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeChat. Use `google` for Google SSO. |
+ | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeHub. Use `google` for Google SSO. |
| `AUTH_GOOGLE_ID` | Required | Client ID from Google Cloud OAuth. |
| `AUTH_GOOGLE_SECRET` | Required | Client Secret from Google Cloud OAuth. |
| `AUTH_URL` | Required | Specifies the callback address for Auth.js when performing OAuth authentication. E.g. `https://your-domain/api/auth` |
@@ -67,7 +65,7 @@ tags:
- After successful deployment, users can sign in to LobeChat using their Google accounts (those added as Test Users, if not in production).
+ After successful deployment, users can sign in to LobeHub using their Google accounts (those added as Test Users, if not in production).
## Advanced Configuration
diff --git a/docs/self-hosting/advanced/auth/next-auth/google.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/google.zh-CN.mdx
new file mode 100644
index 0000000000..34e40f98d8
--- /dev/null
+++ b/docs/self-hosting/advanced/auth/next-auth/google.zh-CN.mdx
@@ -0,0 +1,77 @@
+---
+title: 为 LobeHub 配置 Google SSO 认证服务
+description: 了解如何为 LobeHub 配置 Google SSO 认证服务,创建 OAuth 应用、添加用户,并设置环境变量以实现无缝集成。
+tags:
+ - Google SSO
+ - OAuth 2.0
+ - 认证服务
+ - LobeHub
+---
+
+# 配置 Google SSO 认证服务
+
+
+ ### 创建 Google Cloud OAuth 2.0 客户端
+
+ 在你的 [Google Cloud 控制台][google-cloud-console] 中,导航至 **API 和服务 > 凭据**。
+
+ 点击 **创建凭据**,选择 **OAuth 客户端 ID**。
+
+ 如果你尚未设置 OAuth 同意屏幕,系统会提示你进行设置。请完成 OAuth 同意屏幕的配置(填写应用名称、支持邮箱,并根据需要添加授权用户)。
+
+ 选择 **Web 应用** 作为应用类型。
+
+ 在 **授权的重定向 URI** 部分,输入:
+
+ ```bash
+ https://your-domain/api/auth/callback/google
+ ```
+
+
+ \- 注册后你可以添加或修改重定向 URI,但请确保该 URL 与你部署的 LobeHub 实例一致。\
+ \- 请将 "your-domain" 替换为你的实际域名。
+
+
+ 点击 **创建**。
+
+ 创建完成后,复制 **客户端 ID(Client ID)** 和 **客户端密钥(Client Secret)**。
+
+
+
+ ### 添加用户(仅限内部测试可选)
+
+ 如果你的应用处于 **测试** 或 **内部** 发布状态,请在 OAuth 同意屏幕的 **测试用户** 部分添加用户邮箱。\
+ 未添加的用户将无法进行身份验证。
+
+ ### 配置环境变量
+
+ 在部署 LobeHub 时,请配置以下环境变量:
+
+ | 环境变量名称 | 类型 | 描述 |
+ | ------------------------- | -- | --------------------------------------------------------------- |
+ | `NEXT_AUTH_SECRET` | 必填 | 用于加密 Auth.js 会话令牌的密钥。可使用命令生成:`openssl rand -base64 32` |
+ | `NEXT_AUTH_SSO_PROVIDERS` | 必填 | 指定 LobeHub 使用的单点登录提供商。使用 `google` 表示启用 Google SSO。 |
+ | `AUTH_GOOGLE_ID` | 必填 | 来自 Google Cloud OAuth 的客户端 ID。 |
+ | `AUTH_GOOGLE_SECRET` | 必填 | 来自 Google Cloud OAuth 的客户端密钥。 |
+ | `AUTH_URL` | 必填 | 指定 Auth.js 在执行 OAuth 认证时的回调地址。例如:`https://your-domain/api/auth` |
+
+
+ 更多关于这些环境变量的说明,请参阅 [📘 环境变量文档](/docs/self-hosting/environment-variable#google)。
+
+
+
+
+ 部署成功后,用户即可使用其 Google 账号登录 LobeHub(如果未处于生产环境,需先添加为测试用户)。
+
+
+## 高级配置
+
+如需了解更多高级选项、权限范围(Scopes)及同意屏幕配置,请参阅 [Google 身份平台文档][google-identity-docs]。
+
+## 相关资源
+
+- [快速开始:配置 Google OAuth 客户端][google-oauth-quickstart]
+
+[google-cloud-console]: https://console.cloud.google.com/apis/credentials
+[google-identity-docs]: https://developers.google.com/identity
+[google-oauth-quickstart]: https://developers.google.com/identity/protocols/oauth2/web-server#creatingcred
diff --git a/docs/self-hosting/advanced/auth/next-auth/keycloak.mdx b/docs/self-hosting/advanced/auth/next-auth/keycloak.mdx
index d9cbb94973..b7820eb228 100644
--- a/docs/self-hosting/advanced/auth/next-auth/keycloak.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/keycloak.mdx
@@ -1,22 +1,22 @@
---
-title: Configuring Keycloak Authentication Service in LobeChat
+title: Configuring Keycloak Authentication Service in LobeHub
description: >-
- Learn how to configure the Keycloak authentication service in LobeChat,
+ Learn how to configure the Keycloak authentication service in LobeHub,
including deployment, creation, permission settings, and environment
variables.
tags:
- Keycloak Authentication
- Environment Variable Configuration
- Single Sign-On
- - LobeChat
+ - LobeHub
---
-# Configuring Keycloak Authentication Service in LobeChat
+# Configuring Keycloak Authentication Service in LobeHub
[Keycloak](https://www.keycloak.org/) is an open-source identity and access management solution that provides single sign-on, identity brokering, and social login features, suitable for modern applications and services.
- If you want to privately deploy Keycloak, we recommend using it together with LobeChat via Docker
+ If you want to privately deploy Keycloak, we recommend using it together with LobeHub via Docker
Compose deployment for easier service management.
@@ -24,12 +24,12 @@ tags:
If you deploy using a local network IP, this guide assumes:
-- Your LobeChat database version IP/port is `http://LOBECHAT_IP:3210`.
+- Your LobeHub database version IP/port is `http://LobeHub_IP:3210`.
- Your privately deployed Keycloak domain is `http://KEYCLOAK_IP:8080`.
If you deploy using a public network, this guide assumes:
-- Your LobeChat database version domain is `https://lobe.example.com`.
+- Your LobeHub database version domain is `https://lobe.example.com`.
- Your privately deployed Keycloak domain is `https://lobe-auth-api.example.com`.
@@ -40,13 +40,13 @@ If you deploy using a public network, this guide assumes:
1. Create a new Realm
- Click the dropdown menu in the upper left corner and select "Create Realm"
- - Enter a name, such as "LobeChat", then click "Create"
+ - Enter a name, such as "LobeHub", then click "Create"
2. Create a Client
- Select "Clients" from the left menu, then click "Create client"
- Fill in the following information:
- - Client ID: `lobechat`
+ - Client ID: `LobeHub`
- Client type: `OpenID Connect`
- Click "Next"
- On the "Capability config" page:
@@ -56,9 +56,9 @@ If you deploy using a public network, this guide assumes:
- On the "Login settings" page:
- Valid redirect URIs:
- Local development environment: `http://localhost:3210/api/auth/callback/keycloak`
- - Local network IP deployment: `http://LOBECHAT_IP:3210/api/auth/callback/keycloak`
+ - Local network IP deployment: `http://LobeHub_IP:3210/api/auth/callback/keycloak`
- Public environment: `https://lobe.example.com/api/auth/callback/keycloak`
- - Web origins: Add your LobeChat domain or IP
+ - Web origins: Add your LobeHub domain or IP
- Click "Save"
3. Get Client Secret
@@ -97,20 +97,20 @@ If you deploy using a public network, this guide assumes:
### Configure Environment Variables
- Set the obtained client ID and client secret as `AUTH_KEYCLOAK_ID` and `AUTH_KEYCLOAK_SECRET` in the LobeChat environment variables.
+ Set the obtained client ID and client secret as `AUTH_KEYCLOAK_ID` and `AUTH_KEYCLOAK_SECRET` in the LobeHub environment variables.
- Configure the LobeChat environment variable `AUTH_KEYCLOAK_ISSUER` as:
+ Configure the LobeHub environment variable `AUTH_KEYCLOAK_ISSUER` as:
- - `http://localhost:8080/realms/LobeChat` for local development environment
- - `http://KEYCLOAK_IP:8080/realms/LobeChat` for privately deployed Keycloak on a local network
- - `https://lobe-auth-api.example.com/realms/LobeChat` for Keycloak deployed in a public environment
+ - `http://localhost:8080/realms/LobeHub` for local development environment
+ - `http://KEYCLOAK_IP:8080/realms/LobeHub` for privately deployed Keycloak on a local network
+ - `https://lobe-auth-api.example.com/realms/LobeHub` for Keycloak deployed in a public environment
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `AUTH_SECRET` | Required | Key used to encrypt Auth.js session tokens. You can generate a key using: `openssl rand -base64 32` |
- | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeChat. For Keycloak, fill in `keycloak`. |
+ | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeHub. For Keycloak, fill in `keycloak`. |
| `AUTH_KEYCLOAK_ID` | Required | Keycloak client ID |
| `AUTH_KEYCLOAK_SECRET` | Required | Keycloak client secret |
| `AUTH_KEYCLOAK_ISSUER` | Required | OpenID Connect issuer URL for the Keycloak provider, in the format `{keycloak_url}/realms/{realm_name}` |
@@ -122,5 +122,5 @@ If you deploy using a public network, this guide assumes:
- After successful deployment, users will be able to authenticate through Keycloak and use LobeChat.
+ After successful deployment, users will be able to authenticate through Keycloak and use LobeHub.
diff --git a/docs/self-hosting/advanced/auth/next-auth/keycloak.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/keycloak.zh-CN.mdx
index 84107a933a..aff67fa99f 100644
--- a/docs/self-hosting/advanced/auth/next-auth/keycloak.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/keycloak.zh-CN.mdx
@@ -1,11 +1,11 @@
---
-title: 在 LobeChat 中配置 Keycloak 身份验证服务
-description: 学习如何在 LobeChat 中配置 Keycloak 身份验证服务,包括部署、创建、设置权限和环境变量。
+title: 在 LobeHub 中配置 Keycloak 身份验证服务
+description: 学习如何在 LobeHub 中配置 Keycloak 身份验证服务,包括部署、创建、设置权限和环境变量。
tags:
- Keycloak 身份验证
- 环境变量配置
- 单点登录
- - LobeChat
+ - LobeHub
---
# 配置 Keycloak 身份验证服务
@@ -13,7 +13,7 @@ tags:
[Keycloak](https://www.keycloak.org/) 是一个开源的身份和访问管理解决方案,提供单点登录、身份代理和社交登录等功能,适用于现代应用和服务。
- 若你想要私有部署 Keycloak,我们建议你将之与 LobeChat 一同使用 Docker Compose
+ 若你想要私有部署 Keycloak,我们建议你将之与 LobeHub 一同使用 Docker Compose
部署,这样可以更方便地管理服务。
@@ -21,12 +21,12 @@ tags:
若你使用局域网 IP 部署,下文假设:
-- 你的 LobeChat 数据库版本 IP / 端口为 `http://LOBECHAT_IP:3210`。
+- 你的 LobeHub 数据库版本 IP / 端口为 `http://LobeHub_IP:3210`。
- 你私有部署 Keycloak,其域名为 `http://KEYCLOAK_IP:8080`。
若你使用公网部署,下文假设:
-- 你的 LobeChat 数据库版本域名为 `https://lobe.example.com`。
+- 你的 LobeHub 数据库版本域名为 `https://lobe.example.com`。
- 你私有部署 Keycloak,其域名为 `https://lobe-auth-api.example.com`。
@@ -37,13 +37,13 @@ tags:
1. 创建新领域(Realm)
- 点击左上角的下拉菜单,选择 "Create Realm"
- - 输入名称,例如 "LobeChat",然后点击 "Create"
+ - 输入名称,例如 "LobeHub",然后点击 "Create"
2. 创建客户端(Client)
- 在左侧菜单中选择 "Clients",然后点击 "Create client"
- 填写以下信息:
- - Client ID: `lobechat`
+ - Client ID: `LobeHub`
- Client type: `OpenID Connect`
- 点击 "Next"
- 在 "Capability config" 页面:
@@ -53,9 +53,9 @@ tags:
- 在 "Login settings" 页面:
- Valid redirect URIs:
- 本地开发环境:`http://localhost:3210/api/auth/callback/keycloak`
- - 局域网 IP 部署:`http://LOBECHAT_IP:3210/api/auth/callback/keycloak`
+ - 局域网 IP 部署:`http://LobeHub_IP:3210/api/auth/callback/keycloak`
- 公网环境:`https://lobe.example.com/api/auth/callback/keycloak`
- - Web origins: 添加你的 LobeChat 域名或 IP
+ - Web origins: 添加你的 LobeHub 域名或 IP
- 点击 "Save"
3. 获取客户端密钥
@@ -93,15 +93,15 @@ tags:
### 配置环境变量
- 将获取到的客户端 ID 和客户端密钥,设为 LobeChat 环境变量中的 `AUTH_KEYCLOAK_ID` 和 `AUTH_KEYCLOAK_SECRET`。
+ 将获取到的客户端 ID 和客户端密钥,设为 LobeHub 环境变量中的 `AUTH_KEYCLOAK_ID` 和 `AUTH_KEYCLOAK_SECRET`。
- 配置 LobeChat 环境变量中 `AUTH_KEYCLOAK_ISSUER` 为:
+ 配置 LobeHub 环境变量中 `AUTH_KEYCLOAK_ISSUER` 为:
- - `http://localhost:8080/realms/LobeChat`,若你是本地开发环境
- - `http://KEYCLOAK_IP:8080/realms/LobeChat`,若你是局域网私有部署的 Keycloak
- - `https://lobe-auth-api.example.com/realms/LobeChat`,若你是公网环境部署的 Keycloak
+ - `http://localhost:8080/realms/LobeHub`,若你是本地开发环境
+ - `http://KEYCLOAK_IP:8080/realms/LobeHub`,若你是局域网私有部署的 Keycloak
+ - `https://lobe-auth-api.example.com/realms/LobeHub`,若你是公网环境部署的 Keycloak
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------------------ |
@@ -117,4 +117,4 @@ tags:
-部署成功后,用户将可以通过 Keycloak 身份认证并使用 LobeChat。
+部署成功后,用户将可以通过 Keycloak 身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/logto.mdx b/docs/self-hosting/advanced/auth/next-auth/logto.mdx
index 0c552fdf6a..ce55e0a80e 100644
--- a/docs/self-hosting/advanced/auth/next-auth/logto.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/logto.mdx
@@ -1,13 +1,13 @@
---
-title: Configuring Logto Authentication Service in LobeChat
+title: Configuring Logto Authentication Service in LobeHub
description: >-
- Learn how to configure Logto authentication service in LobeChat, including
+ Learn how to configure Logto authentication service in LobeHub, including
deployment, creation, setting permissions, and environment variables.
tags:
- Logto Authentication
- Environment Variable Configuration
- Single Sign-On
- - LobeChat
+ - LobeHub
---
# Configuring Logto Authentication Service
@@ -16,13 +16,13 @@ tags:
If you want to deploy Logto privately, we recommend using Docker Compose to deploy it together
- with the LobeChat database version. In this case, LobeChat can share the same Postgres instance
+ with the LobeHub database version. In this case, LobeHub can share the same Postgres instance
with it.
## Logto Configuration Process
-The following assumes your LobeChat database version domain is `https://lobe.example.com`.
+The following assumes your LobeHub database version domain is `https://lobe.example.com`.
If you are using a privately deployed Logto, assume its endpoint domain is `https://lobe-auth-api.example.com`.
@@ -39,40 +39,40 @@ If you are using Logto Cloud, assume its endpoint domain is `https://example.log
Set `CORS allowed origins` to `https://lobe.example.com`.
-
+
After successful creation, save the `Client ID` and `Client Secret`.
### Configure Webhook (Optional)
- Configure the Logto Webhook so that LobeChat can receive notifications when user information is updated.
+ Configure the Logto Webhook so that LobeHub can receive notifications when user information is updated.
Go to `Webhooks`, create a Webhook, and fill in the following fields:
- Endpoint URL: `https://lobe.example.com/api/webhooks/logto`
- Events:
- - `User.Data.Updated`: Allow LobeChat to synchronize user profile information updates from Logto.
- - `User.SuspensionStatus.Updated`: Allow LobeChat to remove the active session from suspended users from logging in, only available when database session strategy is `database`.
+ - `User.Data.Updated`: Allow LobeHub to synchronize user profile information updates from Logto.
+ - `User.SuspensionStatus.Updated`: Allow LobeHub to remove the active session from suspended users from logging in, only available when database session strategy is `database`.
After successful creation, copy the Webhook's `Signing Key` and fill it in the `LOGTO_WEBHOOK_SIGNING_KEY` environment variable.
### Configure Environment Variables
-
+
- Set the obtained `Client ID` and `Client Secret` as `AUTH_LOGTO_ID` and `AUTH_LOGTO_SECRET` in the LobeChat environment variables.
+ Set the obtained `Client ID` and `Client Secret` as `AUTH_LOGTO_ID` and `AUTH_LOGTO_SECRET` in the LobeHub environment variables.
- Configure `AUTH_LOGTO_ISSUER` in the LobeChat environment variables as follows:
+ Configure `AUTH_LOGTO_ISSUER` in the LobeHub environment variables as follows:
- `https://lobe-auth-api.example.com/oidc` if you are using a privately deployed Logto
- `https://example.logto.app/oidc` if you are using Logto Cloud
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| --------------------------- | -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `AUTH_SECRET` | Required | The key used to encrypt Auth.js session tokens. You can generate a key using the command: `openssl rand -base64 32` |
- | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeChat. For Logto, enter `logto`. |
+ | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LobeHub. For Logto, enter `logto`. |
| `AUTH_LOGTO_ID` | Required | The Client ID from the Logto App details page |
| `AUTH_LOGTO_SECRET` | Required | The Client Secret from the Logto App details page |
| `AUTH_LOGTO_ISSUER` | Required | OpenID Connect issuer of the Logto provider |
@@ -101,5 +101,5 @@ If you encounter issues during the Logto deployment process, refer to the follow
- I am using Docker deployment and want a one-click upgrade: Execute the custom command in the container: `sh -c "npm run cli db seed -- --swe --encrypt-base-role" && npx @logto/cli db alteration deploy $version && npm start`
- After successful deployment, users will be able to authenticate via Logto and use LobeChat.
+ After successful deployment, users will be able to authenticate via Logto and use LobeHub.
diff --git a/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx
index 2b444545fe..d3beccb193 100644
--- a/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx
@@ -1,11 +1,11 @@
---
-title: 在 LobeChat 中配置 Logto 身份验证服务
-description: 学习如何在 LobeChat 中配置 Logto 身份验证服务,包括部署、创建、设置权限和环境变量。
+title: 在 LobeHub 中配置 Logto 身份验证服务
+description: 学习如何在 LobeHub 中配置 Logto 身份验证服务,包括部署、创建、设置权限和环境变量。
tags:
- Logto 身份验证
- 环境变量配置
- 单点登录
- - LobeChat
+ - LobeHub
---
# 配置 Logto 身份验证服务
@@ -13,13 +13,13 @@ tags:
[Logto](https://github.com/logto-io/logto) 是一个开源的身份验证服务,界面简洁美观、功能配置丰富且易于上手,你即可以选择使用其官方提供的 Logto Cloud,也可以选择私有部署 Logto。
- 若你想要私有部署 Logto,我们建议你将之与 LobeChat 数据库版本一同使用 Docker Compose 部署,此时
- LobeChat 可以与之共用同一个 Postgres 实例。
+ 若你想要私有部署 Logto,我们建议你将之与 LobeHub 数据库版本一同使用 Docker Compose 部署,此时
+ LobeHub 可以与之共用同一个 Postgres 实例。
## Logto 配置流程
-下文假设你的 LobeChat 数据库版本域名为 `https://lobe.example.com`。
+下文假设你的 LobeHub 数据库版本域名为 `https://lobe.example.com`。
若你是私有部署的 Logto,假设其 endpoint 域名为 `https://lobe-auth-api.example.com`。
@@ -36,35 +36,35 @@ tags:
配置 `CORS allowed origins` 为 `https://lobe.example.com`
-
+
创建成功后, 将 `Client ID` 和 `Client Secret` 保存下来。
### 配置 Webhook (可选)
- 配置 Logto 的 Webhook,以便在用户信息更新时 LobeChat 可以接收到通知。
+ 配置 Logto 的 Webhook,以便在用户信息更新时 LobeHub 可以接收到通知。
前往 `Webhooks` ,创建一个 Webhook,填写以下字段:
- 端点 URL: `https://lobe.example.com/api/webhooks/logto`
- 事件:
- - `User.Data.Updated`: 允许 LobeChat 同步 Logto 中用户资料信息的更新。
- - `User.SuspensionStatus.Updated`: 允许 LobeChat 将被暂停的用户移除登录会话,仅在数据库会话策略为 `database` 时可用。
+ - `User.Data.Updated`: 允许 LobeHub 同步 Logto 中用户资料信息的更新。
+ - `User.SuspensionStatus.Updated`: 允许 LobeHub 将被暂停的用户移除登录会话,仅在数据库会话策略为 `database` 时可用。
创建成功后,复制 Webhook 的 `签名密钥`。填写到环境变量中的 `LOGTO_WEBHOOK_SIGNING_KEY`。
### 配置环境变量
-
+
- 将获取到的 `Client ID` 和 `Client Secret`,设为 LobeChat 环境变量中的 `AUTH_LOGTO_ID` 和 `AUTH_LOGTO_SECRET`。
+ 将获取到的 `Client ID` 和 `Client Secret`,设为 LobeHub 环境变量中的 `AUTH_LOGTO_ID` 和 `AUTH_LOGTO_SECRET`。
- 配置 LobeChat 环境变量中 `AUTH_LOGTO_ISSUER` 为:
+ 配置 LobeHub 环境变量中 `AUTH_LOGTO_ISSUER` 为:
- `https://lobe-auth-api.example.com/oidc`,若你是私有部署的 Logto
- `https://example.logto.app/oidc`,若你是使用的 Logto Cloud
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| --------------------------- | -- | ------------------------------------------------------------------------------------------------ |
@@ -97,4 +97,4 @@ tags:
- 我使用 docker 部署 希望一键升级:在容器中执行自定义命令:`sh -c "npm run cli db seed -- --swe --encrypt-base-role" && npx @logto/cli db alteration deploy $version && npm start`
-部署成功后,用户将可以通过 Logto 身份认证并使用 LobeChat。
+部署成功后,用户将可以通过 Logto 身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.mdx b/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.mdx
index 0ba3b380e3..280c280fc0 100644
--- a/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.mdx
@@ -1,7 +1,7 @@
---
-title: Configuration of Microsoft Entra ID Authentication Service for LobeChat
+title: Configuration of Microsoft Entra ID Authentication Service for LobeHub
description: >-
- Learn how to configure Microsoft Entra ID Authentication Service for LobeChat,
+ Learn how to configure Microsoft Entra ID Authentication Service for LobeHub,
create applications, add users, and set up environment variables for seamless
integration.
tags:
@@ -10,7 +10,7 @@ tags:
- Azure Portal
- SSO
- Environment Variables
- - LobeChat
+ - LobeHub
---
# Configuration of Microsoft Entra ID Authentication Service
@@ -33,17 +33,17 @@ tags:
matches the deployed URL. - Please replace "your-domain" with your own domain.
-
+
Click on "Register".
After successfully creating the application, click on the corresponding application to enter the application details page, and switch to the "Overview" tab to view the corresponding configuration information.
-
+
Go to "Certificates & secrets", select the "Client secrets" tab, click on "New client secret", fill in the description, select the expiration time, and click on "Add" to create a new client secret.
-
+
Please make sure to save your client secret as this is your only chance to view it.
@@ -51,11 +51,11 @@ tags:
### Add Users
- Go back to the "Microsoft Entra ID" interface, enter "Users", click on "New user", fill in the user information, and click on "Create" to create a user for using LobeChat.
+ Go back to the "Microsoft Entra ID" interface, enter "Users", click on "New user", fill in the user information, and click on "Create" to create a user for using LobeHub.
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ----------------------------------- | -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -74,7 +74,7 @@ tags:
- After successful deployment, users will be able to authenticate and use LobeChat using the users
+ After successful deployment, users will be able to authenticate and use LobeHub using the users
configured in Microsoft Entra ID.
diff --git a/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.zh-CN.mdx
index 6202bba093..2a3774f6eb 100644
--- a/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/microsoft-entra-id.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中配置 Microsoft Entra ID 身份验证服务
-description: 学习如何在 LobeChat 中配置 Microsoft Entra ID 身份验证服务,包括创建应用、新增用户和配置环境变量。详细步骤和相关资料。
+title: 在 LobeHub 中配置 Microsoft Entra ID 身份验证服务
+description: 学习如何在 LobeHub 中配置 Microsoft Entra ID 身份验证服务,包括创建应用、新增用户和配置环境变量。详细步骤和相关资料。
tags:
- Microsoft Entra ID
- Microsoft Azure Portal
@@ -32,27 +32,27 @@ tags:
your-domain 请填写自己的域名
-
+
点击「Register」
创建成功后,点击相应的应用,进入应用详情页,切换到「Overview」标签页,就可以看到相应的配置信息。
-
+
进入「Certificates & secrets」,选择「Client secrets」标签,点击「New client secret」,填写描述,选择过期时间,点击「Add」,创建一个新的客户端密钥。
-
+ 请务必保存好你的客户端密钥,因为这是你唯一的机会查看它。
### 新增用户
- 回到「Microsoft Entra ID」界面,进入「Users」,点击「New user」,填写用户信息,点击「Create」,创建用户以使用 LobeChat。
+ 回到「Microsoft Entra ID」界面,进入「Users」,点击「New user」,填写用户信息,点击「Create」,创建用户以使用 LobeHub。
### 配置环境变量
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ----------------------------------- | -- | ------------------------------------------------------------------------------------------- |
@@ -69,7 +69,7 @@ tags:
- 部署成功后,用户将可以使用 Microsoft Entra ID 中配置的用户通过身份认证并使用 LobeChat。
+ 部署成功后,用户将可以使用 Microsoft Entra ID 中配置的用户通过身份认证并使用 LobeHub。
## 进阶配置
diff --git a/docs/self-hosting/advanced/auth/next-auth/okta.mdx b/docs/self-hosting/advanced/auth/next-auth/okta.mdx
index 125486c2f7..5a8d86601d 100644
--- a/docs/self-hosting/advanced/auth/next-auth/okta.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/okta.mdx
@@ -1,9 +1,9 @@
---
-title: Configure Okta Identity Verification Service for LobeChat
+title: Configure Okta Identity Verification Service for LobeHub
description: >-
- Learn how to configure Okta Identity Verification Service for LobeChat for
- your organization, including creating applications, adding users, and
- configuring environment variables.
+ Learn how to configure Okta Identity Verification Service for LobeHub for your
+ organization, including creating applications, adding users, and configuring
+ environment variables.
tags:
- Okta
- Identity Verification
@@ -27,7 +27,7 @@ tags:
| Setting Name | Description | Sample Information |
| ---------------------- | ------------------------------------------------------------------------------------------------------------ | --------------------------------------------- |
- | App Integration Name | The Application Name your users will see | LobeChat Instance |
+ | App Integration Name | The Application Name your users will see | LobeHub Instance |
| Sign-in redirect URIs | Okta sends the authentication response and ID token for the user's sign-in request to these URIs | (http(s)://your-domain/api/auth/callback/okta |
| Sign-out redirect URIs | After your application contacts Okta to close the user session, Okta redirects the user to one of these URIs | (http(s)://your-domain |
@@ -38,11 +38,11 @@ tags:
### Add Users
- Click on the "Assignments" in the top navigation bar to enter the user management interface, where you can create or assign users in your organization to log in to LobeChat.
+ Click on the "Assignments" in the top navigation bar to enter the user management interface, where you can create or assign users in your organization to log in to LobeHub.
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -59,7 +59,7 @@ tags:
- After successful deployment, users will be able to authenticate and use LobeChat using the users
+ After successful deployment, users will be able to authenticate and use LobeHub using the users
configured in Okta.
diff --git a/docs/self-hosting/advanced/auth/next-auth/okta.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/okta.zh-CN.mdx
index d435f16814..58fe854b99 100644
--- a/docs/self-hosting/advanced/auth/next-auth/okta.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/okta.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中配置 Okta 身份验证服务 - 详细步骤和环境变量设置
-description: 学习如何在 LobeChat 中为您的组织配置 Okta 身份验证服务,包括创建应用程序、添加用户和配置环境变量等。
+title: 在 LobeHub 中配置 Okta 身份验证服务 - 详细步骤和环境变量设置
+description: 学习如何在 LobeHub 中为您的组织配置 Okta 身份验证服务,包括创建应用程序、添加用户和配置环境变量等。
tags:
- Okta
- 身份验证
@@ -24,7 +24,7 @@ tags:
| 设置名称 | 描述 | 示例信息 |
| ---------------------- | ------------------------------------------- | --------------------------------------------- |
- | App Integration Name | 您的用户将看到的应用程序名称 | LobeChat Instance |
+ | App Integration Name | 您的用户将看到的应用程序名称 | LobeHub Instance |
| Sign-in redirect URIs | Okta 将用户登录请求的身份验证响应和 ID 令牌发送到这些 URI | (http(s)://your-domain/api/auth/callback/okta |
| Sign-out redirect URIs | 您的应用程序联系 Okta 关闭用户会话后,Okta 将用户重定向到这些 URI 之一 | (http(s)://your-domain |
@@ -34,11 +34,11 @@ tags:
### 添加用户
- 点击顶部导航栏中的「Assignments」进入用户管理界面,您可以在此创建或分配组织中的用户来登录 LobeChat。
+ 点击顶部导航栏中的「Assignments」进入用户管理界面,您可以在此创建或分配组织中的用户来登录 LobeHub。
### 配置环境变量
- 在部署 LobeChat 时,您需要配置以下环境变量:
+ 在部署 LobeHub 时,您需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------ |
@@ -55,7 +55,7 @@ tags:
- 部署成功后,用户将能够使用在 Okta 中配置的用户进行身份验证并使用 LobeChat。
+ 部署成功后,用户将能够使用在 Okta 中配置的用户进行身份验证并使用 LobeHub。
[okta-client-page]: https://login.okta.com
diff --git a/docs/self-hosting/advanced/auth/next-auth/wechat.mdx b/docs/self-hosting/advanced/auth/next-auth/wechat.mdx
index 34e65f145f..5879ff00f7 100644
--- a/docs/self-hosting/advanced/auth/next-auth/wechat.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/wechat.mdx
@@ -1,14 +1,14 @@
---
-title: Configure Wechat Authentication Service in LobeChat
+title: Configure Wechat Authentication Service in LobeHub
description: >-
- Learn how to configure Wechat authentication service in LobeChat, including
+ Learn how to configure Wechat authentication service in LobeHub, including
creating a new Wechat App, setting permissions, and environment variables.
tags:
- Wechat Authentication
- Wechat App
- Environment Variable Configuration
- Single Sign-On
- - LobeChat
+ - LobeHub
---
# Configure Wechat Authentication Service
@@ -26,12 +26,12 @@ tags:
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `AUTH_SECRET` | Required | Key used to encrypt Auth.js session tokens. You can generate the key using the command: `openssl rand -base64 32` |
- | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the Single Sign-On provider for LobeChat. Use `github` for Github. |
+ | `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the Single Sign-On provider for LobeHub. Use `github` for Github. |
| `WECHAT_CLIENT_ID` | Required | Client ID from the Wechat website application details page |
| `WECHAT_CLIENT_SECRET` | Required | Client Secret from the Wechat website application details page |
| `AUTH_URL` | Required | This URL is used to specify the callback address for Auth.js when performing OAuth authentication. Only set it if the default generated redirect address is incorrect. `https://example.com/api/auth` |
@@ -43,5 +43,5 @@ tags:
After successful deployment, users will be able to authenticate through the WeChat Open Platform
- and use LobeChat.
+ and use LobeHub.
diff --git a/docs/self-hosting/advanced/auth/next-auth/wechat.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/wechat.zh-CN.mdx
index 774dea7bb7..ec3c50898c 100644
--- a/docs/self-hosting/advanced/auth/next-auth/wechat.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/wechat.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: 在 LobeChat 中配置微信身份验证服务
-description: 学习如何在 LobeChat 中配置微信身份验证服务,包括创建新的微信网站应用、设置权限和环境变量。
+title: 在 LobeHub 中配置微信身份验证服务
+description: 学习如何在 LobeHub 中配置微信身份验证服务,包括创建新的微信网站应用、设置权限和环境变量。
tags:
- 微信身份验证
- 微信网站应用
- 环境变量配置
- 单点登录
- - LobeChat
+ - LobeHub
---
# 配置微信身份验证服务
-\## 微信配置流程
+## 微信配置流程
### 创建微信网站应用
@@ -24,7 +24,7 @@ tags:
### 配置环境变量
- 在部署 LobeChat 时,你需要配置以下环境变量:
+ 在部署 LobeHub 时,你需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ------------------------------------------------------------------------------------------- |
@@ -39,4 +39,4 @@ tags:
-部署成功后,用户将可以通过微信开放平台身份认证并使用 LobeChat。
+部署成功后,用户将可以通过微信开放平台身份认证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/auth/next-auth/zitadel.mdx b/docs/self-hosting/advanced/auth/next-auth/zitadel.mdx
index ecbbb79e93..ada3a577d7 100644
--- a/docs/self-hosting/advanced/auth/next-auth/zitadel.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/zitadel.mdx
@@ -1,12 +1,12 @@
---
-title: Configure ZITADEL Authentication Service for LobeChat
+title: Configure ZITADEL Authentication Service for LobeHub
description: >-
- Learn how to configure ZITADEL Authentication Service for LobeChat deployment,
+ Learn how to configure ZITADEL Authentication Service for LobeHub deployment,
including creating ZITADEL applications, setting up environment variables, and
enabling single sign-on (SSO).
tags:
- ZITADEL Authentication
- - LobeChat Deployment
+ - LobeHub Deployment
- Single Sign-On (SSO)
- Environment Variables
- ZITADEL Applications
@@ -19,15 +19,15 @@ tags:
Log in to the console of your ZITADEL instance using an account with at least a [`Project Owner` role](https://zitadel.com/docs/guides/manage/console/managers#roles), navigate to (or [create](https://zitadel.com/docs/guides/manage/console/projects#create-a-project)) the project you'd like to host your application in, and click the **New** button to create an application.
-
+
Fill in the name, choose **Web** as the application type, and click **Continue**.
-
+
Choose **Code** as the authentication method.
-
+
In the **Redirect URIs** field, fill in:
@@ -35,34 +35,34 @@ tags:
http(s)://your-domain/api/auth/callback/zitadel
```
-
+
- You can fill in or modify redirect URIs after creating the application, but make sure the filled
URL is consistent with the deployed URL.
- - Replace `http(s)://your-domain` with the actual URL that LobeChat is deployed to.
+ - Replace `http(s)://your-domain` with the actual URL that LobeHub is deployed to.
Confirm the configuration and click **Create**.
-
+
Save the **ClientId** and **ClientSecret** for later use.
-
+
In the application settings page, navigate to the **Token Settings** tab, enable **User Info inside ID Token** option, and click **Save**.
-
+
Navigate to the **URLs** tab, and save the **issuer** URL.
-
+
### Configure Environment Variables
- When deploying LobeChat, you need to configure the following environment variables:
+ When deploying LobeHub, you need to configure the following environment variables:
| Environment Variable | Type | Description |
| ------------------------- | -------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -79,6 +79,6 @@ tags:
- After successful deployment, users will be able to authenticate and use LobeChat using existing
+ After successful deployment, users will be able to authenticate and use LobeHub using existing
users configured in ZITADEL.
diff --git a/docs/self-hosting/advanced/auth/next-auth/zitadel.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/zitadel.zh-CN.mdx
index f4c30deafa..3b6a4ede2f 100644
--- a/docs/self-hosting/advanced/auth/next-auth/zitadel.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/zitadel.zh-CN.mdx
@@ -1,12 +1,12 @@
---
-title: 在 LobeChat 中配置 ZITADEL 身份验证服务
-description: 学习如何在 LobeChat 中配置 ZITADEL 身份验证服务,包括创建应用、配置环境变量等步骤。
+title: 在 LobeHub 中配置 ZITADEL 身份验证服务
+description: 学习如何在 LobeHub 中配置 ZITADEL 身份验证服务,包括创建应用、配置环境变量等步骤。
tags:
- ZITADEL
- 身份验证服务
- 环境变量配置
- 单点登录
- - LobeChat
+ - LobeHub
---
# 配置 ZITADEL 身份验证服务
@@ -16,15 +16,15 @@ tags:
使用具有 [`Project Owner` 角色](https://zitadel.com/docs/guides/manage/console/managers#roles)的账户登录到 ZITADEL 实例控制台,进入(或[创建](https://zitadel.com/docs/guides/manage/console/projects#create-a-project))该应用所属的项目,点击「创建」按钮创建应用。
-
+
填写应用名称,应用类型选择「Web」,点击「继续」。
-
+
选择「Code」作为身份验证方式。
-
+
在「重定向 URLs」字段中填写:
@@ -32,38 +32,38 @@ tags:
http(s)://your-domain/api/auth/callback/zitadel
```
-
+
- 可以创建应用后再填写或修改重定向 URL,但请确保填写的 URL 与部署的 URL 一致。
- - 请将 `http(s)://your-domain` 替换为 LobeChat 部署的实际 URL。
+ - 请将 `http(s)://your-domain` 替换为 LobeHub 部署的实际 URL。
确认配置并点击「创建」。
-
+
记录下「ClientId」和「ClientSecret」备用。
-
+
在应用设置页面中,切换到「令牌设置」选项卡,勾选「在 ID Token 中包含用户信息」选项,点击「保存」。
-
+
切换到「URLs」选项卡,记录下「issuer」URL。
-
+
### 配置环境变量
- 部署 LobeChat 时,您需要配置以下环境变量:
+ 部署 LobeHub 时,您需要配置以下环境变量:
| 环境变量 | 类型 | 描述 |
| ------------------------- | -- | ----------------------------------------------------------------------------------- |
| `AUTH_SECRET` | 必选 | 用于加密 Auth.js 会话令牌的密钥。您可以使用以下命令生成密钥:`openssl rand -base64 32` |
- | `NEXT_AUTH_SSO_PROVIDERS` | 必选 | 为 LobeChat 选择单点登录提供程序。对于 ZITADEL,请填写 `zitadel`。 |
+ | `NEXT_AUTH_SSO_PROVIDERS` | 必选 | 为 LobeHub 选择单点登录提供程序。对于 ZITADEL,请填写 `zitadel`。 |
| `AUTH_ZITADEL_ID` | 必选 | ZITADEL 应用的 Client ID(`ClientId`)。 |
| `AUTH_ZITADEL_SECRET` | 必选 | ZITADEL 应用的 Client Secret(`ClientSecret`)。 |
| `AUTH_ZITADEL_ISSUER` | 必选 | ZITADEL 应用的 OpenID Connect 颁发者(issuer)URL。 |
@@ -74,4 +74,4 @@ tags:
-部署成功后,用户将能够通过 ZITADEL 中配置的用户进行身份验证并使用 LobeChat。
+部署成功后,用户将能够通过 ZITADEL 中配置的用户进行身份验证并使用 LobeHub。
diff --git a/docs/self-hosting/advanced/desktop.mdx b/docs/self-hosting/advanced/desktop.mdx
index 069f2d9cf2..386dd335f3 100644
--- a/docs/self-hosting/advanced/desktop.mdx
+++ b/docs/self-hosting/advanced/desktop.mdx
@@ -1,20 +1,20 @@
---
-title: Automatic Synchronization with LobeChat Desktop
-description: Configure LobeChat Desktop for a synchronized experience
+title: Automatic Synchronization with LobeHub Desktop
+description: Configure LobeHub Desktop for a synchronized experience
tags:
- - LobeChat
+ - LobeHub
- Desktop Synchronization
- Self-Hosted Instance
- OIDC Configuration
---
-# Automatic Synchronization with LobeChat Desktop
+# Automatic Synchronization with LobeHub Desktop
-LobeChat Desktop provides users with an enhanced experience while also expanding the capabilities of LobeChat, allowing users to utilize LobeChat in offline environments. This document will guide you on how to configure LobeChat Desktop and connect it to your self-hosted instance.
+LobeHub Desktop provides users with an enhanced experience while also expanding the capabilities of LobeHub, allowing users to utilize LobeHub in offline environments. This document will guide you on how to configure LobeHub Desktop and connect it to your self-hosted instance.
## System Requirements
-LobeChat Desktop is built using Electron and supports the following operating systems:
+LobeHub Desktop is built using Electron and supports the following operating systems:
- Windows
- macOS
@@ -22,16 +22,16 @@ LobeChat Desktop is built using Electron and supports the following operating sy
## Desktop Version Information
-The version release of LobeChat Desktop follows this logic:
+The version release of LobeHub Desktop follows this logic:
-- **Beta Version**: All merges to the main branch that are automatically released will be created as Beta versions of LobeChat Desktop.
+- **Beta Version**: All merges to the main branch that are automatically released will be created as Beta versions of LobeHub Desktop.
- **Stable Version**: The development team will periodically release stable versions through manual tagging.
If you wish to access the latest features, you can use the Beta version; if you prioritize stability, please use the Stable version.
## Connecting to a Self-Hosted Instance
-LobeChat Desktop can connect to your self-hosted LobeChat instance, allowing you to use your self-hosted configuration on the desktop.
+LobeHub Desktop can connect to your self-hosted LobeHub instance, allowing you to use your self-hosted configuration on the desktop.
### Preparations
@@ -58,16 +58,16 @@ Add the generated environment variables to your deployment configuration.
If you have already configured `OIDC_JWKS_KEY`, no changes are needed. The system will automatically fall back to `OIDC_JWKS_KEY` for backward compatibility.
-If you are deploying LobeChat using one-click deployment methods (such as Vercel, Railway, etc.), you need to:
+If you are deploying LobeHub using one-click deployment methods (such as Vercel, Railway, etc.), you need to:
1. Add the above environment variables to the environment variable configuration of your deployment platform.
2. For Vercel deployments, ensure that the `APP_URL` environment variable is set to your domain URL, which is crucial for correctly handling authentication callbacks.
### Connection Steps
-1. Open LobeChat Desktop.
+1. Open LobeHub Desktop.
2. Click the Wi-Fi icon in the upper right corner.
-3. Enter the address of your self-hosted instance (e.g., `https://your-lobechat-instance.com`).
+3. Enter the address of your self-hosted instance (e.g., `https://your-LobeHub-instance.com`).
4. Click the "Connect" button.
5. You will be redirected to the login page; after completing the authorization, you can use your self-hosted configuration.
diff --git a/docs/self-hosting/advanced/desktop.zh-CN.mdx b/docs/self-hosting/advanced/desktop.zh-CN.mdx
index 3cc877ff0c..49e40ac8d0 100644
--- a/docs/self-hosting/advanced/desktop.zh-CN.mdx
+++ b/docs/self-hosting/advanced/desktop.zh-CN.mdx
@@ -1,21 +1,21 @@
---
-title: 与 LobeChat 桌面端自动同步
-description: 配置 LobeChat 桌面端,获取端同步体验
+title: 与 LobeHub 桌面端自动同步
+description: 配置 LobeHub 桌面端,获取端同步体验
tags:
- - LobeChat
+ - LobeHub
- 桌面端
- 自托管
- OIDC
- 配置指南
---
-# 与 LobeChat 桌面端自动同步
+# 与 LobeHub 桌面端自动同步
-LobeChat 桌面端为用户提供了更好的使用体验,同时也扩展了 LobeChat 的能力边界,允许用户在离线环境下使用 LobeChat。本文档将指导您如何配置 LobeChat 桌面端以及如何将其与自托管实例连接。
+LobeHub 桌面端为用户提供了更好的使用体验,同时也扩展了 LobeHub 的能力边界,允许用户在离线环境下使用 LobeHub。本文档将指导您如何配置 LobeHub 桌面端以及如何将其与自托管实例连接。
## 系统要求
-LobeChat 桌面端使用 Electron 构建,支持以下操作系统:
+LobeHub 桌面端使用 Electron 构建,支持以下操作系统:
- Windows
- macOS
@@ -23,16 +23,16 @@ LobeChat 桌面端使用 Electron 构建,支持以下操作系统:
## 桌面端版本说明
-LobeChat 桌面端的版本发布遵循以下逻辑:
+LobeHub 桌面端的版本发布遵循以下逻辑:
-- **Beta 版本**:所有合并到 main 分支并自动发布版本,都会自动创建为 LobeChat 桌面端的 Beta 版本
+- **Beta 版本**:所有合并到 main 分支并自动发布版本,都会自动创建为 LobeHub 桌面端的 Beta 版本
- **Stable 版本**:开发团队会通过手动打标来定期发布稳定版本
如果您希望获得最新功能,可以使用 Beta 版本;如果您注重稳定性,请使用 Stable 版本。
## 与自托管实例连接
-LobeChat 桌面端可以与您自托管的 LobeChat 实例连接,以便您可以在桌面端使用您的自托管配置。
+LobeHub 桌面端可以与您自托管的 LobeHub 实例连接,以便您可以在桌面端使用您的自托管配置。
### 准备工作
@@ -56,16 +56,16 @@ ENABLE_OIDC=1
如果您之前已配置 `OIDC_JWKS_KEY`,无需修改。系统会自动回退使用 `OIDC_JWKS_KEY`(向后兼容)。
-如果您使用一键部署方式(如 Vercel、Railway 等平台)部署 LobeChat,您需要:
+如果您使用一键部署方式(如 Vercel、Railway 等平台)部署 LobeHub,您需要:
1. 将上述环境变量添加到部署平台的环境变量配置中
2. 对于 Vercel 部署,请确保 `APP_URL` 环境变量设置为你的域名 URL,这对于正确处理认证回调至关重要
### 连接步骤
-1. 打开 LobeChat 桌面端
+1. 打开 LobeHub 桌面端
2. 点击右上角 wifi 图标
-3. 输入您的自托管实例地址(例如:`https://your-lobechat-instance.com`)
+3. 输入您的自托管实例地址(例如:`https://your-LobeHub-instance.com`)
4. 点击 "连接" 按钮
5. 您将被重定向到登录页面,完成授权后即可使用您的自托管配置
diff --git a/docs/self-hosting/advanced/feature-flags.mdx b/docs/self-hosting/advanced/feature-flags.mdx
index b5eeeabba0..d09a5305a7 100644
--- a/docs/self-hosting/advanced/feature-flags.mdx
+++ b/docs/self-hosting/advanced/feature-flags.mdx
@@ -1,11 +1,11 @@
---
-title: LobeChat Feature Flags Environment Variables Configuration Guide
+title: LobeHub Feature Flags Environment Variables Configuration Guide
description: >-
- Learn how to use environment variables to customize LobeChat's feature flags,
+ Learn how to use environment variables to customize LobeHub's feature flags,
including controlling whether a feature is enabled or disabled, or enabling or
disabling features for specific user groups or environments as needed.
tags:
- - LobeChat
+ - LobeHub
- Environment Variables
- Configuration Guide
- Feature Flags
@@ -13,12 +13,12 @@ tags:
# Feature Flags
-In addition to basic environment variable configuration, LobeChat also offers feature flags to control whether a feature is enabled globally, or to enable or disable features for specific user groups or environments as needed.
+In addition to basic environment variable configuration, LobeHub also offers feature flags to control whether a feature is enabled globally, or to enable or disable features for specific user groups or environments as needed.
## Feature Flags Environment Variable `FEATURE_FLAGS`
- Type: Optional
-- Description: Used to control LobeChat's feature functionalities. Supports multiple feature flags, using `+` to add a feature and `-` to disable a feature. Separate multiple feature flags with a comma `,` and enclose the entire value in quotes `"` to avoid parsing errors.
+- Description: Used to control LobeHub's feature functionalities. Supports multiple feature flags, using `+` to add a feature and `-` to disable a feature. Separate multiple feature flags with a comma `,` and enclose the entire value in quotes `"` to avoid parsing errors.
- Default: `-`
- Example: `"-welcome_suggest"`
diff --git a/docs/self-hosting/advanced/feature-flags.zh-CN.mdx b/docs/self-hosting/advanced/feature-flags.zh-CN.mdx
index 3966e3b44a..b03041d044 100644
--- a/docs/self-hosting/advanced/feature-flags.zh-CN.mdx
+++ b/docs/self-hosting/advanced/feature-flags.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 特性标志环境变量配置指南
-description: 了解如何使用环境变量自定义 LobeChat 的特性标志,包括控制否启用某个功能、或者根据需要对特定用户群体或环境启用或禁用功能。
+title: LobeHub 特性标志环境变量配置指南
+description: 了解如何使用环境变量自定义 LobeHub 的特性标志,包括控制否启用某个功能、或者根据需要对特定用户群体或环境启用或禁用功能。
tags:
- - LobeChat
+ - LobeHub
- 环境变量
- 配置指南
- 特征标志
@@ -10,12 +10,12 @@ tags:
# 特性标志
-除了基础的环境变量配置外,LobeChat 还提供了一些特性标志(Feature Flags),用于控制是否全局启用某个功能,或者根据需要对特定用户群体或环境启用或禁用功能。
+除了基础的环境变量配置外,LobeHub 还提供了一些特性标志(Feature Flags),用于控制是否全局启用某个功能,或者根据需要对特定用户群体或环境启用或禁用功能。
## 特性标志环境变量 `FEATURE_FLAGS`
- 类型:可选
-- 描述:用于控制 LobeChat 的特性功能,支持多个功能标志,使用 `+` 增加一个功能,使用 `-` 来关闭一个功能,多个功能标志之间使用英文逗号 `,` 隔开,最外层建议添加引号 `"` 以避免解析错误。
+- 描述:用于控制 LobeHub 的特性功能,支持多个功能标志,使用 `+` 增加一个功能,使用 `-` 来关闭一个功能,多个功能标志之间使用英文逗号 `,` 隔开,最外层建议添加引号 `"` 以避免解析错误。
- 默认值:`-`
- 示例:`"-welcome_suggest"`
diff --git a/docs/self-hosting/advanced/knowledge-base.mdx b/docs/self-hosting/advanced/knowledge-base.mdx
index 2e8509d7e3..21d64cc57b 100644
--- a/docs/self-hosting/advanced/knowledge-base.mdx
+++ b/docs/self-hosting/advanced/knowledge-base.mdx
@@ -1,10 +1,10 @@
---
-title: LobeChat Knowledge Base / File Upload
+title: LobeHub Knowledge Base / File Upload
description: >-
- Explore LobeChat's file upload and knowledge base management features with
- core components.
+ Explore LobeHub's file upload and knowledge base management features with core
+ components.
tags:
- - LobeChat
+ - LobeHub
- File Upload
- Knowledge Base
- PostgreSQL
@@ -13,7 +13,7 @@ tags:
# Knowledge Base / File Upload
-LobeChat supports file upload and knowledge base management. This feature relies on the following core technical components. Understanding these components will help you successfully deploy and maintain the knowledge base system.
+LobeHub supports file upload and knowledge base management. This feature relies on the following core technical components. Understanding these components will help you successfully deploy and maintain the knowledge base system.
## Core Components
@@ -45,7 +45,7 @@ S3 (or S3-compatible storage services) is used for storing uploaded files.
OpenAI's Embedding service is used to convert text into vector representations.
- LobeChat currently uses OpenAI's `text-embedding-3-small` model by default. Ensure your API Key
+ LobeHub currently uses OpenAI's `text-embedding-3-small` model by default. Ensure your API Key
has access to this model.
@@ -62,7 +62,7 @@ Unstructured.io is a powerful document processing tool.
- **Use Case**: Handle non-plain text formats like PDF, Word
- **Note**: Evaluate processing needs based on document complexity
-By correctly configuring and integrating these core components, you can build a powerful and efficient knowledge base system for LobeChat. Each component plays a crucial role in the overall architecture, supporting advanced document management and intelligent retrieval functions.
+By correctly configuring and integrating these core components, you can build a powerful and efficient knowledge base system for LobeHub. Each component plays a crucial role in the overall architecture, supporting advanced document management and intelligent retrieval functions.
### 5. Custom Embedding
diff --git a/docs/self-hosting/advanced/knowledge-base.zh-CN.mdx b/docs/self-hosting/advanced/knowledge-base.zh-CN.mdx
index b8af042c74..25d1ac20e9 100644
--- a/docs/self-hosting/advanced/knowledge-base.zh-CN.mdx
+++ b/docs/self-hosting/advanced/knowledge-base.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 知识库 / 文件上传
-description: 了解 LobeChat 的文件上传和知识库管理核心组件,助力成功部署。
+title: LobeHub 知识库 / 文件上传
+description: 了解 LobeHub 的文件上传和知识库管理核心组件,助力成功部署。
tags:
- - LobeChat
+ - LobeHub
- 文件上传
- 知识库管理
- PostgreSQL
@@ -11,7 +11,7 @@ tags:
# 知识库 / 文件上传
-LobeChat 支持文件上传 / 知识库管理。该功能依赖于以下核心技术组件,了解这些组件将有助于你成功部署和维护知识库系统。
+LobeHub 支持文件上传 / 知识库管理。该功能依赖于以下核心技术组件,了解这些组件将有助于你成功部署和维护知识库系统。
## 核心组件
@@ -43,7 +43,7 @@ S3(或兼容 S3 协议的存储服务)用于存储上传的文件。
OpenAI 的嵌入(Embedding)服务用于将文本转化为向量表示。
- LobeChat 当前默认使用 OpenAI `text-embedding-3-small` 模型,请确保你的 API Key 可以访问该模型。
+ LobeHub 当前默认使用 OpenAI `text-embedding-3-small` 模型,请确保你的 API Key 可以访问该模型。
- **用途**:生成文本的向量表示,用于语义搜索
@@ -59,7 +59,7 @@ Unstructured.io 是一个强大的文档处理工具。
- **应用场景**:处理 PDF、Word 等非纯文本格式的文档
- **注意事项**:评估处理需求,根据文档复杂度决定是否部署
-通过正确配置和集成这些核心组件,您可以为 LobeChat 构建一个强大、高效的知识库系统。每个组件都在整体架构中扮演着关键角色,共同支持高级的文档管理和智能检索功能。
+通过正确配置和集成这些核心组件,您可以为 LobeHub 构建一个强大、高效的知识库系统。每个组件都在整体架构中扮演着关键角色,共同支持高级的文档管理和智能检索功能。
### 5. 自定义 Embedding(可选)
diff --git a/docs/self-hosting/advanced/model-list.mdx b/docs/self-hosting/advanced/model-list.mdx
index f060567595..79551d7bf2 100644
--- a/docs/self-hosting/advanced/model-list.mdx
+++ b/docs/self-hosting/advanced/model-list.mdx
@@ -1,10 +1,10 @@
---
-title: Customizing Provider Model List in LobeChat for Deployment
+title: Customizing Provider Model List in LobeHub for Deployment
description: >-
- Learn how to customize the model list in LobeChat for deployment with the
+ Learn how to customize the model list in LobeHub for deployment with the
syntax and extension capabilities
tags:
- - LobeChat
+ - LobeHub
- model customization
- deployment
- extension capabilities
@@ -12,7 +12,7 @@ tags:
# Model List
-LobeChat supports customizing the model list during deployment. This configuration is done in the environment for each [model provider](/docs/self-hosting/environment-variables/model-provider).
+LobeHub supports customizing the model list during deployment. This configuration is done in the environment for each [model provider](/docs/self-hosting/environment-variables/model-provider).
You can use `+` to add a model, `-` to hide a model, and use `model name->deploymentName=display name` to customize the display name of a model, separated by English commas. The basic syntax is as follows:
diff --git a/docs/self-hosting/advanced/model-list.zh-CN.mdx b/docs/self-hosting/advanced/model-list.zh-CN.mdx
index b8e09802ac..41a757ee8f 100644
--- a/docs/self-hosting/advanced/model-list.zh-CN.mdx
+++ b/docs/self-hosting/advanced/model-list.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 自定义模型服务商模型列表及扩展能力配置
-description: 了解如何在 LobeChat 中自定义模型列表以及扩展能力配置的基本语法和规则。
+title: LobeHub 自定义模型服务商模型列表及扩展能力配置
+description: 了解如何在 LobeHub 中自定义模型列表以及扩展能力配置的基本语法和规则。
tags:
- - LobeChat
+ - LobeHub
- 自定义模型列表
- 扩展能力配置
- 模型展示名
@@ -11,7 +11,7 @@ tags:
# Model List
-LobeChat 支持在部署时自定义模型列表,详情请参考 [模型提供商](/zh/docs/self-hosting/environment-variables/model-provider) 。
+LobeHub 支持在部署时自定义模型列表,详情请参考 [模型提供商](/zh/docs/self-hosting/environment-variables/model-provider) 。
你可以使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名->部署名=展示名<扩展配置>` 来自定义模型的展示名,用英文逗号隔开。通过 `<>` 来添加扩展配置。基本语法如下:
diff --git a/docs/self-hosting/advanced/observability/grafana.mdx b/docs/self-hosting/advanced/observability/grafana.mdx
index 453ff58640..ba30312754 100644
--- a/docs/self-hosting/advanced/observability/grafana.mdx
+++ b/docs/self-hosting/advanced/observability/grafana.mdx
@@ -1,9 +1,9 @@
---
title: 'Observability with Grafana, Prometheus, and Tempo'
description: >-
- Monitor and analyze your LobeChat instance using Grafana dashboards,
- Prometheus metrics, and Tempo traces. This guide covers setup, configuration
- for self-hosted deployments.
+ Monitor and analyze your LobeHub instance using Grafana dashboards, Prometheus
+ metrics, and Tempo traces. This guide covers setup, configuration for
+ self-hosted deployments.
tags:
- Observability
- Grafana
@@ -13,7 +13,7 @@ tags:
# Observability with [Grafana](https://grafana.com/), [Prometheus](https://prometheus.io/), and [Tempo](https://grafana.com/docs/tempo/latest/)
-LobeChat supports advanced observability for self-hosted deployments using open-source tools:
+LobeHub supports advanced observability for self-hosted deployments using open-source tools:
- **Grafana** for dashboards and visualization
- **Prometheus** for metrics collection
@@ -21,7 +21,7 @@ LobeChat supports advanced observability for self-hosted deployments using open-
- **otel-collector** ingesting other OpenTelemetry supported data
We provided Docker Compose (`docker-compose`) file presets to bootstrap the observability stack with advanced self-hosting features.
-This guide will help you set up and use these tools to monitor your LobeChat instance.
+This guide will help you set up and use these tools to monitor your LobeHub instance.
## Prerequisites
@@ -44,7 +44,7 @@ mv .env.example .env
docker compose up -d
```
- This will launch Grafana, Prometheus, Tempo, and the otel-collector alongside LobeChat with Casdoor, Minio, and other advanced services.
+ This will launch Grafana, Prometheus, Tempo, and the otel-collector alongside LobeHub with Casdoor, Minio, and other advanced services.
## 2. Access Grafana Dashboards
@@ -61,7 +61,7 @@ Click on "Explore" in the left sidebar to access the query editor to run ad-hoc
- Ensure all containers are running: `docker compose ps`
- Check logs for any service: `docker compose logs `
-- Verify Prometheus and Tempo are scraping and receiving data from LobeChat and otel-collector.
+- Verify Prometheus and Tempo are scraping and receiving data from LobeHub and otel-collector.
## See Also
diff --git a/docs/self-hosting/advanced/observability/grafana.zh-CN.mdx b/docs/self-hosting/advanced/observability/grafana.zh-CN.mdx
index 6936eaf7bc..1e3d6452c7 100644
--- a/docs/self-hosting/advanced/observability/grafana.zh-CN.mdx
+++ b/docs/self-hosting/advanced/observability/grafana.zh-CN.mdx
@@ -1,7 +1,7 @@
---
title: 使用 Grafana、Prometheus 和 Tempo 进行可观测性监控
description: >-
- 使用 Grafana、Prometheus 指标和 Tempo 链路追踪,监控和分析你的 LobeChat
+ 使用 Grafana、Prometheus 指标和 Tempo 链路追踪,监控和分析你的 LobeHub
实例。本指南涵盖自托管部署的搭建、配置和示例仪表盘。
tags:
- 可观测性
@@ -12,7 +12,7 @@ tags:
# 使用 [Grafana](https://grafana.com/)、[Prometheus](https://prometheus.io/) 和 [Tempo](https://grafana.com/docs/tempo/latest/) 进行可观测性监控
-LobeChat 支持通过开源工具实现自托管部署的高级可观测性:
+LobeHub 支持通过开源工具实现自托管部署的高级可观测性:
- **Grafana**:仪表盘与可视化
- **Prometheus**:指标采集
@@ -42,7 +42,7 @@ mv .env.example .env
docker compose up -d
```
- 这将会启动 Grafana、Prometheus、Tempo、otel-collector 以及 LobeChat、Casdoor、Minio 等高级服务。
+ 这将会启动 Grafana、Prometheus、Tempo、otel-collector 以及 LobeHub、Casdoor、Minio 等高级服务。
## 2. 访问 Grafana 仪表盘
@@ -59,7 +59,7 @@ mv .env.example .env
- 确认所有容器已运行:`docker compose ps`
- 查看服务日志:`docker compose logs <服务名>`
-- 检查 Prometheus 和 Tempo 是否正常采集 LobeChat 及 otel-collector 的数据。
+- 检查 Prometheus 和 Tempo 是否正常采集 LobeHub 及 otel-collector 的数据。
## 相关链接
diff --git a/docs/self-hosting/advanced/observability/langfuse.mdx b/docs/self-hosting/advanced/observability/langfuse.mdx
index 46110892f1..0d0993d6d2 100644
--- a/docs/self-hosting/advanced/observability/langfuse.mdx
+++ b/docs/self-hosting/advanced/observability/langfuse.mdx
@@ -1,7 +1,7 @@
---
-title: Observability and Tracing for LobeChat
+title: Observability and Tracing for LobeHub
description: >-
- Enhance your LobeChat applications with open-source observability and tracing
+ Enhance your LobeHub applications with open-source observability and tracing
using Langfuse. Automatically capture detailed traces and metrics for every
request to optimize and debug your chats.
tags:
@@ -10,11 +10,11 @@ tags:
- Langfuse
---
-# Monitor your LobeChat application with Langfuse
+# Monitor your LobeHub application with Langfuse
## What is Langfuse?
-[Langfuse](https://langfuse.com/) an **open-source LLM Observability platform**. By enabling the Langfuse integration, you can trace your application data to develop, monitor, and improve the use of LobeChat, including:
+[Langfuse](https://langfuse.com/) an **open-source LLM Observability platform**. By enabling the Langfuse integration, you can trace your application data to develop, monitor, and improve the use of LobeHub, including:
- Application [traces](https://langfuse.com/docs/tracing)
- Usage patterns
@@ -28,13 +28,13 @@ tags:
Get your Langfuse API key by signing up for [Langfuse Cloud](https://cloud.langfuse.com) or [self-hosting](https://langfuse.com/docs/deployment/self-host) Langfuse.
- ### Set up LobeChat
+ ### Set up LobeHub
- There are multiple ways to [self-host LobeChat](https://lobehub.com/docs/self-hosting/start). For this example, we will use the Docker Desktop deployment.
+ There are multiple ways to [self-host LobeHub](https://lobehub.com/docs/self-hosting/start). For this example, we will use the Docker Desktop deployment.
- Before deploying LobeChat, set the following four environment variables with the Langfuse API keys you created in the previous step.
+ Before deploying LobeHub, set the following four environment variables with the Langfuse API keys you created in the previous step.
```sh
ENABLE_LANGFUSE = '1'
@@ -47,25 +47,25 @@ tags:
Before running the Docker container, set the environment variables in the Docker Desktop with the Langfuse API keys you created in the previous step.
-
+
### Activate Analytics in Settings
- Once you have LobeChat running, navigate to the **About** tab in the **Settings** and activate analytics. This is necessary for traces to be sent to Langfuse.
+ Once you have LobeHub running, navigate to the **About** tab in the **Settings** and activate analytics. This is necessary for traces to be sent to Langfuse.
-
+
### See Chat Traces in Langfuse
- After setting your LLM model key, you can start interacting with your LobeChat application.
+ After setting your LLM model key, you can start interacting with your LobeHub application.
-
+
All conversations in the chat are automatically traced and sent to Langfuse. You can view the traces in the [Traces section](https://langfuse.com/docs/tracing) in the Langfuse UI.
-
+
*[Example trace in the Langfuse UI](https://cloud.langfuse.com/project/cloramnkj0002jz088vzn1ja4/traces/63e9246d-3f22-4e45-936d-b0c4ccf55a1e?timestamp=2024-11-26T17%3A00%3A02.028Z\&observation=7ea75a0c-d9d1-425c-9b88-27561c63b413)*
diff --git a/docs/self-hosting/advanced/observability/langfuse.zh-CN.mdx b/docs/self-hosting/advanced/observability/langfuse.zh-CN.mdx
index 1018117d25..15b678e425 100644
--- a/docs/self-hosting/advanced/observability/langfuse.zh-CN.mdx
+++ b/docs/self-hosting/advanced/observability/langfuse.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: LobeChat 的可观测性和追踪
-description: 使用 Langfuse 为你的 LobeChat 应用增强开源可观测性和追踪功能。自动捕获每个请求的详细追踪和指标,以优化和调试你的对话。
+title: LobeHub 的可观测性和追踪
+description: 使用 Langfuse 为你的 LobeHub 应用增强开源可观测性和追踪功能。自动捕获每个请求的详细追踪和指标,以优化和调试你的对话。
tags:
- 可观测性
- 追踪
- Langfuse
---
-# 使用 Langfuse 监控你的 LobeChat 应用
+# 使用 Langfuse 监控你的 LobeHub 应用
## 什么是 Langfuse?
-[Langfuse](https://langfuse.com/) 是一个 **开源的 LLM 可观测性平台**。启用 Langfuse 集成后,你可以追踪应用数据,以开发、监控和优化 LobeChat 的使用,包括:
+[Langfuse](https://langfuse.com/) 是一个 **开源的 LLM 可观测性平台**。启用 Langfuse 集成后,你可以追踪应用数据,以开发、监控和优化 LobeHub 的使用,包括:
- 应用 [追踪](https://langfuse.com/docs/tracing)
- 使用模式
@@ -25,13 +25,13 @@ tags:
通过注册 [Langfuse Cloud](https://cloud.langfuse.com) 或 [自托管](https://langfuse.com/docs/deployment/self-host) Langfuse 来获取你的 Langfuse API 密钥。
- ### 设置 LobeChat
+ ### 设置 LobeHub
- 有多种方式可以 [自托管 LobeChat](https://lobehub.com/docs/self-hosting/start)。在本示例中,我们将使用 Docker Desktop 部署。
+ 有多种方式可以 [自托管 LobeHub](https://lobehub.com/docs/self-hosting/start)。在本示例中,我们将使用 Docker Desktop 部署。
- 在部署 LobeChat 之前,使用你在上一步创建的 Langfuse API 密钥设置以下四个环境变量。
+ 在部署 LobeHub 之前,使用你在上一步创建的 Langfuse API 密钥设置以下四个环境变量。
```sh
ENABLE_LANGFUSE = '1'
@@ -44,25 +44,25 @@ tags:
在运行 Docker 容器之前,在 Docker Desktop 中设置环境变量,并填入你在上一步创建的 Langfuse API 密钥。
-
+
### 在设置中启用分析功能
- 当 LobeChat 运行后,进入 **设置** 中的 **关于** 选项卡,并启用分析功能。这是将追踪数据发送到 Langfuse 所必需的。
+ 当 LobeHub 运行后,进入 **设置** 中的 **关于** 选项卡,并启用分析功能。这是将追踪数据发送到 Langfuse 所必需的。
-
+
### 在 Langfuse 中查看聊天追踪
- 设置好 LLM 模型密钥后,你就可以开始与 LobeChat 进行交互。
+ 设置好 LLM 模型密钥后,你就可以开始与 LobeHub 进行交互。
-
+
所有对话都会被自动追踪并发送到 Langfuse。你可以在 Langfuse UI 的 [追踪部分](https://langfuse.com/docs/tracing) 查看这些数据。
-
+
## 反馈
diff --git a/docs/self-hosting/advanced/online-search.mdx b/docs/self-hosting/advanced/online-search.mdx
index 5388e8120f..53d11b14b3 100644
--- a/docs/self-hosting/advanced/online-search.mdx
+++ b/docs/self-hosting/advanced/online-search.mdx
@@ -3,7 +3,7 @@ title: >-
Configuring Online Search Functionality - Enhancing AI's Ability to Access Web
Information
description: >-
- Learn how to configure the SearXNG online search functionality for LobeChat,
+ Learn how to configure the SearXNG online search functionality for LobeHub,
enabling AI to access the latest web information.
tags:
- Online Search
@@ -14,7 +14,7 @@ tags:
# Configuring Online Search Functionality
-LobeChat supports configuring **web search functionality** for AI, enabling it to retrieve real-time information from the internet to provide more accurate and up-to-date responses. Web search supports multiple search engine providers, including [SearXNG](https://github.com/searxng/searxng), [Search1API](https://www.search1api.com), [Google](https://programmablesearchengine.google.com), and [Brave](https://brave.com/search/api), among others.
+LobeHub supports configuring **web search functionality** for AI, enabling it to retrieve real-time information from the internet to provide more accurate and up-to-date responses. Web search supports multiple search engine providers, including [SearXNG](https://github.com/searxng/searxng), [Search1API](https://www.search1api.com), [Google](https://programmablesearchengine.google.com), and [Brave](https://brave.com/search/api), among others.
Web search allows AI to access time-sensitive content, such as the latest news, technology trends,
@@ -23,7 +23,7 @@ LobeChat supports configuring **web search functionality** for AI, enabling it t
your use case.
-By setting the search service environment variable `SEARCH_PROVIDERS` and the corresponding API Keys, LobeChat will query multiple sources and return the results. You can also configure crawler service environment variables such as `CRAWLER_IMPLS` (e.g., `browserless`, `firecrawl`, `tavily`, etc.) to extract webpage content, enhancing the capability of search + reading.
+By setting the search service environment variable `SEARCH_PROVIDERS` and the corresponding API Keys, LobeHub will query multiple sources and return the results. You can also configure crawler service environment variables such as `CRAWLER_IMPLS` (e.g., `browserless`, `firecrawl`, `tavily`, etc.) to extract webpage content, enhancing the capability of search + reading.
# Core Environment Variables
@@ -189,7 +189,7 @@ SEARXNG_URL=https://searxng-instance.com
This URL should point to a functional SearXNG instance. You can choose to self-host SearXNG or use a publicly available SearXNG instance.
-You can find publicly available SearXNG instances in the [SearXNG instance list](https://searx.space/). Choose an instance that is fast and reliable, and then configure its URL in LobeChat.
+You can find publicly available SearXNG instances in the [SearXNG instance list](https://searx.space/). Choose an instance that is fast and reliable, and then configure its URL in LobeHub.
> Note that the `searxng` you use must have `json` output enabled; otherwise, the `lobe-chat` call will result in an error. If self-hosting, find the `searxng` configuration file and add `json` as shown below.
@@ -206,7 +206,7 @@ formats:
After configuration, you can verify whether the online search functionality is working correctly by following these steps:
-1. Restart the LobeChat service.
+1. Restart the LobeHub service.
2. Start a new chat session, enable smart online search, and then ask AI a question that requires the latest information, such as "What is the current gold price today?" or "What are the latest major news stories?"
3. Observe whether AI can return the latest information based on internet searches.
@@ -214,6 +214,6 @@ If AI can answer these time-sensitive questions, it indicates that the online se
## References
-- [LobeChat Online Search RFC Discussion](https://github.com/lobehub/lobe-chat/discussions/6447)
+- [LobeHub Online Search RFC Discussion](https://github.com/lobehub/lobe-chat/discussions/6447)
- [SearXNG GitHub Repository](https://github.com/searxng/searxng)
- [Discussion on Enabling JSON Output for SearXNG](https://github.com/searxng/searxng/discussions/3542)
diff --git a/docs/self-hosting/advanced/online-search.zh-CN.mdx b/docs/self-hosting/advanced/online-search.zh-CN.mdx
index 434ef3898b..1e48f660dc 100644
--- a/docs/self-hosting/advanced/online-search.zh-CN.mdx
+++ b/docs/self-hosting/advanced/online-search.zh-CN.mdx
@@ -1,6 +1,6 @@
---
title: 配置联网搜索功能 - 增强 AI 的网络信息获取能力
-description: 了解如何为 LobeChat 配置 SearXNG 联网搜索功能,使 AI 能够获取最新的网络信息。
+description: 了解如何为 LobeHub 配置 SearXNG 联网搜索功能,使 AI 能够获取最新的网络信息。
tags:
- 联网搜索
- SearXNG
@@ -10,7 +10,7 @@ tags:
# 配置联网搜索功能
-LobeChat 支持为 AI 配置**联网搜索功能**,使其能够实时获取互联网信息,从而提供更准确、最新的回答。联网搜索支持多个搜索引擎提供商,包括 [SearXNG](https://github.com/searxng/searxng)、[Search1API](https://www.search1api.com)、[Google](https://programmablesearchengine.google.com)、[Brave](https://brave.com/search/api) 等。
+LobeHub 支持为 AI 配置**联网搜索功能**,使其能够实时获取互联网信息,从而提供更准确、最新的回答。联网搜索支持多个搜索引擎提供商,包括 [SearXNG](https://github.com/searxng/searxng)、[Search1API](https://www.search1api.com)、[Google](https://programmablesearchengine.google.com)、[Brave](https://brave.com/search/api) 等。
联网搜索可以让 AI 获取时效性内容,如最新新闻、技术动态或产品信息。你可以使用开源的 SearXNG
@@ -18,7 +18,7 @@ LobeChat 支持为 AI 配置**联网搜索功能**,使其能够实时获取互
等,根据你的使用场景自由组合。
-通过设置搜索服务环境变量 `SEARCH_PROVIDERS` 和对应的 API Key,LobeChat 将在多个搜索源中查询并返回结果。你还可以搭配配置爬虫服务环境变量 `CRAWLER_IMPLS`(如 `browserless`、`firecrawl`、`tavily` 等)以提取网页内容,实现搜索 + 阅读的增强能力。
+通过设置搜索服务环境变量 `SEARCH_PROVIDERS` 和对应的 API Key,LobeHub 将在多个搜索源中查询并返回结果。你还可以搭配配置爬虫服务环境变量 `CRAWLER_IMPLS`(如 `browserless`、`firecrawl`、`tavily` 等)以提取网页内容,实现搜索 + 阅读的增强能力。
# 核心环境变量
@@ -184,7 +184,7 @@ SEARXNG_URL=https://searxng-instance.com
这个 URL 应该指向一个可用的 SearXNG 实例。您可以选择自行部署 SearXNG,或使用公共可用的 SearXNG 实例。
-您可以在 [SearXNG 实例列表](https://searx.space/) 中找到公开可用的 SearXNG 实例。选择一个响应速度快、可靠性高的实例,然后将其 URL 配置到 LobeChat 中。
+您可以在 [SearXNG 实例列表](https://searx.space/) 中找到公开可用的 SearXNG 实例。选择一个响应速度快、可靠性高的实例,然后将其 URL 配置到 LobeHub 中。
> 注意,使用的 `searxng` 必须开启 `json` 输出,否则 `lobe-chat` 调用会报错。如果是自托管,类似下面这样,找到 `searxng` 的配置文件,追加 `json` 即可。
@@ -201,7 +201,7 @@ formats:
配置完成后,您可以通过以下步骤验证联网搜索功能是否正常工作:
-1. 重启 LobeChat 服务
+1. 重启 LobeHub 服务
2. 启动一个新的聊天,启动智能联网,之后向 AI 提问一个需要最新信息的问题,例如:"今天的实时金价是多少?" 或 "最近的重大新闻有哪些?"
3. 观察 AI 是否能够返回基于互联网搜索的最新信息
@@ -209,6 +209,6 @@ formats:
## 参考资料
-- [LobeChat 联网搜索 RFC 讨论](https://github.com/lobehub/lobe-chat/discussions/6447)
+- [LobeHub 联网搜索 RFC 讨论](https://github.com/lobehub/lobe-chat/discussions/6447)
- [SearXNG GitHub 仓库](https://github.com/searxng/searxng)
- [SearXNG 开启 json 输出的讨论](https://github.com/searxng/searxng/discussions/3542)
diff --git a/docs/self-hosting/advanced/s3.mdx b/docs/self-hosting/advanced/s3.mdx
index 9d22d8aa55..12a3735d93 100644
--- a/docs/self-hosting/advanced/s3.mdx
+++ b/docs/self-hosting/advanced/s3.mdx
@@ -1,9 +1,9 @@
---
-title: Configuring S3 Storage Service for AI - LobeChat
-description: Learn how to configure S3 storage for LobeChat's multimodal AI conversations.
+title: Configuring S3 Storage Service for AI - LobeHub
+description: Learn how to configure S3 storage for LobeHub's multimodal AI conversations.
tags:
- S3 Storage
- - LobeChat
+ - LobeHub
- AI Conversations
- Cloud Storage
- Image Upload
@@ -11,7 +11,7 @@ tags:
# Configuring S3 Storage Service
-LobeChat has supported multimodal AI conversations since [a long time ago](https://x.com/lobehub/status/1724289575672291782), which involves the function of uploading images to AI. In the client-side database solution, image files are stored as binary data directly in the browser's IndexedDB database. However, this solution is not feasible in the server-side database. Storing file-like data directly in Postgres would greatly waste database storage space and slow down computational performance.
+LobeHub has supported multimodal AI conversations since [a long time ago](https://x.com/lobehub/status/1724289575672291782), which involves the function of uploading images to AI. In the client-side database solution, image files are stored as binary data directly in the browser's IndexedDB database. However, this solution is not feasible in the server-side database. Storing file-like data directly in Postgres would greatly waste database storage space and slow down computational performance.
The best practice in this area is to use a file storage service (S3) to store image files. S3 is also the storage solution on which the file upload/knowledge base function depends.
diff --git a/docs/self-hosting/advanced/s3.zh-CN.mdx b/docs/self-hosting/advanced/s3.zh-CN.mdx
index e3f3de3ade..49713c2fe5 100644
--- a/docs/self-hosting/advanced/s3.zh-CN.mdx
+++ b/docs/self-hosting/advanced/s3.zh-CN.mdx
@@ -10,7 +10,7 @@ tags:
# 配置 S3 存储服务
-LobeChat 在 [很早以前](https://x.com/lobehub/status/1724289575672291782) 就支持了多模态的 AI 会话,其中涉及到图片上传给大模型的功能。在客户端数据库方案中,图片文件直接以二进制数据存储在浏览器 IndexedDB 数据库,但在服务端数据库中这个方案并不可行。因为在 Postgres 中直接存储文件类二进制数据会大大浪费宝贵的数据库存储空间,并拖慢计算性能。
+LobeHub 在 [很早以前](https://x.com/lobehub/status/1724289575672291782) 就支持了多模态的 AI 会话,其中涉及到图片上传给大模型的功能。在客户端数据库方案中,图片文件直接以二进制数据存储在浏览器 IndexedDB 数据库,但在服务端数据库中这个方案并不可行。因为在 Postgres 中直接存储文件类二进制数据会大大浪费宝贵的数据库存储空间,并拖慢计算性能。
这块最佳实践是使用文件存储服务(S3)来存储图片文件,同时 S3 也是文件上传 / 知识库功能所依赖的大容量静态文件存储方案。
diff --git a/docs/self-hosting/advanced/s3/cloudflare-r2.mdx b/docs/self-hosting/advanced/s3/cloudflare-r2.mdx
index 3517814d31..e5406208e6 100644
--- a/docs/self-hosting/advanced/s3/cloudflare-r2.mdx
+++ b/docs/self-hosting/advanced/s3/cloudflare-r2.mdx
@@ -20,27 +20,27 @@ We need to configure an S3 storage service in the server-side database to store
The interface of Cloudflare R2 is shown below:
-
+
When creating the bucket, specify its name and then click create.
-
+
### Obtain Environment Variables for the Bucket
In the settings of the R2 storage bucket, you can view the bucket configuration information:
-
+
The corresponding environment variables are:
```shell
# Bucket name
- S3_BUCKET=lobechat
+ S3_BUCKET=LobeHub
# Request endpoint of the bucket (note that the path in this link includes the bucket name, which must be removed, or use the link provided on the page for applying S3 API token)
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# Access domain of the bucket
- S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+ S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
```
@@ -49,21 +49,21 @@ We need to configure an S3 storage service in the server-side database to store
### Obtain S3 Key Environment Variables
- You need to obtain the access key for S3 so that the LobeChat server has permission to access the S3 storage service. In R2, you can configure the access key in the account details:
+ You need to obtain the access key for S3 so that the LobeHub server has permission to access the S3 storage service. In R2, you can configure the access key in the account details:
-
+
Click the button in the upper right corner to create an API token and enter the create API Token page.
-
+
Since our server-side database needs to read and write to the S3 storage service, the permission needs to be set to `Object Read and Write`, then click create.
-
+
After creation, you can see the corresponding S3 API token.
-
+
The corresponding environment variables are:
@@ -78,11 +78,11 @@ We need to configure an S3 storage service in the server-side database to store
In R2, you can find the CORS configuration in the settings of the storage bucket:
-
+
Add a CORS rule to allow requests from your domain (in the previous section, it is `https://your-project.vercel.app`):
-
+
If you also plan to use the desktop client, add [http://localhost:3015](http://localhost:3015) to AllowedOrigins so the desktop client (running locally) can access R2.
@@ -115,7 +115,7 @@ S3_ACCESS_KEY_ID=9998d6757e276cf9f1edbd325b7083a6
S3_SECRET_ACCESS_KEY=55af75d8eb6b99f189f6a35f855336ea62cd9c4751a5cf4337c53c1d3f497ac2
# Bucket Name
-S3_BUCKET=lobechat
+S3_BUCKET=LobeHub
# Bucket Request Endpoint
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# Public Access Domain for the Bucket
diff --git a/docs/self-hosting/advanced/s3/cloudflare-r2.zh-CN.mdx b/docs/self-hosting/advanced/s3/cloudflare-r2.zh-CN.mdx
index 304c847599..d2bd7bc1d7 100644
--- a/docs/self-hosting/advanced/s3/cloudflare-r2.zh-CN.mdx
+++ b/docs/self-hosting/advanced/s3/cloudflare-r2.zh-CN.mdx
@@ -21,48 +21,48 @@ tags:
下图是 Cloudflare R2 的界面:
-
+
创建存储桶时将指定其名称,然后点击创建。
-
+
### 获取存储桶相关环境变量
在 R2 存储桶的设置中,可以看到桶配置的信息:
-
+
其对应的环境变量为:
```shell
# 存储桶的名称
- S3_BUCKET=lobechat
+ S3_BUCKET=LobeHub
# 存储桶的请求端点(注意此处链接的路径带存储桶名称,必须删除该路径,或使用申请 S3 API token 页面所提供的链接)
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# 存储桶对外的访问域名
- S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+ S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
```
`S3_ENDPOINT`必须删除其路径,否则会无法访问所上传文件
### 获取 S3 密钥环境变量
- 你需要获取 S3 的访问密钥,以便 LobeChat 的服务端有权限访问 S3 存储服务。在 R2 中,你可以在账户详情中配置访问密钥:
+ 你需要获取 S3 的访问密钥,以便 LobeHub 的服务端有权限访问 S3 存储服务。在 R2 中,你可以在账户详情中配置访问密钥:
-
+
点击右上角按钮创建 API token,进入创建 API Token 页面
-
+
鉴于我们的服务端数据库需要读写 S3 存储服务,因此权限需要选择`对象读与写`,然后点击创建。
-
+
创建完成后,就可以看到对应的 S3 API token
-
+
其对应的环境变量为:
@@ -77,11 +77,11 @@ tags:
在 R2 中,你可以在存储桶的设置中找到跨域配置:
-
+
添加跨域规则,允许你的域名(在上文是 `https://your-project.vercel.app`)来源的请求:
-
+
如果你还需要在桌面端使用,请在 AllowedOrigins 中额外添加 [http://localhost:3015](http://localhost:3015),以便桌面端(本地运行)能够访问 R2。
@@ -112,7 +112,7 @@ S3_ACCESS_KEY_ID=9998d6757e276cf9f1edbd325b7083a6
S3_SECRET_ACCESS_KEY=55af75d8eb6b99f189f6a35f855336ea62cd9c4751a5cf4337c53c1d3f497ac2
# 存储桶的名称
-S3_BUCKET=lobechat
+S3_BUCKET=LobeHub
# 存储桶的请求端点
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# 存储桶对外的访问域名
diff --git a/docs/self-hosting/advanced/s3/tencent-cloud.mdx b/docs/self-hosting/advanced/s3/tencent-cloud.mdx
index eed8a276d4..a18f069792 100644
--- a/docs/self-hosting/advanced/s3/tencent-cloud.mdx
+++ b/docs/self-hosting/advanced/s3/tencent-cloud.mdx
@@ -21,17 +21,17 @@ We need to configure S3 storage service for file storage in the server-side data
Create a new bucket on [Tencent Cloud COS](https://console.cloud.tencent.com/cos/bucket):
-
+
Name the bucket (e.g., 'lobe'). Choose 'Public Read, Private Write', select any region, and keep other settings default.
-
+
### Get Bucket-related Environment Variables
View bucket information in COS bucket overview:
-
+
Corresponding environment variables:
@@ -52,7 +52,7 @@ We need to configure S3 storage service for file storage in the server-side data
Add the following in 'Security Management - CORS Settings':
-
+
### Get S3 Keys
diff --git a/docs/self-hosting/advanced/s3/tencent-cloud.zh-CN.mdx b/docs/self-hosting/advanced/s3/tencent-cloud.zh-CN.mdx
index ed7a3ae0ce..274baa03bf 100644
--- a/docs/self-hosting/advanced/s3/tencent-cloud.zh-CN.mdx
+++ b/docs/self-hosting/advanced/s3/tencent-cloud.zh-CN.mdx
@@ -19,17 +19,17 @@ tags:
你需要首先前往 [腾讯云 COS](https://console.cloud.tencent.com/cos/bucket) 并创建一个新的存储桶(Bucket):
-
+
创建存储桶时将指定其名称,下文以 `lobe` 为例。选择 `公有读私有写`,地域随意,其余配置一概默认即可,然后点击创建。
-
+
### 获取存储桶相关环境变量
在 COS 存储桶的概览设置中,可以看到桶配置的信息:
-
+
其对应的环境变量为:
@@ -54,7 +54,7 @@ tags:
在左侧 `安全管理 - 跨域访问 CORS 设置` 中,添加以下配置并保存:
-
+
### 获取 S3 秘钥
diff --git a/docs/self-hosting/advanced/settings-url-share.mdx b/docs/self-hosting/advanced/settings-url-share.mdx
index 80e121be3d..263d474e5c 100644
--- a/docs/self-hosting/advanced/settings-url-share.mdx
+++ b/docs/self-hosting/advanced/settings-url-share.mdx
@@ -1,7 +1,7 @@
---
title: Share Settings via URL - Import and Export Configuration Settings
description: >-
- Learn how to import and export configuration settings for LobeChat via URL.
+ Learn how to import and export configuration settings for LobeHub via URL.
Understand the supported settings, URL format, and parameter schema for
keyVaults and languageModel.
tags:
@@ -9,13 +9,13 @@ tags:
- URL Import
- URL Export
- Configuration Settings
- - LobeChat
+ - LobeHub
- Parameter Schema
---
# Share settings via URL
-LobeChat support import settings from external URL to quickly set up LobeChat configuration.
+LobeHub support import settings from external URL to quickly set up LobeHub configuration.
The currently supported settings are:
@@ -62,7 +62,7 @@ console.log(url);
```
- LobeChat does not verify the correctness of the settings parameters in the URL, nor provide
+ LobeHub does not verify the correctness of the settings parameters in the URL, nor provide
encryption or decryption methods. Please use with caution.
diff --git a/docs/self-hosting/advanced/settings-url-share.zh-CN.mdx b/docs/self-hosting/advanced/settings-url-share.zh-CN.mdx
index e372e7117f..a13910ddf8 100644
--- a/docs/self-hosting/advanced/settings-url-share.zh-CN.mdx
+++ b/docs/self-hosting/advanced/settings-url-share.zh-CN.mdx
@@ -1,10 +1,10 @@
---
-title: URL 分享设置参数 - LobeChat 配置快速设置
-description: 了解如何从外部 URL 导入和导出 LobeChat 的设置参数,包括 keyVaults 和 languageModel,以及参数格式和类型。
+title: URL 分享设置参数 - LobeHub 配置快速设置
+description: 了解如何从外部 URL 导入和导出 LobeHub 的设置参数,包括 keyVaults 和 languageModel,以及参数格式和类型。
tags:
- URL 分享
- 设置参数
- - LobeChat
+ - LobeHub
- keyVaults
- languageModel
- JSON 格式
@@ -14,7 +14,7 @@ tags:
# URL 分享设置参数
-LobeChat 支持从外部 URL 导入设置参数,以便于快速设置 LobeChat 的配置。
+LobeHub 支持从外部 URL 导入设置参数,以便于快速设置 LobeHub 的配置。
目前支持的设置项有:
@@ -61,7 +61,7 @@ console.log(url);
```
- LobeChat 不对 URL 中的设置参数进行正确性校验,也不提供 URL 的加密、解密方法,请谨慎使用。
+ LobeHub 不对 URL 中的设置参数进行正确性校验,也不提供 URL 的加密、解密方法,请谨慎使用。
## 参数格式
diff --git a/docs/self-hosting/advanced/upstream-sync.mdx b/docs/self-hosting/advanced/upstream-sync.mdx
index 0525d8e816..be688071fe 100644
--- a/docs/self-hosting/advanced/upstream-sync.mdx
+++ b/docs/self-hosting/advanced/upstream-sync.mdx
@@ -1,7 +1,7 @@
---
-title: LobeChat Upstream Sync and Docker Deployment Guide
+title: LobeHub Upstream Sync and Docker Deployment Guide
description: >-
- Learn how to enable automatic updates for LobeChat Vercel deployments and
+ Learn how to enable automatic updates for LobeHub Vercel deployments and
upgrade Docker deployment versions easily. Follow step-by-step instructions
for a seamless deployment process.
tags:
@@ -36,27 +36,27 @@ After forking the project, due to Github's limitations, you need to manually ena
If you encounter a sync failure, you need to manually click "Update Branch" once.
-
+
-
+
## `B` Docker Deployment
-Upgrading the Docker deployment version is very simple, you just need to redeploy the latest LobeChat image. Here are the commands required to perform these steps:
+Upgrading the Docker deployment version is very simple, you just need to redeploy the latest LobeHub image. Here are the commands required to perform these steps:
- ### Stop and Remove the Current Running LobeChat Container
+ ### Stop and Remove the Current Running LobeHub Container
- Assuming the LobeChat container is named `lobe-chat`, use the following commands to stop and remove the currently running LobeChat container:
+ Assuming the LobeHub container is named `lobe-chat`, use the following commands to stop and remove the currently running LobeHub container:
```fish
docker stop lobe-chat
docker rm lobe-chat
```
- ### Pull the Latest LobeChat Image
+ ### Pull the Latest LobeHub Image
- Use the following command to pull the latest Docker image for LobeChat:
+ Use the following command to pull the latest Docker image for LobeHub:
```fish
docker pull lobehub/lobe-chat
@@ -64,7 +64,7 @@ Upgrading the Docker deployment version is very simple, you just need to redeplo
### Restart the Docker Container
- Redeploy the LobeChat container using the newly pulled image:
+ Redeploy the LobeHub container using the newly pulled image:
```fish
docker run -d -p 3210:3210 \
@@ -80,7 +80,7 @@ Ensure that you have sufficient permissions to stop and remove the container bef
**If I redeploy, will I lose my local chat records?**
- No need to worry, you won't. All of LobeChat's chat records are stored in your local browser. Therefore, when redeploying LobeChat using Docker, your chat records will not be lost.
+ No need to worry, you won't. All of LobeHub's chat records are stored in your local browser. Therefore, when redeploying LobeHub using Docker, your chat records will not be lost.
If you wish to automate the above steps, you can follow the method below and use Crontab scheduling to complete it. The specific steps are as follows.
diff --git a/docs/self-hosting/advanced/upstream-sync.zh-CN.mdx b/docs/self-hosting/advanced/upstream-sync.zh-CN.mdx
index 1ccb01ca24..d2850e6777 100644
--- a/docs/self-hosting/advanced/upstream-sync.zh-CN.mdx
+++ b/docs/self-hosting/advanced/upstream-sync.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: LobeChat 启动自动更新 - Vercel / Zeabur 部署
-description: 按照指南重新部署项目以解决 LobeChat 在 Vercel 中默认创建新项目导致的“有可用更新”提示问题,并启用自动更新。
+title: LobeHub 启动自动更新 - Vercel / Zeabur 部署
+description: 按照指南重新部署项目以解决 LobeHub 在 Vercel 中默认创建新项目导致的“有可用更新”提示问题,并启用自动更新。
tags:
- Vercel
- Zeabur
@@ -31,27 +31,27 @@ tags:
如果你遇到了同步失败的情况,你需要手动重新点一次 「Update Branch」。
-
+
-
+
## `B` Docker 部署
-Docker 部署版本的升级非常简单,只需要重新部署 LobeChat 的最新镜像即可。 以下是执行这些步骤所需的指令:
+Docker 部署版本的升级非常简单,只需要重新部署 LobeHub 的最新镜像即可。 以下是执行这些步骤所需的指令:
- ### 停止并删除当前运行的 LobeChat 容器
+ ### 停止并删除当前运行的 LobeHub 容器
- 假设 LobeChat 容器的名称是 `lobe-chat`,使用以下指令停止并删除当前运行的 LobeChat 容器:
+ 假设 LobeHub 容器的名称是 `lobe-chat`,使用以下指令停止并删除当前运行的 LobeHub 容器:
```fish
docker stop lobe-chat
docker rm lobe-chat
```
- ### 拉取最新的 LobeChat 镜像
+ ### 拉取最新的 LobeHub 镜像
- 使用以下命令拉取 LobeChat 的最新 Docker 镜像:
+ 使用以下命令拉取 LobeHub 的最新 Docker 镜像:
```fish
docker pull lobehub/lobe-chat
@@ -59,7 +59,7 @@ Docker 部署版本的升级非常简单,只需要重新部署 LobeChat 的最
### 重新启动 Docker 容器
- 使用新拉取的镜像重新部署 LobeChat 容器:
+ 使用新拉取的镜像重新部署 LobeHub 容器:
```fish
docker run -d -p 3210:3210 \
@@ -75,7 +75,7 @@ Docker 部署版本的升级非常简单,只需要重新部署 LobeChat 的最
**重新部署的话,我本地的聊天记录会丢失吗?**
- 放心,不会的。LobeChat 的聊天记录全部都存储在你的本地浏览器中。因此使用 Docker 重新部署 LobeChat 时,你的聊天记录并不会丢失。
+ 放心,不会的。LobeHub 的聊天记录全部都存储在你的本地浏览器中。因此使用 Docker 重新部署 LobeHub 时,你的聊天记录并不会丢失。
如果你希望自动化执行以上步骤,你可以参照下面的方法,利用 Crontab 定时来完成。具体步骤如下。
diff --git a/docs/self-hosting/environment-variables.mdx b/docs/self-hosting/environment-variables.mdx
index 758dce29d3..1c34bb418b 100644
--- a/docs/self-hosting/environment-variables.mdx
+++ b/docs/self-hosting/environment-variables.mdx
@@ -1,10 +1,10 @@
---
-title: LobeChat Environment Variables - Customizing Guide
+title: LobeHub Environment Variables - Customizing Guide
description: >-
- Learn how to customize LobeChat configuration using environment variables for
+ Learn how to customize LobeHub configuration using environment variables for
additional features and options.
tags:
- - LobeChat
+ - LobeHub
- Environment Variables
- Configuration
- Customization
@@ -12,7 +12,7 @@ tags:
# Environment Variables
-LobeChat provides some additional configuration options when deployed, which can be customized using environment variables.
+LobeHub provides some additional configuration options when deployed, which can be customized using environment variables.
@@ -28,7 +28,7 @@ LobeChat provides some additional configuration options when deployed, which can
## Building a Custom Image with Overridden `NEXT_PUBLIC` Variables
-If you need to override `NEXT_PUBLIC` environment variables, you can build a custom Docker image using GitHub Actions without forking the entire LobeChat repository. Here's a guide on how to do this:
+If you need to override `NEXT_PUBLIC` environment variables, you can build a custom Docker image using GitHub Actions without forking the entire LobeHub repository. Here's a guide on how to do this:
1. Create a new GitHub repository for your custom build.
@@ -89,6 +89,6 @@ jobs:
5. Set "Read and write" permissions for workflows in Repository settings > Actions > General > Workflow permissions.
-6. To build your custom image, go to the "Actions" tab in your GitHub repository and manually trigger the "Build Custom LobeChat Image" workflow.
+6. To build your custom image, go to the "Actions" tab in your GitHub repository and manually trigger the "Build Custom LobeHub Image" workflow.
-This approach allows you to create a custom build with your desired `NEXT_PUBLIC` variables without maintaining a full fork of the LobeChat repository. You can trigger a new build whenever you need to update your custom image.
+This approach allows you to create a custom build with your desired `NEXT_PUBLIC` variables without maintaining a full fork of the LobeHub repository. You can trigger a new build whenever you need to update your custom image.
diff --git a/docs/self-hosting/environment-variables.zh-CN.mdx b/docs/self-hosting/environment-variables.zh-CN.mdx
index a6f76ec612..d685f2dc18 100644
--- a/docs/self-hosting/environment-variables.zh-CN.mdx
+++ b/docs/self-hosting/environment-variables.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 环境变量 - 自定义设置
-description: 了解如何使用环境变量在部署 LobeChat 时进行自定义设置。包括基础环境变量和模型服务商设置。
+title: LobeHub 环境变量 - 自定义设置
+description: 了解如何使用环境变量在部署 LobeHub 时进行自定义设置。包括基础环境变量和模型服务商设置。
tags:
- - LobeChat
+ - LobeHub
- 环境变量
- 自定义设置
- 模型服务商
@@ -10,7 +10,7 @@ tags:
# 环境变量
-LobeChat 在部署时提供了一些额外的配置项,你可以使用环境变量进行自定义设置。
+LobeHub 在部署时提供了一些额外的配置项,你可以使用环境变量进行自定义设置。
diff --git a/docs/self-hosting/environment-variables/analytics.mdx b/docs/self-hosting/environment-variables/analytics.mdx
index c5bb104c47..d096e3038c 100644
--- a/docs/self-hosting/environment-variables/analytics.mdx
+++ b/docs/self-hosting/environment-variables/analytics.mdx
@@ -1,12 +1,12 @@
---
-title: Integrating Analytics in LobeChat
+title: Integrating Analytics in LobeHub
description: >-
Learn how to configure environment variables for Vercel Analytics, Google
- Analytics, PostHog Analytics, and Umami Analytics in LobeChat for data
+ Analytics, PostHog Analytics, and Umami Analytics in LobeHub for data
collection and analysis.
tags:
- Data Analytics
- - LobeChat
+ - LobeHub
- Analytics Services
- Environment Variables
- Configuration
@@ -14,7 +14,7 @@ tags:
# Data Analytics
-We have integrated several free/open-source data analytics services in LobeChat for collecting user usage data. Here are environment variables that you can use.
+We have integrated several free/open-source data analytics services in LobeHub for collecting user usage data. Here are environment variables that you can use.
## Vercel Analytics
@@ -82,7 +82,7 @@ We have integrated several free/open-source data analytics services in LobeChat
## Langfuse Observability
-[Langfuse](https://langfuse.com/) is an [open-source](https://github.com/langfuse/langfuse) LLM Observability platform. By enabling the Langfuse integration, you can trace your chat data with Langfuse to develop, monitor, and evaluate the use of your LobeChat.
+[Langfuse](https://langfuse.com/) is an [open-source](https://github.com/langfuse/langfuse) LLM Observability platform. By enabling the Langfuse integration, you can trace your chat data with Langfuse to develop, monitor, and evaluate the use of your LobeHub.
### `ENABLE_LANGFUSE`
diff --git a/docs/self-hosting/environment-variables/analytics.zh-CN.mdx b/docs/self-hosting/environment-variables/analytics.zh-CN.mdx
index 9806738299..4b9c1be9eb 100644
--- a/docs/self-hosting/environment-variables/analytics.zh-CN.mdx
+++ b/docs/self-hosting/environment-variables/analytics.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中 配置数据统计服务环境变量指南
-description: 了解如何在 LobeChat 中配置各种数据统计服务的环境变量,包括Vercel Analytics、Google Analytics 等。
+title: 在 LobeHub 中 配置数据统计服务环境变量指南
+description: 了解如何在 LobeHub 中配置各种数据统计服务的环境变量,包括Vercel Analytics、Google Analytics 等。
tags:
- 数据统计
- 环境变量
@@ -12,7 +12,7 @@ tags:
# 数据统计
-我们在 LobeChat 中集成了若干免费 / 开源的数据统计服务,用于了解用户的使用情况,以下是相关环境变量。
+我们在 LobeHub 中集成了若干免费 / 开源的数据统计服务,用于了解用户的使用情况,以下是相关环境变量。
## Vercel Analytics
diff --git a/docs/self-hosting/environment-variables/auth.mdx b/docs/self-hosting/environment-variables/auth.mdx
index acab33f10d..385f81158f 100644
--- a/docs/self-hosting/environment-variables/auth.mdx
+++ b/docs/self-hosting/environment-variables/auth.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat Authentication Service Environment Variables
+title: LobeHub Authentication Service Environment Variables
description: >-
Explore the essential environment variables for configuring authentication
- services in LobeChat, including Better Auth, OAuth SSO, NextAuth settings, and
+ services in LobeHub, including Better Auth, OAuth SSO, NextAuth settings, and
provider-specific details.
tags:
- Authentication Service
@@ -14,7 +14,7 @@ tags:
# Authentication Service
-LobeChat provides a complete authentication service capability when deployed. The following are the relevant environment variables. You can use these environment variables to easily define the identity verification services that need to be enabled in LobeChat.
+LobeHub provides a complete authentication service capability when deployed. The following are the relevant environment variables. You can use these environment variables to easily define the identity verification services that need to be enabled in LobeHub.
## Better Auth
diff --git a/docs/self-hosting/environment-variables/auth.zh-CN.mdx b/docs/self-hosting/environment-variables/auth.zh-CN.mdx
index d8c977d539..fdd50f2823 100644
--- a/docs/self-hosting/environment-variables/auth.zh-CN.mdx
+++ b/docs/self-hosting/environment-variables/auth.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 身份验证服务设置
-description: 了解如何配置 LobeChat 的身份验证服务环境变量,包括 Better Auth、OAuth SSO、NextAuth 设置等。
+title: LobeHub 身份验证服务设置
+description: 了解如何配置 LobeHub 的身份验证服务环境变量,包括 Better Auth、OAuth SSO、NextAuth 设置等。
tags:
- - LobeChat
+ - LobeHub
- 身份验证服务
- Better Auth
- 单点登录
@@ -12,7 +12,7 @@ tags:
# 身份验证服务
-LobeChat 在部署时提供了完善的身份验证服务能力,以下是相关的环境变量,你可以使用这些环境变量轻松定义需要在 LobeChat 中开启的身份验证服务。
+LobeHub 在部署时提供了完善的身份验证服务能力,以下是相关的环境变量,你可以使用这些环境变量轻松定义需要在 LobeHub 中开启的身份验证服务。
## Better Auth
diff --git a/docs/self-hosting/environment-variables/basic.mdx b/docs/self-hosting/environment-variables/basic.mdx
index c206219a9b..7b4a964b79 100644
--- a/docs/self-hosting/environment-variables/basic.mdx
+++ b/docs/self-hosting/environment-variables/basic.mdx
@@ -1,11 +1,11 @@
---
-title: Customize LobeChat Deployment with Environment Variables
+title: Customize LobeHub Deployment with Environment Variables
description: >-
- Learn about the common environment variables used to customize LobeChat
- deployment, including OAuth SSO, base path, and default agent
+ Learn about the common environment variables used to customize LobeHub
+ deployment, including access code, OAuth SSO, base path, and default agent
configuration.
tags:
- - LobeChat
+ - LobeHub
- environment variables
- deployment
- OAuth SSO
@@ -15,14 +15,14 @@ tags:
# Environment Variables
-LobeChat provides some additional configuration options during deployment, which can be customized using environment variables.
+LobeHub provides some additional configuration options during deployment, which can be customized using environment variables.
## Common Variables
### `KEY_VAULTS_SECRET`
-- Type: Required (server database mode)
-- Description: Used to encrypt sensitive information stored by users in the database (such as API Keys, baseURL, etc.), preventing exposure of critical information in case of database breach
+- Type: Optional
+- Description: Add a password to access the LobeHub service. You can set a long password to prevent brute force attacks.
- Default: -
- Example: `Kix2wcUONd4CX51E/ZPAd36BqM4wzJgKjPtz2sGztqQ=`
@@ -46,11 +46,11 @@ When using the `turn` mode, the API Keys will be retrieved in a polling manner a
### `DEFAULT_AGENT_CONFIG`
- Type: Optional
-- Description: Used to configure the default settings for the LobeChat default agent. It supports various data types and structures, including key-value pairs, nested fields, array values, and more.
+- Description: Used to configure the default settings for the LobeHub default agent. It supports various data types and structures, including key-value pairs, nested fields, array values, and more.
- Default: -
- Example: `'model=gpt-4-1106-preview;params.max_tokens=300;plugins=search-engine,lobe-image-designer'`
-The `DEFAULT_AGENT_CONFIG` is used to configure the default settings for the LobeChat default agent. It supports various data types and structures, including key-value pairs, nested fields, array values, and more. The table below provides detailed information on the configuration options, examples, and corresponding explanations for the `DEFAULT_AGENT_CONFIG` environment variable:
+The `DEFAULT_AGENT_CONFIG` is used to configure the default settings for the LobeHub default agent. It supports various data types and structures, including key-value pairs, nested fields, array values, and more. The table below provides detailed information on the configuration options, examples, and corresponding explanations for the `DEFAULT_AGENT_CONFIG` environment variable:
| Configuration Type | Example | Explanation |
| ----------------------- | ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ |
@@ -72,7 +72,7 @@ Further reading:
### `SYSTEM_AGENT`
- Type: Optional
-- Description: Used to configure models and providers for LobeChat system agents (such as topic generation, translation, etc.).
+- Description: Used to configure models and providers for LobeHub system agents (such as topic generation, translation, etc.).
- Default value: `-`
- Example: `default=ollama/deepseek-v3` or `topic=openai/gpt-4,translation=anthropic/claude-1`
@@ -103,7 +103,7 @@ Available system agents and their functions:
### `FEATURE_FLAGS`
- Type: Optional
-- Description: Used to control LobeChat's feature functionalities. Supports multiple feature flags, using `+` to add a feature and `-` to disable a feature. Separate multiple feature flags with a comma `,` and enclose the entire value in quotes `"` to avoid parsing errors.
+- Description: Used to control LobeHub's feature functionalities. Supports multiple feature flags, using `+` to add a feature and `-` to disable a feature. Separate multiple feature flags with a comma `,` and enclose the entire value in quotes `"` to avoid parsing errors.
- Default: `-`
- Example: `"-welcome_suggest"`
@@ -145,9 +145,9 @@ For specific content, please refer to the [Feature Flags](/docs/self-hosting/adv
**Use Cases**:
-LobeChat performs SSRF security checks in the following scenarios:
+LobeHub performs SSRF security checks in the following scenarios:
-1. **Image/Video URL to Base64 Conversion**: When processing media messages (e.g., vision models, multimodal models), LobeChat converts image and video URLs to base64 format. This check prevents malicious users from accessing internal network resources.
+1. **Image/Video URL to Base64 Conversion**: When processing media messages (e.g., vision models, multimodal models), LobeHub converts image and video URLs to base64 format. This check prevents malicious users from accessing internal network resources.
Examples:
@@ -221,7 +221,7 @@ This environment variable allows administrators to customize the default image g
### `PLUGINS_INDEX_URL`
- Type: Optional
-- Description: Index address of the LobeChat plugin market. If you have deployed the plugin market service on your own, you can use this variable to override the default plugin market address.
+- Description: Index address of the LobeHub plugin market. If you have deployed the plugin market service on your own, you can use this variable to override the default plugin market address.
- Default: `https://chat-plugins.lobehub.com`
### `PLUGIN_SETTINGS`
@@ -250,5 +250,5 @@ The above example sets the `SERPAPI_API_KEY` of the `search-engine` plugin to `x
### `AGENTS_INDEX_URL`
- Type: Optional
-- Description: Index address of the LobeChat assistant market. If you have deployed the assistant market service on your own, you can use this variable to override the default market address.
+- Description: Index address of the LobeHub assistant market. If you have deployed the assistant market service on your own, you can use this variable to override the default market address.
- Default: `https://chat-agents.lobehub.com`
diff --git a/docs/self-hosting/environment-variables/basic.zh-CN.mdx b/docs/self-hosting/environment-variables/basic.zh-CN.mdx
index d9697f9f44..ee55bc3f79 100644
--- a/docs/self-hosting/environment-variables/basic.zh-CN.mdx
+++ b/docs/self-hosting/environment-variables/basic.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 环境变量配置指南
-description: 了解如何使用环境变量自定义设置 LobeChat 部署,包括访问密码、单点登录、basePath 设置等。
+title: LobeHub 环境变量配置指南
+description: 了解如何使用环境变量自定义设置 LobeHub 部署,包括访问密码、单点登录、basePath 设置等。
tags:
- - LobeChat
+ - LobeHub
- 环境变量
- 配置指南
- 单点登录
@@ -12,14 +12,14 @@ tags:
# 环境变量
-LobeChat 在部署时提供了一些额外的配置项,你可以使用环境变量进行自定义设置。
+LobeHub 在部署时提供了一些额外的配置项,你可以使用环境变量进行自定义设置。
## 通用变量
### `KEY_VAULTS_SECRET`
-- 类型:必选(服务端数据库模式)
-- 描述:用于加密用户存储在数据库中的敏感信息(如 API Key、baseURL 等),防止数据库泄露时关键信息被暴露
+- 类型:可选
+- 描述:添加访问 LobeHub 服务的密码,你可以设置一个长密码以防被爆破
- 默认值:-
- 示例:`Kix2wcUONd4CX51E/ZPAd36BqM4wzJgKjPtz2sGztqQ=`
@@ -43,11 +43,11 @@ LobeChat 在部署时提供了一些额外的配置项,你可以使用环境
### `DEFAULT_AGENT_CONFIG`
- 类型:可选
-- 描述:用于配置 LobeChat 默认助理的默认配置。它支持多种数据类型和结构,包括键值对、嵌套字段、数组值等。
+- 描述:用于配置 LobeHub 默认助理的默认配置。它支持多种数据类型和结构,包括键值对、嵌套字段、数组值等。
- 默认值:`-`
- 示例:`'model=gpt-4-1106-preview;params.max_tokens=300;plugins=search-engine,lobe-image-designer'`
-`DEFAULT_AGENT_CONFIG` 用于配置 LobeChat 默认助理的默认配置。它支持多种数据类型和结构,包括键值对、嵌套字段、数组值等。下表详细说明了 `DEFAULT_AGENT_CONFIG` 环境变量的配置项、示例以及相应解释:
+`DEFAULT_AGENT_CONFIG` 用于配置 LobeHub 默认助理的默认配置。它支持多种数据类型和结构,包括键值对、嵌套字段、数组值等。下表详细说明了 `DEFAULT_AGENT_CONFIG` 环境变量的配置项、示例以及相应解释:
| 配置项类型 | 示例 | 解释 |
| ----- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------- |
@@ -69,7 +69,7 @@ LobeChat 在部署时提供了一些额外的配置项,你可以使用环境
### `SYSTEM_AGENT`
- 类型:可选
-- 描述:用于配置 LobeChat 系统助手(如主题生成、翻译等功能)的模型和供应商。
+- 描述:用于配置 LobeHub 系统助手(如主题生成、翻译等功能)的模型和供应商。
- 默认值:`-`
- 示例:`default=ollama/deepseek-v3` 或 `topic=openai/gpt-4,translation=anthropic/claude-1`
@@ -100,7 +100,7 @@ LobeChat 在部署时提供了一些额外的配置项,你可以使用环境
### `FEATURE_FLAGS`
- 类型:可选
-- 描述:用于控制 LobeChat 的特性功能,支持多个功能标志,使用 `+` 增加一个功能,使用 `-` 来关闭一个功能,多个功能标志之间使用英文逗号 `,` 隔开,最外层建议添加引号 `"` 以避免解析错误。
+- 描述:用于控制 LobeHub 的特性功能,支持多个功能标志,使用 `+` 增加一个功能,使用 `-` 来关闭一个功能,多个功能标志之间使用英文逗号 `,` 隔开,最外层建议添加引号 `"` 以避免解析错误。
- 默认值:`-`
- 示例:`"-welcome_suggest"`
@@ -140,9 +140,9 @@ LobeChat 在部署时提供了一些额外的配置项,你可以使用环境
**应用场景**:
-LobeChat 会在以下场景执行 SSRF 安全检查:
+LobeHub 会在以下场景执行 SSRF 安全检查:
-1. **图片 / 视频 URL 转 Base64**:在处理媒体消息时(例如视觉模型、多模态模型),LobeChat 会将图片和视频 URL 转换为 base64 格式。此检查可防止恶意用户通过媒体 URL 访问内网资源。
+1. **图片 / 视频 URL 转 Base64**:在处理媒体消息时(例如视觉模型、多模态模型),LobeHub 会将图片和视频 URL 转换为 base64 格式。此检查可防止恶意用户通过媒体 URL 访问内网资源。
举例:
@@ -216,7 +216,7 @@ SSRF_ALLOW_IP_ADDRESS_LIST=192.168.1.100,10.0.0.50
### `PLUGINS_INDEX_URL`
- 类型:可选
-- 描述:LobeChat 插件市场的索引地址,如果你自行部署了插件市场的服务,可以使用该变量来覆盖默认的插件市场地址
+- 描述:LobeHub 插件市场的索引地址,如果你自行部署了插件市场的服务,可以使用该变量来覆盖默认的插件市场地址
- 默认值:`https://chat-plugins.lobehub.com`
### `PLUGIN_SETTINGS`
@@ -245,5 +245,5 @@ SSRF_ALLOW_IP_ADDRESS_LIST=192.168.1.100,10.0.0.50
### `AGENTS_INDEX_URL`
- 类型:可选
-- 描述:LobeChat 助手市场的索引地址,如果你自行部署了助手市场的服务,可以使用该变量来覆盖默认的市场地址
+- 描述:LobeHub 助手市场的索引地址,如果你自行部署了助手市场的服务,可以使用该变量来覆盖默认的市场地址
- 默认值:`https://chat-agents.lobehub.com`
diff --git a/docs/self-hosting/environment-variables/model-provider.mdx b/docs/self-hosting/environment-variables/model-provider.mdx
index b4384465fe..3ff932d06e 100644
--- a/docs/self-hosting/environment-variables/model-provider.mdx
+++ b/docs/self-hosting/environment-variables/model-provider.mdx
@@ -1,5 +1,5 @@
---
-title: LobeChat Model Service Providers - Environment Variables and Configuration
+title: LobeHub Model Service Providers - Environment Variables and Configuration
description: >-
Learn about the environment variables and configuration settings for various
model service providers like OpenAI, Google AI, AWS Bedrock, Ollama,
@@ -11,7 +11,7 @@ tags:
# Model Providers
-When deploying LobeChat, a rich set of environment variables related to model service providers is provided, allowing you to easily define the model service providers to be enabled in LobeChat.
+When deploying LobeHub, a rich set of environment variables related to model service providers is provided, allowing you to easily define the model service providers to be enabled in LobeHub.
## OpenAI
diff --git a/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx b/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx
index 60cf064f26..5ff4519911 100644
--- a/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx
+++ b/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: LobeChat 模型服务商相关环境变量配置指南
-description: 了解如何配置各个模型服务商的环境变量以在 LobeChat 中使用不同的AI模型服务。
+title: LobeHub 模型服务商相关环境变量配置指南
+description: 了解如何配置各个模型服务商的环境变量以在 LobeHub 中使用不同的AI模型服务。
tags:
- - LobeChat
+ - LobeHub
- 模型服务商
- 环境变量
- AI模型
@@ -11,7 +11,7 @@ tags:
# 模型服务商
-LobeChat 在部署时提供了丰富的模型服务商相关的环境变量,你可以使用这些环境变量轻松定义需要在 LobeChat 中开启的模型服务商。
+LobeHub 在部署时提供了丰富的模型服务商相关的环境变量,你可以使用这些环境变量轻松定义需要在 LobeHub 中开启的模型服务商。
## OpenAI
diff --git a/docs/self-hosting/environment-variables/s3.mdx b/docs/self-hosting/environment-variables/s3.mdx
index a068b0dcbb..f39800edc9 100644
--- a/docs/self-hosting/environment-variables/s3.mdx
+++ b/docs/self-hosting/environment-variables/s3.mdx
@@ -12,7 +12,7 @@ tags:
# Configuring S3 Storage Service
-LobeChat supports multimodal AI sessions, including the ability to upload unstructured data such as images and files to large language models. To optimize storage and performance, we use S3-compatible file storage services to store image files and support file upload/knowledge base functionality.
+LobeHub supports multimodal AI sessions, including the ability to upload unstructured data such as images and files to large language models. To optimize storage and performance, we use S3-compatible file storage services to store image files and support file upload/knowledge base functionality.
## Core Environment Variables
@@ -88,7 +88,7 @@ LobeChat supports multimodal AI sessions, including the ability to upload unstru
- Default: undefined
- Example: `1`
-When set to `1`, LobeChat will convert images to base64 encoding before uploading them to the LLM model. When encountering the following error, please consider configuring this environment variable to `1`:
+When set to `1`, LobeHub will convert images to base64 encoding before uploading them to the LLM model. When encountering the following error, please consider configuring this environment variable to `1`:
```log
Route: [xai] ProviderBizError: Fetching images over plain http:// is not supported.
diff --git a/docs/self-hosting/environment-variables/s3.zh-CN.mdx b/docs/self-hosting/environment-variables/s3.zh-CN.mdx
index 99d19f8dbd..2bf68f84e5 100644
--- a/docs/self-hosting/environment-variables/s3.zh-CN.mdx
+++ b/docs/self-hosting/environment-variables/s3.zh-CN.mdx
@@ -10,7 +10,7 @@ tags:
# 配置 S3 存储服务
-LobeChat 支持多模态的 AI 会话,包括将图片、文件等非结构化数据上传给大模型的功能。为了优化存储和性能,我们使用 S3 兼容的文件存储服务来存储图片文件和支持文件上传 / 知识库功能。
+LobeHub 支持多模态的 AI 会话,包括将图片、文件等非结构化数据上传给大模型的功能。为了优化存储和性能,我们使用 S3 兼容的文件存储服务来存储图片文件和支持文件上传 / 知识库功能。
## 核心环境变量
@@ -86,7 +86,7 @@ LobeChat 支持多模态的 AI 会话,包括将图片、文件等非结构化
- 默认值:undefined
- 示例:`1`
-当设置为 `1` 时,LobeChat 会将图片转换为 base64 编码后上传到 LLM 模型中,当遇到如下错误时请考虑配置该环境变量为 1
+当设置为 `1` 时,LobeHub 会将图片转换为 base64 编码后上传到 LLM 模型中,当遇到如下错误时请考虑配置该环境变量为 1
```log
Route: [xai] ProviderBizError: Fetching images over plain http:// is not supported.
diff --git a/docs/self-hosting/examples/azure-openai.mdx b/docs/self-hosting/examples/azure-openai.mdx
index 1a9275059c..7bafa0cfd7 100644
--- a/docs/self-hosting/examples/azure-openai.mdx
+++ b/docs/self-hosting/examples/azure-openai.mdx
@@ -1,11 +1,11 @@
---
-title: Configuring Azure OpenAI for LobeChat
+title: Configuring Azure OpenAI for LobeHub
description: >-
- Learn how to configure Azure OpenAI for LobeChat, including interface
+ Learn how to configure Azure OpenAI for LobeHub, including interface
configuration, and deployment settings.
tags:
- Azure OpenAI
- - LobeChat
+ - LobeHub
- Configuration
- Azure API
- Model Service Provider
@@ -13,7 +13,7 @@ tags:
# Integrating with Azure OpenAI
-LobeChat supports using [Azure OpenAI](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/concepts/models) as the model service provider for OpenAI. This article will explain how to configure Azure OpenAI.
+LobeHub supports using [Azure OpenAI](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/concepts/models) as the model service provider for OpenAI. This article will explain how to configure Azure OpenAI.
## Configuring in the Interface
@@ -40,3 +40,4 @@ If you want the deployed version to be pre-configured with Azure OpenAI for end
| `AZURE_ENDPOINT` | Required | Azure API address, can be found in the "Keys and Endpoints" section when checking resources in the Azure portal | - | `https://docs-test-001.openai.azure.com` |
| `AZURE_API_VERSION` | Required | Azure API version, following the format YYYY-MM-DD | 2023-08-01-preview | `-`, see [latest version](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions) |
| `AZURE_MODEL_LIST` | Required | Used to control the model list, use `+` to add a model, use `-` to hide a model, use `id->deplymentName=displayName` to customize the display name of a model, separated by commas. Definition syntax rules see [Model List](/docs/self-hosting/advanced/model-list) | - | `gpt-35-turbo->my-deploy=GPT 3.5 Turbo` or `gpt-4-turbo->my-gpt4=GPT 4 Turbo<128000:vision:fc>` |
+| `ACCESS_CODE` | Optional | Add a password to access LobeHub. You can set a long password to prevent brute force attacks. When this value is separated by commas, it becomes an array of passwords | - | `awCT74` or `e3@09!` or `code1,code2,code3` |
diff --git a/docs/self-hosting/examples/azure-openai.zh-CN.mdx b/docs/self-hosting/examples/azure-openai.zh-CN.mdx
index dfa2456c8e..3993422725 100644
--- a/docs/self-hosting/examples/azure-openai.zh-CN.mdx
+++ b/docs/self-hosting/examples/azure-openai.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中集成 Azure OpenAI
-description: 了解如何在 LobeChat 中配置 Azure OpenAI。一步步指导从界面配置到部署时的环境变量设置。
+title: 在 LobeHub 中集成 Azure OpenAI
+description: 了解如何在 LobeHub 中配置 Azure OpenAI。一步步指导从界面配置到部署时的环境变量设置。
tags:
- Azure OpenAI
- 配置指南
@@ -13,7 +13,7 @@ tags:
# 与 Azure OpenAI 集成使用
-LobeChat 支持使用 [Azure OpenAI](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/concepts/models) 作为 OpenAI 的模型服务商,本文将介绍如何配置 Azure OpenAI。
+LobeHub 支持使用 [Azure OpenAI](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/concepts/models) 作为 OpenAI 的模型服务商,本文将介绍如何配置 Azure OpenAI。
## 在界面中配置
@@ -42,3 +42,4 @@ LobeChat 支持使用 [Azure OpenAI](https://learn.microsoft.com/zh-cn/azure/ai-
| `AZURE_ENDPOINT` | 必选 | Azure API 地址,从 Azure 门户检查资源时,可在 “密钥和终结点” 部分中找到此值 | - | `https://docs-test-001.openai.azure.com` |
| `AZURE_API_VERSION` | 必选 | Azure 的 API 版本,遵循 YYYY-MM-DD 格式 | 2023-08-01-preview | `-`,查阅[最新版本](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/reference#chat-completions) |
| `AZURE_MODEL_LIST` | 必选 | 用来控制模型列表,使用 `模型名->部署名=展示名` 来自定义模型的展示名,用英文逗号隔开。支持扩展能力,其余语法规则详见 [模型列表](/zh/docs/self-hosting/advanced/model-list) | - | `gpt-35-turbo->my-deploy=GPT 3.5 Turbo` 或 `gpt-4-turbo->my-gpt4=GPT 4 Turbo<128000:vision:fc>` |
+| `ACCESS_CODE` | 可选 | 添加访问 LobeHub 的密码,你可以设置一个长密码以防被爆破,该值用逗号分隔时为密码数组 | - | `awCT74` 或 `e3@09!` or `code1,code2,code3` |
diff --git a/docs/self-hosting/examples/ollama.mdx b/docs/self-hosting/examples/ollama.mdx
index bb5f60121a..8c4259e2c5 100644
--- a/docs/self-hosting/examples/ollama.mdx
+++ b/docs/self-hosting/examples/ollama.mdx
@@ -1,12 +1,12 @@
---
-title: Integrating LobeChat with Ollama for Enhanced Language Models
+title: Integrating LobeHub with Ollama for Enhanced Language Models
description: >-
- Learn how to configure and deploy LobeChat to leverage Ollama's powerful
- language models locally. Follow the guide to run Ollama and LobeChat on your
+ Learn how to configure and deploy LobeHub to leverage Ollama's powerful
+ language models locally. Follow the guide to run Ollama and LobeHub on your
system.
tags:
- Ollama integration
- - LobeChat configuration
+ - LobeHub configuration
- Local deployment
- Language models
- Ollama usage
@@ -14,25 +14,25 @@ tags:
# Integrating with Ollama
-Ollama is a powerful framework for running large language models (LLMs) locally, supporting various language models including Llama 2, Mistral, and more. Now, LobeChat supports integration with Ollama, meaning you can easily use the language models provided by Ollama to enhance your application within LobeChat.
+Ollama is a powerful framework for running large language models (LLMs) locally, supporting various language models including Llama 2, Mistral, and more. Now, LobeHub supports integration with Ollama, meaning you can easily use the language models provided by Ollama to enhance your application within LobeHub.
-This document will guide you on how to configure and deploy LobeChat to use Ollama:
+This document will guide you on how to configure and deploy LobeHub to use Ollama:
## Running Ollama Locally
First, you need to install Ollama. For detailed steps on installing and configuring Ollama, please refer to the [Ollama Website](https://ollama.com).
-## Running LobeChat Locally
+## Running LobeHub Locally
-Assuming you have already started the Ollama service locally on port `11434`. Run the following Docker command to start LobeChat locally:
+Assuming you have already started the Ollama service locally on port `11434`. Run the following Docker command to start LobeHub locally:
```bash
docker run -d -p 3210:3210 -e OLLAMA_PROXY_URL=http://host.docker.internal:11434 lobehub/lobe-chat
```
-Now, you can use LobeChat to converse with the local LLM.
+Now, you can use LobeHub to converse with the local LLM.
-For more information on using Ollama in LobeChat, please refer to [Ollama Usage](/docs/usage/providers/ollama).
+For more information on using Ollama in LobeHub, please refer to [Ollama Usage](/docs/usage/providers/ollama).
## Accessing Ollama from Non-Local Locations
diff --git a/docs/self-hosting/examples/ollama.zh-CN.mdx b/docs/self-hosting/examples/ollama.zh-CN.mdx
index 2ce219204b..30d550f66e 100644
--- a/docs/self-hosting/examples/ollama.zh-CN.mdx
+++ b/docs/self-hosting/examples/ollama.zh-CN.mdx
@@ -1,9 +1,9 @@
---
-title: 在 LobeChat 中集成 Ollama
-description: 学习如何在 LobeChat 中配置与部署 Ollama 语言模型框架,提升本地大模型使用体验。
+title: 在 LobeHub 中集成 Ollama
+description: 学习如何在 LobeHub 中配置与部署 Ollama 语言模型框架,提升本地大模型使用体验。
tags:
- Ollama
- - LobeChat
+ - LobeHub
- 语言模型
- 集成
- 配置
@@ -12,25 +12,25 @@ tags:
# 与 Ollama 集成
-Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支持多种语言模型,包括 Llama 2, Mistral 等。现在,LobeChat 已经支持与 Ollama 的集成,这意味着你可以在 LobeChat 中轻松使用 Ollama 提供的语言模型来增强你的应用。
+Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支持多种语言模型,包括 Llama 2, Mistral 等。现在,LobeHub 已经支持与 Ollama 的集成,这意味着你可以在 LobeHub 中轻松使用 Ollama 提供的语言模型来增强你的应用。
-本文档将指导你如何配置与部署 LobeChat 来使用 Ollama:
+本文档将指导你如何配置与部署 LobeHub 来使用 Ollama:
## 本地启动 Ollama
首先,你需要安装 Ollama,安装与配置 Ollama 的详细步骤可以参考 [Ollama 官方站点](https://ollama.com)。
-## 本地运行 LobeChat
+## 本地运行 LobeHub
-假设你已经在本地 `11434` 端口启动了 Ollama 服务。运行以下 Docker 命令行,在本地启动 LobeChat:
+假设你已经在本地 `11434` 端口启动了 Ollama 服务。运行以下 Docker 命令行,在本地启动 LobeHub:
```bash
docker run -d -p 3210:3210 -e OLLAMA_PROXY_URL=http://host.docker.internal:11434 lobehub/lobe-chat
```
-接下来,你就可以使用 LobeChat 与本地 LLM 对话了。
+接下来,你就可以使用 LobeHub 与本地 LLM 对话了。
-关于在 LobeChat 中使用 Ollama 的更多信息,请查阅 [Ollama 使用](/zh/docs/usage/providers/ollama)。
+关于在 LobeHub 中使用 Ollama 的更多信息,请查阅 [Ollama 使用](/zh/docs/usage/providers/ollama)。
## 非本地访问 Ollama
diff --git a/docs/self-hosting/faq/no-v1-suffix.mdx b/docs/self-hosting/faq/no-v1-suffix.mdx
index 95bb2755af..9b490b6d82 100644
--- a/docs/self-hosting/faq/no-v1-suffix.mdx
+++ b/docs/self-hosting/faq/no-v1-suffix.mdx
@@ -1,10 +1,8 @@
---
-title: >-
- Troubleshooting `OPENAI_PROXY_URL` in LobeChat Configuration for Empty
- Response
+title: Troubleshooting `OPENAI_PROXY_URL` in LobeHub Configuration for Empty Response
description: >-
Learn how to troubleshoot and resolve the issue of receiving an empty response
- after configuring the LobeChat `OPENAI_PROXY_URL` environment variable.
+ after configuring the LobeHub `OPENAI_PROXY_URL` environment variable.
tags:
- OPENAI_PROXY_URL
- environment variable
diff --git a/docs/self-hosting/faq/no-v1-suffix.zh-CN.mdx b/docs/self-hosting/faq/no-v1-suffix.zh-CN.mdx
index 2812c46393..6bfeed57f1 100644
--- a/docs/self-hosting/faq/no-v1-suffix.zh-CN.mdx
+++ b/docs/self-hosting/faq/no-v1-suffix.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中配置 `OPENAI_PROXY_URL` 环境变量但返回值为空
-description: 重新检查并确认 LobeChat 环境变量 `OPENAI_PROXY_URL` 是否设置正确,包括是否正确地添加了 `/v1` 后缀(如果需要)。
+title: 在 LobeHub 中配置 `OPENAI_PROXY_URL` 环境变量但返回值为空
+description: 重新检查并确认 LobeHub 环境变量 `OPENAI_PROXY_URL` 是否设置正确,包括是否正确地添加了 `/v1` 后缀(如果需要)。
tags:
- OPENAI_PROXY_URL
- 环境变量配置
diff --git a/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.mdx b/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.mdx
index 82530888ab..44e7fdca94 100644
--- a/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.mdx
+++ b/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.mdx
@@ -1,8 +1,8 @@
---
-title: Resolving `UNABLE_TO_VERIFY_LEAF_SIGNATURE` Error in LobeChat When Using Proxy
+title: Resolving `UNABLE_TO_VERIFY_LEAF_SIGNATURE` Error in LobeHub When Using Proxy
description: >-
Learn how to bypass Node.js certificate validation to resolve certificate
- verification errors in LobeChat when using a proxy with self-signed or
+ verification errors in LobeHub when using a proxy with self-signed or
untrusted certificates.
tags:
- Node.js
diff --git a/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.zh-CN.mdx b/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.zh-CN.mdx
index 90e648ffa0..789eee086f 100644
--- a/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.zh-CN.mdx
+++ b/docs/self-hosting/faq/proxy-with-unable-to-verify-leaf-signature.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 解决 LobeChat 使用代理时遇到证书验证错误的问题
-description: 在 LobeChat 私有化部署时,使用代理进行网络请求可能会遇到证书验证错误。了解如何通过跳过 Node.js 的证书验证来解决这一问题。
+title: 解决 LobeHub 使用代理时遇到证书验证错误的问题
+description: 在 LobeHub 私有化部署时,使用代理进行网络请求可能会遇到证书验证错误。了解如何通过跳过 Node.js 的证书验证来解决这一问题。
tags:
- 证书验证错误
- 代理服务器
diff --git a/docs/self-hosting/platform/alibaba-cloud.mdx b/docs/self-hosting/platform/alibaba-cloud.mdx
index 73d9a161bb..b09bb62fd4 100644
--- a/docs/self-hosting/platform/alibaba-cloud.mdx
+++ b/docs/self-hosting/platform/alibaba-cloud.mdx
@@ -1,17 +1,18 @@
---
-title: Deploy LobeChat on Alibaba Cloud
+title: Deploy LobeHub on Alibaba Cloud
description: >-
- Learn how to deploy the LobeChat application on Alibaba Cloud, including
- clicking the deploy button and other operations.
+ Learn how to deploy the LobeHub application on Alibaba Cloud, including
+ preparing the large model API Key, clicking the deploy button, and other
+ operations.
tags:
- Alibaba Cloud
- - LobeChat
+ - LobeHub
- Alibaba Cloud Compute Nest
---
-# Deploy LobeChat with Alibaba Cloud
+# Deploy LobeHub with Alibaba Cloud
-If you want to deploy LobeChat on Alibaba Cloud, you can follow the steps below:
+If you want to deploy LobeHub on Alibaba Cloud, you can follow the steps below:
## Alibaba Cloud Deployment Process
@@ -24,4 +25,4 @@ If you want to deploy LobeChat on Alibaba Cloud, you can follow the steps below:
[deploy-button-image]: https://service-info-public.oss-cn-hangzhou.aliyuncs.com/computenest-en.svg
-[deploy-link]: https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=LobeChat%E7%A4%BE%E5%8C%BA%E7%89%88
+[deploy-link]: https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=LobeHub%E7%A4%BE%E5%8C%BA%E7%89%88
diff --git a/docs/self-hosting/platform/alibaba-cloud.zh-CN.mdx b/docs/self-hosting/platform/alibaba-cloud.zh-CN.mdx
index 9732e94401..1a5a868757 100644
--- a/docs/self-hosting/platform/alibaba-cloud.zh-CN.mdx
+++ b/docs/self-hosting/platform/alibaba-cloud.zh-CN.mdx
@@ -1,15 +1,15 @@
---
-title: 在 阿里云 上部署 LobeChat
-description: 学习如何在阿里云上部署 LobeChat 应用,包括点击部署按钮等操作。
+title: 在 阿里云 上部署 LobeHub
+description: 学习如何在阿里云上部署LobeHub应用,包括准备大模型 API Key、点击部署按钮等操作。
tags:
- 阿里云
- - LobeChat
+ - LobeHub
- 部署流程
---
# 使用 阿里云计算巢 部署
-如果想在 阿里云 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 阿里云 上部署 LobeHub,可以按照以下步骤进行操作:
## 阿里云 部署流程
@@ -22,4 +22,4 @@ tags:
[deploy-button-image]: https://service-info-public.oss-cn-hangzhou.aliyuncs.com/computenest-en.svg
-[deploy-link]: https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=LobeChat%E7%A4%BE%E5%8C%BA%E7%89%88
+[deploy-link]: https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=LobeHub%E7%A4%BE%E5%8C%BA%E7%89%88
diff --git a/docs/self-hosting/platform/btpanel.mdx b/docs/self-hosting/platform/btpanel.mdx
index 90347591ab..09048917ff 100644
--- a/docs/self-hosting/platform/btpanel.mdx
+++ b/docs/self-hosting/platform/btpanel.mdx
@@ -1,13 +1,13 @@
---
-title: Deploy LobeChat using aaPanel
+title: Deploy LobeHub using aaPanel
description: >-
- Learn how to deploy the LobeChat service using aaPanel-Docker, including
+ Learn how to deploy the LobeHub service using aaPanel-Docker, including
installing the Docker container environment and using the command to start the
service with one click. Detailed instructions on how to configure environment
variables and use proxy addresses.
tags:
- Docker
- - LobeChat
+ - LobeHub
- Deployment guidelines
---
@@ -17,18 +17,18 @@ To install aaPanel, go to the [aaPanel](https://www.aapanel.com/new/download.htm
## Deployment
-1. Log in to aaPanel and click `Docker` in the menu bar 
+1. Log in to aaPanel and click `Docker` in the menu bar 
-2. The first time you will be prompted to install the `Docker` and `Docker Compose` services, click Install Now. If it is already installed, please ignore it. 
+2. The first time you will be prompted to install the `Docker` and `Docker Compose` services, click Install Now. If it is already installed, please ignore it. 
-3. After the installation is complete, find `LobeChat` in `One-Click Install` and click `install`\
- 
+3. After the installation is complete, find `LobeHub` in `One-Click Install` and click `install`\
+ 
-4. configure basic information such as the domain name, OpenAI API key, and port to complete the installation Note: The domain name is optional, if the domain name is filled, it can be managed through \[Website]--> \[Proxy Project], and you do not need to check \[Allow external access] after filling in the domain name, otherwise you need to check it before you can access it through the port 
+4. configure basic information such as the domain name, OpenAI API key, and port to complete the installation Note: The domain name is optional, if the domain name is filled, it can be managed through \[Website]--> \[Proxy Project], and you do not need to check \[Allow external access] after filling in the domain name, otherwise you need to check it before you can access it through the port 
5. After installation, enter the domain name or IP+ port set in the previous step in the browser to access.
-- Name: application name, default `LobeChat-random characters`
+- Name: application name, default `LobeHub-random characters`
- Version selection: default `latest`
- Domain name: If you need to access directly through the domain name, please configure the domain name here and resolve the domain name to the server
- Allow external access: If you need direct access through `IP+Port`, please check. If you have set up a domain name, please do not check here.
@@ -42,7 +42,7 @@ To install aaPanel, go to the [aaPanel](https://www.aapanel.com/new/download.htm
[https://github.com/lobehub/lobe-chat/discussions/5986](https://github.com/lobehub/lobe-chat/discussions/5986)
-## Visit LobeChat
+## Visit LobeHub
-- If you have set a domain name, please directly enter the domain name in the browser address bar, such as `http://demo.lobechat`, to access the `LobeChat` console.
-- If you choose to access through `IP+Port`, please enter the domain name in the browser address bar to access `http://:3210` to access the `HertzBeat` console. 
+- If you have set a domain name, please directly enter the domain name in the browser address bar, such as `http://demo.LobeHub`, to access the `LobeHub` console.
+- If you choose to access through `IP+Port`, please enter the domain name in the browser address bar to access `http://:3210` to access the `HertzBeat` console. 
diff --git a/docs/self-hosting/platform/btpanel.zh-CN.mdx b/docs/self-hosting/platform/btpanel.zh-CN.mdx
index bfebf84680..906bd7d196 100644
--- a/docs/self-hosting/platform/btpanel.zh-CN.mdx
+++ b/docs/self-hosting/platform/btpanel.zh-CN.mdx
@@ -1,11 +1,11 @@
---
-title: 通过 宝塔面板Docker应用商店 部署 LobeChat
+title: 通过 宝塔面板Docker应用商店 部署 LobeHub
description: >-
- 学习如何使用 宝塔面板Docker应用 部署 LobeChat 服务,包括安装 Docker
+ 学习如何使用 宝塔面板Docker应用 部署 LobeHub 服务,包括安装 Docker
容器环境和使用指令一键启动服务。详细说明如何配置环境变量和使用代理地址。
tags:
- Docker
- - LobeChat
+ - LobeHub
- 部署指引
---
@@ -18,15 +18,15 @@ tags:
## 部署
-1. 登录宝塔面板,在左侧菜单栏中点击 `Docker` 
+1. 登录宝塔面板,在左侧菜单栏中点击 `Docker` 
-2. 首次会提示安装`Docker`和`Docker Compose`服务,点击立即安装,若已安装请忽略。 
+2. 首次会提示安装`Docker`和`Docker Compose`服务,点击立即安装,若已安装请忽略。 
-3. 安装完成后在`Docker-应用商店-AI/大模型`中找到 `LobeChat`,点击`安装` 
+3. 安装完成后在`Docker-应用商店-AI/大模型`中找到 `LobeHub`,点击`安装` 
-4. 设置域名等基本信息,点击`确定` 
+4. 设置域名等基本信息,点击`确定` 
-- 名称:应用名称,默认`lobechat_随机字符`
+- 名称:应用名称,默认`LobeHub_随机字符`
- 版本选择:默认`latest`
- 域名:如您需要通过域名访问,请在此处填写您的域名
- 允许外部访问:如您需通过`IP+Port`直接访问,请勾选,如您已经设置了域名,请不要勾选此处
@@ -45,7 +45,7 @@ tags:
[https://github.com/lobehub/lobe-chat/discussions/5986](https://github.com/lobehub/lobe-chat/discussions/5986)
-## 访问 LobeChat
+## 访问 LobeHub
-- 如果您填写域名,请在浏览器输入您的域名访问,如`http://demo.lobechat`,即可访问 `LobeChat` 页面。
-- 请在浏览器地址栏中输入域名访问 `http://<宝塔面板IP>:3210`,即可访问 `LobeChat` 页面。 
+- 如果您填写域名,请在浏览器输入您的域名访问,如`http://demo.LobeHub`,即可访问 `LobeHub` 页面。
+- 请在浏览器地址栏中输入域名访问 `http://<宝塔面板IP>:3210`,即可访问 `LobeHub` 页面。 
diff --git a/docs/self-hosting/platform/docker-compose.mdx b/docs/self-hosting/platform/docker-compose.mdx
index a0c3b4c01f..5a8df490d7 100644
--- a/docs/self-hosting/platform/docker-compose.mdx
+++ b/docs/self-hosting/platform/docker-compose.mdx
@@ -1,12 +1,12 @@
---
-title: Deploy LobeChat with Docker Compose
+title: Deploy LobeHub with Docker Compose
description: >-
- Learn how to deploy the LobeChat service using Docker Compose. Follow
+ Learn how to deploy the LobeHub service using Docker Compose. Follow
step-by-step instructions to install Docker, run the deployment command, and
set up automatic updates.
tags:
- Docker Compose
- - LobeChat Service
+ - LobeHub Service
- Docker Deployment
- Automatic Updates
- Crontab Script
@@ -22,7 +22,7 @@ tags:
[![][docker-pulls-shield]][docker-pulls-link]
-We provide a [Docker image][docker-release-link] for deploying the LobeChat service on your private device.
+We provide a [Docker image][docker-release-link] for deploying the LobeHub service on your private device.
### Install Docker Container Environment
diff --git a/docs/self-hosting/platform/docker-compose.zh-CN.mdx b/docs/self-hosting/platform/docker-compose.zh-CN.mdx
index b6aa7b9be7..4eda35cdcd 100644
--- a/docs/self-hosting/platform/docker-compose.zh-CN.mdx
+++ b/docs/self-hosting/platform/docker-compose.zh-CN.mdx
@@ -1,9 +1,9 @@
---
-title: 通过 Docker Compose 部署 LobeChat
-description: 学习如何使用 Docker Compose 部署 LobeChat 服务,包括安装 Docker 容器环境和自动更新脚本设置。
+title: 通过 Docker Compose 部署 LobeHub
+description: 学习如何使用 Docker Compose 部署 LobeHub 服务,包括安装 Docker 容器环境和自动更新脚本设置。
tags:
- Docker Compose
- - LobeChat
+ - LobeHub
- Docker 容器
- 自动更新脚本
- 部署指引
@@ -19,7 +19,7 @@ tags:
[![][docker-pulls-shield]][docker-pulls-link]
-我们提供了 [Docker 镜像](https://hub.docker.com/r/lobehub/lobe-chat) ,供你在自己的私有设备上部署 LobeChat 服务。
+我们提供了 [Docker 镜像](https://hub.docker.com/r/lobehub/lobe-chat) ,供你在自己的私有设备上部署 LobeHub 服务。
### 安装 Docker 容器环境
diff --git a/docs/self-hosting/platform/docker.mdx b/docs/self-hosting/platform/docker.mdx
index 8793dc3453..de6956bfe4 100644
--- a/docs/self-hosting/platform/docker.mdx
+++ b/docs/self-hosting/platform/docker.mdx
@@ -1,11 +1,11 @@
---
-title: Deploy LobeChat with Docker
+title: Deploy LobeHub with Docker
description: >-
- Learn how to deploy the LobeChat service using Docker, including installation
+ Learn how to deploy the LobeHub service using Docker, including installation
steps, command deployment, proxy configuration, and automatic update scripts.
tags:
- Docker Deployment
- - LobeChat Service
+ - LobeHub Service
- Docker Command
- Proxy Configuration
- Automatic Update Script
@@ -21,7 +21,7 @@ tags:
[![][docker-pulls-shield]][docker-pulls-link]
-We provide a [Docker image][docker-release-link] for you to deploy the LobeChat service on your private device.
+We provide a [Docker image][docker-release-link] for you to deploy the LobeHub service on your private device.
### Install Docker Container Environment
@@ -44,7 +44,7 @@ We provide a [Docker image][docker-release-link] for you to deploy the LobeChat
### Docker Command Deployment
- Use the following command to start the LobeChat service with one click:
+ Use the following command to start the LobeHub service with one click:
```fish
$ docker run -d -p 3210:3210 \
@@ -59,7 +59,7 @@ We provide a [Docker image][docker-release-link] for you to deploy the LobeChat
- Replace `sk-xxxx` in the above command with your OpenAI API Key.
- - For the complete list of environment variables supported by LobeChat, please refer to the [Environment Variables](/docs/self-hosting/environment-variables) section.
+ - For the complete list of environment variables supported by LobeHub, please refer to the [Environment Variables](/docs/self-hosting/environment-variables) section.
Since the official Docker image build takes about half an hour, if you see the "update available"
diff --git a/docs/self-hosting/platform/docker.zh-CN.mdx b/docs/self-hosting/platform/docker.zh-CN.mdx
index d23c1365b1..5b2b0d7af6 100644
--- a/docs/self-hosting/platform/docker.zh-CN.mdx
+++ b/docs/self-hosting/platform/docker.zh-CN.mdx
@@ -1,9 +1,9 @@
---
-title: 通过 Docker 部署 LobeChat
-description: 学习如何使用 Docker 部署 LobeChat 服务,包括安装 Docker 容器环境和使用指令一键启动服务。详细说明如何配置环境变量和使用代理地址。
+title: 通过 Docker 部署 LobeHub
+description: 学习如何使用 Docker 部署 LobeHub 服务,包括安装 Docker 容器环境和使用指令一键启动服务。详细说明如何配置环境变量和使用代理地址。
tags:
- Docker
- - LobeChat
+ - LobeHub
- 部署指引
- 环境变量
- 代理地址
@@ -20,7 +20,7 @@ tags:
[![][docker-pulls-shield]][docker-pulls-link]
-我们提供了 [Docker 镜像][docker-release-link],供你在自己的私有设备上部署 LobeChat 服务。
+我们提供了 [Docker 镜像][docker-release-link],供你在自己的私有设备上部署 LobeHub 服务。
## 部署指南
@@ -45,7 +45,7 @@ tags:
### Docker 指令部署
- 使用以下命令即可使用一键启动 LobeChat 服务:
+ 使用以下命令即可使用一键启动 LobeHub 服务:
```fish
$ docker run -d -p 3210:3210 \
@@ -60,7 +60,7 @@ tags:
- 使用你的 OpenAI API Key 替换上述命令中的 `sk-xxxx` ,获取 API Key 的方式详见最后一节。
- LobeChat 支持的完整环境变量列表请参考 [📘 环境变量](/zh/docs/self-hosting/environment-variables)
+ LobeHub 支持的完整环境变量列表请参考 [📘 环境变量](/zh/docs/self-hosting/environment-variables)
部分
@@ -151,7 +151,7 @@ tags:
## 获取 OpenAI API Key
-API Key 是使用 LobeChat 进行大语言模型会话的必要信息,本节以 OpenAI 模型服务商为例,简要介绍获取 API Key 的方式。
+API Key 是使用 LobeHub 进行大语言模型会话的必要信息,本节以 OpenAI 模型服务商为例,简要介绍获取 API Key 的方式。
### `A` 通过 OpenAI 官方渠道
@@ -172,7 +172,7 @@ API Key 是使用 LobeChat 进行大语言模型会话的必要信息,本节
-将此 API Key 填写到 LobeChat 的 API Key 配置中,即可开始使用。
+将此 API Key 填写到 LobeHub 的 API Key 配置中,即可开始使用。
账户注册后,一般有 5 美元的免费额度,但有效期只有三个月。如果你希望长期使用你的 API
diff --git a/docs/self-hosting/platform/netlify.mdx b/docs/self-hosting/platform/netlify.mdx
index 9833d81e49..4ec97ca97e 100644
--- a/docs/self-hosting/platform/netlify.mdx
+++ b/docs/self-hosting/platform/netlify.mdx
@@ -1,26 +1,27 @@
---
-title: Deploy LobeChat with Netlify - Step-by-Step Guide
+title: Deploy LobeHub with Netlify - Step-by-Step Guide
description: >-
- Learn how to deploy LobeChat on Netlify with detailed instructions on forking
- the repository, importing to Netlify workspace, configuring site name and
- environment variables, and monitoring deployment progress.
+ Learn how to deploy LobeHub on Netlify with detailed instructions on forking
+ the repository, preparing your OpenAI API Key, importing to Netlify workspace,
+ configuring site name and environment variables, and monitoring deployment
+ progress.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Netlify Deployment
- Environment Variables
- Custom Domain Setup
---
-# Deploy LobeChat with Netlify
+# Deploy LobeHub with Netlify
-If you want to deploy LobeChat on Netlify, you can follow these steps:
+If you want to deploy LobeHub on Netlify, you can follow these steps:
-## Deploy LobeChat with Netlify
+## Deploy LobeHub with Netlify
- ### Fork the LobeChat Repository
+ ### Fork the LobeHub Repository
- Click the Fork button to fork the LobeChat repository to your GitHub account.
+ Click the Fork button to fork the LobeHub repository to your GitHub account.
### Import to Netlify Workspace
@@ -31,58 +32,58 @@ If you want to deploy LobeChat on Netlify, you can follow these steps:
Click "Import from git"
-
+
Then click "Deploy with Github" and authorize Netlify to access your GitHub account.
-
+
- Next, select the LobeChat project:
+ Next, select the LobeHub project:
-
+
### Configure Site Name and Environment Variables
In this step, you need to configure your site, including the site name, build command, and publish directory. Fill in your site name in the "Site Name" field. If there are no special requirements, you do not need to modify the remaining configurations as we have already set the default configurations.
-
+
Click the "Add environment variables" button to add site environment variables if needed:
-
+
- For a complete list of environment variables supported by LobeChat, please refer to the [📘
+ For a complete list of environment variables supported by LobeHub, please refer to the [📘
Environment Variables](/docs/self-hosting/environment-variables)
Finally click "Deploy lobe-chat" to enter the deployment phase
-
+
### Wait for Deployment to Complete
After clicking deploy, you will enter the site details page, where you can click the "Deploying your site" in blue or the "Building" in yellow to view the deployment progress.
-
+
- Upon entering the deployment details, you will see the following interface, indicating that your LobeChat is currently being deployed. Simply wait for the deployment to complete.
+ Upon entering the deployment details, you will see the following interface, indicating that your LobeHub is currently being deployed. Simply wait for the deployment to complete.
-
+
During the deployment and build process:
-
+
### Deployment Successful, Start Using
- If your Deploy Log in the interface looks like the following, it means your LobeChat has been successfully deployed.
+ If your Deploy Log in the interface looks like the following, it means your LobeHub has been successfully deployed.
-
+
- At this point, you can click on "Open production deploy" to access your LobeChat site.
+ At this point, you can click on "Open production deploy" to access your LobeHub site.
-
+
## Set up Custom Domain (Optional)
diff --git a/docs/self-hosting/platform/netlify.zh-CN.mdx b/docs/self-hosting/platform/netlify.zh-CN.mdx
index b8f6c9985f..2d66efe927 100644
--- a/docs/self-hosting/platform/netlify.zh-CN.mdx
+++ b/docs/self-hosting/platform/netlify.zh-CN.mdx
@@ -1,24 +1,25 @@
---
-title: 在 Netlify 上部署 LobeChat
+title: 在 Netlify 上部署 LobeHub
description: >-
- 学习如何在 Netlify 上部署 LobeChat,包括 Fork 仓库、导入到 Netlify 工作台、配置站点名称与环境变量等步骤。
+ 学习如何在 Netlify 上部署 LobeHub,包括 Fork 仓库、准备 OpenAI API Key、导入到 Netlify
+ 工作台、配置站点名称与环境变量等步骤。
tags:
- Netlify
- - LobeChat
+ - LobeHub
- 部署教程
- 环境配置
---
# 使用 Netlify 部署
-如果想在 Netlify 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 Netlify 上部署 LobeHub,可以按照以下步骤进行操作:
-## Netlify 部署 LobeChat
+## Netlify 部署 LobeHub
- ### Fork LobeChat 仓库
+ ### Fork LobeHub 仓库
- 点击 Fork 按钮,将 LobeChat 仓库 Fork 到你的 GitHub 账号下。
+ 点击 Fork 按钮,将 LobeHub 仓库 Fork 到你的 GitHub 账号下。
### 在 Netlify 工作台导入
@@ -26,58 +27,58 @@ tags:
点击 「Import from git」
-
+
然后点击 「Deploy with Github」,并授权 Netlify 访问你的 GitHub 账号
-
+
- 然后选择 LobeChat 项目:
+ 然后选择 LobeHub 项目:
-
+
### 配置站点名称与环境变量
在这一步,你需要配置你的站点,包括站点名称、构建命令、发布目录等。在「Site Name」字段填写上你的站点名称。其余配置如果没有特殊要求,无需修改,我们已经设定好了默认配置。
-
+
如需要,点击 「Add environment variables」按钮添加站点环境变量:
-
+
- LobeChat 支持的完整环境变量列表请参考 [📘 环境变量](/zh/docs/self-hosting/environment-variables)
+ LobeHub 支持的完整环境变量列表请参考 [📘 环境变量](/zh/docs/self-hosting/environment-variables)
部分
最后点击「Deploy lobe-chat」进入部署阶段。
-
+
### 等待部署完成
点击部署后,会进入站点详情页面,你可以点击青色字样的「Deploying your site」或者 「Building」 黄色标签查看部署进度。
-
+
- 进入部署详情,你会看到下述界面,这意味着你的 LobeChat 正在部署中,只需等待部署完成即可。
+ 进入部署详情,你会看到下述界面,这意味着你的 LobeHub 正在部署中,只需等待部署完成即可。
-
+
部署构建过程中:
-
+
### 部署成功,开始使用
- 如果你的界面中的 Deploy Log 如下所示,意味着你的 LobeChat 部署成功了。
+ 如果你的界面中的 Deploy Log 如下所示,意味着你的 LobeHub 部署成功了。
-
+
- 此时,你可以点击「Open production deploy」,即可访问你的 LobeChat 站点
+ 此时,你可以点击「Open production deploy」,即可访问你的 LobeHub 站点
-
+
## 绑定自定义域名(可选)
diff --git a/docs/self-hosting/platform/railway.mdx b/docs/self-hosting/platform/railway.mdx
index bc022487c0..73acff77d6 100644
--- a/docs/self-hosting/platform/railway.mdx
+++ b/docs/self-hosting/platform/railway.mdx
@@ -1,18 +1,18 @@
---
-title: Deploy LobeChat with Railway
+title: Deploy LobeHub with Railway
description: >-
- Learn how to deploy LobeChat on Railway and follow the step-by-step process.
- Deploy with a click and start using it. Optionally, bind a custom domain for
- your deployment.
+ Learn how to deploy LobeHub on Railway and follow the step-by-step process.
+ Get your OpenAI API Key, deploy with a click, and start using it. Optionally,
+ bind a custom domain for your deployment.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Railway Deployment
- Custom Domain Binding
---
-# Deploy LobeChat with Railway
+# Deploy LobeHub with Railway
-If you want to deploy LobeChat on Railway, you can follow the steps below:
+If you want to deploy LobeHub on Railway, you can follow the steps below:
## Railway Deployment Process
diff --git a/docs/self-hosting/platform/railway.zh-CN.mdx b/docs/self-hosting/platform/railway.zh-CN.mdx
index 77e54aa046..0bfdc1eb09 100644
--- a/docs/self-hosting/platform/railway.zh-CN.mdx
+++ b/docs/self-hosting/platform/railway.zh-CN.mdx
@@ -1,16 +1,18 @@
---
-title: 在 Railway 上部署 LobeChat
-description: 学习如何在 Railway 上部署 LobeChat 应用,包括点击按钮进行部署、绑定自定义域名等步骤。
+title: 在 Railway 上部署 LobeHub
+description: 学习如何在 Railway 上部署 LobeHub 应用,包括准备 OpenAI API Key、点击按钮进行部署、绑定自定义域名等步骤。
tags:
- Railway
- 部署
- - LobeChat
+ - LobeHub
+ - OpenAI
+ - API Key
- 自定义域名
---
# 使用 Railway 部署
-如果想在 Railway 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 Railway 上部署 LobeHub,可以按照以下步骤进行操作:
## Railway 部署流程
diff --git a/docs/self-hosting/platform/repocloud.mdx b/docs/self-hosting/platform/repocloud.mdx
index e0a7a00fae..1d321d96d3 100644
--- a/docs/self-hosting/platform/repocloud.mdx
+++ b/docs/self-hosting/platform/repocloud.mdx
@@ -1,18 +1,18 @@
---
-title: Deploy LobeChat on RepoCloud
+title: Deploy LobeHub on RepoCloud
description: >-
- Learn how to deploy LobeChat on RepoCloud with ease. Follow these steps to
- deploy the application and start using it. Optional: Bind a custom domain for
- a personalized touch.
+ Learn how to deploy LobeHub on RepoCloud with ease. Follow these steps to
+ prepare your OpenAI API Key, deploy the application, and start using it.
+ Optional: Bind a custom domain for a personalized touch.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- RepoCloud Deployment
- Custom Domain Binding
---
-# Deploy LobeChat with RepoCloud
+# Deploy LobeHub with RepoCloud
-If you want to deploy LobeChat on RepoCloud, you can follow the steps below:
+If you want to deploy LobeHub on RepoCloud, you can follow the steps below:
## RepoCloud Deployment Process
diff --git a/docs/self-hosting/platform/repocloud.zh-CN.mdx b/docs/self-hosting/platform/repocloud.zh-CN.mdx
index 9e3bb81ad3..2129eb894b 100644
--- a/docs/self-hosting/platform/repocloud.zh-CN.mdx
+++ b/docs/self-hosting/platform/repocloud.zh-CN.mdx
@@ -1,16 +1,16 @@
---
-title: 在 RepoCloud 上部署 LobeChat
-description: 学习如何在 RepoCloud 上部署 LobeChat 应用,包括点击部署按钮、绑定自定义域名等操作。
+title: 在 RepoCloud 上部署 LobeHub
+description: 学习如何在RepoCloud上部署LobeHub应用,包括准备OpenAI API Key、点击部署按钮、绑定自定义域名等操作。
tags:
- RepoCloud
- - LobeChat
+ - LobeHub
- 部署流程
- 自定义域名
---
# 使用 RepoCloud 部署
-如果想在 RepoCloud 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 RepoCloud 上部署 LobeHub,可以按照以下步骤进行操作:
## RepoCloud 部署流程
diff --git a/docs/self-hosting/platform/sealos.mdx b/docs/self-hosting/platform/sealos.mdx
index 562b8ada20..f79af5a234 100644
--- a/docs/self-hosting/platform/sealos.mdx
+++ b/docs/self-hosting/platform/sealos.mdx
@@ -1,17 +1,17 @@
---
-title: Deploy LobeChat on Sealos
+title: Deploy LobeHub on Sealos
description: >-
- Learn how to deploy LobeChat on Sealos with ease. Follow the provided steps to
- set up LobeChat and start using it efficiently.
+ Learn how to deploy LobeHub on Sealos with ease. Follow the provided steps to
+ set up LobeHub and start using it efficiently.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Sealos Deployment
- Custom Domain Binding
---
-# Deploy LobeChat with Sealos
+# Deploy LobeHub with Sealos
-If you want to deploy LobeChat on Sealos, you can follow the steps below:
+If you want to deploy LobeHub on Sealos, you can follow the steps below:
## Sealos Deployment Process
diff --git a/docs/self-hosting/platform/sealos.zh-CN.mdx b/docs/self-hosting/platform/sealos.zh-CN.mdx
index 42d4fc1572..ee02eafa1d 100644
--- a/docs/self-hosting/platform/sealos.zh-CN.mdx
+++ b/docs/self-hosting/platform/sealos.zh-CN.mdx
@@ -1,16 +1,17 @@
---
-title: 在 Sealos 上部署 LobeChat
-description: 学习如何在 Sealos 上部署 LobeChat,包括点击部署按钮、绑定自定义域名等操作。
+title: 在 Sealos 上部署 LobeHub
+description: 学习如何在 Sealos 上部署 LobeHub,包括准备 OpenAI API Key、点击部署按钮、绑定自定义域名等操作。
tags:
- Sealos
- - LobeChat
+ - LobeHub
+ - OpenAI API Key
- 部署流程
- 自定义域名
---
# 使用 Sealos 部署
-如果想在 Sealos 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 Sealos 上部署 LobeHub,可以按照以下步骤进行操作:
## Sealos 部署流程
diff --git a/docs/self-hosting/platform/tencentcloud-lighthouse.mdx b/docs/self-hosting/platform/tencentcloud-lighthouse.mdx
index 238b3e2151..caf9a1505b 100644
--- a/docs/self-hosting/platform/tencentcloud-lighthouse.mdx
+++ b/docs/self-hosting/platform/tencentcloud-lighthouse.mdx
@@ -1,17 +1,19 @@
---
-title: Deploy LobeChat on TencentCloud Lighthouse
+title: Deploy LobeHub on TencentCloud Lighthouse
description: >-
- Learn how to deploy the LobeChat application on TencentCloud Lighthouse,
- including clicking the deploy button and other operations.
+ Learn how to deploy the LobeHub application on TencentCloud Lighthouse,
+ including preparing the large model API Key, clicking the deploy button, and
+ other operations.
tags:
- TencentCloud Lighthouse
- TencentCloud
- - LobeChat
+ - LobeHub
+ - API Key
---
-# Deploy LobeChat with TencentCloud Lighthouse
+# Deploy LobeHub with TencentCloud Lighthouse
-If you want to deploy LobeChat on TencentCloud Lighthouse, you can follow the steps below:
+If you want to deploy LobeHub on TencentCloud Lighthouse, you can follow the steps below:
## Tencent Cloud Deployment Process
@@ -24,4 +26,4 @@ If you want to deploy LobeChat on TencentCloud Lighthouse, you can follow the st
[deploy-button-image]: https://cloudcache.tencent-cloud.com/qcloud/ui/static/static_source_business/d65fb782-4fb0-4348-ad85-f2943d6bee8f.svg
-[deploy-link]: https://buy.tencentcloud.com/lighthouse?blueprintType=APP_OS&blueprintOfficialId=lhbp-6u0ti132®ionId=9&zone=ap-singapore-3&bundleId=bundle_starter_nmc_lin_med2_01&loginSet=AUTO&rule=true&from=lobechat
+[deploy-link]: https://buy.tencentcloud.com/lighthouse?blueprintType=APP_OS&blueprintOfficialId=lhbp-6u0ti132®ionId=9&zone=ap-singapore-3&bundleId=bundle_starter_nmc_lin_med2_01&loginSet=AUTO&rule=true&from=LobeHub
diff --git a/docs/self-hosting/platform/tencentcloud-lighthouse.zh-CN.mdx b/docs/self-hosting/platform/tencentcloud-lighthouse.zh-CN.mdx
index ffb76467f2..f0a2aec9fc 100644
--- a/docs/self-hosting/platform/tencentcloud-lighthouse.zh-CN.mdx
+++ b/docs/self-hosting/platform/tencentcloud-lighthouse.zh-CN.mdx
@@ -1,16 +1,16 @@
---
-title: 在 腾讯轻量云 上部署 LobeChat
-description: 学习如何快速在腾讯轻量云上部署 LobeChat 应用,包括点击部署按钮等操作。
+title: 在 腾讯轻量云 上部署 LobeHub
+description: 学习如何快速在腾讯轻量云上部署LobeHub应用,包括准备大模型 API Key、点击部署按钮等操作。
tags:
- 腾讯云
- 腾讯轻量云
- - LobeChat
+ - LobeHub
- 部署流程
---
# 使用 腾讯轻量云 部署
-如果想在 腾讯云 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 腾讯云 上部署 LobeHub,可以按照以下步骤进行操作:
## 腾讯轻量云 部署流程
@@ -23,4 +23,4 @@ tags:
[deploy-button-image]: https://cloudcache.tencent-cloud.com/qcloud/ui/static/static_source_business/d65fb782-4fb0-4348-ad85-f2943d6bee8f.svg
-[deploy-link]: https://buy.cloud.tencent.com/lighthouse?blueprintType=APP_OS&blueprintOfficialId=lhbp-6u0ti132®ionId=8&zone=ap-beijing-3&bundleId=bundle_starter_mc_med2_01&loginSet=AUTO&rule=true&from=lobechat
+[deploy-link]: https://buy.cloud.tencent.com/lighthouse?blueprintType=APP_OS&blueprintOfficialId=lhbp-6u0ti132®ionId=8&zone=ap-beijing-3&bundleId=bundle_starter_mc_med2_01&loginSet=AUTO&rule=true&from=LobeHub
diff --git a/docs/self-hosting/platform/vercel.mdx b/docs/self-hosting/platform/vercel.mdx
index c59af76897..8210352496 100644
--- a/docs/self-hosting/platform/vercel.mdx
+++ b/docs/self-hosting/platform/vercel.mdx
@@ -1,17 +1,18 @@
---
-title: Deploy LobeChat with Vercel
+title: Deploy LobeHub with Vercel
description: >-
- Learn how to deploy LobeChat on Vercel with ease. Follow the provided steps to
- deploy the project and start using it efficiently.
+ Learn how to deploy LobeHub on Vercel with ease. Follow the provided steps to
+ prepare your OpenAI API Key, deploy the project, and start using it
+ efficiently.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Vercel Deployment
- Custom Domain Binding
---
-# Deploy LobeChat with Vercel
+# Deploy LobeHub with Vercel
-If you want to deploy LobeChat on Vercel, you can follow the steps below:
+If you want to deploy LobeHub on Vercel, you can follow the steps below:
## Vercel Deployment Process
diff --git a/docs/self-hosting/platform/vercel.zh-CN.mdx b/docs/self-hosting/platform/vercel.zh-CN.mdx
index 952ac1b7e9..6842d70cd9 100644
--- a/docs/self-hosting/platform/vercel.zh-CN.mdx
+++ b/docs/self-hosting/platform/vercel.zh-CN.mdx
@@ -1,17 +1,18 @@
---
-title: 在 Vercel 上部署 LobeChat
-description: 学习如何在 Vercel 上一键部署 LobeChat,点击按钮进行部署,绑定自定义域名,自动同步更新等。
+title: 在 Vercel 上部署 LobeHub
+description: 学习如何在 Vercel 上一键部署 LobeHub,准备 OpenAI API Key,点击按钮进行部署,绑定自定义域名,自动同步更新等。
tags:
- Vercel
- 部署指引
- - LobeChat
+ - LobeHub
+ - OpenAI API Key
- 自定义域名
- 自动同步更新
---
# Vercel 部署指引
-如果想在 Vercel 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 Vercel 上部署 LobeHub,可以按照以下步骤进行操作:
## Vercel 部署流程
@@ -34,7 +35,7 @@ tags:
如果你根据上述中的一键部署步骤部署了自己的项目,你可能会发现总是被提示 “有可用更新”。这是因为 Vercel 默认为你创建新项目而非 fork 本项目,这将导致无法准确检测更新。
- 我们建议按照 [📘 LobeChat 自部署保持更新](/zh/docs/self-hosting/advanced/upstream-sync)
+ 我们建议按照 [📘 LobeHub 自部署保持更新](/zh/docs/self-hosting/advanced/upstream-sync)
步骤重新部署。
diff --git a/docs/self-hosting/platform/zeabur.mdx b/docs/self-hosting/platform/zeabur.mdx
index f18bfe67ff..6d1bce4ceb 100644
--- a/docs/self-hosting/platform/zeabur.mdx
+++ b/docs/self-hosting/platform/zeabur.mdx
@@ -1,17 +1,17 @@
---
-title: Deploy LobeChat on Zeabur
+title: Deploy LobeHub on Zeabur
description: >-
- Learn how to deploy LobeChat on Zeabur with ease. Follow the provided steps to
+ Learn how to deploy LobeHub on Zeabur with ease. Follow the provided steps to
set up your chat application seamlessly.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Zeabur Deployment
- Custom Domain Binding
---
-# Deploy LobeChat with Zeabur
+# Deploy LobeHub with Zeabur
-If you want to deploy LobeChat on Zeabur, you can follow the steps below:
+If you want to deploy LobeHub on Zeabur, you can follow the steps below:
## Zeabur Deployment Process
@@ -27,16 +27,16 @@ If you want to deploy LobeChat on Zeabur, you can follow the steps below:
You can use the subdomain provided by Zeabur, or choose to bind a custom domain. Currently, the domains provided by Zeabur have not been contaminated, and most regions can connect directly.
-# Deploy LobeChat with Zeabur as serverless function
+# Deploy LobeHub with Zeabur as serverless function
> Note: There are still issues with [middlewares and rewrites of next.js on Zeabur](https://github.com/lobehub/lobe-chat/pull/2775?notification_referrer_id=NT_kwDOAdi2DrQxMDkyODQ4MDc2NTozMDk3OTU5OA#issuecomment-2146713899), use at your own risk!
-Since Zeabur does NOT officially support FREE users deploy containerized service, you may wish to deploy LobeChat as a serverless function service. To deploy LobeChat as a serverless function service on Zeabur, you can follow the steps below:
+Since Zeabur does NOT officially support FREE users deploy containerized service, you may wish to deploy LobeHub as a serverless function service. To deploy LobeHub as a serverless function service on Zeabur, you can follow the steps below:
## Zeabur Deployment Process
- ### Fork LobeChat
+ ### Fork LobeHub
### Add Zeabur pack config file
@@ -57,7 +57,7 @@ Since Zeabur does NOT officially support FREE users deploy containerized service
Create a project, then create a service under this project.
- ### Link your fork of LobeChat to the just created Zeabur service.
+ ### Link your fork of LobeHub to the just created Zeabur service.
When adding service, choose github. This may triger a oAuth depend on varies factors like how you login to Zeabur and if you have already authorized Zeabur to access all your repos
diff --git a/docs/self-hosting/platform/zeabur.zh-CN.mdx b/docs/self-hosting/platform/zeabur.zh-CN.mdx
index 810a019674..f70b5ad6c8 100644
--- a/docs/self-hosting/platform/zeabur.zh-CN.mdx
+++ b/docs/self-hosting/platform/zeabur.zh-CN.mdx
@@ -1,16 +1,17 @@
---
-title: 在 Zeabur 上部署 LobeChat
-description: 点击按钮进行部署。在部署完成后,即可开始使用 LobeChat 并选择是否绑定自定义域名。
+title: 在 Zeabur 上部署 LobeHub
+description: 按照指南准备 OpenAI API Key 并点击按钮进行部署。在部署完成后,即可开始使用 LobeHub 并选择是否绑定自定义域名。
tags:
- Zeabur
- - LobeChat
+ - LobeHub
+ - OpenAI API Key
- 部署流程
- 自定义域名
---
# 使用 Zeabur 部署
-如果想在 Zeabur 上部署 LobeChat,可以按照以下步骤进行操作:
+如果想在 Zeabur 上部署 LobeHub,可以按照以下步骤进行操作:
## Zeabur 部署流程
@@ -26,16 +27,16 @@ tags:
你可以使用 Zeabur 提供的子域名,也可以选择绑定自定义域名。目前 Zeabur 提供的域名还未被污染,大多数地区都可以直连。
-# 使用 Zeabur 将 LobeChat 部署为无服务器函数
+# 使用 Zeabur 将 LobeHub 部署为无服务器函数
> **注意:** 仍然存在关于 [Zeabur 上 next.js 的中间件和重写问题](https://github.com/lobehub/lobe-chat/pull/2775?notification_referrer_id=NT_kwDOAdi2DrQxMDkyODQ4MDc2NTozMDk3OTU5OA#issuecomment-2146713899),请自担风险!
-由于 Zeabur 并未官方支持免费用户部署容器化服务,您可能希望将 LobeChat 部署为无服务器函数服务。要在 Zeabur 上将 LobeChat 部署为无服务器函数服务,您可以按照以下步骤操作:
+由于 Zeabur 并未官方支持免费用户部署容器化服务,您可能希望将 LobeHub 部署为无服务器函数服务。要在 Zeabur 上将 LobeHub 部署为无服务器函数服务,您可以按照以下步骤操作:
## Zeabur 部署流程
- ### Fork LobeChat
+ ### Fork LobeHub
### 添加 Zeabur 打包配置文件
@@ -56,7 +57,7 @@ tags:
创建一个项目,并再这个项目下新建一个服务。
- ### 将您的 LobeChat 分支链接到刚创建的 Zeabur 服务。
+ ### 将您的 LobeHub 分支链接到刚创建的 Zeabur 服务。
在添加服务时,选择 github。这可能会触发一个 oAuth,取决于诸如您如何登录到 Zeabur 以及您是否已经授权 Zeabur 访问所有您的存储库等各种因素。
diff --git a/docs/self-hosting/server-database.mdx b/docs/self-hosting/server-database.mdx
index 01cb380016..8150729367 100644
--- a/docs/self-hosting/server-database.mdx
+++ b/docs/self-hosting/server-database.mdx
@@ -1,8 +1,8 @@
---
-title: Deploying Server-Side Database for LobeChat
-description: Learn how to deploy LobeChat's server-side database using Postgres.
+title: Deploying Server-Side Database for LobeHub
+description: Learn how to deploy LobeHub's server-side database using Postgres.
tags:
- - LobeChat
+ - LobeHub
- Server-Side Database
- Postgres
- Deployment Guide
@@ -10,7 +10,7 @@ tags:
# Deploying Server-Side Database
-LobeChat defaults to using a client-side database (IndexedDB) but also supports deploying a server-side database. LobeChat uses Postgres as the backend storage database.
+LobeHub defaults to using a client-side database (IndexedDB) but also supports deploying a server-side database. LobeHub uses Postgres as the backend storage database.
PostgreSQL is a powerful open-source relational database management system with high scalability
@@ -19,7 +19,7 @@ LobeChat defaults to using a client-side database (IndexedDB) but also supports
management.
-This guide will introduce the process and principles of deploying the server-side database version of LobeChat on any platform from a framework perspective, so you can understand both the what and the why, and then deploy according to your specific needs.
+This guide will introduce the process and principles of deploying the server-side database version of LobeHub on any platform from a framework perspective, so you can understand both the what and the why, and then deploy according to your specific needs.
If you are already familiar with the complete principles, you can quickly get started by checking the deployment guides for each platform:
@@ -27,7 +27,7 @@ If you are already familiar with the complete principles, you can quickly get st
---
-For the server-side database version of LobeChat, a normal deployment process typically involves configuring three modules:
+For the server-side database version of LobeHub, a normal deployment process typically involves configuring three modules:
1. Database configuration;
2. Authentication service configuration;
@@ -44,9 +44,27 @@ Before deployment, make sure you have a Postgres database instance ready. You ca
There is a slight difference in the way they are configured in terms of environment variables.
-Since we support file-based conversations/knowledge base conversations, we need to install the `pgvector` plugin for Postgres. This plugin provides vector search capabilities and is a key component for LobeChat to implement RAG.
+Since we support file-based conversations/knowledge base conversations, we need to install the `pgvector` plugin for Postgres. This plugin provides vector search capabilities and is a key component for LobeHub to implement RAG.
+ ### `NEXT_PUBLIC_SERVICE_MODE`
+
+ LobeHub supports both client-side and server-side databases, so we provide an environment variable for switching modes, which is `NEXT_PUBLIC_SERVICE_MODE`, with a default value of `client`.
+
+ For server-side database deployment scenarios, you need to set `NEXT_PUBLIC_SERVICE_MODE` to `server`.
+
+
+ In the official `lobe-chat-database` Docker image, this environment variable is already set to
+ `server` by default. Therefore, if you deploy using the Docker image, you do not need to configure
+ this environment variable again.
+
+
+
+ Since environment variables starting with `NEXT_PUBLIC` take effect in the front-end code, they cannot be modified through container runtime injection. (Refer to the `next.js` documentation [Configuring: Environment Variables | Next.js (nextjs.org)](https://nextjs.org/docs/pages/building-your-application/configuring/environment-variables)). This is why we chose to create a separate DB version image.
+
+ If you need to modify variables with the `NEXT_PUBLIC` prefix in a Docker deployment, you must build the image yourself and inject your own `NEXT_PUBLIC` prefixed environment variables during the build.
+
+
### `DATABASE_URL`
The core of configuring the database is to add the `DATABASE_URL` environment variable and fill in the Postgres database connection URL you have prepared. The typical format of the database connection URL is `postgres://username:password@host:port/database`.
@@ -77,7 +95,7 @@ Since we support file-based conversations/knowledge base conversations, we need
You can generate a random 32-character string as the value of `KEY_VAULTS_SECRET` using `openssl
- rand -base64 32`.
+ rand -base64 32`.
@@ -87,7 +105,7 @@ In the server-side database mode, we need an authentication service to distingui
### Clerk
-[Clerk](https://clerk.com?utm_source=lobehub\&utm_medium=docs) is an authentication SaaS service that provides out-of-the-box authentication capabilities with high productization, low integration costs, and a great user experience. For those who offer SaaS products, Clerk is a good choice. Our official [LobeChat Cloud](https://lobechat.com) uses Clerk as the authentication service.
+[Clerk](https://clerk.com?utm_source=lobehub\&utm_medium=docs) is an authentication SaaS service that provides out-of-the-box authentication capabilities with high productization, low integration costs, and a great user experience. For those who offer SaaS products, Clerk is a good choice. Our official [LobeHub Cloud](https://LobeHub.com) uses Clerk as the authentication service.
The integration of Clerk is relatively simple, requiring only the configuration of these environment variables:
@@ -120,7 +138,7 @@ For information on configuring NextAuth, you can refer to the [Authentication](/
## Configuring S3 Storage Service
-LobeChat has supported multimodal AI conversations since [a long time ago](https://x.com/lobehub/status/1724289575672291782), involving the function of uploading images to large models. In the client-side database solution, image files are stored as binary data directly in the browser's IndexedDB database. However, this solution is not feasible in the server-side database. Storing file-like data directly in Postgres will greatly waste valuable database storage space and slow down computational performance.
+LobeHub has supported multimodal AI conversations since [a long time ago](https://x.com/lobehub/status/1724289575672291782), involving the function of uploading images to large models. In the client-side database solution, image files are stored as binary data directly in the browser's IndexedDB database. However, this solution is not feasible in the server-side database. Storing file-like data directly in Postgres will greatly waste valuable database storage space and slow down computational performance.
The best practice in this area is to use a file storage service (S3) to store image files, which is also the storage solution relied upon for subsequent file uploads/knowledge base functions.
@@ -134,6 +152,6 @@ For detailed configuration guidelines on S3, please refer to [S3 Object Storage]
## Getting Started with Deployment
-The above is a detailed explanation of configuring LobeChat with a server-side database. You can configure it according to your actual situation and then choose a deployment platform that suits you to start deployment:
+The above is a detailed explanation of configuring LobeHub with a server-side database. You can configure it according to your actual situation and then choose a deployment platform that suits you to start deployment:
diff --git a/docs/self-hosting/server-database.zh-CN.mdx b/docs/self-hosting/server-database.zh-CN.mdx
index 4f40d16ecb..3c11d54e32 100644
--- a/docs/self-hosting/server-database.zh-CN.mdx
+++ b/docs/self-hosting/server-database.zh-CN.mdx
@@ -1,6 +1,6 @@
---
title: 使用服务端数据库部署 - 配置数据库、身份验证服务和 S3 存储服务
-description: 本文将介绍服务端数据库版 LobeChat 的部署思路,解释如何配置数据库、身份验证服务和 S3 存储服务。
+description: 本文将介绍服务端数据库版 LobeHub 的部署思路,解释如何配置数据库、身份验证服务和 S3 存储服务。
tags:
- 服务端数据库
- Postgres
@@ -12,14 +12,14 @@ tags:
# 使用服务端数据库部署
-LobeChat 默认使用客户端数据库(IndexedDB),同时也支持使用服务端数据库(下简称 DB 版)。LobeChat 采用了 Postgres 作为后端存储数据库。
+LobeHub 默认使用客户端数据库(IndexedDB),同时也支持使用服务端数据库(下简称 DB 版)。LobeHub 采用了 Postgres 作为后端存储数据库。
PostgreSQL 是一种强大的开源关系型数据库管理系统,具备高度扩展性和标准 SQL
支持。它提供了丰富的数据类型、并发处理、数据完整性、安全性及可编程性,适用于复杂应用和大规模数据管理。
-本文将从框架角度介绍在任何一个平台中部署 DB 版 LobeChat 的流程和原理,让你知其然也知其所以然,最后可以根据自己的实际情况进行部署。
+本文将从框架角度介绍在任何一个平台中部署 DB 版 LobeHub 的流程和原理,让你知其然也知其所以然,最后可以根据自己的实际情况进行部署。
如你已经熟悉完整原理,可以查看各个平台的部署指南快速开始:
@@ -27,7 +27,7 @@ LobeChat 默认使用客户端数据库(IndexedDB),同时也支持使用
---
-对于 LobeChat 的 DB 版,正常的部署流程都需要包含三个模块的配置:
+对于 LobeHub 的 DB 版,正常的部署流程都需要包含三个模块的配置:
1. 数据库配置;
2. 身份验证服务配置;
@@ -42,9 +42,26 @@ LobeChat 默认使用客户端数据库(IndexedDB),同时也支持使用
两者的配置方式在环境变量的取值上会略有一点区别,其他方面是一样的。
-同时,由于我们支持了文件对话 / 知识库对话的能力,因此我们需要为 Postgres 安装 `pgvector` 插件,该插件提供了向量搜索的能力,是 LobeChat 实现 RAG 的重要构件之一。
+同时,由于我们支持了文件对话 / 知识库对话的能力,因此我们需要为 Postgres 安装 `pgvector` 插件,该插件提供了向量搜索的能力,是 LobeHub 实现 RAG 的重要构件之一。
+ ### `NEXT_PUBLIC_SERVICE_MODE`
+
+ LobeHub 同时支持了客户端数据库和服务端数据库,因此我们提供了一个环境变量用于切换模式,这个变量为 `NEXT_PUBLIC_SERVICE_MODE`,该值默认为 `client`。
+
+ 针对服务端数据库部署场景,你需要将 `NEXT_PUBLIC_SERVICE_MODE` 设置为 `server`。
+
+
+ 在官方的 `lobe-chat-database` Docker 镜像中,已经默认将该环境变量设为 `server`,因此如果你使用
+ Docker 镜像部署,则无需再配置该环境变量。
+
+
+
+ 由于 `NEXT_PUBLIC` 开头的环境变量是在前端代码中生效的,而因此无法通过容器运行时注入进行修改。 (`next.js`的参考文档 [Configuring: Environment Variables | Next.js (nextjs.org)](https://nextjs.org/docs/pages/building-your-application/configuring/environment-variables) ) 这也是为什么我们选择再打一个 DB 版镜像的原因。
+
+ 如果你需要在 Docker 部署中修改 `NEXT_PUBLIC` 前缀的变量,你必须自行构建镜像,在 build 时就把自己的 `NEXT_PUBLIC` 开头的环境变量打进去。
+
+
### `DATABASE_URL`
配置数据库,核心是添加 `DATABASE_URL` 环境变量,将你准备好的 Postgres 数据库连接 URL 填入其中。数据库连接 URL 的通常格式为 `postgres://username:password@host:port/database`。
@@ -83,7 +100,7 @@ LobeChat 默认使用客户端数据库(IndexedDB),同时也支持使用
### Clerk
-[Clerk](https://clerk.com?utm_source=lobehub\&utm_medium=docs) 是一个身份验证 SaaS 服务,提供了开箱即用的身份验证能力,产品化程度很高,集成成本较低,体验很好。对于提供 SaaS 化产品的诉求来说,Clerk 是一个不错的选择。我们官方提供的 [LobeChat Cloud](https://lobechat.com),就是使用了 Clerk 作为身份验证服务。
+[Clerk](https://clerk.com?utm_source=lobehub\&utm_medium=docs) 是一个身份验证 SaaS 服务,提供了开箱即用的身份验证能力,产品化程度很高,集成成本较低,体验很好。对于提供 SaaS 化产品的诉求来说,Clerk 是一个不错的选择。我们官方提供的 [LobeHub Cloud](https://LobeHub.com),就是使用了 Clerk 作为身份验证服务。
Clerk 的集成也相对简单,只需要配置 `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` 、 `CLERK_SECRET_KEY` 和 `CLERK_WEBHOOK_SECRET` 环境变量即可,这三个环境变量可以在 Clerk 控制台中获取。
@@ -111,7 +128,7 @@ NextAuth 是一个开源的身份验证库,支持多种身份验证提供商
## 配置 S3 存储服务
-LobeChat 在 [很早以前](https://x.com/lobehub/status/1724289575672291782) 就支持了多模态的 AI 会话,其中涉及到图片上传给大模型的功能。在客户端数据库方案中,图片文件直接以二进制数据存储在浏览器 IndexedDB 数据库,但在服务端数据库中这个方案并不可行。因为在 Postgres 中直接存储文件类二进制数据会大大浪费宝贵的数据库存储空间,并拖慢计算性能。
+LobeHub 在 [很早以前](https://x.com/lobehub/status/1724289575672291782) 就支持了多模态的 AI 会话,其中涉及到图片上传给大模型的功能。在客户端数据库方案中,图片文件直接以二进制数据存储在浏览器 IndexedDB 数据库,但在服务端数据库中这个方案并不可行。因为在 Postgres 中直接存储文件类二进制数据会大大浪费宝贵的数据库存储空间,并拖慢计算性能。
这块最佳实践是使用文件存储服务(S3)来存储图片文件,同时 S3 也是文件上传 / 知识库功能所依赖的大容量静态文件存储方案。
@@ -124,6 +141,6 @@ LobeChat 在 [很早以前](https://x.com/lobehub/status/1724289575672291782)
## 开始部署
-以上就是关于服务端数据库版 LobeChat 的配置详解,你可以根据自己的实际情况进行配置,然后选择适合自己的部署平台开始部署:
+以上就是关于服务端数据库版 LobeHub 的配置详解,你可以根据自己的实际情况进行配置,然后选择适合自己的部署平台开始部署:
diff --git a/docs/self-hosting/server-database/docker-compose.mdx b/docs/self-hosting/server-database/docker-compose.mdx
index 7bd7ee3cf5..0fc58f926c 100644
--- a/docs/self-hosting/server-database/docker-compose.mdx
+++ b/docs/self-hosting/server-database/docker-compose.mdx
@@ -1,16 +1,16 @@
---
-title: Deploying LobeChat with Docker Compose
+title: Deploying LobeHub with Docker Compose
description: >-
- Learn how to deploy the LobeChat service using Docker Compose, including
+ Learn how to deploy the LobeHub service using Docker Compose, including
configuration tutorials for various services.
tags:
- Docker Compose
- - LobeChat
+ - LobeHub
- Docker Container
- Deployment Guide
---
-# Deploying LobeChat Server Database Version with Docker Compose
+# Deploying LobeHub Server Database Version with Docker Compose
[![][docker-release-shield]][docker-release-link]
@@ -70,13 +70,13 @@ The script supports the following deployment modes; please choose the appropriat
After the script finishes running, you need to check the configuration generation report, which includes the accounts and initial login passwords for the Casdoor administrator and user.
- Please log in to LobeChat using the user account; the administrator account is only for managing
+ Please log in to LobeHub using the user account; the administrator account is only for managing
Casdoor.
```log
The results of the secure key generation are as follows:
- LobeChat:
+ LobeHub:
- URL: http://localhost:3210
- Username: user
- Password: c66f8c
@@ -118,7 +118,7 @@ The script supports the following deployment modes; please choose the appropriat
### Access Application
- Visit your LobeChat service at [http://localhost:3210](http://localhost:3210). The account credentials for the application can be found in the report from step `2`.
+ Visit your LobeHub service at [http://localhost:3210](http://localhost:3210). The account credentials for the application can be found in the report from step `2`.
### Port Mode
@@ -136,13 +136,13 @@ The script supports the following deployment modes; please choose the appropriat
After the script finishes running, please check the configuration generation report for the Casdoor administrator account, user account, and their initial login passwords.
- Please log in to LobeChat using the user account; the administrator account is only for managing
+ Please log in to LobeHub using the user account; the administrator account is only for managing
Casdoor.
```log
The results of the secure key generation are as follows:
- LobeChat:
+ LobeHub:
- URL: http://your_server_ip:3210
- Username: user
- Password: 837e26
@@ -183,7 +183,7 @@ The script supports the following deployment modes; please choose the appropriat
### Access Application
- You can access your LobeChat service at `http://your_server_ip:3210`. The account credentials for the application can be found in the report from step `2`.
+ You can access your LobeHub service at `http://your_server_ip:3210`. The account credentials for the application can be found in the report from step `2`.
If your service can accessed via the public network,
@@ -231,7 +231,7 @@ The script supports the following deployment modes; please choose the appropriat
In domain mode, you need to complete the following configurations based on script prompts:
- - Domain setup for the LobeChat service: `lobe.example.com`
+ - Domain setup for the LobeHub service: `lobe.example.com`
- Domain setup for the Minio service: `minio.example.com`
- Domain setup for the Casdoor service: `auth.example.com`
- Choose the access protocol: `http` or `https`
@@ -252,13 +252,13 @@ The script supports the following deployment modes; please choose the appropriat
After the script finishes running, you need to check the configuration generation report, which includes the initial login password for the Casdoor administrator.
- Please log in to LobeChat using the user account; the administrator account is only for managing
+ Please log in to LobeHub using the user account; the administrator account is only for managing
Casdoor.
```log
The results of the secure key generation are as follows:
- LobeChat:
+ LobeHub:
- URL: https://lobe.example.com
- Username: user
- Password: 837e26
@@ -299,7 +299,7 @@ The script supports the following deployment modes; please choose the appropriat
### Access Application
- You can access your LobeChat service via `https://lobe.example.com`. The account credentials for the application can be found in the report from step `3`.
+ You can access your LobeHub service via `https://lobe.example.com`. The account credentials for the application can be found in the report from step `3`.
If your service can accessed via the public network,
@@ -311,7 +311,7 @@ The script supports the following deployment modes; please choose the appropriat
## Custom Deployment
-This section mainly introduces the configurations that need to be modified to customize the deployment of the LobeChat service in different network environments. Before starting, you can download the [Docker Compose configuration file](https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/docker-compose.yml) and the [environment variable configuration file](https://raw.githubusercontent.com/lobehub/lobe-chat/refs/heads/main/docker-compose/local/.env.example).
+This section mainly introduces the configurations that need to be modified to customize the deployment of the LobeHub service in different network environments. Before starting, you can download the [Docker Compose configuration file](https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/docker-compose.yml) and the [environment variable configuration file](https://raw.githubusercontent.com/lobehub/lobe-chat/refs/heads/main/docker-compose/local/.env.example).
```sh
curl -O https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/docker-compose.yml
@@ -326,14 +326,14 @@ mv .env.example .env
### Prerequisites
-Generally, to fully run the LobeChat database version, you will need at least the following four services:
+Generally, to fully run the LobeHub database version, you will need at least the following four services:
-- The LobeChat database version itself
+- The LobeHub database version itself
- PostgreSQL database with PGVector plugin
- Object storage service that supports S3 protocol
-- An SSO authentication service supported by LobeChat
+- An SSO authentication service supported by LobeHub
-These services can be combined through self-hosting or online cloud services to meet various deployment needs. In this article, we provide a Docker Compose configuration entirely based on open-source self-hosted services, which can be used directly to start the LobeChat database version or modified to suit your requirements.
+These services can be combined through self-hosting or online cloud services to meet various deployment needs. In this article, we provide a Docker Compose configuration entirely based on open-source self-hosted services, which can be used directly to start the LobeHub database version or modified to suit your requirements.
We use [MinIO](https://github.com/minio/minio) as the local S3 object storage service and [Casdoor](https://github.com/casdoor/casdoor) as the local authentication service by default.
@@ -348,15 +348,15 @@ Now, we will introduce the necessary configurations for running these services:
1. Casdoor
-- LobeChat requires communication with Casdoor, so you need to configure Casdoor's Issuer.
+- LobeHub requires communication with Casdoor, so you need to configure Casdoor's Issuer.
```env
AUTH_CASDOOR_ISSUER=https://auth.example.com
```
-This configuration will affect LobeChat's login authentication service, and you need to ensure that the URL of the Casdoor service is correct. You can find common manifestations and solutions for errors in this configuration in the [FAQ](#faq).
+This configuration will affect LobeHub's login authentication service, and you need to ensure that the URL of the Casdoor service is correct. You can find common manifestations and solutions for errors in this configuration in the [FAQ](#faq).
-- Additionally, you need to allow the callback URL in Casdoor to point to the LobeChat address:
+- Additionally, you need to allow the callback URL in Casdoor to point to the LobeHub address:
Please add a line in the `Authentication -> Application` -> `` -> `Redirect URI` in Casdoor's web panel:
@@ -372,7 +372,7 @@ origin=https://auth.example.com
2. MinIO
-- LobeChat needs to provide a public access URL for object files for the LLM service provider, hence you need to configure MinIO's Endpoint.
+- LobeHub needs to provide a public access URL for object files for the LLM service provider, hence you need to configure MinIO's Endpoint.
```env
S3_PUBLIC_DOMAIN=https://minio.example.com
@@ -423,13 +423,13 @@ Solutions:
lobe-chat | [auth][error] TypeError: fetch failed
```
-Cause: LobeChat cannot access the authentication service.
+Cause: LobeHub cannot access the authentication service.
Solutions:
-- Check whether your authentication service is running properly and whether LobeChat's network can reach the authentication service.
+- Check whether your authentication service is running properly and whether LobeHub's network can reach the authentication service.
-- A straightforward troubleshooting method is to use the `curl` command in the LobeChat container terminal to access your authentication service at `https://auth.example.com/.well-known/openid-configuration`. If JSON format data is returned, it indicates your authentication service is functioning correctly.
+- A straightforward troubleshooting method is to use the `curl` command in the LobeHub container terminal to access your authentication service at `https://auth.example.com/.well-known/openid-configuration`. If JSON format data is returned, it indicates your authentication service is functioning correctly.
#### OAuth Token Exchange Failures with Reverse Proxy
@@ -445,7 +445,7 @@ docker compose up -d
````markdown
## Extended Configuration
-To enhance your LobeChat service, you can perform the following extended configurations according to your needs.
+To enhance your LobeHub service, you can perform the following extended configurations according to your needs.
### Use MinIO to Store Casdoor Avatars
@@ -495,7 +495,7 @@ Allow users to change their avatars in Casdoor.
3. In Casdoor's `Authentication -> Providers`, associate the MinIO S3 service. Below is an example configuration:
- 
+ 
Here, the client ID and client secret correspond to the `Access Key` and `Secret Key` from the previous step; replace `192.168.31.251` with `your_server_ip`.
@@ -520,7 +520,7 @@ In the following, it is assumed that in addition to the above services, you are
The domain and corresponding service port descriptions are as follows:
-- `lobe.example.com`: This is your LobeChat service domain, which needs to reverse proxy to the LobeChat service port, default is `3210`.
+- `lobe.example.com`: This is your LobeHub service domain, which needs to reverse proxy to the LobeHub service port, default is `3210`.
- `auth.example.com`: This is your Logto UI domain, which needs to reverse proxy to the Logto WebUI service port, default is `8000`.
- `minio.example.com`: This is your MinIO API domain, which needs to reverse proxy to the MinIO API service port, default is `9000`.
- `minio-ui.example.com`: Optional, this is your MinIO UI domain, which needs to reverse proxy to the MinIO WebUI service port, default is `9001`.
@@ -633,7 +633,7 @@ After logging in, perform the following actions:
1. In `User Management -> Organizations`, add a new organization with the name and display name `Lobe Users`. Keep the rest as default.
2. In `Authentication -> Apps`, add a new application.
-- Name and display name should be `LobeChat`.
+- Name and display name should be `LobeHub`.
- Organization should be `Lobe Users`.
- Add a line in Redirect URLs as `https://lobe.example.com/api/auth/callback/casdoor`.
- Disable all login methods except password.
@@ -651,7 +651,7 @@ After logging in, perform the following actions:
This article uses MinIO as an example to explain the configuration process. If you are using another S3 service provider, please refer to their documentation for configuration.
- Please remember to configure the corresponding S3 service provider's CORS settings to ensure that LobeChat can access the S3 service correctly.
+ Please remember to configure the corresponding S3 service provider's CORS settings to ensure that LobeHub can access the S3 service correctly.
In this document, you need to allow cross-origin requests from `https://lobe.example.com`. This can either be configured in MinIO WebUI under `Configuration - API - Cors Allow Origin`, or in the Docker Compose configuration under `minio - environment - MINIO_API_CORS_ALLOW_ORIGIN`.
@@ -667,20 +667,20 @@ You first need to access the WebUI for configuration:
2. In the left panel under User / Access Keys, click `Create New Access Key`, no additional modifications needed, and fill the generated `Access Key` and `Secret Key` into your `.env` file under `S3_ACCESS_KEY_ID` and `S3_SECRET_ACCESS_KEY`.
-
+
-3. Restart the LobeChat service:
+3. Restart the LobeHub service:
```sh
docker compose up -d
```
-At this point, you have successfully deployed the LobeChat database version, and you can access your LobeChat service at `https://lobe.example.com`.
+At this point, you have successfully deployed the LobeHub database version, and you can access your LobeHub service at `https://lobe.example.com`.
#### Configuring Internal Server Communication with `INTERNAL_APP_URL`
- If you are deploying LobeChat behind a CDN (like Cloudflare) or reverse proxy, you may want to configure internal server-to-server communication to bypass the CDN/proxy layer for better performance.
+ If you are deploying LobeHub behind a CDN (like Cloudflare) or reverse proxy, you may want to configure internal server-to-server communication to bypass the CDN/proxy layer for better performance.
You can configure the `INTERNAL_APP_URL` environment variable:
@@ -733,7 +733,7 @@ CASDOOR_PORT=8000
MINIO_PORT=9000
# Postgres related, which are the necessary environment variables for DB
-LOBE_DB_NAME=lobechat
+LOBE_DB_NAME=LobeHub
POSTGRES_PASSWORD=uWNZugjBqixf8dxC
# Casdoor secret
@@ -762,7 +762,7 @@ services:
- '${MINIO_PORT}:${MINIO_PORT}' # MinIO API
- '9001:9001' # MinIO Console
- '${CASDOOR_PORT}:${CASDOOR_PORT}' # Casdoor
- - '${LOBE_PORT}:3210' # LobeChat
+ - '${LOBE_PORT}:3210' # LobeHub
command: tail -f /dev/null
networks:
- lobe-network
diff --git a/docs/self-hosting/server-database/docker-compose.zh-CN.mdx b/docs/self-hosting/server-database/docker-compose.zh-CN.mdx
index e187c034c5..10b269462b 100644
--- a/docs/self-hosting/server-database/docker-compose.zh-CN.mdx
+++ b/docs/self-hosting/server-database/docker-compose.zh-CN.mdx
@@ -1,14 +1,14 @@
---
-title: 通过 Docker Compose 部署 LobeChat
-description: 学习如何使用 Docker Compose 部署 LobeChat 服务,包括各种服务的配置教程。
+title: 通过 Docker Compose 部署 LobeHub
+description: 学习如何使用 Docker Compose 部署 LobeHub 服务,包括各种服务的配置教程。
tags:
- Docker Compose
- - LobeChat
+ - LobeHub
- Docker 容器
- 部署指引
---
-# 使用 Docker Compose 部署 LobeChat 服务端数据库版本
+# 使用 Docker Compose 部署 LobeHub 服务端数据库版本
[![][docker-release-shield]][docker-release-link]
@@ -66,11 +66,11 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
你需要在脚本运行结束后查看配置生成报告,包括 Casdoor 管理员的帐号、用户账号和它们的初始登录密码。
- 请使用用户账号登录 LobeChat,管理员账号仅用于管理 Casdoor。
+ 请使用用户账号登录 LobeHub,管理员账号仅用于管理 Casdoor。
```log
安全密钥生成结果如下:
- LobeChat:
+ LobeHub:
- URL: http://localhost:3210
- Username: user
- Password: c66f8c
@@ -112,7 +112,7 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
### 访问应用
- 通过 [http://localhost:3210](http://localhost:3210) 访问你的 LobeChat 服务。应用的账号密码在步骤`2`的报告中。
+ 通过 [http://localhost:3210](http://localhost:3210) 访问你的 LobeHub 服务。应用的账号密码在步骤`2`的报告中。
### 端口模式
@@ -129,11 +129,11 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
你需要在脚本运行结束后查看配置生成报告,包括 Casdoor 管理员的帐号、用户账号和它们的初始登录密码。
- 请使用用户账号登录 LobeChat,管理员账号仅用于管理 Casdoor。
+ 请使用用户账号登录 LobeHub,管理员账号仅用于管理 Casdoor。
```log
安全密钥生成结果如下:
- LobeChat:
+ LobeHub:
- URL: http://your_server_ip:3210
- Username: user
- Password: 837e26
@@ -174,7 +174,7 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
### 访问应用
- 你可以通过 `http://your_server_ip:3210` 访问你的 LobeChat 服务。应用的账号密码在步骤`2`的报告中。
+ 你可以通过 `http://your_server_ip:3210` 访问你的 LobeHub 服务。应用的账号密码在步骤`2`的报告中。
请注意,如果你的服务能够被公网访问,我们强烈建议你参考 [文档](https://lobehub.com/docs/self-hosting/advanced/auth/next-auth/casdoor) 关闭注册功能。
@@ -218,7 +218,7 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
在域名模式中,你需要根据脚本提示完成:
- - LobeChat 服务的域名设置:`lobe.example.com`
+ - LobeHub 服务的域名设置:`lobe.example.com`
- Minio 服务的域名设置:`minio.example.com`
- Casdoor 服务的域名设置:`auth.example.com`
- 选择访问协议:`http` 或 `https`
@@ -238,11 +238,11 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
你需要在脚本运行结束后查看配置生成报告,包括 Casdoor 管理员的初始登录密码。
- 请使用用户账号登录 LobeChat,管理员账号仅用于管理 Casdoor。
+ 请使用用户账号登录 LobeHub,管理员账号仅用于管理 Casdoor。
```log
安全密钥生成结果如下:
- LobeChat:
+ LobeHub:
- URL: https://lobe.example.com
- Username: user
- Password: 837e26
@@ -283,7 +283,7 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
### 访问应用
- 你可以通过 `https://lobe.example.com` 访问你的 LobeChat 服务。应用的账号密码在步骤`3`的报告中。
+ 你可以通过 `https://lobe.example.com` 访问你的 LobeHub 服务。应用的账号密码在步骤`3`的报告中。
请注意,如果你的服务能够被公网访问,我们强烈建议你参考 [文档](https://lobehub.com/docs/self-hosting/advanced/auth/next-auth/casdoor) 关闭注册功能。
@@ -292,7 +292,7 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
## 自定义部署
-该章节主要为你介绍在不同的网络环境下自定义部署 LobeChat 服务必须要修改的配置。在开始前,你可以先下载 [Docker Compose 配置文件](https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/docker-compose.yml) 以及 [环境变量配置文件](https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/.env.zh-CN.example)。
+该章节主要为你介绍在不同的网络环境下自定义部署 LobeHub 服务必须要修改的配置。在开始前,你可以先下载 [Docker Compose 配置文件](https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/docker-compose.yml) 以及 [环境变量配置文件](https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/.env.zh-CN.example)。
```sh
curl -O https://raw.githubusercontent.com/lobehub/lobe-chat/HEAD/docker-compose/local/docker-compose.yml
@@ -307,14 +307,14 @@ mv .env.zh-CN.example .env
### 预备知识
-一般来讲,想要完整的运行 LobeChat 数据库版本,你需要至少拥有如下四个服务
+一般来讲,想要完整的运行 LobeHub 数据库版本,你需要至少拥有如下四个服务
-- LobeChat 数据库版本自身
+- LobeHub 数据库版本自身
- 带有 PGVector 插件的 PostgreSQL 数据库
- 支持 S3 协议的对象存储服务
-- 受 LobeChat 支持的 SSO 登录鉴权服务
+- 受 LobeHub 支持的 SSO 登录鉴权服务
-这些服务可以通过自建或者在线云服务组合搭配,以满足不同层次的部署需求。本文中,我们提供了完全基于开源自建服务的 Docker Compose 配置,你可以直接使用这份配置文件来启动 LobeChat 数据库版本,也可以对之进行修改以适应你的需求。
+这些服务可以通过自建或者在线云服务组合搭配,以满足不同层次的部署需求。本文中,我们提供了完全基于开源自建服务的 Docker Compose 配置,你可以直接使用这份配置文件来启动 LobeHub 数据库版本,也可以对之进行修改以适应你的需求。
我们默认使用 [MinIO](https://github.com/minio/minio) 作为本地 S3 对象存储服务,使用 [Casdoor](https://github.com/casdoor/casdoor) 作为本地鉴权服务。
@@ -328,15 +328,15 @@ mv .env.zh-CN.example .env
1. Casdoor
-- LobeChat 需要与 Casdoor 通讯,因此你需要配置 Casdoor 的 Issuer 。
+- LobeHub 需要与 Casdoor 通讯,因此你需要配置 Casdoor 的 Issuer 。
```env
AUTH_CASDOOR_ISSUER=https://auth.example.com
```
-该配置会影响 LobeChat 的登录鉴权服务,你需要确保 Casdoor 服务的地址正确。你可以在 [常见问题](#常见问题) 中找到该配置错误的常见现象及解决方案。
+该配置会影响 LobeHub 的登录鉴权服务,你需要确保 Casdoor 服务的地址正确。你可以在 [常见问题](#常见问题) 中找到该配置错误的常见现象及解决方案。
-- 同时,你也需要在 Casdoor 中允许回调地址为 LobeChat 的地址:
+- 同时,你也需要在 Casdoor 中允许回调地址为 LobeHub 的地址:
请在 Casdoor 的 Web 面板的 `身份认证 -> 应用` -> `<应用ID,默认为 app-built-in>` -> `重定向URL` 中添加一行:
@@ -352,7 +352,7 @@ origin=https://auth.example.com
2. MinIO
-- LobeChat 需要为 LLM 服务提供商提供文件对象的公网访问地址,因此你需要配置 MinIO 的 Endpoint 。
+- LobeHub 需要为 LLM 服务提供商提供文件对象的公网访问地址,因此你需要配置 MinIO 的 Endpoint 。
```env
S3_PUBLIC_DOMAIN=https://minio.example.com
@@ -403,13 +403,13 @@ lobe-chat | [auth][error] r3: "response" is not a conform Authorization Ser
lobe-chat | [auth][error] TypeError: fetch failed
```
-成因:LobeChat 无法访问鉴权服务。
+成因:LobeHub 无法访问鉴权服务。
解决方案:
-- 请检查你的鉴权服务是否正常运行,以及 LobeChat 所在的网络是否能够访问到鉴权服务。
+- 请检查你的鉴权服务是否正常运行,以及 LobeHub 所在的网络是否能够访问到鉴权服务。
-- 一个直接的排查方式,你可以在 LobeChat 容器的终端中,使用 `curl` 命令访问你的鉴权服务 `https://auth.example.com/.well-known/openid-configuration`,如果返回了 JSON 格式的数据,则说明你的鉴权服务正常运行。
+- 一个直接的排查方式,你可以在 LobeHub 容器的终端中,使用 `curl` 命令访问你的鉴权服务 `https://auth.example.com/.well-known/openid-configuration`,如果返回了 JSON 格式的数据,则说明你的鉴权服务正常运行。
#### 反向代理下 OAuth 令牌交换失败
@@ -424,7 +424,7 @@ docker compose up -d
## 拓展配置
-为了完善你的 LobeChat 服务,你可以根据你的需求进行以下拓展配置。
+为了完善你的 LobeHub 服务,你可以根据你的需求进行以下拓展配置。
### 使用 MinIO 存储 Casdoor 头像
@@ -473,7 +473,7 @@ docker compose up -d
3. 在 Casdoor 的 `身份认证 -> 提供商` 中关联 MinIO S3 服务,以下是一个示例配置:
- 
+ 
其中,客户端 ID、客户端密钥为上一步创建的访问密钥中的 `Access Key` 和 `Secret Key`,`192.168.31.251` 应当被替换为 `your_server_ip`。
@@ -496,7 +496,7 @@ docker compose up -d
域名和配套服务端口说明如下:
-- `lobe.example.com`:为你的 LobeChat 服务端域名,需要反向代理到 LobeChat 服务端口,默认为 `3210`
+- `lobe.example.com`:为你的 LobeHub 服务端域名,需要反向代理到 LobeHub 服务端口,默认为 `3210`
- `auth.example.com`:为你的 Logto UI 域名,需要反向代理到 Logto WebUI 服务端口,默认为 `8000`
- `minio.example.com`:为你的 MinIO API 域名,需要反向代理到 MinIO API 服务端口,默认为 `9000`
- `minio-ui.example.com`:可选,为你的 MinIO UI 域名,需要反向代理到 MinIO WebUI 服务端口,默认为 `9001`
@@ -609,7 +609,7 @@ docker compose up -d # 重新启动
1. 在 `用户管理 -> 组织` 中,添加一个新的组织。名称与显示名称为 `Lobe Users`。其余保持默认即可。
2. 在 `身份认证 -> 应用` 中,添加一个新的应用。
-- 名称与显示名称为 `LobeChat`。
+- 名称与显示名称为 `LobeHub`。
- 组织为 `Lobe Users`。
- 重定向 URLS 中添加一行 为 `https://lobe.example.com/api/auth/callback/casdoor`。
- 关闭除密码外的登录方式 。
@@ -626,7 +626,7 @@ docker compose up -d # 重新启动
本文以 MinIO 为例,解释配置过程,如果你使用的是其他 S3 服务商,请参照其文档进行配置。
- 请记得注意配置对应 S3 服务商的 CORS 跨域配置,以确保 LobeChat 能够正常访问 S3 服务。
+ 请记得注意配置对应 S3 服务商的 CORS 跨域配置,以确保 LobeHub 能够正常访问 S3 服务。
在本文中,你需要允许 `https://lobe.example.com` 的跨域请求。这既可以在 MinIO WebUI 的 `Configuration - API - Cors Allow Origin` 中配置,也可以在 Docker Compose 中的 `minio - environment - MINIO_API_CORS_ALLOW_ORIGIN` 中配置。
@@ -642,20 +642,20 @@ docker compose up -d # 重新启动
2. 在左侧面板 User / Access Keys 处,点击 `Create New Access Key`,无需额外修改,将生成的 `Access Key` 和 `Secret Key` 填入你的 `.env` 文件中的 `S3_ACCESS_KEY_ID` 和 `S3_SECRET_ACCESS_KEY` 中
-
+
-3. 重启 LobeChat 服务:
+3. 重启 LobeHub 服务:
```sh
docker compose up -d
```
-至此,你已经成功部署了 LobeChat 数据库版本,你可以通过 `https://lobe.example.com` 访问你的 LobeChat 服务。
+至此,你已经成功部署了 LobeHub 数据库版本,你可以通过 `https://lobe.example.com` 访问你的 LobeHub 服务。
#### 使用 `INTERNAL_APP_URL` 配置内部服务器通信
- 如果你在 CDN(如 Cloudflare)或反向代理后部署 LobeChat,你可以配置内部服务器到服务器通信以绕过 CDN / 代理层,以获得更好的性能。
+ 如果你在 CDN(如 Cloudflare)或反向代理后部署 LobeHub,你可以配置内部服务器到服务器通信以绕过 CDN / 代理层,以获得更好的性能。
你可以配置 `INTERNAL_APP_URL` 环境变量:
@@ -708,7 +708,7 @@ CASDOOR_PORT=8000
MINIO_PORT=9000
# Postgres related, which are the necessary environment variables for DB
-LOBE_DB_NAME=lobechat
+LOBE_DB_NAME=LobeHub
POSTGRES_PASSWORD=uWNZugjBqixf8dxC
# Casdoor secret
@@ -737,7 +737,7 @@ services:
- '${MINIO_PORT}:${MINIO_PORT}' # MinIO API
- '9001:9001' # MinIO Console
- '${CASDOOR_PORT}:${CASDOOR_PORT}' # Casdoor
- - '${LOBE_PORT}:3210' # LobeChat
+ - '${LOBE_PORT}:3210' # LobeHub
command: tail -f /dev/null
networks:
- lobe-network
diff --git a/docs/self-hosting/server-database/docker.mdx b/docs/self-hosting/server-database/docker.mdx
index 43e80ca4e7..ad7c1bdf98 100644
--- a/docs/self-hosting/server-database/docker.mdx
+++ b/docs/self-hosting/server-database/docker.mdx
@@ -1,10 +1,10 @@
---
-title: Deploying LobeChat Database with Docker
+title: Deploying LobeHub Database with Docker
description: >-
- Learn how to deploy the LobeChat server database version using Docker on Linux
+ Learn how to deploy the LobeHub server database version using Docker on Linux
and local machines.
tags:
- - LobeChat
+ - LobeHub
- Docker
- Database Deployment
- Postgres
@@ -22,16 +22,21 @@ tags:
This article assumes that you are familiar with the basic principles and processes of deploying
- the LobeChat server database version, so it only includes content related to core environment
- variable configuration. If you are not familiar with the deployment principles of the LobeChat
+ the LobeHub server database version, so it only includes content related to core environment
+ variable configuration. If you are not familiar with the deployment principles of the LobeHub
server database version, please refer to [Deploying Server
Database](/docs/self-hosting/server-database) first.
+
+ Due to the inability to expose `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` using Docker environment variables, you cannot use Clerk as an authentication service when deploying LobeHub using Docker / Docker Compose.
+
+ If you do need Clerk as an authentication service, you might consider deploying using Vercel or building your own image.
+
## Deploying on a Linux Server
-Here is the process for deploying the LobeChat server database version on a Linux server:
+Here is the process for deploying the LobeHub server database version on a Linux server:
### Create a Postgres Database Instance
@@ -48,7 +53,7 @@ Here is the process for deploying the LobeChat server database version on a Linu
The pgvector plugin provides vector search capabilities for Postgres, which is an important
- component for LobeChat to implement RAG.
+ component for LobeHub to implement RAG.
@@ -79,8 +84,8 @@ Here is the process for deploying the LobeChat server database version on a Linu
S3_ACCESS_KEY_ID=xxxxxxxxxx
S3_SECRET_ACCESS_KEY=xxxxxxxxxx
S3_ENDPOINT=https://xxxxxxxxxx.r2.cloudflarestorage.com
- S3_BUCKET=lobechat
- S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+ S3_BUCKET=LobeHub
+ S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
```
@@ -120,7 +125,7 @@ Here is the process for deploying the LobeChat server database version on a Linu
## Using Locally (Mac / Windows)
-The data version of LobeChat also supports direct use on a local Mac/Windows machine.
+The data version of LobeHub also supports direct use on a local Mac/Windows machine.
Here, we assume that you have a pg instance available on port 5432 locally on your Mac/Windows, with the account `postgres` and password `mysecretpassword`, accessible at `localhost:5432`.
@@ -136,8 +141,8 @@ $ docker run -it -d --name lobe-chat-database -p 3210:3210 \
-e S3_ACCESS_KEY_ID=xxxxxxxxxx \
-e S3_SECRET_ACCESS_KEY=xxxxxxxxxx \
-e S3_ENDPOINT=https://xxxxxxxxxx.r2.cloudflarestorage.com \
- -e S3_BUCKET=lobechat \
- -e S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com \
+ -e S3_BUCKET=LobeHub \
+ -e S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com \
lobehub/lobe-chat-database
```
diff --git a/docs/self-hosting/server-database/docker.zh-CN.mdx b/docs/self-hosting/server-database/docker.zh-CN.mdx
index d59597b4d4..acf4b8b18f 100644
--- a/docs/self-hosting/server-database/docker.zh-CN.mdx
+++ b/docs/self-hosting/server-database/docker.zh-CN.mdx
@@ -1,9 +1,9 @@
---
-title: 使用 Docker 部署 LobeChat 数据库
-description: 详细步骤教你如何在 Docker 中部署 LobeChat 服务端数据库。
+title: 使用 Docker 部署 LobeHub 数据库
+description: 详细步骤教你如何在 Docker 中部署 LobeHub 服务端数据库。
tags:
- Docker
- - LobeChat
+ - LobeHub
- 数据库部署
- Postgres
---
@@ -19,17 +19,23 @@ tags:
- 本文已经假定你了解了 LobeChat 服务端数据库版本(下简称 DB
- 版)的部署基本原理和流程,因此只包含核心环境变量配置的内容。如果你还不了解 LobeChat DB
+ 本文已经假定你了解了 LobeHub 服务端数据库版本(下简称 DB
+ 版)的部署基本原理和流程,因此只包含核心环境变量配置的内容。如果你还不了解 LobeHub DB
版的部署原理,请先查阅 [使用服务端数据库部署](/zh/docs/self-hosting/server-database) 。
此外,针对国内的腾讯云储存桶用户,可查询[配置腾讯云 COS
存储服务](/zh/docs/self-hosting/advanced/s3/tencent-cloud)。
+
+ 由于无法使用 Docker 环境变量暴露 `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY`,使用 Docker / Docker Compose
+ 部署 LobeHub 时,你不能使用 Clerk 作为登录鉴权服务。
+
+ 如果你确实需要 Clerk 作为登录鉴权服务,你可以考虑使用 Vercel 部署或者自行构建镜像。
+
## 在 Linux 服务器上部署
-以下是在 Linux 服务器上部署 LobeChat DB 版的流程:
+以下是在 Linux 服务器上部署 LobeHub DB 版的流程:
### 创建 Postgres 数据库实例
@@ -45,7 +51,7 @@ tags:
上述指令会创建一个名为 `my-postgres`,并且网络为 `pg` 的 PG 实例,其中 `pgvector/pgvector:pg16` 是一个 Postgres 16 的镜像,且默认安装了 pgvector 插件。
- pgvector 插件为 Postgres 提供了向量搜索的能力,是 LobeChat 实现 RAG 的重要构件之一。
+ pgvector 插件为 Postgres 提供了向量搜索的能力,是 LobeHub 实现 RAG 的重要构件之一。
@@ -77,9 +83,9 @@ tags:
S3_SECRET_ACCESS_KEY=xxxxxxxxxx
# 用于 S3 API 访问的域名
S3_ENDPOINT=https://xxxxxxxxxx.r2.cloudflarestorage.com
- S3_BUCKET=lobechat
+ S3_BUCKET=LobeHub
# 用于外网访问 S3 的公共域名,需配置 CORS
- S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+ S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
# S3_REGION=ap-chengdu # 如果需要指定地域
```
@@ -120,7 +126,7 @@ tags:
## 在本地(Mac / Windows) 上使用
-LobeChat 的 DB 版也支持直接在本地的 Mac/Windows 本地使用。
+LobeHub 的 DB 版也支持直接在本地的 Mac/Windows 本地使用。
在此我们已假设你的本地有一个 5432 端口可用,账号为 `postgres` ,密码是 `mysecretpassword` 的 pg 实例,它在 `localhost:5432` 可用。
@@ -136,8 +142,8 @@ $ docker run -it -d --name lobe-chat-database -p 3210:3210 \
-e S3_ACCESS_KEY_ID=xxxxxxxxxx \
-e S3_SECRET_ACCESS_KEY=xxxxxxxxxx \
-e S3_ENDPOINT=https://xxxxxxxxxx.r2.cloudflarestorage.com \
- -e S3_BUCKET=lobechat \
- -e S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com \
+ -e S3_BUCKET=LobeHub \
+ -e S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com \
lobehub/lobe-chat-database
```
diff --git a/docs/self-hosting/server-database/dokploy.mdx b/docs/self-hosting/server-database/dokploy.mdx
index 15d534713d..9cb8dd5441 100644
--- a/docs/self-hosting/server-database/dokploy.mdx
+++ b/docs/self-hosting/server-database/dokploy.mdx
@@ -1,18 +1,18 @@
---
-title: Deploy LobeChat with database on Dokploy
+title: Deploy LobeHub with database on Dokploy
description: >-
- Learn how to deploy LobeChat with database on Dokploy with ease, including:
+ Learn how to deploy LobeHub with database on Dokploy with ease, including:
database, authentication and S3 storage service.
tags:
- - Deploy LobeChat
- - Dokploy Deployment
- - Better Auth
- - S3 Storage
+ - Deploy LobeHub
+ - Vercel Deployment
+ - OpenAI API Key
+ - Custom Domain Binding
---
# Deploying Server Database Version on Dokploy.
-This article will detail how to deploy the server database version of LobeChat.
+This article will detail how to deploy the server database version of LobeHub.
## 1. Preparation Work
@@ -24,11 +24,11 @@ curl -sSL https://dokploy.com/install.sh | sh
1. Connect your GitHub to Dokploy in the Settings / Git section according to the prompt.
-
+
2. Enter the Projects interface to create a Project.
-
+
### Configure S3 Storage Service
@@ -69,9 +69,9 @@ You also need to configure the `JWKS_KEY` environment variable for signing and v
Enter the previously created Project, click on Create Service, and select Database. In the Database interface, choose PostgreSQL, then set the database name, user, and password. In the Docker image field, enter `pgvector/pgvector:pg17`, and finally click Create to create the database.
-
+
-Enter the created database and set an unused port in External Credentials to allow external access; otherwise, LobeChat will not be able to connect to the database. You can view the Postgres database connection URL in External Host, as shown below:
+Enter the created database and set an unused port in External Credentials to allow external access; otherwise, LobeHub will not be able to connect to the database. You can view the Postgres database connection URL in External Host, as shown below:
```shell
postgresql://postgres:wAbLxfXSwkxxxxxx@45.577.281.48:5432/postgres
@@ -79,21 +79,21 @@ postgresql://postgres:wAbLxfXSwkxxxxxx@45.577.281.48:5432/postgres
Finally, click Deploy to deploy the database.
-
+
-## Deploy LobeChat on Dokploy.
+## Deploy LobeHub on Dokploy.
-Click "Create Service", select "Application", and create the LobeChat application.
+Click "Create Service", select "Application", and create the LobeHub application.
-
+
-Enter the created LobeChat application, select the forked lobe-chat project and branch, and click Save to save.
+Enter the created LobeHub application, select the forked lobe-chat project and branch, and click Save to save.
-
+
Switch to the Environment section, fill in the environment variables, and click Save.
-
+
```shell
# Environment variables required for building
@@ -125,14 +125,14 @@ S3_ENABLE_PATH_STYLE=
After adding the environment variables and saving, click Deploy to initiate the deployment. You can check the deployment progress and log information under Deployments.
-
+
-After a successful deployment, bind your own domain to your LobeChat application and request a certificate on the Domains page.
+After a successful deployment, bind your own domain to your LobeHub application and request a certificate on the Domains page.
-
+
-## Check if LobeChat is working properly.
+## Check if LobeHub is working properly.
-Go to your LobeChat website, and if you click on the login button in the upper left corner and the login pop-up appears normally, it means you have configured it successfully. Enjoy it to the fullest!
+Go to your LobeHub website, and if you click on the login button in the upper left corner and the login pop-up appears normally, it means you have configured it successfully. Enjoy it to the fullest!
-
+
diff --git a/docs/self-hosting/server-database/dokploy.zh-CN.mdx b/docs/self-hosting/server-database/dokploy.zh-CN.mdx
index 11464ab967..3c33ec2067 100644
--- a/docs/self-hosting/server-database/dokploy.zh-CN.mdx
+++ b/docs/self-hosting/server-database/dokploy.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 Dokploy 上部署 LobeChat 的服务端数据库版本
-description: 本文详细介绍如何在 Dokploy 中部署服务端数据库版 LobeChat,包括数据库配置、身份验证服务配置的设置步骤。
+title: 在 Dokploy 上部署 LobeHub 的服务端数据库版本
+description: 本文详细介绍如何在 Dokploy 中部署服务端数据库版 LobeHub,包括数据库配置、身份验证服务配置的设置步骤。
tags:
- 服务端数据库
- Postgres
@@ -13,7 +13,7 @@ tags:
# 在 Dokploy 上部署服务端数据库版
-本文将详细介绍如何在 Dokploy 中部署服务端数据库版 LobeChat。
+本文将详细介绍如何在 Dokploy 中部署服务端数据库版 LobeHub。
## 一、准备工作
@@ -25,11 +25,11 @@ curl -sSL https://dokploy.com/install.sh | sh
1. 在 Dokploy 的 Settings / Git 处根据提示将 Github 绑定到 Dokploy
-
+
2. 进入 Projects 界面创建一个 Project
-
+
### 配置 S3 存储服务
@@ -70,9 +70,9 @@ S3_ENABLE_PATH_STYLE=
进入前面创建的 Project,点击 Create Service 选择 Database,在 Database 界面选择 PostgreSQL ,然后设置数据库名、用户、密码,在 Docker image 中填入 `pgvector/pgvector:pg17` 最后点击 Create 创建数据库。
-
+
-进入创建的数据库,在 External Credentials 设置一个未被占用的端口,使其能能通过外部访问,否则 LobeChat 将无法连接到该数据库。你可以在 External Host 查看 Postgres 数据库连接 URL ,如下:
+进入创建的数据库,在 External Credentials 设置一个未被占用的端口,使其能能通过外部访问,否则 LobeHub 将无法连接到该数据库。你可以在 External Host 查看 Postgres 数据库连接 URL ,如下:
```shell
postgresql://postgres:wAbLxfXSwkxxxxxx@45.577.281.48:5432/postgres
@@ -80,21 +80,21 @@ postgresql://postgres:wAbLxfXSwkxxxxxx@45.577.281.48:5432/postgres
最后点击 Deploy 部署数据库
-
+
-## 在 Dokploy 上部署 LobeChat
+## 在 Dokploy 上部署 LobeHub
-点击 Create Service 选择 Application,创建 LobeChat 应用
+点击 Create Service 选择 Application,创建 LobeHub 应用
-
+
-进入创建的 LobeChat 应用,选择你 fork 的 lobe-chat 项目及分支,点击 Save 保存
+进入创建的 LobeHub 应用,选择你 fork 的 lobe-chat 项目及分支,点击 Save 保存
-
+
切换到 Environment ,在其中填入环境变量,点击保存。
-
+
```shell
# 构建所必需的环境变量
@@ -126,14 +126,14 @@ S3_ENABLE_PATH_STYLE=
添加完环境变量并保存后,点击 Deploy 进行部署,你可以在 Deployments 处查看部署进程及日志信息
-
+
-部署成功后在 Domains 页面,为你的 LobeChat 应用绑定自己的域名并申请证书。
+部署成功后在 Domains 页面,为你的 LobeHub 应用绑定自己的域名并申请证书。
-
+
-## 验证 LobeChat 是否正常工作
+## 验证 LobeHub 是否正常工作
-进入你的 LobeChat 网址,如果你点击左上角登录,可以正常显示登录弹窗,那么说明你已经配置成功了,尽情享用吧~
+进入你的 LobeHub 网址,如果你点击左上角登录,可以正常显示登录弹窗,那么说明你已经配置成功了,尽情享用吧~
-
+
diff --git a/docs/self-hosting/server-database/netlify.mdx b/docs/self-hosting/server-database/netlify.mdx
index bcc0e0fd82..0213750662 100644
--- a/docs/self-hosting/server-database/netlify.mdx
+++ b/docs/self-hosting/server-database/netlify.mdx
@@ -1,13 +1,13 @@
---
-title: Deploy LobeChat with Database on Netlify
+title: Deploy LobeHub with Database on Netlify
description: >-
- Learn how to deploy LobeChat on Netlify with ease, including: database,
+ Learn how to deploy LobeHub on Netlify with ease, including: database,
authentication and S3 storage service.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Netlify Deployment
---
-# Deploy LobeChat with Database on Netlify
+# Deploy LobeHub with Database on Netlify
TODO
diff --git a/docs/self-hosting/server-database/netlify.zh-CN.mdx b/docs/self-hosting/server-database/netlify.zh-CN.mdx
index c71f604fa2..61dc3b92ce 100644
--- a/docs/self-hosting/server-database/netlify.zh-CN.mdx
+++ b/docs/self-hosting/server-database/netlify.zh-CN.mdx
@@ -1,16 +1,16 @@
---
-title: 在 Netlify 上部署 LobeChat 服务端数据库版
+title: 在 Netlify 上部署 LobeHub 服务端数据库版
description: >-
- 学习如何在 Netlify 上部署 LobeChat,包括 Fork 仓库、准备 OpenAI API Key、导入到 Netlify
+ 学习如何在 Netlify 上部署 LobeHub,包括 Fork 仓库、准备 OpenAI API Key、导入到 Netlify
工作台、配置站点名称与环境变量等步骤。
tags:
- Netlify
- - LobeChat
+ - LobeHub
- 部署教程
- OpenAI API Key
- 环境配置
---
-# 使用 Netlify 部署 LobeChat 数据库版
+# 使用 Netlify 部署 LobeHub 数据库版
TODO
diff --git a/docs/self-hosting/server-database/railway.mdx b/docs/self-hosting/server-database/railway.mdx
index d59f1243dd..cd6c5bb95a 100644
--- a/docs/self-hosting/server-database/railway.mdx
+++ b/docs/self-hosting/server-database/railway.mdx
@@ -1,13 +1,13 @@
---
-title: Deploy LobeChat with Database on Railway
+title: Deploy LobeHub with Database on Railway
description: >-
- Learn how to deploy LobeChat on Railway with ease, including: database,
+ Learn how to deploy LobeHub on Railway with ease, including: database,
authentication and S3 storage service.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Railway Deployment
---
-# Deploy LobeChat with Database on Railway
+# Deploy LobeHub with Database on Railway
TODO
diff --git a/docs/self-hosting/server-database/railway.zh-CN.mdx b/docs/self-hosting/server-database/railway.zh-CN.mdx
index c51def9a1b..6347cf6e64 100644
--- a/docs/self-hosting/server-database/railway.zh-CN.mdx
+++ b/docs/self-hosting/server-database/railway.zh-CN.mdx
@@ -1,15 +1,15 @@
---
-title: 在 Railway 上部署 LobeChat 数据库版
-description: 学习如何在 Railway 上部署 LobeChat 应用,包括准备 OpenAI API Key、点击按钮进行部署、绑定自定义域名等步骤。
+title: 在 Railway 上部署 LobeHub 数据库版
+description: 学习如何在 Railway 上部署 LobeHub 应用,包括准备 OpenAI API Key、点击按钮进行部署、绑定自定义域名等步骤。
tags:
- Railway
- 部署
- - LobeChat
+ - LobeHub
- OpenAI
- API Key
- 自定义域名
---
-# 使用 Railway 部署 LobeChat 数据库版
+# 使用 Railway 部署 LobeHub 数据库版
TODO
diff --git a/docs/self-hosting/server-database/repocloud.mdx b/docs/self-hosting/server-database/repocloud.mdx
index ee95f9601f..7d7b745b53 100644
--- a/docs/self-hosting/server-database/repocloud.mdx
+++ b/docs/self-hosting/server-database/repocloud.mdx
@@ -1,18 +1,18 @@
---
-title: Deploy LobeChat with Database on RepoCloud
+title: Deploy LobeHub with Database on RepoCloud
description: >-
- Learn how to deploy LobeChat on RepoCloud with ease, including database,
+ Learn how to deploy LobeHub on RepoCloud with ease, including database,
authentication and S3 storage service.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- RepoCloud Deployment
- OpenAI API Key
- Custom Domain Binding
---
-# Deploying LobeChat Database Edition with RepoCloud
+# Deploying LobeHub Database Edition with RepoCloud
-If you want to deploy LobeChat Database Edition on RepoCloud, you can follow the steps below:
+If you want to deploy LobeHub Database Edition on RepoCloud, you can follow the steps below:
## RepoCloud Deployment Process
diff --git a/docs/self-hosting/server-database/repocloud.zh-CN.mdx b/docs/self-hosting/server-database/repocloud.zh-CN.mdx
index dd9bfbc730..e2a23c5efc 100644
--- a/docs/self-hosting/server-database/repocloud.zh-CN.mdx
+++ b/docs/self-hosting/server-database/repocloud.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: 在 RepoCloud 上部署 LobeChat 数据库版
-description: 学习如何在 RepoCloud 上部署 LobeChat 应用,包括准备 OpenAI API Key、点击部署按钮、绑定自定义域名等操作。
+title: 在 RepoCloud 上部署 LobeHub 数据库版
+description: 学习如何在 RepoCloud 上部署 LobeHub 应用,包括准备 OpenAI API Key、点击部署按钮、绑定自定义域名等操作。
tags:
- RepoCloud
- - LobeChat
+ - LobeHub
- 部署流程
- OpenAI API Key
- 自定义域名
---
-# 在 RepoCloud 上部署 LobeChat 数据库版
+# 在 RepoCloud 上部署 LobeHub 数据库版
-如果您想在 RepoCloud 上部署 LobeChat 数据库版,可以按照以下步骤进行操作:
+如果您想在 RepoCloud 上部署 LobeHub 数据库版,可以按照以下步骤进行操作:
## RepoCloud 部署流程
diff --git a/docs/self-hosting/server-database/sealos.mdx b/docs/self-hosting/server-database/sealos.mdx
index 3b58162377..5bf6e8a9ce 100644
--- a/docs/self-hosting/server-database/sealos.mdx
+++ b/docs/self-hosting/server-database/sealos.mdx
@@ -1,10 +1,10 @@
---
title: Deploy Lobe Chat Database Version on Sealos
description: >-
- Learn how to deploy LobeChat on Sealos with ease. Follow the provided steps to
- set up LobeChat and start using it efficiently.
+ Learn how to deploy LobeHub on Sealos with ease. Follow the provided steps to
+ set up LobeHub and start using it efficiently.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Sealos Deployment
- OpenAI API Key
- Custom Domain Binding
@@ -14,8 +14,8 @@ tags:
This article assumes that you are familiar with the basic principles and processes of deploying
- the LobeChat server database version, so it only includes content related to core environment
- variable configuration. If you are not familiar with the deployment principles of the LobeChat
+ the LobeHub server database version, so it only includes content related to core environment
+ variable configuration. If you are not familiar with the deployment principles of the LobeHub
server database version, please refer to [Deploying Server
Database](/docs/self-hosting/server-database) first.
diff --git a/docs/self-hosting/server-database/sealos.zh-CN.mdx b/docs/self-hosting/server-database/sealos.zh-CN.mdx
index 5ee18d91ef..95901922dc 100644
--- a/docs/self-hosting/server-database/sealos.zh-CN.mdx
+++ b/docs/self-hosting/server-database/sealos.zh-CN.mdx
@@ -1,15 +1,15 @@
---
-title: 在 Sealos 上部署 LobeChat 数据库版
-description: 学习如何在 Sealos 上部署 LobeChat,包括准备 OpenAI API Key、点击部署按钮、绑定自定义域名等操作。
+title: 在 Sealos 上部署 LobeHub 数据库版
+description: 学习如何在 Sealos 上部署 LobeHub,包括准备 OpenAI API Key、点击部署按钮、绑定自定义域名等操作。
tags:
- Sealos
- - LobeChat
+ - LobeHub
- OpenAI API Key
- 部署流程
- 自定义域名
---
-# 使用 Sealos 部署 LobeChat 数据库版
+# 使用 Sealos 部署 LobeHub 数据库版
本文假设你已经熟悉 Lobe Chat
@@ -22,7 +22,7 @@ tags:
- Logto 提供身份校验(需额外部署)
- 带有 Vector 插件的 PostgreSQL 来做数据存储和向量化
- 一个对象存储 Bucket
-- LobeChat Database 的实例
+- LobeHub Database 的实例
这里是在 Sealos 上部署 Lobe Chat 服务器数据库版的流程:
diff --git a/docs/self-hosting/server-database/vercel.mdx b/docs/self-hosting/server-database/vercel.mdx
index 971a04832c..ac6f6d30db 100644
--- a/docs/self-hosting/server-database/vercel.mdx
+++ b/docs/self-hosting/server-database/vercel.mdx
@@ -1,10 +1,10 @@
---
-title: Deploy LobeChat with database on Vercel
+title: Deploy LobeHub with database on Vercel
description: >-
- Learn how to deploy LobeChat with database on Vercel with ease, including:
+ Learn how to deploy LobeHub with database on Vercel with ease, including:
database, authentication and S3 storage service.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Vercel Deployment
- Better Auth
- S3 Storage
@@ -12,7 +12,7 @@ tags:
# Deploying Server Database Version on Vercel
-This article will detail how to deploy the server database version of LobeChat on Vercel, including: 1) database configuration; 2) identity authentication service configuration; 3) steps for setting up the S3 storage service.
+This article will detail how to deploy the server database version of LobeHub on Vercel, including: 1) database configuration; 2) identity authentication service configuration; 3) steps for setting up the S3 storage service.
Before proceeding, please make sure of the following:
@@ -53,7 +53,7 @@ This article will detail how to deploy the server database version of LobeChat o
An example of filling in Vercel is as follows:
-
+
@@ -69,7 +69,7 @@ This article will detail how to deploy the server database version of LobeChat o
An example of filling in Vercel is as follows:
-
+
@@ -89,7 +89,7 @@ This article will detail how to deploy the server database version of LobeChat o
### Add the `APP_URL` Environment Variable
- Finally, you need to add the `APP_URL` environment variable, which specifies the URL address of the LobeChat application.
+ Finally, you need to add the `APP_URL` environment variable, which specifies the URL address of the LobeHub application.
## 2. Configure Authentication Service
@@ -114,6 +114,55 @@ The server-side database needs to be paired with a user authentication service t
For advanced features like SSO providers, magic link login, and email verification, see [Authentication Service](/docs/self-hosting/advanced/auth).
+
+ ### Add Public and Private Key Environment Variables in Vercel
+
+ In Vercel's deployment environment variables, add the `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` and `CLERK_SECRET_KEY` environment variables. You can click on "API Keys" in the menu, then copy the corresponding values and paste them into Vercel's environment variables.
+
+
+
+ The environment variables required for this step are as follows:
+
+ ```shell
+ NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=pk_live_xxxxxxxxxxx
+ CLERK_SECRET_KEY=sk_live_xxxxxxxxxxxxxxxxxxxxxx
+ ```
+
+ Add the above variables to Vercel:
+
+
+
+ ### Create and Configure Webhook in Clerk
+
+ Since we let Clerk fully handle user authentication and management, we need Clerk to notify our application and store data in the database when there are changes in the user's lifecycle (create, update, delete). We achieve this requirement through the Webhook provided by Clerk.
+
+ We need to add an endpoint in Clerk's Webhooks to inform Clerk to send notifications to this endpoint when a user's information changes.
+
+
+
+ Fill in the endpoint with the URL of your Vercel project, such as `https://your-project.vercel.app/api/webhooks/clerk`. Then, subscribe to events by checking the three user events (`user.created`, `user.deleted`, `user.updated`), and click create.
+
+
+ The `https://` in the URL is essential to maintain the integrity of the URL.
+
+
+
+
+ ### Add Webhook Secret to Vercel Environment Variables
+
+ After creation, you can find the secret of this Webhook in the bottom right corner:
+
+
+
+ The environment variable corresponding to this secret is `CLERK_WEBHOOK_SECRET`:
+
+ ```shell
+ CLERK_WEBHOOK_SECRET=whsec_xxxxxxxxxxxxxxxxxxxxxx
+ ```
+
+ Add it to Vercel's environment variables:
+
+
By completing these steps, you have successfully configured the authentication service. Next, we will configure the S3 storage service.
@@ -135,27 +184,27 @@ In the server-side database, we need to configure the S3 storage service to stor
The interface of Cloudflare R2 is shown below:
-
+
When creating a storage bucket, specify its name and then click create.
-
+
### Obtain Environment Variables for the Bucket
In the settings of the R2 storage bucket, you can view the bucket configuration information:
-
+
The corresponding environment variables are:
```shell
# Storage bucket name
- S3_BUCKET=lobechat
+ S3_BUCKET=LobeHub
# Storage bucket request endpoint (note that the path in this link includes the bucket name, which must be removed, or use the link provided on the S3 API token application page)
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# Public access domain for the storage bucket
- S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+ S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
```
@@ -164,21 +213,21 @@ In the server-side database, we need to configure the S3 storage service to stor
### Obtain S3 Key Environment Variables
- You need to obtain the access key for S3 so that the LobeChat server has permission to access the S3 storage service. In R2, you can configure the access key in the account details:
+ You need to obtain the access key for S3 so that the LobeHub server has permission to access the S3 storage service. In R2, you can configure the access key in the account details:
-
+
Click the button in the upper right corner to create an API token and enter the create API Token page.
-
+
Since our server-side database needs to read and write to the S3 storage service, the permission needs to be set to `Object Read and Write`, then click create.
-
+
After creation, you can see the corresponding S3 API token.
-
+
The corresponding environment variables are:
@@ -201,7 +250,7 @@ In the server-side database, we need to configure the S3 storage service to stor
S3_SECRET_ACCESS_KEY=55af75d8eb6b99f189f6a35f855336ea62cd9c4751a5cf4337c53c1d3f497ac2
# Bucket name
- S3_BUCKET=lobechat
+ S3_BUCKET=LobeHub
# Bucket request endpoint
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# Public domain for bucket access
@@ -213,7 +262,7 @@ In the server-side database, we need to configure the S3 storage service to stor
Then, insert the above environment variables into Vercel's environment variables:
-
+
### Configuring Cross-Origin Resource Sharing (CORS)
@@ -221,11 +270,11 @@ In the server-side database, we need to configure the S3 storage service to stor
In R2, you can find the CORS configuration in the bucket settings:
-
+
Add a CORS rule to allow requests from your domain (in this case, `https://your-project.vercel.app`):
-
+
Example configuration:
@@ -244,22 +293,22 @@ In the server-side database, we need to configure the S3 storage service to stor
## Four, Deployment and Verification
-After completing the steps above, the configuration of the server-side database should be done. Next, we can deploy LobeChat to Vercel and then visit your Vercel link to verify if the server-side database is working correctly.
+After completing the steps above, the configuration of the server-side database should be done. Next, we can deploy LobeHub to Vercel and then visit your Vercel link to verify if the server-side database is working correctly.
### Redeploy the latest commit
After configuring the environment variables, you need to redeploy the latest commit and wait for the deployment to complete.
-
+
### Check if the features are working properly
If you click on the login button in the top left corner and the login popup appears normally, then you have successfully configured it. Enjoy using it\~
-
+
-
+
## Appendix
@@ -285,11 +334,11 @@ S3_ACCESS_KEY_ID=9998d6757e276cf9f1edbd325b7083a6
S3_SECRET_ACCESS_KEY=55af75d8eb6b99f189f6a35f855336ea62cd9c4751a5cf4337c53c1d3f497ac2
# Bucket name
-S3_BUCKET=lobechat
+S3_BUCKET=LobeHub
# Bucket request endpoint
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# Public access domain for the bucket
-S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
# Bucket region, such as us-west-1, generally not needed to add, but some service providers may require configuration
# S3_REGION=us-west-1
```
diff --git a/docs/self-hosting/server-database/vercel.zh-CN.mdx b/docs/self-hosting/server-database/vercel.zh-CN.mdx
index b5d74a3c66..455172fdaf 100644
--- a/docs/self-hosting/server-database/vercel.zh-CN.mdx
+++ b/docs/self-hosting/server-database/vercel.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 Vercel 上部署 LobeChat 的服务端数据库版本
-description: 本文详细介绍如何在 Vercel 中部署服务端数据库版 LobeChat,包括数据库配置、身份验证服务配置和 S3 存储服务的设置步骤。
+title: 在 Vercel 上部署 LobeHub 的服务端数据库版本
+description: 本文详细介绍如何在 Vercel 中部署服务端数据库版 LobeHub,包括数据库配置、身份验证服务配置和 S3 存储服务的设置步骤。
tags:
- 服务端数据库
- Postgres
@@ -14,7 +14,7 @@ tags:
# 在 Vercel 上部署服务端数据库版
-本文将详细介绍如何在 Vercel 中部署服务端数据库版 LobeChat,包括: 1)数据库配置;2)身份验证服务配置;3) S3 存储服务的设置步骤。
+本文将详细介绍如何在 Vercel 中部署服务端数据库版 LobeHub,包括: 1)数据库配置;2)身份验证服务配置;3) S3 存储服务的设置步骤。
进行后续操作前,请务必确认以下事项:
@@ -54,7 +54,7 @@ tags:
在 Vercel 中填写的示例如下:
-
+
@@ -70,7 +70,7 @@ tags:
在 Vercel 中填写的示例如下:
-
+
@@ -89,7 +89,7 @@ tags:
### 添加 `APP_URL` 环境变量
- 该部分最后需要添加 `APP_URL` 环境变量,用于指定 LobeChat 应用的 URL 地址。
+ 该部分最后需要添加 `APP_URL` 环境变量,用于指定 LobeHub 应用的 URL 地址。
## 二、 配置身份验证服务
@@ -114,6 +114,53 @@ tags:
如需 SSO 登录、魔法链接登录、邮箱验证等高级功能,请参阅 [身份验证服务](/zh/docs/self-hosting/advanced/auth)。
+
+ ### 在 Vercel 中添加公、私钥环境变量
+
+ 在 Vercel 的部署环境变量中,添加 `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` 和 `CLERK_SECRET_KEY` 环境变量。你可以在菜单中点击「API Keys」,然后复制对应的值填入 Vercel 的环境变量中。
+
+
+
+ 此步骤所需的环境变量如下:
+
+ ```shell
+ NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=pk_live_xxxxxxxxxxx
+ CLERK_SECRET_KEY=sk_live_xxxxxxxxxxxxxxxxxxxxxx
+ ```
+
+ 添加上述变量到 Vercel 中:
+
+
+
+ ### 在 Clerk 中创建并配置 Webhook
+
+ 由于我们让 Clerk 完全接管用户鉴权与管理,因此我们需要在 Clerk 用户生命周期变更时(创建、更新、删除)中通知我们的应用并存储落库。我们通过 Clerk 提供的 Webhook 来实现这一诉求。
+
+ 我们需要在 Clerk 的 Webhooks 中添加一个端点(Endpoint),告诉 Clerk 当用户发生变更时,向这个端点发送通知。
+
+
+
+ 在 endpoint 中填写你的 Vercel 项目的 URL,如 `https://your-project.vercel.app/api/webhooks/clerk`。然后在订阅事件(Subscribe to events)中,勾选 user 的三个事件(`user.created` 、`user.deleted`、`user.updated`),然后点击创建。
+
+ URL 的`https://`不可缺失,须保持 URL 的完整性
+
+
+
+ ### 将 Webhook 秘钥添加到 Vercel 环境变量
+
+ 创建完毕后,可以在右下角找到该 Webhook 的秘钥:
+
+
+
+ 这个秘钥所对应的环境变量名为 `CLERK_WEBHOOK_SECRET`:
+
+ ```shell
+ CLERK_WEBHOOK_SECRET=whsec_xxxxxxxxxxxxxxxxxxxxxx
+ ```
+
+ 将其添加到 Vercel 的环境变量中:
+
+
这样,你已经成功配置了身份验证服务。接下来我们将配置 S3 存储服务。
@@ -134,48 +181,48 @@ tags:
下图是 Cloudflare R2 的界面:
-
+
创建存储桶时将指定其名称,然后点击创建。
-
+
### 获取存储桶相关环境变量
在 R2 存储桶的设置中,可以看到桶配置的信息:
-
+
其对应的环境变量为:
```shell
# 存储桶的名称
- S3_BUCKET=lobechat
+ S3_BUCKET=LobeHub
# 存储桶的请求端点(注意此处链接的路径带存储桶名称,必须删除该路径,或使用申请 S3 API token 页面所提供的链接)
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# 存储桶对外的访问域名
- S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+ S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
```
`S3_ENDPOINT`必须删除其路径,否则会无法访问所上传文件
### 获取 S3 密钥环境变量
- 你需要获取 S3 的访问密钥,以便 LobeChat 的服务端有权限访问 S3 存储服务。在 R2 中,你可以在账户详情中配置访问密钥:
+ 你需要获取 S3 的访问密钥,以便 LobeHub 的服务端有权限访问 S3 存储服务。在 R2 中,你可以在账户详情中配置访问密钥:
-
+
点击右上角按钮创建 API token,进入创建 API Token 页面
-
+
鉴于我们的服务端数据库需要读写 S3 存储服务,因此权限需要选择`对象读与写`,然后点击创建。
-
+
创建完成后,就可以看到对应的 S3 API token
-
+
其对应的环境变量为:
@@ -196,7 +243,7 @@ tags:
S3_SECRET_ACCESS_KEY=55af75d8eb6b99f189f6a35f855336ea62cd9c4751a5cf4337c53c1d3f497ac2
# 存储桶的名称
- S3_BUCKET=lobechat
+ S3_BUCKET=LobeHub
# 存储桶的请求端点
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# 存储桶对外的访问域名
@@ -208,7 +255,7 @@ tags:
然后将上述环境变量填入 Vercel 的环境变量中:
-
+
### 配置跨域
@@ -216,11 +263,11 @@ tags:
在 R2 中,你可以在存储桶的设置中找到跨域配置:
-
+
添加跨域规则,允许你的域名(在上文是 `https://your-project.vercel.app`)来源的请求:
-
+
示例配置如下:
@@ -239,22 +286,22 @@ tags:
## 四、部署并验证
-通过上述步骤之后,我们应该就完成了服务端数据库的配置。接下来我们可以将 LobeChat 部署到 Vercel 上,然后访问你的 Vercel 链接,验证服务端数据库是否正常工作。
+通过上述步骤之后,我们应该就完成了服务端数据库的配置。接下来我们可以将 LobeHub 部署到 Vercel 上,然后访问你的 Vercel 链接,验证服务端数据库是否正常工作。
### 重新部署最新的 commit
配置好环境变量后,你需要重新部署最新的 commit,并等待部署完成。
-
+
### 检查功能是否正常
如果你点击左上角登录,可以正常显示登录弹窗,那么说明你已经配置成功了,尽情享用吧~
-
+
-
+
## 附录
@@ -280,11 +327,11 @@ S3_ACCESS_KEY_ID=9998d6757e276cf9f1edbd325b7083a6
S3_SECRET_ACCESS_KEY=55af75d8eb6b99f189f6a35f855336ea62cd9c4751a5cf4337c53c1d3f497ac2
# 存储桶的名称
-S3_BUCKET=lobechat
+S3_BUCKET=LobeHub
# 存储桶的请求端点
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
# 存储桶对外的访问域名
-S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
+S3_PUBLIC_DOMAIN=https://s3-for-LobeHub.your-domain.com
# 桶的区域,如 us-west-1,一般来说不需要添加,但某些服务商则需要配置
# S3_REGION=us-west-1
```
diff --git a/docs/self-hosting/server-database/zeabur.mdx b/docs/self-hosting/server-database/zeabur.mdx
index 5f9dc5503c..99e52fb37f 100644
--- a/docs/self-hosting/server-database/zeabur.mdx
+++ b/docs/self-hosting/server-database/zeabur.mdx
@@ -1,21 +1,21 @@
---
-title: Deploying LobeChat Database on Zeabur
+title: Deploying LobeHub Database on Zeabur
description: >-
- Learn how to deploy LobeChat on Zeabur with ease. Follow the provided steps to
+ Learn how to deploy LobeHub on Zeabur with ease. Follow the provided steps to
set up your chat application seamlessly.
tags:
- - Deploy LobeChat
+ - Deploy LobeHub
- Zeabur Deployment
- OpenAI API Key
- Custom Domain Binding
---
-# Deploying LobeChat Database on Zeabur
+# Deploying LobeHub Database on Zeabur
This article assumes that you are familiar with the basic principles and processes of deploying
- the LobeChat server database version, so it only includes content related to core environment
- variable configuration. If you are not familiar with the deployment principles of the LobeChat
+ the LobeHub server database version, so it only includes content related to core environment
+ variable configuration. If you are not familiar with the deployment principles of the LobeHub
server database version, please refer to [Deploying Server
Database](/docs/self-hosting/server-database) first.
@@ -25,16 +25,16 @@ The template on Zeabur includes 4 services:
- Logto for authrization.
- PostgreSQL with Vector plugin for data storage and indexing.
- MinIO for image storage.
-- LobeChat database version.
+- LobeHub database version.
## Deploying on Zeabur
-Here is the process for deploying the LobeChat server database version on Zeabur:
+Here is the process for deploying the LobeHub server database version on Zeabur:
### Go to the template page on Zeabur
- Go to the [LobeChat Database template page](https://zeabur.com/templates/RRSPSD) on Zeabur and click on the "Deploy" button.
+ Go to the [LobeHub Database template page](https://zeabur.com/templates/RRSPSD) on Zeabur and click on the "Deploy" button.
### Fill in the required environment variables
@@ -44,7 +44,7 @@ Here is the process for deploying the LobeChat server database version on Zeabur
- OpenAI API key: Your OpenAI API key to get responses from OpenAI.
- - LobeChat Domain: A free subdomain with `.zeabur.app` suffix.
+ - LobeHub Domain: A free subdomain with `.zeabur.app` suffix.
- MinIO Public Domain: A free subdomain with `.zeabur.app` suffix for yout MinIO web port to enable public access for the uploaded files.
@@ -54,7 +54,7 @@ Here is the process for deploying the LobeChat server database version on Zeabur
### Select a region and deploy
- After you fill all the required environment variables, select a region where you want to deploy your LobeChat Database and click on the "Deploy" button.
+ After you fill all the required environment variables, select a region where you want to deploy your LobeHub Database and click on the "Deploy" button.
You will see another modal pop-up where you can see the deployment progress.
@@ -64,13 +64,13 @@ Here is the process for deploying the LobeChat server database version on Zeabur
Access your Logto console with the console domain you just binded, and then create a `Next.js 14(App router)` application to get the client ID and client secret, and fill in the cors and callback URLs. You can check [this document](../advanced/auth.mdx) for a more detailed guide.
- Fill in those variables into your LobeChat service on Zeabur, here is a more detailed guide for [editing environment variables on Zeabur](https://zeabur.com/docs/deploy/variables).
+ Fill in those variables into your LobeHub service on Zeabur, here is a more detailed guide for [editing environment variables on Zeabur](https://zeabur.com/docs/deploy/variables).
For detailed configuration of Logto, refer to [this document](/docs/self-hosting/advanced/auth/next-auth/logto).
- ### Access your LobeChat Instance
+ ### Access your LobeHub Instance
- Press on the `LobeChat-Database` and you can see the public domain you just created, click on it to access your LobeChat Database.
+ Press on the `LobeHub-Database` and you can see the public domain you just created, click on it to access your LobeHub Database.
You can also bind a custom domain for your services if you want, here is a guide on how to [bind a custom domain on Zeabur](https://zeabur.com/docs/deploy/domain-binding).
diff --git a/docs/self-hosting/server-database/zeabur.zh-CN.mdx b/docs/self-hosting/server-database/zeabur.zh-CN.mdx
index a1d7fab2ae..16561609de 100644
--- a/docs/self-hosting/server-database/zeabur.zh-CN.mdx
+++ b/docs/self-hosting/server-database/zeabur.zh-CN.mdx
@@ -1,19 +1,19 @@
---
-title: 在 Zeabur 上部署 LobeChat
-description: 按照指南准备 OpenAI API Key 并点击按钮进行部署。在部署完成后,即可开始使用 LobeChat 并选择是否绑定自定义域名。
+title: 在 Zeabur 上部署 LobeHub
+description: 按照指南准备 OpenAI API Key 并点击按钮进行部署。在部署完成后,即可开始使用 LobeHub 并选择是否绑定自定义域名。
tags:
- Zeabur
- - LobeChat
+ - LobeHub
- OpenAI API Key
- 部署流程
- 自定义域名
---
-# 使用 Zeabur 部署 LobeChat 数据库版
+# 使用 Zeabur 部署 LobeHub 数据库版
- 本文假设你已经熟悉 LobeChat
- 服务器数据库版的部署基本原理和流程,因此只包含与核心环境变量配置相关的内容。如果你对 LobeChat
+ 本文假设你已经熟悉 LobeHub
+ 服务器数据库版的部署基本原理和流程,因此只包含与核心环境变量配置相关的内容。如果你对 LobeHub
服务器数据库版的部署原理不熟悉,请先参考[部署服务器数据库](/zh/docs/self-hosting/server-database)。
@@ -22,16 +22,16 @@ tags:
- Logto 提供身份校验
- 带有 Vector 插件的 PostgreSQL 来做数据存储和向量化
- MinIO 作为对象存储
-- LobeChat Database 的实例
+- LobeHub Database 的实例
## 在 Zeabur 上部署
-这里是在 Zeabur 上部署 LobeChat 服务器数据库版的流程:
+这里是在 Zeabur 上部署 LobeHub 服务器数据库版的流程:
### 前往 Zeabur 上的模板页面
- 前往 [Zeabur 上的 LobeChat 数据库模板页面](https://zeabur.com/templates/RRSPSD) 并点击 "Deploy" 按钮。
+ 前往 [Zeabur 上的 LobeHub 数据库模板页面](https://zeabur.com/templates/RRSPSD) 并点击 "Deploy" 按钮。
### 填写必要的环境变量
@@ -40,14 +40,14 @@ tags:
以下是你需要填写的环境变量:
- OpenAI API key: 你的 OpenAI API key 用于获取模型的访问权限。
- - LobeChat Domain: 一个免费的 `.zeabur.app` 后缀的域名。
+ - LobeHub Domain: 一个免费的 `.zeabur.app` 后缀的域名。
- MinIO Public Domain: 一个免费的 `.zeabur.app` 后缀的域名为了暴露 MinIO 服务以公开访问资源。
- Logto Console Domain: 一个免费的 `.zeabur.app` 后缀的域名来访问 Logto 的控制台。
- Logto API Domain: 一个免费的 `.zeabur.app` 后缀的域名来访问 Logto 的 API。
### 选择一个区域并部署
- 在你填写完所有必要的环境变量后,选择一个你想要部署 LobeChat 数据库的区域并点击 “部署” 按钮。
+ 在你填写完所有必要的环境变量后,选择一个你想要部署 LobeHub 数据库的区域并点击 “部署” 按钮。
你会看到另一个模态弹窗,你可以在这里看到部署的进度。
@@ -55,13 +55,13 @@ tags:
当部署完成后,你会被自动导航到你在 Zeabur 控制台上刚刚创建的项目。你需要再进一步配置你的 Logto 服务。
- 使用你刚绑定的域名来访问你的 Logto 控制台,创建一个新项目以获得对应的客户端 ID 与密钥,将它们填入你的 LobeChat 服务的变量中。关于如何填入变量,可以参照 [Zeabur 的官方文档](https://zeabur.com/docs/deploy/variables)。
+ 使用你刚绑定的域名来访问你的 Logto 控制台,创建一个新项目以获得对应的客户端 ID 与密钥,将它们填入你的 LobeHub 服务的变量中。关于如何填入变量,可以参照 [Zeabur 的官方文档](https://zeabur.com/docs/deploy/variables)。
Logto 的详细配置可以参考[这篇文档](/zh/docs/self-hosting/advanced/auth/next-auth/logto)。
- ### 访问你的 LobeChat
+ ### 访问你的 LobeHub
- 按下 `LobeChat-Database` 你会看到你刚刚创建的公共域名,点击它以访问你的 LobeChat 数据库。
+ 按下 `LobeHub-Database` 你会看到你刚刚创建的公共域名,点击它以访问你的 LobeHub 数据库。
你可以选择绑定一个自定义域名,这里有一个关于如何在 Zeabur 上[绑定自定义域名](https://zeabur.com/docs/deploy/domain-binding)的指南。
diff --git a/docs/self-hosting/start.mdx b/docs/self-hosting/start.mdx
index 4f5aa21e8a..7d2305f731 100644
--- a/docs/self-hosting/start.mdx
+++ b/docs/self-hosting/start.mdx
@@ -1,8 +1,8 @@
---
-title: Build Your Own LobeChat - Choose Your Deployment Platform
+title: Build Your Own LobeHub - Choose Your Deployment Platform
description: >-
Explore multiple deployment platforms like Vercel, Docker, Docker Compose, and
- more to deploy LobeChat. Choose the platform that best suits your needs.
+ more to deploy LobeHub. Choose the platform that best suits your needs.
tags:
- Lobe Chat
- Deployment Platform
@@ -14,13 +14,13 @@ tags:
# Build Your Own Lobe Chat
-LobeChat supports various deployment platforms, including Vercel, Docker, and Docker Compose. You can choose a deployment platform that suits you to build your own Lobe Chat.
+LobeHub supports various deployment platforms, including Vercel, Docker, and Docker Compose. You can choose a deployment platform that suits you to build your own Lobe Chat.
## Quick Deployment
-For users who are new to LobeChat, we recommend using the client-side database mode for quick deployment. The advantage of this mode is that deployment can be quickly completed with just one command/button, making it easy for you to quickly get started and experience LobeChat.
+For users who are new to LobeHub, we recommend using the client-side database mode for quick deployment. The advantage of this mode is that deployment can be quickly completed with just one command/button, making it easy for you to quickly get started and experience LobeHub.
-You can follow the guide below for quick deployment of LobeChat:
+You can follow the guide below for quick deployment of LobeHub:
@@ -32,7 +32,7 @@ You can follow the guide below for quick deployment of LobeChat:
## Advanced Mode: Server-Side Database
-For users who are already familiar with LobeChat or need cross-device synchronization, you can deploy a version with a server-side database to access a more complete and powerful LobeChat.
+For users who are already familiar with LobeHub or need cross-device synchronization, you can deploy a version with a server-side database to access a more complete and powerful LobeHub.
diff --git a/docs/self-hosting/start.zh-CN.mdx b/docs/self-hosting/start.zh-CN.mdx
index bad58495d9..beb6ea97b5 100644
--- a/docs/self-hosting/start.zh-CN.mdx
+++ b/docs/self-hosting/start.zh-CN.mdx
@@ -1,5 +1,5 @@
---
-title: 构建属于自己的 LobeChat - 自选部署平台
+title: 构建属于自己的 LobeHub - 自选部署平台
description: >-
选择适合自己的部署平台,构建个性化的 Lobe Chat。支持 Docker、Docker
Compose、Netlify、Railway、Repocloud、Sealos、Vercel 和 Zeabur 部署。
@@ -18,13 +18,13 @@ tags:
# 构建属于自己的 Lobe Chat
-LobeChat 支持多种部署平台,包括 Vercel、Docker、 Docker Compose 、阿里云计算巢 和腾讯轻量云 等,你可以选择适合自己的部署平台进行部署,构建属于自己的 Lobe Chat。
+LobeHub 支持多种部署平台,包括 Vercel、Docker、 Docker Compose 、阿里云计算巢 和腾讯轻量云 等,你可以选择适合自己的部署平台进行部署,构建属于自己的 Lobe Chat。
## 快速部署
-对于第一次了解 LobeChat 的用户,我们推荐使用客户端数据库的模式快速部署,该模式的优势是一行指令 / 一个按钮即可快捷完成部署,便于你快速上手与体验 LobeChat。
+对于第一次了解 LobeHub 的用户,我们推荐使用客户端数据库的模式快速部署,该模式的优势是一行指令 / 一个按钮即可快捷完成部署,便于你快速上手与体验 LobeHub。
-你可以通过以下指南快速部署 LobeChat:
+你可以通过以下指南快速部署 LobeHub:
@@ -34,7 +34,7 @@ LobeChat 支持多种部署平台,包括 Vercel、Docker、 Docker Compose 、
## 进阶模式:服务端数据库
-针对已经了解 LobeChat 的用户,或需要多端同步的用户,可以自行部署带有服务端数据库的版本,进而获得更完整、功能更强大的 LobeChat。
+针对已经了解 LobeHub 的用户,或需要多端同步的用户,可以自行部署带有服务端数据库的版本,进而获得更完整、功能更强大的 LobeHub。
diff --git a/docs/usage/agent/agent-team.mdx b/docs/usage/agent/agent-team.mdx
new file mode 100644
index 0000000000..e94b21d17a
--- /dev/null
+++ b/docs/usage/agent/agent-team.mdx
@@ -0,0 +1,66 @@
+---
+title: Agent Teams
+description: >-
+ Simple centralized configuration for prompts, model selection, knowledge
+ bases, plugins, and more.
+tags:
+ - LobeHub
+ - LobeHub
+ - AI Assistant
+ - Assistant Organization
+ - Group Settings
+ - Assistant Search
+ - Assistant Pinning
+---
+
+# Agent Teams
+
+Sometimes, one assistant's perspective just isn't enough. Complex problems require multifaceted thinking, creative projects thrive on diverse expertise, and learning discussions benefit from multiple viewpoints. Agent Group Chat brings together multiple specialized assistants to collaborate just like in a real group chat — a translation assistant, a coding assistant, and a product manager assistant sitting around the table, each contributing their strengths to solve your problem. You're not just getting an answer — you're engaging in a conversation. Here, different perspectives collide, expertise complements one another, and the insights generated through AI collaboration go far beyond what a single assistant can offer.
+
+Agent Group Chat is a collaborative space for multiple specialized assistants. You pose a question or task, and each assistant offers insights from their area of expertise. They can discuss, supplement, and even debate with one another.
+
+## Limitations of a Single Assistant
+
+- Can only analyze problems from one perspective
+- Expertise is limited to its predefined role
+- Lacks the richness of diverse viewpoints
+
+## Advantages of Agent Group Chat
+
+- Multiple assistants contribute their strengths and collaborate
+- Diverse professional backgrounds lead to comprehensive solutions
+- Discussions among assistants spark deeper insights
+- A built-in moderator ensures orderly and focused conversations
+
+## About the Moderator
+
+Every Agent Group Chat includes a built-in moderator responsible for:
+
+- Understanding your needs and assigning discussion tasks
+- Coordinating the speaking order of assistants
+- Summarizing the discussion and extracting key conclusions
+- Keeping the conversation organized and on-topic
+
+## Creating an Agent Group Chat
+
+Click "Create Group" in the left sidebar to get started.
+
+
+
+When creating a group chat, you can use existing templates or assemble your own team of AI assistants. You can also choose whether to include a moderator and select the model for the moderator.
+
+
+
+## Configuring an Agent Group Chat
+
+In the group chat session, use the left sidebar to select an assistant. You can easily switch their model or remove them from the group.
+
+
+
+Also in the left sidebar, click the "Add Member" button to bring additional assistants into the group chat.
+
+
+
+Go to "Group Profile" in the left sidebar to edit the group prompt, add plugins, or change the moderator model. You can also use the Agent Builder on the right panel for intelligent group creation. Agent Builder is LobeHub’s built-in assistant — simply chat with it, describe your needs, and it will automatically generate a complete group chat configuration, including group settings, system prompts, and plugin setup.
+
+
diff --git a/docs/usage/agent/agent-team.zh-CN.mdx b/docs/usage/agent/agent-team.zh-CN.mdx
new file mode 100644
index 0000000000..98cd667359
--- /dev/null
+++ b/docs/usage/agent/agent-team.zh-CN.mdx
@@ -0,0 +1,64 @@
+---
+title: Agent 团队
+description: 简单的集中配置,例如提示词,选择模型,知识库,插件等。
+tags:
+ - LobeHub
+ - LobeHub
+ - AI 助手
+ - 助手组织
+ - 分组设置
+ - 助手搜索
+ - 助手固定
+---
+
+# Agent 团队
+
+有时候,一个助理的视角是不够的。复杂问题需要多角度思考,创意项目需要不同专业背景的碰撞,学习讨论需要多方辩论。Agent 群聊让多个专业助理聚在一起,像真实群聊一样协作 —— 翻译助理、编程助理、产品经理助理围坐一起,各自发挥专长,共同解决你的问题。你不再是获得一个答案,而是参与一场对话。不同观点在这里碰撞,专业知识在这里互补,AI 助理协同工作时产生的洞察,远超任何单一助理所能提供的。
+
+Agent 群聊是多个专业助理的协作空间。你提出一个问题或任务,不同助理从各自的专业角度给出见解,它们之间可以互相讨论、补充、甚至辩论。
+
+## 单个助理的局限
+
+- 只能从一个角度分析问题
+- 专业领域受限于预设角色
+- 缺少多元视角的碰撞
+
+## Agent 群聊的优势
+
+- 多个助理各取所长,协同工作
+- 不同专业背景带来全面的解决方案
+- 助理之间的讨论激发更深入的洞察
+- 内置主持人确保讨论有序进行
+
+## 关于主持人
+
+每个 Agent 群聊都有一个内置的主持人。它负责:
+
+- 理解你的需求,分配讨论任务
+- 协调各个助理的发言顺序
+- 总结讨论结果,提炼关键结论
+- 确保对话围绕主题有序展开
+
+## 创建 Agent 群聊
+
+在左侧边栏选择「创建群组」即可进入创建。
+
+
+
+创建群聊时,你可以使用现有模版,也可以选择自己的 AI 助理组建群聊。同时,你可以选择是否使用主持人,并为主持人选择模型。
+
+
+
+## 配置 Agent 群聊
+
+在群聊会话左侧边栏,选中助理,可以便捷更换助理的模型和移除助理。
+
+
+
+同样在左侧边栏,点击添加成员按钮可以添加需要的助理到群聊中。
+
+
+
+在左侧边栏进入「群聊档案」,你可以编写群聊提示词,为群聊添加插件,更换主持人模型。你也可以使用右侧面板的 Agent Builder 进行智能创建。Agent Builder 是 LobeHub 的内置助理,只需与 Agent Builder 对话,描述你的需求,它就能理解并自动生成完整的群聊配置 —— 包括群聊设定、系统提示词、插件配置。
+
+
diff --git a/docs/usage/agent/gtd.mdx b/docs/usage/agent/gtd.mdx
new file mode 100644
index 0000000000..05e84f9a1b
--- /dev/null
+++ b/docs/usage/agent/gtd.mdx
@@ -0,0 +1,35 @@
+---
+title: GTD Tools
+description: >-
+ Learn how to use scheduled tasks, including creating, editing, and deleting
+ them.
+tags:
+ - Scheduled Tasks
+ - Create
+ - Edit
+ - Delete
+---
+
+# GTD Tools
+
+GTD Tools is a built-in plugin in LobeHub that deeply integrates the classic GTD (Getting Things Done) time management methodology into your conversational experience. Once enabled, your assistant transforms into a professional task management expert, helping you offload mental clutter and focus on what truly matters—your creativity and deep thinking. With GTD Tools, you can manage your schedule directly through natural language in conversations. Whether it's a sudden burst of inspiration, household chores, or serious work plans, your assistant can accurately record and track your progress.
+
+## Enabling GTD Tools
+
+GTD Tools is a built-in plugin in LobeHub and must be enabled for your assistant before use.
+
+### Enable via Assistant Profile
+
+Go to the assistant profile page, click on "+ Integrate Plugin," and check the "GTD Tools" plugin to activate it.
+
+### Enable in Conversation
+
+In the conversation window, click the plugin icon below the chat box and check the "GTD Tools" plugin to enable it.
+
+### Creating Tasks
+
+You can simply send your plans in the conversation, and the assistant will automatically recognize and confirm the task.
+
+### Completing Tasks
+
+You can update tasks through conversational commands, and the assistant will handle the updates automatically.
diff --git a/docs/usage/agent/gtd.zh-CN.mdx b/docs/usage/agent/gtd.zh-CN.mdx
new file mode 100644
index 0000000000..e04eefe3b2
--- /dev/null
+++ b/docs/usage/agent/gtd.zh-CN.mdx
@@ -0,0 +1,33 @@
+---
+title: GTD 工具
+description: 了解如何使用定时任务,包括创建、编辑、删除等。
+tags:
+ - 定时任务
+ - 创建
+ - 编辑
+ - 删除
+---
+
+# GTD 工具
+
+GTD Tools 是 LobeHub 内置的插件,将经典的 GTD(Getting Things Done)时间管理方法论深度集成到你的对话体验中。开启该插件后,你的助理将化身为专业的任务管理专家,帮助你将大脑从琐碎的杂事中解放出来,专注于当下的创作与思考。通过 GTD Tools,你可以直接在会话中以自然语言管理日程。无论是突如其来的灵感、待办的家务,还是严肃的工作计划,助理都能为你精准记录并追踪进度。
+
+## 启用 GTD Tools
+
+GTD Tools 是 LobeHub 的内置插件,需要为助理启用后才能使用。
+
+### 在助理档案中启用
+
+进入助理档案页面,点击「+ 集成插件」,勾选「GTD Tools」插件即可开启。
+
+### 在会话中启用
+
+进入会话页面,点击对话框下方插件图标,勾选「GTD Tools」插件即可。
+
+### 创建任务
+
+你可以在会话中直接发送你的计划,助理会自动识别并确认记录。
+
+### 消除任务
+
+你可以在会话中通过对话指令让助理自动更新。
diff --git a/docs/usage/agent/notebook.mdx b/docs/usage/agent/notebook.mdx
new file mode 100644
index 0000000000..a16958ec42
--- /dev/null
+++ b/docs/usage/agent/notebook.mdx
@@ -0,0 +1,65 @@
+---
+title: Notebook
+description: >-
+ Learn how to use scheduled tasks, including how to create, edit, and delete
+ them.
+tags:
+ - Scheduled Tasks
+ - Create
+ - Edit
+ - Delete
+---
+
+# Notebook
+
+LobeHub offers a powerful Notebook feature that allows you to save and manage documents directly within your conversations. No more worrying about losing important information—your assistant can help you take notes, save reports, and organize research materials. Everything stays within the current topic and is always accessible. The Notebook breaks the limitations of fleeting conversations by turning valuable insights into structured, manageable documents. From meeting minutes to study notes, research reports to to-do lists, the Notebook helps you build knowledge in a more systematic and lasting way.
+
+The Notebook serves as a topic-level document storage space. When valuable content arises during a conversation, your assistant can save it to the Notebook, creating a structured document library. These documents are linked to the current topic, making it easy to reference and revisit them in future discussions.
+
+## What You Can Do with the Notebook
+
+### Save Notes and Reminders
+
+Your assistant can quickly jot down ideas, to-dos, and flashes of inspiration. Just say "Take a note for me," and the content will be saved to your Notebook.
+
+### Organize Research Materials
+
+When your assistant helps you search for information or analyze data, you can save the valuable findings. The next time you continue your research, everything will be right where you left it.
+
+### Generate Reports and Articles
+
+Your assistant can help you draft structured reports, analytical documents, or long-form articles, and save them directly to the Notebook. You can view, edit, and expand on them anytime.
+
+### Manage Document Versions
+
+You can ask your assistant to update existing documents by adding new content or modifying what's already there. Documents in the Notebook evolve over time, always staying up to date.
+
+### Enable the Notebook Plugin
+
+Notebook is a built-in plugin. You’ll need to enable it for your assistant to use document management features.
+
+### Enable in Assistant Profile
+
+- Go to the assistant profile page, click "+ Integrate Skills," and check "Notebook" to enable it.
+
+### Enable in a Conversation
+
+- In a conversation, click the skill icon below the chat box and check "Notebook" to activate it.
+
+### Using the Notebook
+
+You can ask your assistant to read document content:
+
+- "Show me the notes we saved earlier"
+- "What did that report say?"
+
+To delete a document when it's no longer needed, you can either instruct your assistant to remove it or open the Notebook panel and delete it manually.
+
+### View and Manage Documents
+
+All saved documents appear in the Notebook panel on the right side of the topic. Click the document icon in the top-right corner to open the panel. You can:
+
+- Browse the document list to view titles and summaries
+- Click a document to view its full content
+- Edit documents directly within the panel
+- Click "Edit in Drafts" to sync the document with the "Drafts" section in the conversation
diff --git a/docs/usage/agent/notebook.zh-CN.mdx b/docs/usage/agent/notebook.zh-CN.mdx
new file mode 100644
index 0000000000..6c03ca3677
--- /dev/null
+++ b/docs/usage/agent/notebook.zh-CN.mdx
@@ -0,0 +1,56 @@
+---
+title: 笔记本
+description: 了解如何使用定时任务,包括创建、编辑、删除等。
+tags:
+ - 定时任务
+ - 创建
+ - 编辑
+ - 删除
+---
+
+# 笔记本
+
+LobeHub 支持笔记本功能(Notebook),让你在对话中随时保存和管理文档。不再担心重要内容被遗忘,助理可以帮你记录笔记、保存报告、整理研究资料 —— 所有内容都留存在当前话题中,随时查阅。Notebook 突破了对话内容转瞬即逝的限制,将有价值的信息沉淀为可管理的文档。从会议纪要到学习笔记,从研究报告到待办清单,Notebook 让知识积累更系统、更持久。
+
+Notebook 是话题级别的文档存储空间。当对话中产生了值得保留的内容,助理可以将其保存到 Notebook 中,形成结构化的文档库。这些文档与当前话题关联,方便你在后续对话中查阅和引用。
+
+## Notebook 能做什么
+
+### 保存笔记和备忘
+
+助理可以帮你快速记录想法、待办事项、灵感片段。你只需说 "帮我记一下",内容就会保存到 Notebook 中。
+
+### 整理研究资料
+
+当你让助理搜索信息或分析资料时,可以将有价值的结果保存下来。下次继续研究时,这些资料随时可用。
+
+### 生成报告和文章
+
+助理可以帮你撰写结构化的报告、分析文档、长篇文章,并直接保存到 Notebook。你能随时查看、编辑、补充内容。
+
+### 管理文档版本
+
+你可以让助理更新已有文档,追加新内容或修改现有内容。文档在 Notebook 中持续演进,保持最新状态。
+
+### 启用 Notebook 插件
+
+Notebook 是内置插件,需要为助理启用后才能使用文档管理功能。
+
+### 在助理档案中启用
+
+- 进入助理档案页面,点击「+ 集成技能」,勾选「Notebook」即可开启。
+
+### 在会话中启用
+
+- 进入会话页面,点击对话框下方技能图标,勾选「Notebook」即可。
+
+### 使用 Notebook
+
+你可以让助理读取文档内容:
+
+- "看一下之前保存的笔记"
+- "那份报告写了什么" 删除文档当文档不再需要时,可以给助理发送指令删除或展开 Notebook 面板手动删除。查看和管理文档所有保存的文档都会显示在话题右侧 Notebook 面板中。右上角点击文稿图标即可呼出面板。你可以:
+- 浏览文档列表,查看标题和摘要
+- 点击文档查看完整内容
+- 直接在面板中编辑文档
+- 点击「在文稿中编辑」,将会话内文稿同步到「文稿」板块
diff --git a/docs/usage/agent/sandbox.mdx b/docs/usage/agent/sandbox.mdx
new file mode 100644
index 0000000000..36d0b20fb9
--- /dev/null
+++ b/docs/usage/agent/sandbox.mdx
@@ -0,0 +1,79 @@
+---
+title: Cloud Sandbox
+description: >-
+ Learn how to use scheduled tasks, including creating, editing, and deleting
+ them.
+tags:
+ - Scheduled Tasks
+ - Create
+ - Edit
+ - Delete
+---
+
+# Cloud Sandbox
+
+LobeHub supports the Cloud Sandbox feature, enabling AI assistants to execute code and process files in a securely isolated cloud environment. Instead of merely providing code snippets, the assistant can directly run code, generate documents, and create charts — delivering downloadable results that you can iterate on in real time. Cloud Sandbox breaks the boundaries of traditional conversations, extending AI output from suggestions to actual execution. From data analysis to document generation, from code debugging to file conversion, Cloud Sandbox transforms AI into a true execution assistant.
+
+## Understanding Cloud Sandbox
+
+Cloud Sandbox is a securely isolated cloud-based execution environment. When you need more than just code snippets and want actual execution results, the assistant will run the code in the Cloud Sandbox and return the output.
+
+## What Can Cloud Sandbox Do?
+
+### Execute Code
+
+The assistant can run Python, JavaScript, and TypeScript code within the sandbox and return the results. You’ll see real execution output, not just code text.
+
+### Generate Files
+
+The assistant can create various types of files — PDF documents, Excel spreadsheets, Word documents, images, charts, and more — and provide download links. You can download and use them directly.
+
+### Process Data
+
+The assistant can read, analyze, and transform data files. Upload CSV, JSON, or other data formats, and the assistant can help clean, summarize, and visualize the data.
+
+### Run Commands
+
+The assistant can execute shell commands to install dependencies, manipulate files, and perform complex operations.
+
+### Enabling Cloud Sandbox
+
+Cloud Sandbox is a built-in plugin that must be enabled for the assistant to use its features. You can enable the Cloud Sandbox plugin from the "Assistant Profile" page under the plugin section, or directly within a conversation by checking the plugin option in the chat interface.
+
+### Using Cloud Sandbox
+
+#### Ask the Assistant to Execute Code
+
+Simply describe the task you want to accomplish, and the assistant will write and run the code in the Cloud Sandbox:
+
+- “Write a Python script to calculate the average and standard deviation of this dataset.”
+- “Implement a quicksort algorithm in JavaScript and run a test.”
+- “Run this code for me and show the output.”
+
+#### Ask the Assistant to Generate Documents
+
+Describe the content you need, and the assistant will generate the document and provide a download link:
+
+- “Generate a PDF report with the analysis of this data.”
+- “Convert this content into a Word document.”
+- “Create an Excel spreadsheet to organize this information.”
+
+#### Ask the Assistant to Process Data
+
+Provide data or files, and the assistant will process and return the results:
+
+- “Analyze this sales data and create a trend chart.”
+- “Convert this JSON file to CSV format.”
+- “Clean this dataset by removing duplicates.”
+
+### Understanding the Cloud Sandbox Environment
+
+The Cloud Sandbox runs in a securely isolated cloud server, completely separate from your local machine. All operations performed by the assistant in the sandbox will not affect your local file system.
+
+### Session-Based File Storage
+
+Files in the Cloud Sandbox are temporary and tied to the current conversation session. If the session ends or remains inactive for a long time, the sandbox files may be cleared. If you need to keep the files, be sure to download them using the export links provided by the assistant.
+
+### Automatic Export of Results
+
+When the assistant generates files in the Cloud Sandbox, they are automatically exported with download links. No extra steps are needed — you’ll receive documents, charts, data files, and other outputs directly from the code execution.
diff --git a/docs/usage/agent/sandbox.zh-CN.mdx b/docs/usage/agent/sandbox.zh-CN.mdx
new file mode 100644
index 0000000000..878bd4806f
--- /dev/null
+++ b/docs/usage/agent/sandbox.zh-CN.mdx
@@ -0,0 +1,73 @@
+---
+title: 云沙箱
+description: 了解如何使用定时任务,包括创建、编辑、删除等。
+tags:
+ - 定时任务
+ - 创建
+ - 编辑
+ - 删除
+---
+
+# 云沙箱
+
+LobeHub 支持 Cloud Sandbox ,让 AI 助理能在安全隔离的云端环境中执行代码、处理文件。不只是给出代码片段,助理可以直接运行代码、生成文档、创建图表 —— 你能立即获得可下载的成果,实时迭代调整。云沙箱突破了传统对话的限制,将 AI 的输出从建议扩展到执行。从数据分析到文档生成,从代码调试到文件转换,云沙箱让 AI 真正成为你的执行助手。
+
+## 理解 Cloud Sandbox
+
+Cloud Sandbox 是一个安全隔离的云端执行环境。当你需要的不只是代码片段,而是实际运行结果时,助理会在 Cloud Sandbox 中执行代码并返回输出。
+
+## Cloud Sandbox 能做什么
+
+### 执行代码
+
+助理可以运行 Python、JavaScript、TypeScript 代码,在沙盒中执行并返回结果。你能看到真实的运行输出,而不只是代码文本。
+
+### 生成文件
+
+助理可以创建各种文件 ——PDF 文档、Excel 表格、Word 文档、图片、图表等,并提供下载链接。你能直接下载使用。
+
+### 数据处理
+
+助理可以读取、分析、转换数据文件。上传 CSV、JSON 等数据,助理能帮你清洗、统计、可视化。
+
+### 运行命令
+
+助理可以执行 Shell 命令,安装依赖包、处理文件、执行复杂操作。
+
+### 启用 Cloud Sandbox
+
+Cloud Sandbox 是内置插件,需要为助理启用后才能使用云沙箱功能。你可以在「助理档案」页面添加插件处启用 Cloud Sandbox 插件,也可以进入会话,在对话框勾选插件处启用 Cloud Sandbox 插件。
+
+### 使用 Cloud Sandbox
+
+### 让助理执行代码
+
+直接描述你想完成的任务,助理会编写代码并在云沙箱中运行:
+
+- 「帮我写个 Python 脚本,计算这组数据的平均值和标准差」
+- 「用 JavaScript 实现一个快速排序算法,运行测试一下」
+- 「帮我跑一下这段代码,看看输出是什么」
+
+### 让助理生成文档
+
+描述你需要的文档内容,助理会生成并提供下载:
+
+- 「帮我生成一份 PDF 报告,包含这些数据的分析结果」
+- 「把这段内容做成 Word 文档」
+- 「创建一个 Excel 表格,整理这些信息」
+
+### 让助理处理数据
+
+提供数据或文件,助理会处理并返回结果:
+
+- 「分析这份销售数据,画个趋势图」
+- 「把这个 JSON 转换成 CSV 格式」
+- 「帮我清洗这份数据,去除重复项」
+
+### 了解云沙箱环境
+
+隔离的云端环境云沙箱运行在安全隔离的云端服务器上,与你的本地电脑完全分离。助理在云沙箱中的所有操作都不会影响你的本地文件系统。
+
+### 会话级文件存储
+
+云沙箱中的文件是临时的,与当前对话会话绑定。会话结束或长时间不活动后,沙箱中的文件可能会被清理。如果你需要保留文件,请及时下载助理提供的导出链接。自动导出成果当助理在云沙箱中生成文件时,会自动导出并提供下载链接。你无需额外操作,即可获得代码运行产生的文档、图表、数据文件等成果。
diff --git a/docs/usage/agent/scheduled-task.mdx b/docs/usage/agent/scheduled-task.mdx
new file mode 100644
index 0000000000..08f67de5ee
--- /dev/null
+++ b/docs/usage/agent/scheduled-task.mdx
@@ -0,0 +1,27 @@
+---
+title: Scheduled Tasks
+description: >-
+ Learn how to use scheduled tasks, including how to create, edit, and delete
+ them.
+tags:
+ - LobeHub
+ - CronJob
+ - Scheduled Tasks
+ - Create
+ - Edit
+ - Delete
+---
+
+# Scheduled Tasks
+
+Scheduled tasks are jobs that run periodically in the cloud.
+
+In simple terms, you can configure an Agent to execute tasks based on your prompt at regular intervals—for example, checking social media content and sending notifications on a schedule.
+
+## Creating a Task
+
+
+
+To create a task, simply enter a prompt, name, and schedule.
+
+## Deleting a Task
diff --git a/docs/usage/agent/scheduled-task.zh-CN.mdx b/docs/usage/agent/scheduled-task.zh-CN.mdx
new file mode 100644
index 0000000000..9b1fdfc4d1
--- /dev/null
+++ b/docs/usage/agent/scheduled-task.zh-CN.mdx
@@ -0,0 +1,25 @@
+---
+title: 定时任务
+description: 了解如何使用定时任务,包括创建、编辑、删除等。
+tags:
+ - LobeHub
+ - CornJob
+ - 定时任务
+ - 创建
+ - 编辑
+ - 删除
+---
+
+# 定时任务
+
+定时任务是定期在云端执行的任务。
+
+简单来说,你可以让 Agent 定期根据你的 Prompt 去执行任务,例如定期检查社交媒体内容并发送通知。
+
+## 创建任务
+
+
+
+输入提示词、名称和日期即可创建任务。
+
+## 删除任务
diff --git a/docs/usage/agent/share.mdx b/docs/usage/agent/share.mdx
new file mode 100644
index 0000000000..c5bdbf2963
--- /dev/null
+++ b/docs/usage/agent/share.mdx
@@ -0,0 +1,92 @@
+---
+title: Share Conversations
+description: >-
+ Learn how to share conversation records using LobeHub's sharing features,
+ including screenshot sharing and ShareGPT links. Easily share your dialogues
+ with others.
+tags:
+ - LobeHub
+ - Share Conversation Records
+ - Screenshot Sharing
+ - Conversation Sharing
+---
+
+# Share Conversation Records
+
+You can share your current conversation with others by clicking the Share button in the top-right corner of the chat window. LobeHub supports two sharing methods: Screenshot Sharing and Shareable Link Generation.
+
+LobeHub allows you to export and share your conversations in various formats, including screenshots, plain text, PDF, or JSON, making it easy to save and share your dialogues.
+
+## Screenshot Sharing
+
+Export your conversation as an image. Available display modes:
+
+- Wide Screen Mode: Optimized for desktop viewing
+- Narrow Screen Mode: Optimized for mobile viewing
+
+Optional content settings:
+
+- Include Assistant Role Settings: Display system prompts and configuration for the assistant
+- Include Footer: Show information at the bottom of the page
+- Image Format Options: JPG / PNG / SVG / WEBP
+- Sharing Options:
+ - Copy Image: Copy to clipboard for direct pasting into other apps
+ - Download Screenshot: Save the image file locally
+
+
+
+## Text Sharing
+
+Export your conversation as plain text. Optional content settings:
+
+- Include Assistant Role Settings: Include system prompts and assistant configuration
+- Include Message Roles: Show the sender of each message
+- Include User Info: Include user-related information
+- Include Plugin Info: Include details of plugin calls
+- Sharing Options:
+ - Copy Text: Copy to clipboard
+ - Download File: Save as a text file
+
+
+
+## PDF Sharing
+
+Export your conversation as a PDF document. Optional content settings:
+
+- Include Assistant Role Settings: Include system prompts and assistant configuration
+- Include Message Roles: Show the sender of each message
+- Include User Info: Include user-related information
+- Include Plugin Info: Include details of plugin calls
+
+Generation Method: After selecting your options, click the Generate button to create and download the PDF file.
+
+
+
+## JSON Sharing
+
+Export your conversation in JSON format, ideal for developers or integration with other systems.
+
+Available export modes:
+
+- Default: LobeHub's standard JSON format
+- OpenAI Compatible: JSON format compatible with the OpenAI API
+
+Optional content settings:
+
+- Include Role Settings: Include assistant role configuration
+
+Sharing Options:
+
+- Copy: Copy JSON content to clipboard
+- Download File: Save as a JSON file
+
+## Use Cases
+
+- Screenshot Sharing: Great for sharing conversations on social media or messaging apps—visually intuitive and easy to read.
+- Text Sharing: Ideal for editing or quoting conversations; small file size.
+- PDF Sharing: Suitable for formal use cases like work reports, study notes, or archival.
+- JSON Sharing: Best for developers—can be imported into other systems or used for data analysis.
+
+## Shareable Link
+
+Coming soon...
diff --git a/docs/usage/agent/share.zh-CN.mdx b/docs/usage/agent/share.zh-CN.mdx
new file mode 100644
index 0000000000..2d229847c8
--- /dev/null
+++ b/docs/usage/agent/share.zh-CN.mdx
@@ -0,0 +1,60 @@
+---
+title: 分享会话
+description: 了解如何通过 LobeHub 的分享功能分享会话记录,包括截图分享和 ShareGPT 分享方式。通过分享功能,轻松与他人分享您的对话。
+tags:
+ - LobeHub
+ - 分享会话记录
+ - 截图分享
+ - 对话分享
+---
+
+# 分享会话记录
+
+通过会话窗口右上角的`分享`按钮,您可以将当前会话记录分享给其他人。LobeHub 支持两种分享方式:`截图分享`和 `生成分享链接`。
+
+LobeHub 支持将会话内容分享给他人。你可以将对话导出为截图、文本、PDF 或 JSON 格式,方便保存、分享。
+
+## 截图分享
+
+将会话导出为图片格式。可选显示模式:
+
+- 宽屏模式:适合电脑屏幕查看
+- 窄屏模式:适合手机屏幕查看
+
+可选内容选项:
+
+- 是否包含助手角色设定:显示助手的系统提示词等设定信息
+- 是否包含页脚:显示页面底部信息
+- 可选图片格式:JPG/PNG/SVG/WEBP
+- 可选分享方式:复制图片:复制到剪贴板,可以直接粘贴到其他应用下载截图:保存图片文件到本地
+
+
+
+## 文本分享
+
+将会话导出为纯文本格式。可选内容选项:
+
+- 是否包含助手角色设定:包含助手的系统提示词等设定是否包含消息角色
+- 显示每条消息的发送者
+- 是否包含用户信息:包含用户相关信息
+- 是否包含插件信息:包含插件调用的相关信息
+- 可选分享方式:复制文本:复制到剪贴板下载文件:保存为文本文件
+
+
+
+## PDF 分享
+
+将会话导出为 PDF 文档。可选内容选项:
+
+- 是否包含助手角色设定:包含助手的系统提示词等设定
+- 是否包含消息角色:显示每条消息的发送者
+- 是否包含用户信息:包含用户相关信息
+- 是否包含插件信息:包含插件调用的相关信息生成方式:选择完选项后,点击生成按钮,直接生成 PDF 文件并下载。
+
+
+
+## JSON 分享
+
+将会话导出为 JSON 格式,适合开发者或需要在其他系统中使用。可选导出模式:默认:LobeHub 的标准 JSON 格式 OpenAI 兼容:兼容 OpenAI API 格式的 JSON 可选内容选项:是否包含角色设定:包含助手的角色设定信息可选分享方式:复制:复制 JSON 内容到剪贴板下载文件:保存为 JSON 文件 \[Image] 使用场景截图分享:适合在社交媒体、聊天工具中分享对话内容,直观易读。文本分享:适合需要编辑或引用对话内容的场景,文件体积小。PDF 分享:适合正式场合,如工作报告、学习笔记、存档记录。JSON 分享:适合开发者,可以导入到其他系统或进行数据分析。
+
+## 分享链接
diff --git a/docs/usage/agent/topic.mdx b/docs/usage/agent/topic.mdx
new file mode 100644
index 0000000000..f11ab63902
--- /dev/null
+++ b/docs/usage/agent/topic.mdx
@@ -0,0 +1,59 @@
+---
+title: Topics
+description: >-
+ Learn how to interact with large language models, including model selection,
+ file/image uploads, temperature settings, conversation history, and more.
+tags:
+ - LobeHub
+ - Large Language Model
+ - LLM
+ - Model Selection
+ - File Upload
+ - Temperature Setting
+ - History Settings
+ - Voice Input
+ - Plugin Settings
+ - Token Usage
+ - New Topic
+ - Send Button
+---
+
+# Topics
+
+In LobeHub, each conversation with the assistant is organized into a topic. You can search, rename, favorite, or delete topics to better manage and retrieve your past conversations.
+
+## Search Topics
+
+On the conversation page, you can search for previous topics you've discussed with the assistant. Enter the conversation view, click "More" in the topic list, and use the search function to quickly locate the desired conversation.
+
+
+
+## Rename Topics
+
+You can give topics more meaningful names to make them easier to identify.
+
+- Manual Rename: Locate the topic you want to rename, click "Rename", enter a new name, and save it.
+- Smart Rename: Click "Smart Rename" on the topic you want to rename, and the system will automatically generate a suitable name based on the conversation content.
+
+## Duplicate Topics
+
+You can create a duplicate of any topic to preserve the original conversation while branching off into a new direction. Find the topic you want to copy and click "Duplicate". The system will create an identical copy of the topic. This is useful for exploring different discussion paths based on the same starting point.
+
+## Favorite Topics
+
+For important or frequently used topics, you can mark them as favorites. Click the favorite icon on the topic you want to save. Favorited topics will appear in the "Favorites" section of the topic list for quick access.
+
+
+
+## Delete Topics
+
+You can delete topics you no longer need. Locate the topic, click "Delete", and confirm the action. The topic will be removed from your list.
+
+## Topic List
+
+Topics are organized by time to help you easily find conversations from different periods.
+
+- Favorites: Displays all your favorited topics.
+- Time-Based List: Other topics are automatically grouped by their creation time. Expand a time group to view all topics from that period.
+
+If you prefer not to use time-based grouping, you can switch to "Ungrouped" view.
diff --git a/docs/usage/agent/topic.zh-CN.mdx b/docs/usage/agent/topic.zh-CN.mdx
new file mode 100644
index 0000000000..70284981aa
--- /dev/null
+++ b/docs/usage/agent/topic.zh-CN.mdx
@@ -0,0 +1,57 @@
+---
+title: 话题
+description: 了解如何使用大型语言模型进行基本交互,包括模型选择、文件/图片上传、温度设置、历史记录设置等。
+tags:
+ - LobeHub
+ - 大型语言模型
+ - LLM
+ - 模型选择
+ - 文件上传
+ - 温度设置
+ - 历史记录设置
+ - 语音输入
+ - 插件设置
+ - Token 用量
+ - 新建话题
+ - 发送按钮
+---
+
+# 话题
+
+在 LobeHub 中,每次与助手的对话会形成一个话题。你可以搜索、重命名、收藏、删除话题,方便管理和查找历史对话。
+
+## 搜索话题
+
+在会话页面,你可以搜索与助手聊过的话题。进入会话,点击话题列表的「更多」,通过搜索话题快速找到需要的历史对话。
+
+
+
+## 重命名话题
+
+你可以为话题设置更有意义的名称,方便识别。
+
+- 手动重命名:找到要重命名的话题后点击「重命名」,输入新的话题名称并即可保存。
+- 智能重命名:找到要重命名的话题后点击「智能重命名」,系统会根据对话内容自动生成合适的名称。
+
+## 创建话题副本
+
+你可以创建某个话题的副本,保留原对话的同时创建新的分支。找到要复制的话题后点击「创建副本」,系统会创建一个完全相同的话题副本。适合在原对话基础上尝试不同的讨论方向。
+
+## 收藏话题
+
+对于重要或常用的话题,可以点击收藏按钮进行收藏。找到要收藏的话题后点击收藏按钮,话题会被标记为收藏。收藏的话题会显示在话题列表的「收藏」列表里,方便快速访问。\[Image]
+
+
+
+## 删除话题
+
+不需要的话题可以删除。找到要删除的话题后点击「删除」,确认删除后话题将被移除。话题列表话题列表按时间组织,方便查找不同时期的对话。收藏列表:显示所有收藏的话题。时间列表:其他话题默认根据创建时间自动分组,展开对应时间的列表,可以查看该时间段的所有话题。如果不想使用时间分组,可以选择「不分组」。
+
+## 话题列表
+
+话题列表按时间组织,方便查找不同时期的对话。
+
+- 收藏列表:显示所有收藏的话题。
+- 时间列表:其他话题默认根据创建时间自动分组,展开对应时间的列表,可以查看该时间段的所有话题。
+
+如果不想使用时间分组,可以选择「不分组」。
diff --git a/docs/usage/agent/translate.mdx b/docs/usage/agent/translate.mdx
new file mode 100644
index 0000000000..48ba039363
--- /dev/null
+++ b/docs/usage/agent/translate.mdx
@@ -0,0 +1,32 @@
+---
+title: Conversation Translation
+description: >-
+ LobeHub allows users to instantly translate conversation content into a
+ selected language, displaying the results in real time. Learn how to configure
+ your translation model for an optimized experience.
+tags:
+ - LobeHub
+ - Conversation Translation
+ - Real-Time Translation
+ - Translation Model Configuration
+---
+
+# Translate Conversation History
+
+
+
+## Translate Content Within Conversations
+
+LobeHub enables users to translate conversation content into a target language with a single click. Once a language is selected, LobeHub will use the pre-configured AI model to perform the translation and display the results in real time within the chat window.
+
+
+
+## Configure Translation Model
+
+You can specify which model you'd like to use as your translation assistant in the settings.
+
+
+
+- Open the `Settings` panel
+- Navigate to the `System Assistant` section and find the `Translation Settings` option
+- Assign a model to serve as your `Translation Assistant`
diff --git a/docs/usage/agent/translate.zh-CN.mdx b/docs/usage/agent/translate.zh-CN.mdx
new file mode 100644
index 0000000000..f093774ff3
--- /dev/null
+++ b/docs/usage/agent/translate.zh-CN.mdx
@@ -0,0 +1,29 @@
+---
+title: 会话翻译
+description: LobeHub 支持用户一键将对话内容翻译成指定语言,实时显示翻译结果。了解如何设置翻译模型以优化翻译体验。
+tags:
+ - LobeHub
+ - 会话翻译
+ - 实时翻译
+ - 翻译模型设置
+---
+
+# 翻译会话记录
+
+
+
+## 翻译对话中的内容
+
+LobeHub 支持用户一键将对话内容翻译成指定语言。选择目标语言后,LobeHub 将调用预先设置的 AI 模型进行翻译,并将翻译结果实时显示在聊天窗口中。
+
+
+
+## 翻译模型设置
+
+你可以在设置中指定您希望使用的模型作为翻译助手。
+
+
+
+- 打开`设置`面板
+- 在`系统助手`中找到`翻译设置`选项
+- 为你的`翻译助手`指定一个模型
diff --git a/docs/usage/agent/tts-stt.mdx b/docs/usage/agent/tts-stt.mdx
new file mode 100644
index 0000000000..d355842d64
--- /dev/null
+++ b/docs/usage/agent/tts-stt.mdx
@@ -0,0 +1,38 @@
+---
+title: Text-to-Speech & Speech-to-Text
+description: >-
+ Learn how to use the text-to-speech (TTS) and speech-to-text (STT) features in
+ LobeHub, including how to configure your preferred voice model.
+tags:
+ - LobeHub
+ - Text-to-Speech
+ - TTS
+ - STT
+ - Voice Model
+---
+
+# Guide to Text-to-Speech & Speech-to-Text
+
+LobeHub supports text and voice conversion features, allowing users to input content via speech and have AI responses read aloud using voice synthesis.
+
+## Text-to-Speech (TTS)
+
+To have AI read text aloud, simply highlight any content in the chat window and select `Text-to-Speech`. The AI will use a TTS model to convert the selected text into speech.
+
+
+
+## Speech-to-Text (STT)
+
+To input text using your voice, click the voice input option in the message box. LobeHub will convert your speech into text and insert it into the input field. Once you're done, you can send it directly to the AI.
+
+
+
+## Configuring Voice Conversion Settings
+
+You can customize the voice conversion experience by selecting your preferred models in the settings.
+
+
+
+- Open the `Settings` panel
+- Navigate to the `Text-to-Speech` section
+- Choose your desired voice service and AI model
diff --git a/docs/usage/agent/tts-stt.zh-CN.mdx b/docs/usage/agent/tts-stt.zh-CN.mdx
new file mode 100644
index 0000000000..f666065848
--- /dev/null
+++ b/docs/usage/agent/tts-stt.zh-CN.mdx
@@ -0,0 +1,36 @@
+---
+title: 文字语音转换
+description: 了解如何在 LobeHub 中使用文字语音转换功能,包括文字转语音(TTS)和语音转文字(STT),以及设置您喜欢的语音模型。
+tags:
+ - LobeHub
+ - 文字语音转换
+ - TTS
+ - STT
+ - 语音模型
+---
+
+# 文字语音转换使用指南
+
+LobeHub 支持文字语音转换功能,允许用户通过语音输入内容,以及将 AI 输出的内容通过语音播报。
+
+## 文字转语音(TTS)
+
+在对话窗口中选中任意内容,选择`文字转语音`,AI 将通过 TTS 模型对文本内容进行语音播报。
+
+
+
+## 语音转文字(STT)
+
+在输入窗口中选择语音输入功能,LobeHub 将您的语音转换为文字并输入到文本框中,完成输入后可以直接发送给 AI。
+
+
+
+## 文字语音转换设置
+
+你可以在设置中为文字语音转换功能指定您希望使用的模型。
+
+
+
+- 打开`设置`面板
+- 找到`文字转语音`设置
+- 选择您所需的语音服务和 AI 模型
diff --git a/docs/usage/agents/agent-organization.mdx b/docs/usage/agents/agent-organization.mdx
deleted file mode 100644
index 5525ed1bff..0000000000
--- a/docs/usage/agents/agent-organization.mdx
+++ /dev/null
@@ -1,53 +0,0 @@
----
-title: Efficiently Organize Your AI Assistants with LobeChat
-description: >-
- Learn how to use LobeChat's grouping, search, and pinning functions to efficiently organize and locate your AI assistants.
-
-tags:
- - LobeChat
- - AI assistants
- - assistant organization
- - grouping
- - search function
- - pinning function
----
-
-# Assistant Organization Guide
-
-
-
-LobeChat provides a rich variety of AI assistant resources. Users can easily add various assistants through the assistant market, offering a wide range of application scenarios for AI applications.
-
-When you have added a large number of assistants, finding a specific assistant in the list may become challenging. LobeChat provides `search`, `grouping`, and `pinning` functions to help you better organize assistants and improve efficiency in locating them.
-
-## Assistant Grouping
-
-Firstly, LobeChat's AI assistants support organization through grouping. You can categorize assistants of the same type together and easily search for the required assistants by collapsing and expanding groups.
-
-### Assistant Settings
-
-
-
-- In the menu of an individual assistant, selecting the `Move to Group` option can quickly categorize the assistant into the specified group.
-- If you don't find the group you want, you can choose `Add Group` to quickly create a new group.
-
-### Group Settings
-
-
-
-- In the group menu, you can quickly create a new assistant under that group.
-- Clicking the `Group Management` button allows you to `rename`, `delete`, `sort`, and perform other operations on all groups.
-
-## Assistant Search
-
-
-
-- At the top of the assistant list, you can use the assistant search function to easily locate the assistant you need using keywords.
-
-## Assistant Pinning
-
-
-
-- In the assistant menu, you can use the `Pin` function to pin the assistant to the top.
-- After pinning an assistant, a pinned area will appear at the top of the assistant list, displaying all pinned assistants.
-- For pinned assistants, you can choose `Unpin` to remove them from the pinned area.
diff --git a/docs/usage/agents/agent-organization.zh-CN.mdx b/docs/usage/agents/agent-organization.zh-CN.mdx
deleted file mode 100644
index 5bff869c49..0000000000
--- a/docs/usage/agents/agent-organization.zh-CN.mdx
+++ /dev/null
@@ -1,51 +0,0 @@
----
-title: LobeChat 助手组织指南 - 提升助手管理效率
-description: 了解如何通过分组、搜索和固定功能更好地组织 LobeChat 的 AI 助手,提升助手管理效率和定位效率。
-tags:
- - LobeChat
- - AI 助手
- - 助手组织
- - 分组设置
- - 助手搜索
- - 助手固定
----
-
-# 助手组织指南
-
-
-
-LobeChat 提供了丰富的 AI 助手资源,用户可以通过助手市场方便地添加各类助手,为 AI 应用提供了广泛的应用场景。
-
-当你添加了大量助手后,在列表中寻找特定助手可能会变得比较困难。LobeChat 提供了`搜索`、`分组`和`固定`功能,帮助您更好地组织助手,提升定位效率。
-
-## 助手分组
-
-首先 LobeChat 的 AI 助手支持以分组的方式进行组织。你可以将同类型的助手归类到一起,并通过折叠和展开分组的方式方便地查询所需助手。
-
-### 助手设置
-
-
-
-- 在单个助手的菜单中,选择`移动到分组`选项可以快速将该助手归类到指定分组。
-- 如果没有你想要的分组,可以选择`添加分组`,快速创建一个新的分组。
-
-### 分组设置
-
-
-
-- 在分组菜单中,可以快速在该分组下新建助手
-- 点击`分组管理`按钮可以对所有分组进行`重命名`、`删除`、`排序`等操作。
-
-## 助手搜索
-
-
-
-- 在助手列表的顶部,您可以通过助手搜索功能,方便地使用关键词定位到您所需的助手。
-
-## 助手固定
-
-
-
-- 在助手菜单中,你可以使用`固定`功能将该助手固定在顶部。
-- 固定助手后,助手列表的上方将出现一个固定区域,显示所有已固定的助手列表。
-- 对于已固定的助手,你可以选择`解除固定`,将其移出固定区域。
diff --git a/docs/usage/agents/concepts.mdx b/docs/usage/agents/concepts.mdx
deleted file mode 100644
index e87fc4f03e..0000000000
--- a/docs/usage/agents/concepts.mdx
+++ /dev/null
@@ -1,40 +0,0 @@
----
-title: Improving User Interaction Efficiency with Agents in LobeChat
-description: >-
- Discover how LobeChat's innovative approach with Agents enhances user experience by providing dedicated functional modules for efficient task handling and quick access to historical conversations.
-
-tags:
- - LobeChat
- - Agents
- - User Interaction Efficiency
- - Task Handling
- - Historical Conversations
----
-
-# Topics and Assistants
-
-## ChatGPT and "Topics"
-
-In the official ChatGPT application, there is only the concept of "topics." As shown in the image, the user's historical conversation topics are listed in the sidebar.
-
-
-
-However, in our usage, we have found that this model has many issues. For example, the information indexing of historical conversations is too scattered. Additionally, when dealing with repetitive tasks, it is difficult to have a stable entry point. For instance, if I want ChatGPT to help me translate a document, in this model, I would need to constantly create new topics and then set up the translation prompt I had previously created. When there are high-frequency tasks, this will result in a very inefficient interaction format.
-
-## Topics and "Agent"
-
-Therefore, in LobeChat, we have introduced the concept of **Agents**. An agent is a complete functional module, each with its own responsibilities and tasks. Assistants can help you handle various tasks and provide professional advice and guidance.
-
-
-
-At the same time, we have integrated topics into each agent. The benefit of this approach is that each agent has an independent topic list. You can choose the corresponding agent based on the current task and quickly switch between historical conversation records. This method is more in line with users' habits in common chat software, improving interaction efficiency.
diff --git a/docs/usage/agents/concepts.zh-CN.mdx b/docs/usage/agents/concepts.zh-CN.mdx
deleted file mode 100644
index 566b701459..0000000000
--- a/docs/usage/agents/concepts.zh-CN.mdx
+++ /dev/null
@@ -1,38 +0,0 @@
----
-title: 在 LobeChat 中进行话题与助手的革新
-description: 了解 LobeChat 中的话题与助手概念,如何提高用户交互效率并解决历史对话信息索引分散的问题。
-tags:
- - LobeChat
- - 话题与助手
- - 交互效率
- - 历史对话记录
- - 信息索引
----
-
-# 话题与助手
-
-## ChatGPT 与「话题」
-
-在 ChatGPT 官方应用中,只存在话题的概念,如图所示,在侧边栏中是用户的历史对话话题列表。
-
-
-
-但在我们的使用过程中其实会发现这种模式存在很多问题,比如历史对话的信息索引过于分散问题,同时当处理一些重复任务时很难有一个稳定的入口,比如我希望有一个稳定的入口可以让 ChatGPT 帮助我翻译文档,在这个模式下,我需要不断新建新的话题同时再设置我之前创建好的翻译 Prompt 设定,当有高频任务存在时,这将是一个效率很低的交互形式。
-
-## 「话题」与「助手」
-
-因此在 LobeChat 中,我们引入了 **助手** 的概念。助手是一个完整的功能模块,每个助手都有自己的职责和任务。助手可以帮助你处理各种任务,并提供专业的建议和指导。
-
-
-
-与此同时,我们将话题索引到每个助手内部。这样做的好处是,每个助手都有一个独立的话题列表,你可以根据当前任务选择对应的助手,并快速切换历史对话记录。这种方式更符合用户对常见聊天软件的使用习惯,提高了交互的效率。
diff --git a/docs/usage/agents/custom-agent.mdx b/docs/usage/agents/custom-agent.mdx
deleted file mode 100644
index 1272867118..0000000000
--- a/docs/usage/agents/custom-agent.mdx
+++ /dev/null
@@ -1,57 +0,0 @@
----
-title: Custom LobeChat Assistant Guide - Adding and Iterating Assistants
-description: >-
- Learn how to add assistants to your favorites list in LobeChat through the role market or by creating custom assistants. Explore detailed steps for creating custom assistants and quick setup tips.
-
-tags:
- - LobeChat
- - Adding Assistants
- - Custom Assistant
- - Role Market
- - Creating Assistants
- - Assistant Configuration
----
-
-# Custom Assistant Guide
-
-As the basic functional unit of LobeChat, adding and iterating assistants is very important. Now you can add assistants to your favorites list in two ways.
-
-## `A` Add through the role market
-
-If you are a beginner in Prompt writing, you might want to browse the assistant market of LobeChat first. Here, you can find commonly used assistants submitted by others and easily add them to your list with just one click, which is very convenient.
-
-
-
-## `B` Create a custom assistant
-
-When you need to handle specific tasks, you need to consider creating a custom assistant to help you solve the problem. You can add and configure the assistant in detail in the following ways.
-
-
-
-
-
-
-
-
- **Quick Setup Tip**: You can conveniently modify the Prompt through the quick edit button in the
- sidebar.
-
-
-
-
-
-
-
-
-If you want to understand Prompt writing tips and common model parameter settings, you can continue to view:
-
-
-
-
-
-
diff --git a/docs/usage/agents/custom-agent.zh-CN.mdx b/docs/usage/agents/custom-agent.zh-CN.mdx
deleted file mode 100644
index 9f11f29198..0000000000
--- a/docs/usage/agents/custom-agent.zh-CN.mdx
+++ /dev/null
@@ -1,53 +0,0 @@
----
-title: LobeChat 自定义助手指南 - 添加和配置助手的最佳方法
-description: 了解如何通过角色市场或新建自定义助手将助手添加到你的常用列表中。快捷设置技巧和常见的模型参数设置也包括在内。
-tags:
- - 自定义助手
- - LobeChat
- - 添加助手
- - 配置助手
- - 角色市场
- - 快捷设置
- - 模型参数设置
----
-
-# 自定义助手指南
-
-作为 LobeChat 的基础职能单位,助手的添加和迭代是非常重要的。现在你可以通过两种方式将助手添加到你的常用列表中
-
-## `A` 通过角色市场添加
-
-如果你是一个 Prompt 编写的新手,不妨先浏览一下 LobeChat 的助手市场。在这里,你可以找到其他人提交的常用助手,并且只需一键添加到你的列表中,非常方便。
-
-
-
-## `B` 通过新建自定义助手
-
-当你需要处理一些特定的任务时,你就需要考虑创建一个自定义助手来帮助你解决问题。可以通过以下方式添加并进行助手的详细配置
-
-
-
-
-
-
-
-**快捷设置技巧**: 可以通过侧边栏的快捷编辑按钮进行 Prompt 的便捷修改
-
-
-
-
-
-
-
-如果你希望理解 Prompt 编写技巧和常见的模型参数设置,可以继续查看:
-
-
-
-
-
-
diff --git a/docs/usage/agents/model.mdx b/docs/usage/agents/model.mdx
deleted file mode 100644
index 61afd6cd68..0000000000
--- a/docs/usage/agents/model.mdx
+++ /dev/null
@@ -1,95 +0,0 @@
----
-title: LobeChat Model Config Guide
-description: >-
- Explore the capabilities of ChatGPT models from gpt-3.5-turbo to gpt-4-32k, understanding their speed, context limits, and cost. Learn about model parameters like temperature and top-p for better output.
-
-tags:
- - ChatGPT Models
- - Model Parameters
- - Neural Networks
- - Language Understanding
- - Generation Capabilities
----
-
-# Model Guide
-
-## ChatGPT
-
-- **gpt-3.5-turbo**: Currently the fastest generating ChatGPT model, it is faster but may sacrifice some text quality, with a context length of 4k.
-- **gpt-4**: ChatGPT 4.0 has improved language understanding and generation capabilities compared to 3.5. It can better understand context and generate more accurate and natural responses. This is thanks to improvements in the GPT-4 model, including better language modeling and deeper semantic understanding, but it may be slower than other models, with a context length of 8k.
-- **gpt-4-32k**: Similar to gpt-4, the context limit is increased to 32k tokens, with a higher cost.
-
-## Concept of Model Parameters
-
-LLM seems magical, but it is essentially a probability problem. The neural network generates a bunch of candidate words from the pre-trained model based on the input text and selects the high-probability ones as output. Most of the related parameters are associated with sampling (i.e., how to select the output from the candidate words).
-
-### `temperature`
-
-This parameter controls the randomness of the model's output. The higher the value, the greater the randomness. Generally, when the same prompt is input multiple times, the model's output varies each time.
-
-- Set to 0: Generates a fixed output for each prompt
-- Lower values: More concentrated and deterministic output
-- Higher values: More random output (more creative)
-
-
- Generally, the longer and clearer the prompt, the better the quality and confidence of the model's
- output. In such cases, the temperature value can be adjusted appropriately. Conversely, if the
- prompt is short and ambiguous, setting a relatively high temperature value will result in unstable
- model output.
-
-
-
-
-### `top_p`
-
-`top_p` is also a sampling parameter, but it differs from temperature in its sampling method. Before outputting, the model generates a bunch of tokens, and these tokens are ranked based on their quality. In the top-p sampling mode, the candidate word list is dynamic, and tokens are selected from the tokens based on a percentage. Top\_p introduces randomness in token selection, allowing other high-scoring tokens to have a chance of being selected, rather than always choosing the highest-scoring one.
-
-
- `top_p` is similar to randomness, and it is generally not recommended to change it together with
- the randomness of temperature.
-
-
-
-
-### `presence_penalty`
-
-The presence penalty parameter can be seen as a punishment for repetitive content in the generated text. When this parameter is set high, the generation model will try to avoid producing repeated words, phrases, or sentences. Conversely, if the presence penalty parameter is set low, the generated text may contain more repetitive content. By adjusting the value of the presence penalty parameter, control over the originality and diversity of the generated text can be achieved. The importance of this parameter is mainly reflected in the following aspects:
-
-- Enhancing the originality and diversity of the generated text: In certain applications, such as creative writing or generating news headlines, it is necessary for the generated text to have high originality and diversity. By increasing the value of the presence penalty parameter, the amount of repeated content in the generated text can be effectively reduced, thereby enhancing its originality and diversity.
-- Preventing the generation of loops and meaningless content: In some cases, the generation model may produce repetitive or meaningless text that usually fails to convey useful information. By appropriately increasing the value of the presence penalty parameter, the probability of generating such meaningless content can be reduced, thereby improving the readability and practicality of the generated text.
-
-
- It is worth noting that the presence penalty parameter, in conjunction with other parameters such
- as temperature and top-p, collectively influences the quality of the generated text. Compared to
- other parameters, the presence penalty parameter primarily focuses on the originality and
- repetitiveness of the text, while the temperature and top-p parameters more significantly affect
- the randomness and determinism of the generated text. By adjusting these parameters reasonably,
- comprehensive control over the quality of the generated text can be achieved.
-
-
-### `frequency_penalty`
-
-It is a mechanism that penalizes frequently occurring new vocabulary in the text to reduce the likelihood of the model repeating the same word. The larger the value, the more likely it is to reduce repeated words.
-
-- `-2.0` When the morning news started broadcasting, I found that my TV now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now now **(The highest frequency word is "now", accounting for 44.79%)**
-- `-1.0` He always watches the news in the early morning, in front of the TV watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch watch **(The highest frequency word is "watch", accounting for 57.69%)**
-- `0.0` When the morning sun poured into the small diner, a tired postman appeared at the door, carrying a bag of letters in his hands. The owner warmly prepared a breakfast for him, and he started sorting the mail while enjoying his breakfast. **(The highest frequency word is "of", accounting for 8.45%)**
-- `1.0` A girl in deep sleep was woken up by a warm ray of sunshine, she saw the first ray of morning light, surrounded by birdsong and flowers, everything was full of vitality. (The highest frequency word is "of", accounting for 5.45%)
-- `2.0` Every morning, he would sit on the balcony to have breakfast. Under the soft setting sun, everything looked very peaceful. However, one day, when he was about to pick up his breakfast, an optimistic little bird flew by, bringing him a good mood for the day. (The highest frequency word is "of", accounting for 4.94%)
-
-
-
-### `reasoning_effort`
-
-The `reasoning_effort` parameter controls the strength of the reasoning process. This setting affects the depth of reasoning the model performs when generating a response. The available values are **`low`**, **`medium`**, and **`high`**, with the following meanings:
-
-- **low**: Lower reasoning effort, resulting in faster response times. Suitable for scenarios where quick responses are needed, but it may sacrifice some reasoning accuracy.
-- **medium** (default): Balances reasoning accuracy and response speed, suitable for most scenarios.
-- **high**: Higher reasoning effort, producing more detailed and complex responses, but slower response times and greater token consumption.
-
-By adjusting the `reasoning_effort` parameter, you can find an appropriate balance between response speed and reasoning depth based on your needs. For example, in conversational scenarios, if fast responses are a priority, you can choose low reasoning effort; if more complex analysis or reasoning is needed, you can opt for high reasoning effort.
-
-
- This parameter is only applicable to reasoning models, such as OpenAI's `o1`, `o1-mini`,
- `o3-mini`, etc.
-
diff --git a/docs/usage/agents/model.zh-CN.mdx b/docs/usage/agents/model.zh-CN.mdx
deleted file mode 100644
index 8ec1dc1afc..0000000000
--- a/docs/usage/agents/model.zh-CN.mdx
+++ /dev/null
@@ -1,88 +0,0 @@
----
-title: ChatGPT 模型指南:参数概念与应用
-description: >-
- 了解 ChatGPT 模型的不同版本及参数概念,包括 temperature、top_p、presence_penalty 和 frequency_penalty。
-
-tags:
- - ChatGPT
- - 模型指南
- - 参数概念
- - LLM
- - 生成模型
----
-
-# 模型指南
-
-## ChatGPT
-
-- **gpt-3.5-turbo**:目前最生成速度最快的 chatgpt 模型更快,但可能会牺牲一些生成文本的质量,上下文长度为 4k。
-- **gpt-4**:ChatGPT 4.0 在语言理解和生成能力方面相对于 3.5 有所提升。它可以更好地理解上下文和语境,并生成更准确、自然的回答。这得益于 GPT-4 模型的改进,包括更好的语言建模和更深入的语义理解,但它的速度可能比其他模型慢,上下文长度为 8k。
-- **gpt-4-32k**:同 gpt-4,上下文限制增加到 32k token,同时费率更高。
-
-## 模型参数概念
-
-LLM 看似很神奇,但本质还是一个概率问题,神经网络根据输入的文本,从预训练的模型里面生成一堆候选词,选择概率高的作为输出,相关的参数,大多都是跟采样有关(也就是要如何从候选词里选择输出)。
-
-### `temperature`
-
-用于控制模型输出的结果的随机性,这个值越大随机性越大。一般我们多次输入相同的 prompt 之后,模型的每次输出都不一样。
-
-- 设置为 0,对每个 prompt 都生成固定的输出
-- 较低的值,输出更集中,更有确定性
-- 较高的值,输出更随机(更有创意 )
-
-
- 一般来说,prompt 越长,描述得越清楚,模型生成的输出质量就越好,置信度越高,这时可以适当调高
- temperature 的值;反过来,如果 prompt 很短,很含糊,这时再设置一个比较高的 temperature
- 值,模型的输出就很不稳定了。
-
-
-
-
-### `top_p`
-
-核采样 `top_p` 也是采样参数,跟 temperature 不一样的采样方式。模型在输出之前,会生成一堆 token,这些 token 根据质量高低排名,核采样模式中候选词列表是动态的,从 tokens 里按百分比选择候选词。 top\_p 为选择 token 引入了随机性,让其他高分的 token 有被选择的机会,不会总是选最高分的。
-
-`top_p` 与随机性类似,一般来说不建议和随机性 temperature 一起更改
-
-
-
-### `presence_penalty`
-
-Presence Penalty 参数可以看作是对生成文本中重复内容的一种惩罚。当该参数设置较高时,生成模型会尽量避免产生重复的词语、短语或句子。相反,如果 Presence Penalty 参数较低,则生成的文本可能会包含更多重复的内容。通过调整 Presence Penalty 参数的值,可以实现对生成文本的原创性和多样性的控制。参数的重要性主要体现在以下几个方面:
-
-- 提高生成文本的独创性和多样性:在某些应用场景下,如创意写作、生成新闻标题等,需要生成的文本具有较高的独创性和多样性。通过增加 Presence Penalty 参数的值,可以有效减少生成文本中的重复内容,从而提高文本的独创性和多样性。
-- 防止生成循环和无意义的内容:在某些情况下,生成模型可能会产生循环、重复的文本,这些文本通常无法传达有效的信息。通过适当增加 Presence Penalty 参数的值,可以降低生成这类无意义内容的概率,提高生成文本的可读性和实用性。
-
-
- 值得注意的是,Presence Penalty 参数与其他参数(如 Temperature 和
- top-p)共同影响着生成文本的质量。对比其他参数,Presence Penalty
- 参数主要关注文本的独创性和重复性,而 Temperature 和 top-p
- 参数则更多地影响着生成文本的随机性和确定性。通过合理地调整这些参数,可以实现对生成文本质量的综合控制
-
-
-
-
-### `frequency_penalty`
-
-是一种机制,通过对文本中频繁出现的新词汇施加惩罚,以减少模型重复同一词语的可能性,值越大,越有可能降低重复字词。
-
-- `-2.0` 当早间新闻开始播出,我发现我家电视现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在现在 *(频率最高的词是 “现在”,占比 44.79%)*
-- `-1.0` 他总是在清晨看新闻,在电视前看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看看 *(频率最高的词是 “看”,占比 57.69%)*
-- `0.0` 当清晨的阳光洒进小餐馆时,一名疲倦的邮递员出现在门口,他的手中提着一袋信件。店主热情地为他准备了一份早餐,他在享用早餐的同时开始整理邮件。**(频率最高的词是 “的”,占比 8.45%)**
-- `1.0` 一个深度睡眠的女孩被一阵温暖的阳光唤醒,她看到了早晨的第一缕阳光,周围是鸟语花香,一切都充满了生机。*(频率最高的词是 “的”,占比 5.45%)*
-- `2.0` 每天早上,他都会在阳台上坐着吃早餐。在柔和的夕阳照耀下,一切看起来都非常宁静。然而有一天,当他准备端起早餐的时候,一只乐观的小鸟飞过,给他带来了一天的好心情。 *(频率最高的词是 “的”,占比 4.94%)*
-
-
-
-### `reasoning_effort`
-
-`reasoning_effort` 参数用于控制推理过程的强度。此参数的设置会影响模型在生成回答时的推理深度。可选值包括 **`low`**、**`medium`** 和 **`high`**,具体含义如下:
-
-- **low(低)**:推理强度较低,生成速度较快,适用于需要快速响应的场景,但可能牺牲一定的推理精度。
-- **medium(中,默认值)**:平衡推理精度与响应速度,适用于大多数场景。
-- **high(高)**:推理强度较高,生成更为详细和复杂的回答,但响应时间较长,且消耗更多的 Token。
-
-通过调整 `reasoning_effort` 参数,可以根据需求在生成速度与推理深度之间找到适合的平衡。例如,在对话场景中,如果更关注快速响应,可以选择低推理强度;如果需要更复杂的分析或推理,可以选择高推理强度。
-
-该参数仅适用于推理模型,如 OpenAI 的 `o1`、`o1-mini`、`o3-mini` 等。
diff --git a/docs/usage/agents/prompt.mdx b/docs/usage/agents/prompt.mdx
deleted file mode 100644
index bce297d49b..0000000000
--- a/docs/usage/agents/prompt.mdx
+++ /dev/null
@@ -1,108 +0,0 @@
----
-title: >-
- Guide to Using Prompts in LobeChat - How to Write Effective Instructions for Generative AI
-
-description: >-
- Learn the basic concepts of prompts and how to write well-structured and effective instructions for generative AI. Improve the quality and effectiveness of prompts to guide AI models accurately.
-
-tags:
- - Generative AI
- - Prompts
- - Writing Instructions
- - Structured Prompts
- - Improving AI Output
----
-
-# Guide to Using Prompts
-
-## Basic Concepts of Prompts
-
-Generative AI is very useful, but it requires human guidance. In most cases, generative AI can be as capable as a new intern at a company, but it needs clear instructions to perform well. The ability to guide generative AI correctly is a very powerful skill. You can guide generative AI by sending a prompt, which is usually a text instruction. A prompt is the input provided to the assistant, and it will affect the output. A good prompt should be structured, clear, concise, and directive.
-
-## How to Write a Well-Structured Prompt
-
-
- A structured prompt refers to the construction of the prompt having a clear logic and structure.
- For example, if you want the model to generate an article, your prompt may need to include the
- article's topic, outline, and style.
-
-
-Let's look at a basic discussion prompt example:
-
-> *"What are the most urgent environmental issues facing our planet, and what actions can individuals take to help address these issues?"*
-
-We can convert it into a simple prompt for the assistant to answer the following questions: placed at the front.
-
-```prompt
-Answer the following questions:
-What are the most urgent environmental issues facing our planet, and what actions can individuals take to help address these issues?
-```
-
-Since the results generated by this prompt are not consistent, some are only one or two sentences. A typical discussion response should have multiple paragraphs, so these results are not ideal. A good prompt should provide **specific formatting and content instructions**. You need to eliminate ambiguity in the language to improve consistency and quality. Here is a better prompt.
-
-```prompt
-Write a highly detailed paper, including an introduction, body, and conclusion, to answer the following questions:
-What are the most urgent environmental issues facing our planet,
-and what actions can individuals take to help address these issues?
-```
-
-The second prompt generates longer output and better structure. The use of the term "paper" in the prompt is intentional, as the assistant can understand the definition of a paper, making it more likely to generate coherent, structured responses.
-
-## How to Improve Quality and Effectiveness
-
-
- There are several ways to improve the quality and effectiveness of prompts:
-
- - **Be Clear About Your Needs:** The model's output will strive to meet your needs, so if your needs are not clear, the output may not meet expectations.
- - **Use Correct Grammar and Spelling:** The model will try to mimic your language style, so if your language style is problematic, the output may also be problematic.
- - **Provide Sufficient Contextual Information:** The model will generate output based on the contextual information you provide, so if the information is insufficient, it may not produce the desired results.
-
-
-After formulating effective prompts for discussing issues, you now need to refine the generated results. This may involve adjusting the output to fit constraints such as word count or combining concepts from different generated results.
-
-A simple method of iteration is to generate multiple outputs and review them to understand the concepts and structures being used. Once the outputs have been evaluated, you can select the most suitable ones and combine them into a coherent response. Another iterative method is to start small and **gradually expand**. This requires more than one prompt: an initial prompt for drafting the initial one or two paragraphs, followed by additional prompts to expand on the content already written. Here is a potential philosophical discussion prompt:
-
-> *"Is mathematics an invention or a discovery? Use careful reasoning to explain your answer."*
-
-Add it to a simple prompt as follows:
-
-```prompt
-Write an opening paragraph to answer the following question:
-Is mathematics an invention or a discovery? Use careful reasoning to explain your answer.
-```
-
-After generating some responses, we found one that we like:
-
-```prompt
-Philosophers and mathematicians have long debated whether mathematics is an invention or a discovery.
-Some argue that the concepts and principles of mathematics exist independently of human thought, and therefore can be considered discovered rather than invented.
-For example, the concepts of numbers, ratios, and geometric shapes are observed in the natural world and can be abstracted and systematically studied.
-```
-
-It's a good start. Then, use another prompt to expand:
-
-```prompt
-I am writing a detailed essay to answer the following prompt:
-Is mathematics an invention or a discovery? Use careful reasoning to explain your answer. Here is my current situation: Philosophers and mathematicians have long debated whether mathematics is an invention or a discovery.
-Some argue that the concepts and principles of mathematics exist independently of human thought, and therefore can be considered discovered rather than invented.
-For example, the concepts of numbers, ratios, and geometric shapes are observed in the natural world and can be abstracted and systematically studied. Write the next paragraph of my essay.
-```
-
-I used this prompt to generate several extensions and selected one that I liked, resulting in the following:
-
-```prompt
-Philosophers and mathematicians have long debated whether mathematics is invented or discovered.
-Some argue that the concepts and principles of mathematics exist independently of human thought, and therefore can be considered to be discovered rather than invented.
-For example, the concepts of numbers, ratios, and geometric shapes are observed in the natural world and can be abstracted and systematically studied.
-On the other hand, some believe that mathematics is largely an invention of human thought.
-Mathematical principles are based on logical reasoning, which is a characteristic of human thought.
-For instance, Euclidean geometry is based on abstract axioms and premises, accepted as true without the need for proof.
-Therefore, geometry can be considered an invention of human thought rather than a discovery.
-Similarly, mathematical formulas and equations are used to model and predict physical phenomena, which are the result of human reasoning.
-```
-
-Using the prompt extensions, we can iteratively write and iterate at each step. This is very useful for situations that require **generating higher quality output and incremental modifications**.
-
-## Further Reading
-
-- **Learn Prompting**: [https://learnprompting.org/en-US/docs/intro](https://learnprompting.org/en-US/docs/intro)
diff --git a/docs/usage/agents/prompt.zh-CN.mdx b/docs/usage/agents/prompt.zh-CN.mdx
deleted file mode 100644
index 5c2e7de6cf..0000000000
--- a/docs/usage/agents/prompt.zh-CN.mdx
+++ /dev/null
@@ -1,105 +0,0 @@
----
-title: 如何通过 LobeChat 写好结构化 Prompt - 提高生成式 AI 输出质量的关键
-description: 学习如何撰写结构化 Prompt 可以提高生成式 AI 输出的质量和效果。本文介绍了撰写有效 Prompt 的方法和技巧,以及如何逐步扩展和优化生成的结果。
-tags:
- - 结构化 Prompt
- - 生成式AI
- - 提高输出质量
- - 撰写技巧
- - 逐步扩展
----
-
-# Prompt 使用指南
-
-## Prompt 基本概念
-
-生成式 AI 非常有用,但它需要人类指导。通常情况下,生成式 AI 能就像公司新来的实习生一样,非常有能力,但需要清晰的指示才能做得好。能够正确地指导生成式 AI 是一项非常强大的技能。你可以通过发送一个 prompt 来指导生成式 AI,这通常是一个文本指令。Prompt 是向助手提供的输入,它会影响输出结果。一个好的 Prompt 应该是结构化的,清晰的,简洁的,并且具有指向性。
-
-## 如何写好一个结构化 prompt
-
-
- 结构化 prompt 是指 prompt 的构造应该有明确的逻辑和结构。例如,如果你想让模型生成一篇文章,你的
- prompt 可能需要包括文章的主题,文章的大纲,文章的风格等信息。
-
-
-让我们看一个基本的讨论问题的例子:
-
-> *"我们星球面临的最紧迫的环境问题是什么,个人可以采取哪些措施来帮助解决这些问题?"*
-
-我们可以将其转化为简单的助手提示,将回答以下问题:放在前面。
-
-```prompt
-回答以下问题:
-我们星球面临的最紧迫的环境问题是什么,个人可以采取哪些措施来帮助解决这些问题?
-```
-
-由于这个提示生成的结果并不一致,有些只有一两个句子。一个典型的讨论回答应该有多个段落,因此这些结果并不理想。一个好的提示应该给出**具体的格式和内容指令**。您需要消除语言中的歧义以提高一致性和质量。这是一个更好的提示。
-
-```prompt
-写一篇高度详细的论文,包括引言、正文和结论段,回答以下问题:
-我们星球面临的最紧迫的环境问题是什么,
-个人可以采取哪些措施来帮助解决这些问题?
-```
-
-第二个提示生成了更长的输出和更好的结构。提示中使用 “论文” 一词是有意的,因为助手可以理解论文的定义,因此更有可能生成连贯的、结构化的回答。
-
-
-
-## 如何提升其质量和效果
-
-
- 提升 prompt 质量和效果的方法主要有以下几点:
-
- - **尽量明确你的需求:** 模型的输出会尽可能满足你的需求,所以如果你的需求不明确,输出可能会不如预期。
- - **使用正确的语法和拼写:** 模型会尽可能模仿你的语言风格,所以如果你的语言风格有问题,输出可能也会有问题。
- - **提供足够的上下文信息:** 模型会根据你提供的上下文信息生成输出,所以如果你提供的上下文信息不足,可能无法生成你想要的结果。
-
-
-在为讨论问题制定有效的提示后,您现在需要细化生成的结果。这可能涉及到调整输出以符合诸如字数等限制,或将不同生成的结果的概念组合在一起。
-
-迭代的一个简单方法是生成多个输出并查看它们,以了解正在使用的概念和结构。一旦评估了输出,您就可以选择最合适的输出并将它们组合成一个连贯的回答。另一种迭代的方法是逐步开始,然后**逐步扩展**。这需要不止一个提示:一个起始提示,用于撰写最初的一两段,然后是其他提示,以扩展已经写过的内容。以下是一个潜在的哲学讨论问题:
-
-> *"数学是发明还是发现?用仔细的推理来解释你的答案。"*
-
-将其添加到一个简单的提示中,如下所示:
-
-```prompt
-写一个起始段回答以下问题:
-数学是发明还是发现?用仔细的推理来解释你的答案。
-```
-
-生成了一些回答,找到了一个我们喜欢的:
-
-```prompt
-哲学家和数学家之间争论不休,数学是发明还是发现。
-一些人认为,数学的概念和原则独立于人类思维存在,因此可以被认为是被发现而不是被发明的。
-例如,数字、比率和几何形状的概念在自然界中被观察到,可以被抽象化和系统化地研究。
-```
-
-是一个不错的开端。然后使用另一个提示来扩展:
-
-```prompt
-我正在撰写一篇详细的短文,回答以下提示:
-数学是发明还是发现?用仔细的推理来解释你的答案。这是我目前的情况: 哲学家和数学家之间争论不休,数学是发明还是发现。
-一些人认为,数学的概念和原则独立于人类思维存在,因此可以被认为是被发现而不是被发明的。
-例如,数字、比率和几何形状的概念在自然界中被观察到,可以被抽象化和系统化地研究。写下我文章的下一段。
-```
-
-使用此提示生成了几个扩展,并选择了一个我喜欢的,最终得到了以下结果:
-
-```prompt
-哲学家和数学家之间争论不休,数学是发明还是发现。
-一些人认为,数学的概念和原则独立于人类思维存在,因此可以被认为是被发现而不是被发明的。
-例如,数字、比率和几何形状的概念在自然界中被观察到,可以被抽象化和系统化地研究。
-另一方面,有人认为数学在很大程度上是人类思维的发明。
-数学原理基于逻辑推理,这是人类思维的一个特征。
-例如,欧几里德几何基于抽象的公理和前提,被接受为真实而无需证明。
-因此,可以认为几何是人类思维的发明,而不是一种发现。
-同样,数学公式和方程用于模拟和预测物理现象,这又是人类推理的结果。
-```
-
-使用扩展提示,我们可以逐步地写作并在每个步骤上进行迭代。这对于需要**生成更高质量的输出并希望逐步修改**的情况非常有用。
-
-## 扩展阅读
-
-- **Learn Prompting**: [https://learnprompting.org/zh-Hans/docs/intro](https://learnprompting.org/zh-Hans/docs/intro)
diff --git a/docs/usage/agents/topics.mdx b/docs/usage/agents/topics.mdx
deleted file mode 100644
index 0f7bb762c5..0000000000
--- a/docs/usage/agents/topics.mdx
+++ /dev/null
@@ -1,26 +0,0 @@
----
-title: LobeChat Topic Usage Guide
-description: >-
- Learn how to save and manage topics during conversations in LobeChat, including saving topics, accessing the topic list, and pinning favorite topics.
-
-tags:
- - Topic Usage
- - Conversation Management
- - Save Topic
- - Topic List
- - Favorite Topics
----
-
-# Topic Usage Guide
-
-
-
-- **Save Topic:** During a conversation, if you want to save the current context and start a new topic, you can click the save button next to the send button.
-- **Topic List:** Clicking on a topic in the list allows for quick switching of historical conversation records and continuing the conversation. You can also use the star icon ⭐️ to pin favorite topics to the top, or use the more button on the right to rename or delete topics.
diff --git a/docs/usage/agents/topics.zh-CN.mdx b/docs/usage/agents/topics.zh-CN.mdx
deleted file mode 100644
index d272700ed3..0000000000
--- a/docs/usage/agents/topics.zh-CN.mdx
+++ /dev/null
@@ -1,25 +0,0 @@
----
-title: LobeChat 话题使用指南 - 保存话题、快速切换历史记录
-description: 学习如何在 LobeChat 中保存话题、快速切换历史记录,并对话题进行收藏、重命名和删除操作。
-tags:
- - 话题使用指南
- - 保存话题
- - 快速切换历史记录
- - 话题收藏
- - 话题重命名
- - 话题删除
----
-
-# 话题使用指南
-
-
-
-- **保存话题:** 在聊天过程中,如果想要保存当前上下文并开启新的话题,可以点击发送按钮旁边的保存按钮。
-- **话题列表:** 点击列表中的话题可以快速切换历史对话记录,并继续对话。你还可以通过点击星标图标 ⭐️ 将话题收藏置顶,或者通过右侧更多按钮对话题进行重命名和删除操作。
diff --git a/docs/usage/community/agent-market.mdx b/docs/usage/community/agent-market.mdx
new file mode 100644
index 0000000000..09783f5b8c
--- /dev/null
+++ b/docs/usage/community/agent-market.mdx
@@ -0,0 +1,44 @@
+---
+title: Assistant Marketplace
+description: >-
+ LobeHub's Assistant Marketplace is a vibrant and innovative community that
+ brings together a wide range of thoughtfully designed assistants to enhance
+ productivity in work and learning environments. You're welcome to submit your
+ own assistant creations and help build a collection of useful, creative, and
+ cutting-edge tools.
+tags:
+ - LobeHub
+ - LobeHub
+ - Assistant Marketplace
+ - Innovation Community
+ - Collaborative Space
+ - Assistant Creations
+ - Automated Internationalization
+ - Multilingual Versions
+---
+
+# Assistant Marketplace
+
+The LobeHub Assistant Marketplace features high-quality AI assistants created by developers and enthusiasts from around the world. Whether you're looking for a coding assistant, writing advisor, language tutor, or a consultant in a specialized field, you'll find the right tool here. These assistants are carefully crafted by community members and tested in real-world scenarios, making them ready for immediate use in both work and study settings.
+
+The marketplace is more than just a resource hub—it's an open platform for creativity. You can publish your own custom assistants and share your ideas and expertise with users across the globe.
+
+## Explore the Assistant Marketplace
+
+To access the Assistant Marketplace, click on "Community" → "Assistants" in the left sidebar.
+
+
+
+Assistants are organized by category, making it easy to quickly find the type of assistant you need.
+
+## Installing and Using Assistants
+
+View and install assistants
+
+Click on any assistant card to open its detail page. Here you'll find an overview, settings, capabilities, and version history. Once you've confirmed the assistant meets your needs, click "Add Assistant & Start Chatting" to begin.
+
+
+
+### Using Assistants
+
+After selecting "Add Assistant & Start Chatting," you can immediately begin interacting with the assistant to test its features and performance. You can customize system prompts, choose different models, configure plugins, and more. You also have the option to create a copy of any marketplace assistant, allowing you to personalize it while retaining its original capabilities.
diff --git a/docs/usage/community/agent-market.zh-CN.mdx b/docs/usage/community/agent-market.zh-CN.mdx
new file mode 100644
index 0000000000..0e854b8e48
--- /dev/null
+++ b/docs/usage/community/agent-market.zh-CN.mdx
@@ -0,0 +1,41 @@
+---
+title: 助手市场
+description: >-
+ LobeHub
+ 助手市场是一个充满活力和创新的社区,汇聚了众多精心设计的助手,为工作场景和学习提供便利。欢迎提交你的助手作品,共同创造更多有趣、实用且具有创新性的助手。
+tags:
+ - LobeHub
+ - LobeHub
+ - 助手市场
+ - 创新社区
+ - 协作空间
+ - 助手作品
+ - 自动化国际化
+ - 多语言版本
+---
+
+# 助手市场
+
+LobeHub 的助理市场汇聚了来自全球创作者的优质 AI 助理。无论你需要编程助理、写作顾问、语言教师,还是专业领域的咨询专家,都能在这里找到合适的助理。这些助理由社区成员精心打造,经过实际使用验证,能直接投入工作和学习场景。
+
+助理市场不只是获取资源的地方,更是一个开放的创作平台。你可以发布自己定制的助理,与全球用户分享你的创意和专业知识。
+
+## 浏览助理市场
+
+点击左侧边栏的「社区」→「助理」,进入助理市场主页。
+
+
+
+助理市场按类别组织,方便你快速找到所需的助理类型。
+
+## 安装和使用助理
+
+查看和安装助理
+
+点击任意助理卡片,进入详情页面。这里展示了助理的概览、设定、能力和版本历史等信息。确认助理符合你的需求时,可以在此页面「添加助理并会话」。
+
+
+
+### 使用助理
+
+选择「添加助理并会话」后,你可以立即开始与助理对话,测试助理的功能和效果。根据需要调整助理的系统提示词、模型选择、插件配置等。你也可以基于市场助理创建副本,在保留原有能力的基础上进行个性化修改。
diff --git a/docs/usage/community/become-a-creator.mdx b/docs/usage/community/become-a-creator.mdx
new file mode 100644
index 0000000000..c67dfb9e80
--- /dev/null
+++ b/docs/usage/community/become-a-creator.mdx
@@ -0,0 +1,27 @@
+---
+title: Community Creators
+description: >-
+ The LobeHub Community is a vibrant and innovative space that brings together a
+ wide range of thoughtfully designed assistants to enhance productivity in work
+ and learning scenarios. You're welcome to submit your own assistant creations
+ and collaborate to build more interesting, practical, and creative tools.
+tags:
+ - LobeHub
+ - Community Creators
+ - Innovation Community
+ - Collaborative Space
+---
+
+# LobeHub Community Creators
+
+The LobeHub Community is a vibrant and innovative space that brings together a wide range of thoughtfully designed assistants to enhance productivity in work and learning scenarios. You're welcome to submit your own assistant creations and collaborate to build more interesting, practical, and creative tools.
+
+## Join the Community
+
+Click on the sidebar: **Community** → **Assistants** → then click **Become a Creator** in the top right corner.
+
+
+
+## Personal Profile
+
+Your personal profile will showcase your creator information and all the content you’ve published.
diff --git a/docs/usage/community/become-a-creator.zh-CN.mdx b/docs/usage/community/become-a-creator.zh-CN.mdx
new file mode 100644
index 0000000000..40abc9f5ec
--- /dev/null
+++ b/docs/usage/community/become-a-creator.zh-CN.mdx
@@ -0,0 +1,25 @@
+---
+title: 社区创作者
+description: >-
+ LobeHub
+ 社区是一个充满活力和创新的社区,汇聚了众多精心设计的助手,为工作场景和学习提供便利。欢迎提交你的助手作品,共同创造更多有趣、实用且具有创新性的助手
+tags:
+ - LobeHub
+ - 社区创作者
+ - 创新社区
+ - 协作空间
+---
+
+# LobeHub 社区创作者
+
+LobeHub 社区是一个充满活力和创新的社区,汇聚了众多精心设计的助手,为工作场景和学习提供便利。欢迎提交你的助手作品,共同创造更多有趣、实用且具有创新性的助手。
+
+## 加入
+
+点击左侧边栏的「社区」→「助理」→右上角「成为创作者」。
+
+
+
+## 个人主页
+
+你的个人主页会展示你的创作者信息和已发布的内容。
diff --git a/docs/usage/community/custom-plugin.mdx b/docs/usage/community/custom-plugin.mdx
new file mode 100644
index 0000000000..08f0377362
--- /dev/null
+++ b/docs/usage/community/custom-plugin.mdx
@@ -0,0 +1,37 @@
+---
+title: Custom Plugins
+description: >-
+ Learn how to install custom plugins and develop LobeHub plugins to extend the
+ capabilities of your AI assistant.
+tags:
+ - Custom Plugins
+ - LobeHub
+ - Plugin Installation
+ - Plugin Development
+ - AI Assistant
+---
+
+# Custom Plugins
+
+## Installing Custom Plugins
+
+If you'd like to install a plugin that isn't available in the LobeHub Plugin Store—such as one you've developed yourself—you can do so by clicking on "Custom Plugin":
+
+
+
+LobeHub's plugin system is also compatible with ChatGPT plugins, allowing you to install them with a single click.
+
+To try installing a custom plugin manually, you can use the following links:
+
+- `Custom Lobe Plugin` Mock Credit Card: [https://lobe-plugin-mock-credit-card.vercel.app/manifest.json](https://lobe-plugin-mock-credit-card.vercel.app/manifest.json)
+- `ChatGPT Plugin` Access Links: [https://www.accesslinks.ai/.well-known/ai-plugin.json](https://www.accesslinks.ai/.well-known/ai-plugin.json)
+
+
+
+
+
+
+
+## Developing Custom Plugins
+
+If you're interested in developing your own LobeHub plugin, check out the [Plugin Development Guide](/en/docs/usage/plugins/development) to push the boundaries of what's possible with your AI assistant!
diff --git a/docs/usage/community/custom-plugin.zh-CN.mdx b/docs/usage/community/custom-plugin.zh-CN.mdx
new file mode 100644
index 0000000000..6ca41bdbed
--- /dev/null
+++ b/docs/usage/community/custom-plugin.zh-CN.mdx
@@ -0,0 +1,35 @@
+---
+title: 自定义插件
+description: 学习如何安装自定义插件和开发 LobeHub 插件,扩展你的 AI 智能助手的功能。
+tags:
+ - 自定义插件
+ - LobeHub
+ - 插件安装
+ - 插件开发
+ - AI 智能助手
+---
+
+# 自定义插件
+
+## 安装自定义插件
+
+如果你希望安装一个不在 LobeHub 插件商店中的插件,例如自己开发的 LobeHub,你可以点击「自定义插件」进行安装:
+
+
+
+此外,LobeHub 的插件机制兼容了 ChatGPT 的插件,因此你可以一键安装相应的 ChatGPT 插件。
+
+如果你希望尝试自行安装自定义插件,你可以使用以下链接来尝试:
+
+- `自定义 Lobe 插件` Mock Credit Card:[https://lobe-plugin-mock-credit-card.vercel.app/manifest.json](https://lobe-plugin-mock-credit-card.vercel.app/manifest.json)
+- `ChatGPT 插件` Access Links:[https://www.accesslinks.ai/.well-known/ai-plugin.json](https://www.accesslinks.ai/.well-known/ai-plugin.json)
+
+
+
+
+
+
+
+## 开发自定义插件
+
+如果你希望自行开发一个 LobeHub 的插件,欢迎查阅 [插件开发指南](/zh/docs/usage/plugins/development) 以扩展你的 AI 智能助手的可能性边界!
diff --git a/docs/usage/community/mcp-market.mdx b/docs/usage/community/mcp-market.mdx
new file mode 100644
index 0000000000..a1b4f07fcd
--- /dev/null
+++ b/docs/usage/community/mcp-market.mdx
@@ -0,0 +1,32 @@
+---
+title: MCP Marketplace
+description: >-
+ The MCP Marketplace is LobeHub's official hub, featuring a wide range of
+ meticulously crafted MCPs designed to enhance productivity in both work and
+ learning environments. You're welcome to submit your own MCP creations and
+ help build a more innovative, practical, and engaging ecosystem.
+tags:
+ - LobeHub
+ - MCP Marketplace
+ - Innovation Community
+ - Collaborative Space
+ - MCP Creations
+ - Automated Internationalization
+ - Multilingual Support
+---
+
+# MCP Marketplace
+
+The MCP Marketplace by LobeHub brings together high-quality MCPs from creators around the world.
+
+## Explore the MCP Marketplace
+
+Click on "Community" → "MCP" in the left sidebar to access the MCP Marketplace homepage.
+
+
+
+The assistant marketplace is organized by category, making it easy to quickly find the type of assistant you need.
+
+## Installing an MCP
+
+Please exercise caution when using MCPs from unknown sources. LobeHub cannot guarantee the safety of all MCPs.
diff --git a/docs/usage/community/mcp-market.zh-CN.mdx b/docs/usage/community/mcp-market.zh-CN.mdx
new file mode 100644
index 0000000000..335d6bcf59
--- /dev/null
+++ b/docs/usage/community/mcp-market.zh-CN.mdx
@@ -0,0 +1,30 @@
+---
+title: MCP 市场
+description: >-
+ MCP 市场是 LobeHub 的官方市场,汇聚了众多精心设计的 MCP,为工作场景和学习提供便利。欢迎提交你的 MCP
+ 作品,共同创造更多有趣、实用且具有创新性的 MCP。
+tags:
+ - LobeHub
+ - MCP 市场
+ - 创新社区
+ - 协作空间
+ - MCP 作品
+ - 自动化国际化
+ - 多语言版本
+---
+
+# MCP 市场
+
+LobeHub 的 MCP 市场汇聚了来自全球创作者的优质 MCP。
+
+## 浏览 MCP 市场
+
+点击左侧边栏的「社区」→「MCP」,进入 MCP 市场主页。
+
+
+
+助理市场按类别组织,方便你快速找到所需的助理类型。
+
+## 安装 MCP
+
+请谨慎使用来路不明的 MCP,LobeHub 无法确保所有 MCP 的安全性。
diff --git a/docs/usage/community/publish-agent.mdx b/docs/usage/community/publish-agent.mdx
new file mode 100644
index 0000000000..b9cea33fd4
--- /dev/null
+++ b/docs/usage/community/publish-agent.mdx
@@ -0,0 +1,30 @@
+---
+title: Publish Your Agent on the Marketplace
+description: >-
+ The LobeHub Assistant Marketplace is a vibrant and innovative community that
+ brings together a wide range of thoughtfully designed assistants to enhance
+ productivity and learning. You're welcome to submit your own assistant and
+ contribute to a growing collection of creative, practical, and cutting-edge
+ tools.
+tags:
+ - LobeHub
+ - LobeHub
+ - Assistant Marketplace
+ - Innovation Community
+ - Collaborative Space
+ - Assistant Creations
+ - Automated Internationalization
+ - Multilingual Support
+---
+
+# Submit Your Agent
+
+If you've created a high-quality Agent, you can submit it to the marketplace and share it with users around the world.
+
+If this is your first time submitting an Agent, you'll need to [create a community profile](/docs/usage/community/become-a-creator) first. This allows you to manage your submissions and published content within the community.
+
+Once you're a registered creator, go to the Agent you want to publish, open its "Agent Profile," and click "Share Assistant to Community."
+
+
+
+After your submission is reviewed and approved, your Agent will appear in the marketplace, available to users worldwide. LobeHub welcomes all users to join this ever-growing ecosystem and take part in the continuous improvement and evolution of assistants. Together, we can build a diverse, practical, and innovative collection of tools that enrich the assistant experience for everyone.
diff --git a/docs/usage/community/publish-agent.zh-CN.mdx b/docs/usage/community/publish-agent.zh-CN.mdx
new file mode 100644
index 0000000000..5584dfe832
--- /dev/null
+++ b/docs/usage/community/publish-agent.zh-CN.mdx
@@ -0,0 +1,27 @@
+---
+title: 在市场发布 Agent
+description: >-
+ LobeHub
+ 助手市场是一个充满活力和创新的社区,汇聚了众多精心设计的助手,为工作场景和学习提供便利。欢迎提交你的助手作品,共同创造更多有趣、实用且具有创新性的助手。
+tags:
+ - LobeHub
+ - LobeHub
+ - 助手市场
+ - 创新社区
+ - 协作空间
+ - 助手作品
+ - 自动化国际化
+ - 多语言版本
+---
+
+# 提交你的 Agent
+
+如果你创建了优质的 Agent,可以提交到市场与全球用户分享。
+
+如果你是首次提交 Agent,需要先[创建社区个人档案](/docs/usage/community/become-a-creator),以便在社区上提交和管理上架信息。
+
+当你已成为创作者,选择需要发布的 Agent 进入「Agent 档案」,点击「分享助理到社区」即可。
+
+
+
+通过审核后,你的 Agent 会出现在市场中,供全球用户使用。LobeHub 欢迎所有用户加入这个不断成长的生态系统,共同参与到助理的迭代与优化中来。共同创造出更多有趣、实用且具有创新性的助理,进一步丰富助理的多样性和实用性。
diff --git a/docs/usage/community/pulgin-development.mdx b/docs/usage/community/pulgin-development.mdx
new file mode 100644
index 0000000000..273b910804
--- /dev/null
+++ b/docs/usage/community/pulgin-development.mdx
@@ -0,0 +1,281 @@
+---
+title: Plugin Development
+description: >-
+ Learn how to add and use custom plugins in LobeHub, including creating a
+ plugin project, adding local plugins in role settings, testing plugin
+ functionality, and understanding the development and deployment process.
+tags:
+ - LobeHub
+ - LobeHub
+ - Plugin Development
+ - Custom Plugins
+ - Plugin Deployment
+ - Plugin Publishing
+ - Plugin UI
+ - Plugin SDK
+---
+
+# Plugin Development
+
+## Plugin Structure
+
+A LobeHub plugin consists of the following components:
+
+1. **Plugin Index**: Displays basic plugin information, including name, description, author, version, and a link to the plugin manifest. The official plugin index is hosted at [lobe-chat-plugins](https://github.com/lobehub/lobe-chat-plugins). To publish your plugin to the official marketplace, you need to [submit a PR](https://github.com/lobehub/lobe-chat-plugins/pulls) to this repository.
+2. **Plugin Manifest**: Describes the plugin's functionality, including backend services, frontend UI, version, and more. For a detailed explanation, see \[manifest]\[manifest-docs-url].
+3. **Plugin Services**: Implements the backend and optional frontend modules described in the manifest:
+ - **Backend**: Implements the `api` section defined in the manifest.
+ - **Frontend UI** (optional): Implements the `ui` section, allowing for richer message displays beyond plain text.
+
+## Custom Plugin Workflow
+
+This section walks you through how to add and use a custom plugin in LobeHub.
+
+
+ ### Create and Start a Plugin Project
+
+ First, create a local plugin project using our template: \[lobe-chat-plugin-template]\[lobe-chat-plugin-template-url]
+
+ ```bash
+ $ git clone https://github.com/lobehub/chat-plugin-template.git
+ $ cd chat-plugin-template
+ $ npm i
+ $ npm run dev
+ ```
+
+ Once you see `ready started server on 0.0.0.0:3400, url: http://localhost:3400`, your plugin service is running locally.
+
+
+
+ ### Add Local Plugin in LobeHub Role Settings
+
+ Next, go to LobeHub, create a new assistant, and open its session settings:
+
+
+
+ Click the Add button on the right side of the plugin list to open the custom plugin dialog:
+
+
+
+ Enter `http://localhost:3400/manifest-dev.json` in the **Plugin Manifest URL** field. This is the local address of your plugin manifest.
+
+ You should now see the plugin identifier auto-filled as `chat-plugin-template`. Fill in the remaining fields (only the title is required), then click Save to complete the setup.
+
+
+
+ Once added, the plugin will appear in the list. To edit its configuration, click the Settings button on the far right.
+
+
+
+ ### Test Plugin Functionality in Chat
+
+ Now let’s test if the plugin works as expected.
+
+ Click Back to return to the chat area, and send a message like: “What should I wear?” The assistant will ask for your gender and current mood.
+
+
+
+ After you respond, the assistant will call the plugin, fetch clothing recommendations based on your gender and mood, and present the results along with a summary.
+
+
+
+ After completing these steps, you now understand the basic process of adding and using a custom plugin in LobeHub.
+
+
+## Local Plugin Development
+
+Now that you know how to add and use a plugin, let’s dive into the development process.
+
+### Manifest
+
+The `manifest` defines how the plugin works. The key fields are `api` and `ui`, which describe the backend API and frontend UI respectively.
+
+Here’s an example from the template:
+
+```json
+{
+ "api": [
+ {
+ "url": "http://localhost:3400/api/clothes",
+ "name": "recommendClothes",
+ "description": "Recommend clothes based on the user's mood",
+ "parameters": {
+ "properties": {
+ "mood": {
+ "description": "User's current mood. Options: happy, sad, anger, fear, surprise, disgust",
+ "enums": ["happy", "sad", "anger", "fear", "surprise", "disgust"],
+ "type": "string"
+ },
+ "gender": {
+ "type": "string",
+ "enum": ["man", "woman"],
+ "description": "User's gender, must be asked before calling the API"
+ }
+ },
+ "required": ["mood", "gender"],
+ "type": "object"
+ }
+ }
+ ],
+ "gateway": "http://localhost:3400/api/gateway",
+ "identifier": "chat-plugin-template",
+ "ui": {
+ "url": "http://localhost:3400",
+ "height": 200
+ },
+ "version": "1"
+}
+```
+
+Key fields:
+
+1. `identifier`: Unique plugin ID. Must be globally unique.
+2. `api`: Array of API definitions. Each includes `url`, `name`, `description`, and `parameters`. The `description` and `parameters` are used in GPT’s [Function Call](https://sspai.com/post/81986) and must follow [JSON Schema](https://json-schema.org/).
+3. `ui`: Defines the plugin’s frontend interface, loaded via iframe. You can specify height and width.
+4. `gateway`: Specifies the API gateway. LobeHub’s default is a cloud service, but for local plugins, this should point to your local server.
+5. `version`: Plugin version (currently unused).
+
+For a full breakdown of manifest fields, see: \[manifest]\[manifest-docs-url].
+
+### Project Structure
+
+The \[lobe-chat-plugin-template]\[lobe-chat-plugin-template-url] uses Next.js. Its structure:
+
+```
+➜ chat-plugin-template
+├── public
+│ └── manifest-dev.json # Manifest file
+├── src
+│ └── pages
+│ │ ├── api # Next.js API routes
+│ │ │ ├── clothes.ts # recommendClothes API
+│ │ │ └── gateway.ts # Local plugin gateway
+│ │ └── index.tsx # Frontend UI
+```
+
+You can use any framework or language, as long as it implements the manifest functionality.
+
+We also welcome contributions of templates in other frameworks and languages.
+
+### Backend
+
+The backend implements the APIs defined in the manifest. The template uses Vercel’s [Edge Runtime](https://nextjs.org/docs/pages/api-reference/edge) for serverless deployment.
+
+#### API Implementation
+
+We provide a `createErrorResponse` method in `@lobehub/chat-plugin-sdk` for handling errors. See \[PluginErrorType]\[plugin-error-type-url] for available types.
+
+Example implementation of the clothes API:
+
+```ts
+export default async (req: Request) => {
+ if (req.method !== 'POST') return createErrorResponse(PluginErrorType.MethodNotAllowed);
+
+ const { gender, mood } = (await req.json()) as RequestData;
+
+ const clothes = gender === 'man' ? manClothes : womanClothes;
+
+ const result: ResponseData = {
+ clothes: clothes[mood] || [],
+ mood,
+ today: Date.now(),
+ };
+
+ return new Response(JSON.stringify(result));
+};
+```
+
+`manClothes` and `womanClothes` are mock data. Replace with real data sources as needed.
+
+#### Plugin Gateway
+
+LobeHub’s default plugin gateway is `/api/plugins`, which sends requests to the `api.url` in the manifest.
+
+For local plugins, set the `gateway` to a local address (e.g., `http://localhost:3400/api/gateway`). Then implement the gateway:
+
+```ts
+import { createLobeHubPluginGateway } from '@lobehub/chat-plugins-gateway';
+
+export const config = {
+ runtime: 'edge',
+};
+
+export default createLobeHubPluginGateway();
+```
+
+[`@lobehub/chat-plugins-gateway`](https://github.com/lobehub/chat-plugins-gateway) provides the gateway implementation used in LobeHub. Use it to route requests to your local plugin service.
+
+### Plugin UI
+
+The UI is optional. For example, the official plugin [🧩 / 🕸 Web Content Extractor](https://github.com/lobehub/chat-plugin-web-crawler) has no UI.
+
+
+
+If you want to display rich content or interactive elements, you can build a custom UI. For example, the [Search Engine](https://github.com/lobehub/chat-plugin-search-engine) plugin:
+
+
+
+#### Implementing the UI
+
+LobeHub loads plugin UIs via `iframe` and uses `postMessage` for communication. You can use any frontend framework or language.
+
+
+
+The template uses React + Next.js + [antd](https://ant.design/). See [`src/pages/index.tsx`](https://github.com/lobehub/chat-plugin-template/blob/main/src/pages/index.tsx) for the UI implementation.
+
+To simplify communication, use `fetchPluginMessage` from [`@lobehub/chat-plugin-sdk`](https://github.com/lobehub/chat-plugin-sdk). It fetches the current plugin message from LobeHub. See \[fetchPluginMessage]\[fetch-plugin-message-url] for details.
+
+```tsx
+import { fetchPluginMessage } from '@lobehub/chat-plugin-sdk';
+import { memo, useEffect, useState } from 'react';
+
+import { ResponseData } from '@/type';
+
+const Render = memo(() => {
+ const [data, setData] = useState();
+
+ useEffect(() => {
+ fetchPluginMessage().then((e: ResponseData) => {
+ setData(e);
+ });
+ }, []);
+
+ return <>...>;
+});
+
+export default Render;
+```
+
+## Plugin Deployment & Publishing
+
+Once your plugin is ready, deploy it using your preferred method—Vercel, Docker, etc.
+
+To share your plugin with others, consider [submitting it](https://github.com/lobehub/lobe-chat-plugins) to the official plugin marketplace.
+
+\[!\[]\[submit-plugin-shield]]\[submit-plugin-url]
+
+### Plugin Shield
+
+[](https://github.com/lobehub/lobe-chat-plugins)
+
+```markdown
+[](https://github.com/lobehub/lobe-chat-plugins)
+```
+
+## Links
+
+- **📘 Plugin SDK Docs**: [https://chat-plugin-sdk.lobehub.com](https://chat-plugin-sdk.lobehub.com)
+- **🚀 chat-plugin-template**: [https://github.com/lobehub/chat-plugin-template](https://github.com/lobehub/chat-plugin-template)
+- **🧩 chat-plugin-sdk**: [https://github.com/lobehub/chat-plugin-sdk](https://github.com/lobehub/chat-plugin-sdk)
+- **🚪 chat-plugin-gateway**: [https://github.com/lobehub/chat-plugins-gateway](https://github.com/lobehub/chat-plugins-gateway)
+- **🏪 lobe-chat-plugins**: [https://github.com/lobehub/lobe-chat-plugins](https://github.com/lobehub/lobe-chat-plugins)
+
+```
+
+[fetch-plugin-message-url]: https://github.com/lobehub/chat-plugin-template
+[lobe-chat-plugin-template-url]: https://github.com/lobehub/chat-plugin-template
+[manifest-docs-url]: https://chat-plugin-sdk.lobehub.com/guides/plugin-manifest
+[plugin-error-type-url]: https://github.com/lobehub/chat-plugin-template
+[submit-plugin-shield]: https://img.shields.io/badge/🧩/🏪_submit_plugin-%E2%86%92-95f3d9?labelColor=black&style=for-the-badge
+[submit-plugin-url]: https://github.com/lobehub/lobe-chat-plugins
+```
diff --git a/docs/usage/plugins/development.zh-CN.mdx b/docs/usage/community/pulgin-development.zh-CN.mdx
similarity index 78%
rename from docs/usage/plugins/development.zh-CN.mdx
rename to docs/usage/community/pulgin-development.zh-CN.mdx
index 7ac82ce7bd..2c7b99e265 100644
--- a/docs/usage/plugins/development.zh-CN.mdx
+++ b/docs/usage/community/pulgin-development.zh-CN.mdx
@@ -1,8 +1,9 @@
---
-title: LobeChat 插件开发指南
-description: 学习如何在 LobeChat 中添加和使用自定义插件,包括创建插件项目、在角色设置中添加本地插件、测试插件功能以及插件开发流程和部署。
+title: 插件开发
+description: 学习如何在 LobeHub 中添加和使用自定义插件,包括创建插件项目、在角色设置中添加本地插件、测试插件功能以及插件开发流程和部署。
tags:
- - LobeChat
+ - LobeHub
+ - LobeHub
- 插件开发
- 自定义插件
- 插件部署
@@ -11,11 +12,11 @@ tags:
- 插件SDK
---
-# 插件开发指南
+# 插件开发
## 插件构成
-一个 LobeChat 的插件由以下几个部分组成:
+一个 LobeHub 的插件由以下几个部分组成:
1. **插件索引**:用于展示插件的基本信息,包括插件名称、描述、作者、版本、插件描述清单的链接,官方的插件索引地址:[lobe-chat-plugins](https://github.com/lobehub/lobe-chat-plugins)。若想上架插件到官方插件市场,需要 [提交 PR](https://github.com/lobehub/lobe-chat-plugins/pulls) 到该仓库;
2. **插件描述清单 (manifest)**:用于描述插件的功能实现,包含了插件的服务端描述、前端展示信息、版本号等。关于 manifest 的详细介绍,详见 [manifest][manifest-docs-url];
@@ -25,7 +26,7 @@ tags:
## 自定义插件流程
-本节将会介绍如何在 LobeChat 中添加和使用一个自定义插件。
+本节将会介绍如何在 LobeHub 中添加和使用一个自定义插件。
### 创建并启动插件项目
@@ -43,25 +44,25 @@ tags:
- ### 在 LobeChat 角色设置中添加本地插件
+ ### 在 LobeHub 角色设置中添加本地插件
- 接下来进入到 LobeChat 中,创建一个新的助手,并进入它的会话设置页:
+ 接下来进入到 LobeHub 中,创建一个新的助手,并进入它的会话设置页:
-
+
点击插件列表右侧的 添加 按钮,打开自定义插件添加弹窗:
-
+
在 **插件描述文件 Url** 地址 中填入 `http://localhost:3400/manifest-dev.json` ,这是我们本地启动的插件描述清单地址。
此时,你应该可以看到看到插件的标识符一栏已经被自动识别为 `chat-plugin-template`。接下来你需要填写剩下的表单字段(只有标题必填),然后点击 保存 按钮,即可完成自定义插件添加。
-
+
完成添加后,在插件列表中就能看到刚刚添加的插件,如果需要修改插件的配置,可以点击最右侧的 设置 按钮进行修改。
-
+
### 会话测试插件功能
@@ -75,7 +76,7 @@ tags:
- 当完成这些操作后,你已经了解了添加自定义插件,并在 LobeChat 中使用的基础流程。
+ 当完成这些操作后,你已经了解了添加自定义插件,并在 LobeHub 中使用的基础流程。
## 本地插件开发
@@ -127,8 +128,8 @@ tags:
1. `identifier`:这是插件的唯一标识符,用来区分不同的插件,这个字段需要全局唯一。
2. `api`:这是一个数组,包含了插件的所有 API 接口信息。每个接口都包含了 url、name、description 和 parameters 字段,均为必填项。其中 `description` 和 `parameters` 两个字段,将会作为 [Function Call](https://sspai.com/post/81986) 的 `functions` 参数发送给 gpt, parameters 需要符合 [JSON Schema](https://json-schema.org/) 规范。 在这个例子中,api 接口名为 `recommendClothes` ,这个接口的功能是根据用户的心情和性别来推荐衣服。接口的参数包括用户的心情和性别,这两个参数都是必填项。
-3. `ui`:这个字段包含了插件的用户界面信息,指明了 LobeChat 从哪个地址加载插件的前端界面。由于 LobeChat 插件界面加载是基于 iframe 实现的,因此可以按需指定插件界面的高度、宽度。
-4. `gateway`:这个字段指定了 LobeChat 查询 api 接口的网关。LobeChat 默认的插件网关是云端服务,而自定义插件的请求需要发给本地启动的服务,远端调用本地地址,一般调用不通。gateway 字段解决了该问题。通过在 manifest 中指定 gateway,LobeChat 将会向该地址发送插件请求,本地的网关地址将会调度请求到本地的插件服务。发布到线上的插件可以不用指定该字段。
+3. `ui`:这个字段包含了插件的用户界面信息,指明了 LobeHub 从哪个地址加载插件的前端界面。由于 LobeHub 插件界面加载是基于 iframe 实现的,因此可以按需指定插件界面的高度、宽度。
+4. `gateway`:这个字段指定了 LobeHub 查询 api 接口的网关。LobeHub 默认的插件网关是云端服务,而自定义插件的请求需要发给本地启动的服务,远端调用本地地址,一般调用不通。gateway 字段解决了该问题。通过在 manifest 中指定 gateway,LobeHub 将会向该地址发送插件请求,本地的网关地址将会调度请求到本地的插件服务。发布到线上的插件可以不用指定该字段。
5. `version`:这是插件的版本号,现阶段暂时没有作用;
在实际开发中,你可以根据自己的需求,修改插件的描述清单,声明想要实现的功能。 关于 manifest 各个字段的完整介绍,参见:[manifest][manifest-docs-url]。
@@ -185,21 +186,21 @@ export default async (req: Request) => {
#### Plugin Gateway
-由于 LobeChat 默认的插件网关是云端服务 `/api/plugins`,云端服务通过 manifest 上的 `api.url` 地址发送请求,以解决跨域问题。
+由于 LobeHub 默认的插件网关是云端服务 `/api/plugins`,云端服务通过 manifest 上的 `api.url` 地址发送请求,以解决跨域问题。
-针对自定义插件,插件请求需要发送给本地服务, 因此通过在 manifest 中指定网关 ([http://localhost:3400/api/gateway),LobeChat](http://localhost:3400/api/gateway\),LobeChat) 将会直接请求该地址,然后只需要在该地址下创建对应的网关即可。
+针对自定义插件,插件请求需要发送给本地服务, 因此通过在 manifest 中指定网关 ([http://localhost:3400/api/gateway),LobeHub](http://localhost:3400/api/gateway\),LobeHub) 将会直接请求该地址,然后只需要在该地址下创建对应的网关即可。
```ts
-import { createLobeChatPluginGateway } from '@lobehub/chat-plugins-gateway';
+import { createLobeHubPluginGateway } from '@lobehub/chat-plugins-gateway';
export const config = {
runtime: 'edge',
};
-export default createLobeChatPluginGateway();
+export default createLobeHubPluginGateway();
```
-[`@lobehub/chat-plugins-gateway`](https://github.com/lobehub/chat-plugins-gateway) 包含了 LobeChat 中插件网关的[实现](https://github.com/lobehub/lobe-chat/blob/main/src/pages/api/plugins.api.ts),你可以直接使用该包创建网关,进而让 LobeChat 访问到本地的插件服务。
+[`@lobehub/chat-plugins-gateway`](https://github.com/lobehub/chat-plugins-gateway) 包含了 LobeHub 中插件网关的[实现](https://github.com/lobehub/lobe-chat/blob/main/src/pages/api/plugins.api.ts),你可以直接使用该包创建网关,进而让 LobeHub 访问到本地的插件服务。
### 插件 UI 界面
@@ -213,13 +214,13 @@ export default createLobeChatPluginGateway();
#### 插件 UI 界面实现
-LobeChat 通过 `iframe` 实现插件 ui 的加载,使用 `postMessage` 实现主体与插件的通信。因此, 插件 UI 的实现方式与普通的网页开发一致,你可以使用任何你熟悉的前端框架与开发语言。
+LobeHub 通过 `iframe` 实现插件 ui 的加载,使用 `postMessage` 实现主体与插件的通信。因此, 插件 UI 的实现方式与普通的网页开发一致,你可以使用任何你熟悉的前端框架与开发语言。
在我们提供的模板中使用了 React + Next.js + [antd](https://ant.design/) 作为前端界面框架,你可以在 [`src/pages/index.tsx`](https://github.com/lobehub/chat-plugin-template/blob/main/src/pages/index.tsx) 中找到用户界面的实现。
-其中关于插件通信,我们在 [`@lobehub/chat-plugin-sdk`](https://github.com/lobehub/chat-plugin-sdk) 提供了相关方法,用于简化插件与 LobeChat 的通信。你可以通过 `fetchPluginMessage` 方法主动向 LobeChat 获取当前消息的数据。关于该方法的详细介绍,参见:[fetchPluginMessage][fetch-plugin-message-url]。
+其中关于插件通信,我们在 [`@lobehub/chat-plugin-sdk`](https://github.com/lobehub/chat-plugin-sdk) 提供了相关方法,用于简化插件与 LobeHub 的通信。你可以通过 `fetchPluginMessage` 方法主动向 LobeHub 获取当前消息的数据。关于该方法的详细介绍,参见:[fetchPluginMessage][fetch-plugin-message-url]。
```tsx
import { fetchPluginMessage } from '@lobehub/chat-plugin-sdk';
@@ -231,7 +232,7 @@ const Render = memo(() => {
const [data, setData] = useState();
useEffect(() => {
- // 从 LobeChat 获取当前插件的消息
+ // 从 LobeHub 获取当前插件的消息
fetchPluginMessage().then((e: ResponseData) => {
setData(e);
});
diff --git a/docs/usage/features/agent-market.mdx b/docs/usage/features/agent-market.mdx
deleted file mode 100644
index eb445f4e0a..0000000000
--- a/docs/usage/features/agent-market.mdx
+++ /dev/null
@@ -1,32 +0,0 @@
----
-title: Find best Assistants in LobeChat Assistant Market
-description: >-
- Explore a vibrant community of carefully designed assistants in LobeChat's Assistant Market. Contribute your wisdom and share your personally developed assistants in this collaborative space.
-
-tags:
- - LobeChat
- - Assistant Market
- - Community
- - Collaboration
- - Assistants
----
-
-# Assistant Market
-
-
-
-In LobeChat's Assistant Market, creators can discover a vibrant and innovative community that brings together numerous carefully designed assistants. These assistants not only play a crucial role in work scenarios but also provide great convenience in the learning process. Our market is not just a showcase platform, but also a collaborative space. Here, everyone can contribute their wisdom and share their personally developed assistants.
-
-
- By [🤖/🏪 submitting agents](https://github.com/lobehub/lobe-chat-agents), you can easily submit
- your assistant works to our platform. We particularly emphasize that LobeChat has established a
- sophisticated automated internationalization (i18n) workflow, which excels in seamlessly
- converting your assistants into multiple language versions. This means that regardless of the
- language your users are using, they can seamlessly experience your assistant.
-
-
-
- We welcome all users to join this ever-growing ecosystem and participate in the iteration and
- optimization of assistants. Together, let's create more interesting, practical, and innovative
- assistants, further enriching the diversity and practicality of assistants.
-
diff --git a/docs/usage/features/agent-market.zh-CN.mdx b/docs/usage/features/agent-market.zh-CN.mdx
deleted file mode 100644
index a9d26d158a..0000000000
--- a/docs/usage/features/agent-market.zh-CN.mdx
+++ /dev/null
@@ -1,31 +0,0 @@
----
-title: 在 LobeChat 助手市场找到创新 AI 助手
-description: >-
- LobeChat助手市场是一个充满活力和创新的社区,汇聚了众多精心设计的助手,为工作场景和学习提供便利。欢迎提交你的助手作品,共同创造更多有趣、实用且具有创新性的助手。
-
-tags:
- - LobeChat
- - 助手市场
- - 创新社区
- - 协作空间
- - 助手作品
- - 自动化国际化
- - 多语言版本
----
-
-# 助手市场
-
-
-
-在 LobeChat 的助手市场中,创作者们可以发现一个充满活力和创新的社区,它汇聚了众多精心设计的助手,这些助手不仅在工作场景中发挥着重要作用,也在学习过程中提供了极大的便利。我们的市场不仅是一个展示平台,更是一个协作的空间。在这里,每个人都可以贡献自己的智慧,分享个人开发的助手。
-
-
- 通过 [🤖/🏪 提交助手](https://github.com/lobehub/lobe-chat-agents)
- ,你可以轻松地将你的助手作品提交到我们的平台。我们特别强调的是,LobeChat
- 建立了一套精密的自动化国际化(i18n)工作流程,
- 它的强大之处在于能够无缝地将你的助手转化为多种语言版本。这意味着,不论你的用户使用何种语言,他们都能无障碍地体验到你的助手。
-
-
-
- 我们欢迎所有用户加入这个不断成长的生态系统,共同参与到助手的迭代与优化中来。共同创造出更多有趣、实用且具有创新性的助手,进一步丰富助手的多样性和实用性。
-
diff --git a/docs/usage/features/agent-team.mdx b/docs/usage/features/agent-team.mdx
deleted file mode 100644
index dfb22668a0..0000000000
--- a/docs/usage/features/agent-team.mdx
+++ /dev/null
@@ -1,52 +0,0 @@
----
-title: Agent Team
-description: Turn any conversation into a team effort. Multiple AI agents collaborate naturally to give you richer, more insightful responses.
-tags:
- - Agent Team
- - Multi-Agent
- - AI Orchestration
- - Agent Coordination
----
-
-# Agent Team
-
-
-
-Sometimes one perspective isn't enough. Agent Team brings together multiple AI agents, each with their own expertise, to collaborate on your conversations. Richer discussions, diverse viewpoints, and solutions you wouldn't get from any single agent.
-
-## Highlights
-
-- Multiple assistants with specialized knowledge work together, each contributing their strengths
-- A built-in host ensures the agent team conversation runs smoothly and organized
-- Private messaging allows seamless coordination between assistants
-- You get comprehensive answers from multiple perspectives
-- Ready to use with your own assistants or rich agent team templates
-
-## Use Cases
-
-**Learning and Research**: Different assistants gather different materials through different tools, then come together for spontaneous discussion.
-
-**Entertainment**: Multiplayer language games like Werewolf, Model United Nations, and Turtle Soup.
-
-**Brainstorming**: Diverse perspectives spark better ideas.
-
-**Problem Solving**: Benefit from insights across different professional fields, with different tools and MCPs, agent team allows you to have the perfect AI team.
-
-## Quick Start
-
-Click the "Create Agent Team" button, you can choose to create directly from preset agent team templates, or select your own assistants to form an agent team.
-
-You can use @ to mention a team member in the agent team, or click their avatar to send them a private message. Everything works just like in a real chat room.
-
-### Interrupt and Resume Agent Team
-
-You can interrupt the host's thinking at any time, and the agent team will "pause" after interruption. You can start the moderator at any time, and the agent team will continue.
-
-Of course, conversations may also stop naturally.
-
-### Advanced Options
-
-- Agent Team Speed: Customize the response speed of the agent team
-- Custom Moderator: Guide the moderator's behavior according to specific needs
-
-Agent Team transforms how you interact with AI. Instead of getting one answer, you participate in a conversation—complete with different viewpoints, collaborative problem-solving, and profound insights that emerge when AI agents work together.
diff --git a/docs/usage/features/agent-team.zh-CN.mdx b/docs/usage/features/agent-team.zh-CN.mdx
deleted file mode 100644
index ee8ff150d0..0000000000
--- a/docs/usage/features/agent-team.zh-CN.mdx
+++ /dev/null
@@ -1,52 +0,0 @@
----
-title: Agent 团队
-description: 让对话变成团队协作。多个 AI 智能体自然配合,为你提供更丰富、更有洞察力的回答。
-tags:
- - 群组对话
- - 多智能体
- - AI 编排
- - 智能体协调
----
-
-# Agent 团队
-
-
-
-有时候,一个视角远远不够。Agent 团队功能让多个拥有不同知识和技能的 AI 智能体聚在一起,协作参与你的对话。更丰富的讨论、多元的观点,以及任何单一智能体都无法提供的解决方案。
-
-## 亮点
-
-- 多个具有专业知识的助手协同工作,各取所长
-- 团队内置的主持人将确保整个团队聊天有条不紊的进行
-- 私信功能让助手间无缝协调配合
-- 你将从多个视角获得全面的答案
-- 开箱即用,由你自己的助手组成,或者使用丰富的团队模版
-
-## 适用场景
-
-**学习研究**:不同的助手通过不同的工具收集不同的资料,汇聚一处并自发讨论。
-
-**娱乐**:狼人杀、模拟联合国、海龟汤等多人语聊游戏。
-
-**头脑风暴**:多元观点激发更好的想法。
-
-**问题解决**:受益于不同专业领域的见解,以及不同的工具和 MCP,Agent 团队允许你拥有完美的 AI 团队。
-
-## 快速开始
-
-点击「创建 Agent 团队」按钮,你可以选择直接从预设的团队模版创建,或选择你自己的助手组成团队。
-
-你可以在团队聊天中输入 @ 来提及某个成员,或者点击他们的头像对其私聊。一切就像是在真实的聊天室中一样。
-
-### 中断与继续团队聊天
-
-你可以随时打断主持人的思考,打断后团队聊天将会「暂停」。你可以随时开启主持人,团队聊天便会继续。
-
-当然,对话也有可能自然地停止。
-
-### 高级选项
-
-- 团队速度:自定义团队的回答速度
-- 自定义主持人:根据特定需要指导主持人的行为
-
-Agent 团队改变了你与 AI 的互动方式。不再是获得一个答案,而是参与一场对话 —— 包含不同观点、协作解决问题,以及 AI 智能体协同工作时产生的深刻洞察。
diff --git a/docs/usage/features/artifacts.mdx b/docs/usage/features/artifacts.mdx
deleted file mode 100644
index 9bd7870274..0000000000
--- a/docs/usage/features/artifacts.mdx
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: Artifacts Support
-description: >-
- Discover the power of Claude Artifacts for dynamic content creation and visualization.
-
-tags:
- - Claude Artifacts
- - LobeChat
- - AI Interaction
- - Dynamic Content
----
-
-# Artifacts Support
-
-
-
-Experience the power of Claude Artifacts, now integrated into LobeChat. This revolutionary feature expands the boundaries of AI-human interaction, enabling real-time creation and visualization of diverse content formats.
-
-Create and visualize with unprecedented flexibility:
-
-- Generate and display dynamic SVG graphics
-- Build and render interactive HTML pages in real-time
-- Produce professional documents in multiple formats
diff --git a/docs/usage/features/artifacts.zh-CN.mdx b/docs/usage/features/artifacts.zh-CN.mdx
deleted file mode 100644
index 07b2b3e7f4..0000000000
--- a/docs/usage/features/artifacts.zh-CN.mdx
+++ /dev/null
@@ -1,22 +0,0 @@
----
-title: 支持白板 (Artifacts)
-description: 体验 LobeChat 的 Claude Artifacts,实时创建和可视化内容。
-tags:
- - Claude Artifacts
- - LobeChat
- - 实时创作
- - 动态 SVG
- - 交互式 HTML
----
-
-# 支持白板 (Artifacts)
-
-
-
-体验集成于 LobeChat 的 Claude Artifacts 能力。这项革命性功能突破了 AI 人机交互的边界,让您能够实时创建和可视化各种格式的内容。
-
-以前所未有的灵活度进行创作与可视化:
-
-- 生成并展示动态 SVG 图形
-- 实时构建与渲染交互式 HTML 页面
-- 输出多种格式的专业文档
diff --git a/docs/usage/features/auth.mdx b/docs/usage/features/auth.mdx
deleted file mode 100644
index 8a705d8a18..0000000000
--- a/docs/usage/features/auth.mdx
+++ /dev/null
@@ -1,76 +0,0 @@
----
-title: User Authentication and Account Management
-description: >-
- Explore LobeChat's user authentication solutions with Better Auth for flexible and secure user management. Learn about features like email/password login, magic link, SSO, and account deletion.
-
-tags:
- - User Authentication
- - Better Auth
- - SSO
- - Account Management
- - Session Management
----
-
-# User Authentication and Account Management
-
-
-
-LobeChat uses [Better Auth](https://www.better-auth.com) as its authentication solution, providing comprehensive, secure, and flexible identity verification services.
-
-## Authentication Methods
-
-LobeChat supports multiple authentication methods to meet diverse user needs:
-
-### Email / Password
-
-Traditional email and password login with secure password hashing. You can register a new account with your email address and set a password.
-
-### Magic Link
-
-Passwordless authentication via email magic link. Simply enter your email, click the link sent to your inbox, and you're logged in - no password required.
-
-### Social Login (SSO)
-
-Quick login via popular identity providers:
-
-- **Google** - Sign in with your Google account
-- **GitHub** - Sign in with your GitHub account
-- **Microsoft** - Sign in with your Microsoft account
-- **Apple** - Sign in with your Apple ID
-- And more...
-
-## Account Management
-
-### Profile Settings
-
-You can manage your account profile in **Settings > Profile**:
-
-
-
-- Update your avatar
-- Change your display name
-- Set your username
-- Manage connected SSO providers
-- Change your password (if using email/password login)
-
-### Account Deletion
-
-If you wish to delete your account, you can request account deletion in **Settings > Data Storage**:
-
-1. Click **Request Account Deletion**
-2. Enter the reason for deletion
-3. Check the confirmation box
-4. Submit your request
-
-
- After submission, there is a **72-hour cooling-off period** during which you can cancel at any time. Once the period ends:
-
- - Your account data will be permanently deleted
- - Unpaid invoices will be canceled immediately
- - Paid fees will not be refunded
- - Your registration and deletion records will be retained for compliance purposes
-
-
-
- For self-hosting deployment, please refer to [Authentication Services](/docs/self-hosting/advanced/auth) for configuration details.
-
diff --git a/docs/usage/features/auth.zh-CN.mdx b/docs/usage/features/auth.zh-CN.mdx
deleted file mode 100644
index e078858ac0..0000000000
--- a/docs/usage/features/auth.zh-CN.mdx
+++ /dev/null
@@ -1,74 +0,0 @@
----
-title: 用户认证与账号管理
-description: 了解 LobeChat 基于 Better Auth 的用户认证方案,支持邮箱密码、魔法链接、SSO 登录及账号注销等功能。
-tags:
- - 用户认证
- - Better Auth
- - SSO
- - 账号管理
- - 会话管理
----
-
-# 用户认证与账号管理
-
-
-
-LobeChat 使用 [Better Auth](https://www.better-auth.com) 作为身份验证解决方案,提供全面、安全、灵活的身份验证服务。
-
-## 认证方式
-
-LobeChat 支持多种认证方式,满足不同用户的需求:
-
-### 邮箱 / 密码
-
-传统的邮箱和密码登录方式,采用安全的密码哈希算法。您可以使用邮箱地址注册新账号并设置密码。
-
-### 魔法链接
-
-通过邮件魔法链接实现无密码认证。只需输入邮箱,点击收件箱中的链接即可登录,无需记忆密码。
-
-### 社交登录 (SSO)
-
-通过主流身份提供商快速登录:
-
-- **Google** - 使用 Google 账号登录
-- **GitHub** - 使用 GitHub 账号登录
-- **Microsoft** - 使用 Microsoft 账号登录
-- **Apple** - 使用 Apple ID 登录
-- 更多...
-
-## 账号管理
-
-### 个人资料设置
-
-您可以在 **设置 > 个人资料** 中管理您的账号资料:
-
-
-
-- 更新头像
-- 修改显示名称
-- 设置用户名
-- 管理已连接的 SSO 提供商
-- 修改密码(如果使用邮箱密码登录)
-
-### 账号注销
-
-如果您希望注销账号,可以在 **设置 > 数据存储** 中申请注销:
-
-1. 点击 **申请注销账号**
-2. 填写注销原因
-3. 勾选确认复选框
-4. 提交申请
-
-
- 提交后将进入 **72 小时冷静期**,期间可随时取消。冷静期结束后:
-
- - 您的账号数据将被永久删除
- - 未支付的账单将立即取消
- - 已支付的费用不予退还
- - 您的注册和注销记录将被保留以满足合规要求
-
-
-
- 如需自托管部署,请参阅 [身份验证服务](/zh/docs/self-hosting/advanced/auth) 了解配置详情。
-
diff --git a/docs/usage/features/branching-conversations.mdx b/docs/usage/features/branching-conversations.mdx
deleted file mode 100644
index 1cc5d4f135..0000000000
--- a/docs/usage/features/branching-conversations.mdx
+++ /dev/null
@@ -1,21 +0,0 @@
----
-title: Branching Conversations
-description: Explore dynamic AI chats with Branching Conversations for deeper interactions.
-tags:
- - Branching Conversations
- - AI Chat
- - Dynamic Conversations
----
-
-# Branching Conversations
-
-
-
-Introducing a more natural and flexible way to chat with AI. With Branch Conversations, your discussions can flow in multiple directions, just like human conversations do. Create new conversation branches from any message, giving you the freedom to explore different paths while preserving the original context.
-
-Choose between two powerful modes:
-
-- **Continuation Mode:** Seamlessly extend your current discussion while maintaining valuable context
-- **Standalone Mode:** Start fresh with a new topic based on any previous message
-
-This groundbreaking feature transforms linear conversations into dynamic, tree-like structures, enabling deeper exploration of ideas and more productive interactions.
diff --git a/docs/usage/features/branching-conversations.zh-CN.mdx b/docs/usage/features/branching-conversations.zh-CN.mdx
deleted file mode 100644
index 16caf102f5..0000000000
--- a/docs/usage/features/branching-conversations.zh-CN.mdx
+++ /dev/null
@@ -1,21 +0,0 @@
----
-title: 分支对话
-description: 探索分支对话功能,提升 AI 交流的自然性与灵活性。
-tags:
- - 分支对话
- - AI 交流
- - 对话模式
----
-
-# 分支对话
-
-
-
-为您带来更自然、更灵活的 AI 对话方式。通过分支对话功能,您的讨论可以像人类对话一样自然延伸。在任意消息处创建新的对话分支,让您在保留原有上下文的同时,自由探索不同的对话方向。
-
-两种强大模式任您选择:
-
-- **延续模式**:无缝延展当前讨论,保持宝贵的对话上下文
-- **独立模式**:基于任意历史消息,开启全新话题探讨
-
-这项突破性功能将线性对话转变为动态的树状结构,让您能够更深入地探索想法,实现更高效的互动体验。
diff --git a/docs/usage/features/cot.mdx b/docs/usage/features/cot.mdx
deleted file mode 100644
index 7d96f2ed92..0000000000
--- a/docs/usage/features/cot.mdx
+++ /dev/null
@@ -1,18 +0,0 @@
----
-title: Chain of Thought
-description: >-
- Explore AI's decision-making with Chain of Thought visualization for clear insights.
-
-tags:
- - AI Reasoning
- - Chain of Thought
- - CoT Visualization
----
-
-# Chain of Thought
-
-
-
-Experience AI reasoning like never before. Watch as complex problems unfold step by step through our innovative Chain of Thought (CoT) visualization. This breakthrough feature provides unprecedented transparency into AI's decision-making process, allowing you to observe how conclusions are reached in real-time.
-
-By breaking down complex reasoning into clear, logical steps, you can better understand and validate the AI's problem-solving approach. Whether you're debugging, learning, or simply curious about AI reasoning, CoT visualization transforms abstract thinking into an engaging, interactive experience.
diff --git a/docs/usage/features/cot.zh-CN.mdx b/docs/usage/features/cot.zh-CN.mdx
deleted file mode 100644
index c2890b4614..0000000000
--- a/docs/usage/features/cot.zh-CN.mdx
+++ /dev/null
@@ -1,18 +0,0 @@
----
-title: 思维链 (CoT)
-description: 体验思维链 (CoT) 的 AI 推理过程,了解复杂问题的解析步骤。
-tags:
- - 思维链
- - AI 推理
- - 可视化
- - 逻辑步骤
- - 决策过程
----
-
-# 思维链 (CoT)
-
-
-
-体验前所未有的 AI 推理过程。通过创新的思维链(CoT)可视化功能,您可以实时观察复杂问题是如何一步步被解析的。这项突破性的功能为 AI 的决策过程提供了前所未有的透明度,让您能够清晰地了解结论是如何得出的。
-
-通过将复杂的推理过程分解为清晰的逻辑步骤,您可以更好地理解和验证 AI 的解题思路。无论您是在调试问题、学习知识,还是单纯对 AI 推理感兴趣,思维链可视化都能将抽象思维转化为一种引人入胜的互动体验。
diff --git a/docs/usage/features/database.mdx b/docs/usage/features/database.mdx
deleted file mode 100644
index f5f9f28dcc..0000000000
--- a/docs/usage/features/database.mdx
+++ /dev/null
@@ -1,56 +0,0 @@
----
-title: Local / Cloud Database Solutions for LobeChat
-description: >-
- Explore the options of local and server-side databases for LobeChat, offering data control, privacy protection, and convenient user experiences.
-
-tags:
- - Local Database
- - Server-Side Database
- - Data Privacy
- - Data Control
- - CRDT Technology
- - PostgreSQL
- - Dirzzle ORM
- - Clerk Authentication
----
-
-# Local / Cloud Database
-
-
-
-In modern application development, the choice of data storage solution is crucial. To meet the needs of different users, LobeChat offers flexible configurations that support both local and server-side databases. Whether you prioritize data privacy and control or seek a convenient user experience, LobeChat can provide excellent solutions for you.
-
-## Local Database: Data Control and Privacy Protection
-
-For users who prefer more control over their data and value privacy protection, LobeChat offers support for local databases. By using IndexedDB as the storage solution and combining it with dexie as an Object-Relational Mapping (ORM) tool, LobeChat achieves efficient data management.
-
-Additionally, we have introduced Conflict-Free Replicated Data Type (CRDT) technology to ensure a seamless multi-device synchronization experience. This experimental feature aims to provide users with greater autonomy and data security.
-
-
- LobeChat defaults to the local database solution to reduce the onboarding cost for new users.
-
-
-Furthermore, we have attempted to introduce CRDT technology to achieve cross-device synchronization based on the local database. This experimental feature aims to provide users with greater autonomy and data security.
-
-## Server-Side Database: Convenient and Efficient User Experience
-
-For users who seek a convenient user experience, LobeChat supports PostgreSQL as the server-side database. By managing data with Dirzzle ORM and combining it with Clerk for authentication, LobeChat can offer users an efficient and reliable server-side data management solution.
-
-### Server-Side Database Technology Stack
-
-- **DB**: PostgreSQL (Neon is the default)
-- **ORM**: Dirzzle ORM
-- **Auth**: Clerk
-- **Server Router**: tRPC
-
-## Deployment Solution Selection Guide
-
-### 1. Local Database
-
-The local database solution is suitable for users who wish to have strict control over their data. With LobeChat's support for local databases, you can securely store and manage data without relying on external servers. This solution is particularly suitable for users with high requirements for data privacy.
-
-### 2. Server-Side Database
-
-The server-side database solution is ideal for users who want to simplify data management processes and enjoy a convenient user experience. Through server-side databases and user authentication, LobeChat can ensure the security and efficiency of data. If you want to learn how to configure a server-side database, please refer to our [detailed documentation](/docs/self-hosting/advanced/server-database).
-
-Whether you choose a local database or a server-side database, LobeChat can provide you with an excellent user experience.
diff --git a/docs/usage/features/database.zh-CN.mdx b/docs/usage/features/database.zh-CN.mdx
deleted file mode 100644
index 68b7dc4734..0000000000
--- a/docs/usage/features/database.zh-CN.mdx
+++ /dev/null
@@ -1,50 +0,0 @@
----
-title: LobeChat 支持本地 / 云端数据库存储
-description: LobeChat 支持本地 / 云端数据存储,既能实现 Local First,同时支持数据云同步。
-tags:
- - LobeChat
- - IndexedDB
- - Postgres
- - Local First
- - 数据云同步
- - 数据库
----
-
-# 本地 / 云端数据存储
-
-
-
-在现代应用开发中,数据存储方案的选择至关重要。为了满足不同用户的需求,LobeChat 提供了同时支持本地数据库和服务端数据库的灵活配置。无论您是注重数据隐私与掌控,还是追求便捷的使用体验,LobeChat 都能为您提供卓越的解决方案。
-
-## 本地数据库:数据掌控与隐私保护
-
-对于希望对数据有更多掌控感和隐私保护的用户,LobeChat 提供了本地数据库支持。采用 IndexedDB 作为存储解决方案,并结合 dexie 作为 ORM(对象关系映射),LobeChat 实现了高效的数据管理。
-
-同时,我们引入了 CRDT(Conflict-Free Replicated Data Type)技术,确保多端同步功能的无缝体验。这一实验性功能旨在为用户提供更高的自主性和数据安全性。
-
-LobeChat 默认采取本地数据库方案,以降低新用户的上手成本。
-
-此外,我们尝试引入了 CRDT(Conflict-Free Replicated Data Type)技术,在本地数据库基础上实现了跨端同步。这一实验性功能旨在为用户提供更高的自主性和数据安全性。
-
-## 服务端数据库:便捷与高效的使用体验
-
-对于追求便捷使用体验的用户,LobeChat 支持 PostgreSQL 作为服务端数据库。通过 Drizzle ORM 管理数据,结合 Clerk 进行身份验证,LobeChat 能够为用户提供高效、可靠的服务端数据管理方案。
-
-### 服务端数据库技术栈
-
-- **DB**: PostgreSQL(默认使用 Neon)
-- **ORM**: Drizzle ORM
-- **Auth**: Clerk
-- **Server Router**: tRPC
-
-## 部署方案选择指南
-
-### 1. 本地数据库
-
-本地数据库方案适用于那些希望对数据进行严格控制的用户。通过 LobeChat 的本地数据库支持,您可以在不依赖外部服务器的情况下,安全地存储和管理数据。这一方案特别适合对数据隐私有高要求的用户。
-
-### 2. 服务端数据库
-
-服务端数据库方案则适合那些希望简化数据管理流程,享受便捷使用体验的用户。通过服务端数据库与用户身份验证,LobeChat 能够确保数据的安全性与高效性。如果您希望了解如何配置服务端数据库,请参考我们的[详细文档](/zh/docs/self-hosting/advanced/server-database)。
-
-无论选择本地数据库还是服务端数据库,LobeChat 都能为你提供卓越的用户体验。
diff --git a/docs/usage/features/desktop.mdx b/docs/usage/features/desktop.mdx
deleted file mode 100644
index 9a048c7662..0000000000
--- a/docs/usage/features/desktop.mdx
+++ /dev/null
@@ -1,53 +0,0 @@
----
-title: LobeChat Desktop Application
-description: >-
- Experience the full power of LobeChat without browser limitations. A
- lightweight, focused, and always-ready desktop app offering a dedicated
- environment and optimal performance.
-tags:
- - Desktop Application
- - Native App
- - Performance Optimization
- - Dedicated Environment
- - Offline Use
- - System Integration
- - User Experience
----
-
-# Desktop Application
-
-
-
-**Peak Performance, Zero Distractions**
-
-Unlock the full LobeChat experience without the constraints of a browser — lightweight, focused, and always ready. Our desktop application provides a dedicated environment for your AI interactions, ensuring optimal performance with minimal distractions.
-
-Enjoy faster response times, better resource management, and a more stable connection to your AI assistant. The desktop app is designed for users who demand the best performance from their AI tools.
-
-## Why Choose the Desktop Application
-
-### 🚀 Superior Performance
-
-
-The desktop app delivers faster response times and a smoother user experience compared to the browser version.
-
-
-- **Dedicated Process**: Runs independently, free from browser limitations
-- **Memory Optimization**: More efficient memory management and resource allocation
-- **GPU Acceleration**: Fully leverages hardware acceleration capabilities
-- **Low Latency**: Reduces network delays and page load times
-
-### 🎯 Focused Experience
-
-- **Distraction-Free Environment**: Eliminates interruptions from browser tabs, bookmarks bar, and more
-- **Full-Screen Mode**: Supports immersive, full-screen usage
-- **Quick Launch**: Auto-start on boot, ready whenever you are
-- **Keyboard Shortcuts**: Extensive shortcut support to boost productivity
-
-### 🔒 Secure and Reliable
-
-- **Local Storage**: Data securely stored locally for enhanced privacy
-- **Offline Capability**: Partial functionality available offline
-- **Automatic Updates**: Always up to date without manual intervention
-- **Data Backup**: Comprehensive data backup and recovery features
-
diff --git a/docs/usage/features/desktop.zh-CN.mdx b/docs/usage/features/desktop.zh-CN.mdx
deleted file mode 100644
index 7058ac80ab..0000000000
--- a/docs/usage/features/desktop.zh-CN.mdx
+++ /dev/null
@@ -1,49 +0,0 @@
----
-title: LobeChat 桌面应用
-description: 获得完整的 LobeChat 体验,摆脱浏览器限制。轻量级、专注且随时就绪的桌面应用,提供专用环境和最佳性能。
-tags:
- - 桌面应用
- - 原生应用
- - 性能优化
- - 专用环境
- - 离线使用
- - 系统集成
- - 用户体验
----
-
-# 桌面应用
-
-
-
-**巅峰性能,零干扰**
-
-获得完整的 LobeChat 体验,摆脱浏览器限制 —— 轻量级、专注且随时就绪。我们的桌面应用程序为你的 AI 交互提供专用环境,确保最佳性能和最小干扰。
-
-体验更快的响应时间、更好的资源管理和与 AI 助手的更稳定连接。桌面应用专为要求 AI 工具最佳性能的用户设计。
-
-## 为什么选择桌面应用
-
-### 🚀 卓越性能
-
-
- 桌面应用提供比浏览器版本更快的响应速度和更流畅的用户体验。
-
-
-- **专用进程**:独立运行,不受浏览器限制
-- **内存优化**:更高效的内存管理和资源分配
-- **GPU 加速**:充分利用硬件加速能力
-- **低延迟**:减少网络延迟和页面加载时间
-
-### 🎯 专注体验
-
-- **无干扰环境**:摆脱浏览器标签页、书签栏等干扰
-- **全屏模式**:支持全屏和沉浸式体验
-- **快速启动**:开机自启动,随时可用
-- **键盘快捷键**:丰富的快捷键支持,提高操作效率
-
-### 🔒 安全可靠
-
-- **本地存储**:数据安全存储在本地,隐私保护更好
-- **离线能力**:部分功能支持离线使用
-- **自动更新**:无需手动更新,始终保持最新版本
-- **数据备份**:完整的数据备份和恢复功能
diff --git a/docs/usage/features/knowledge-base.mdx b/docs/usage/features/knowledge-base.mdx
deleted file mode 100644
index d9534756d9..0000000000
--- a/docs/usage/features/knowledge-base.mdx
+++ /dev/null
@@ -1,25 +0,0 @@
----
-title: File Upload / Knowledge Base
-description: >-
- Discover LobeChat's file upload and knowledge base features for enhanced user experience.
-
-tags:
- - File Upload
- - Knowledge Base
- - LobeChat
- - User Management
- - File Management
----
-
-# File Upload / Knowledge Base
-
-
-
-LobeChat supports file upload and knowledge base functionality. You can upload various types of files including documents, images, audio, and video, as well as create knowledge bases, making it convenient for users to manage and search for files. Additionally, you can utilize files and knowledge base features during conversations, enabling a richer dialogue experience.
-
-
-
-
- Learn more on [📘 LobeChat Knowledge Base Launch — From Now On, Every Step
- Counts](https://lobehub.com/blog/knowledge-base)
-
diff --git a/docs/usage/features/knowledge-base.zh-CN.mdx b/docs/usage/features/knowledge-base.zh-CN.mdx
deleted file mode 100644
index 115706674a..0000000000
--- a/docs/usage/features/knowledge-base.zh-CN.mdx
+++ /dev/null
@@ -1,22 +0,0 @@
----
-title: 文件上传 / 知识库
-description: 了解LobeChat的文件上传与知识库功能,提升对话体验。
-tags:
- - 文件上传
- - 知识库
- - LobeChat
- - 对话体验
----
-
-# 文件上传 / 知识库
-
-
-
-LobeChat 支持文件上传与知识库功能,你可以上传文件、图片、音频、视频等多种类型的文件,以及创建知识库,方便用户管理和查找文件。同时在对话中使用文件和知识库功能,实现更加丰富的对话体验。
-
-
-
-
- 查阅 [📘 LobeChat 知识库上线 —— 此刻起,跬步千里](https://lobehub.com/zh/blog/knowledge-base)
- 了解详情。
-
diff --git a/docs/usage/features/local-llm.mdx b/docs/usage/features/local-llm.mdx
deleted file mode 100644
index 5d93da2292..0000000000
--- a/docs/usage/features/local-llm.mdx
+++ /dev/null
@@ -1,51 +0,0 @@
----
-title: Using Local LLM in LobeChat
-description: >-
- Experience groundbreaking AI support with a local LLM in LobeChat powered by Ollama AI. Start conversations effortlessly and enjoy unprecedented interaction speed!
-
-tags:
- - Local Large Language Model
- - Ollama AI
- - LobeChat
- - AI communication
- - Natural Language Processing
- - Docker deployment
----
-
-# Local Large Language Model (LLM) Support
-
-
-
-Available in >=0.127.0, currently only supports Docker deployment
-
-With the release of LobeChat v0.127.0, we are excited to introduce a groundbreaking feature - Ollama AI support! 🤯 With the powerful infrastructure of [Ollama AI](https://ollama.ai/) and the [community's collaborative efforts](https://github.com/lobehub/lobe-chat/pull/1265), you can now engage in conversations with a local LLM (Large Language Model) in LobeChat! 🤩
-
-We are thrilled to introduce this revolutionary feature to all LobeChat users at this special moment. The integration of Ollama AI not only signifies a significant technological leap for us but also reaffirms our commitment to continuously pursue more efficient and intelligent communication.
-
-### How to Start a Conversation with Local LLM?
-
-The startup process is exceptionally simple! By running the following Docker command, you can experience conversations with a local LLM in LobeChat:
-
-```bash
-docker run -d -p 3210:3210 -e OLLAMA_PROXY_URL=http://host.docker.internal:11434/v1 lobehub/lobe-chat
-```
-
-Yes, it's that simple! 🤩 You don't need to go through complicated configurations or worry about a complex installation process. We have prepared everything for you. With just one command, you can engage in deep conversations with a local AI.
-
-### Experience Unprecedented Interaction Speed
-
-With the powerful capabilities of Ollama AI, LobeChat has greatly improved its efficiency in natural language processing. Both processing speed and response time have reached new heights. This means that your conversational experience will be smoother, without any waiting, and with instant responses.
-
-### Why Choose a Local LLM?
-
-Compared to cloud-based solutions, a local LLM provides higher privacy and security. All your conversations are processed locally, without passing through any external servers, ensuring the security of your data. Additionally, local processing can reduce network latency, providing you with a more immediate communication experience.
-
-### Embark on Your LobeChat & Ollama AI Journey
-
-Now, let's embark on this exciting journey together! Through the collaboration of LobeChat and Ollama AI, explore the endless possibilities brought by AI. Whether you are a tech enthusiast or simply curious about AI communication, LobeChat will offer you an unprecedented experience.
-
-
-
-
-
-
diff --git a/docs/usage/features/local-llm.zh-CN.mdx b/docs/usage/features/local-llm.zh-CN.mdx
deleted file mode 100644
index b4c044e0b8..0000000000
--- a/docs/usage/features/local-llm.zh-CN.mdx
+++ /dev/null
@@ -1,44 +0,0 @@
----
-title: 在 LobeChat 中使用本地大语言模型(LLM)
-description: LobeChat 支持本地 LLM,使用 Ollama AI集成带来高效智能沟通。体验本地大语言模型的隐私性、安全性和即时交流
-tags:
- - '本地大语言模型,LLM,LobeChat v0.127.0,Ollama AI,Docker 部署'
----
-
-# 支持本地大语言模型(LLM)
-
-
-
-在 >=v0.127.0 版本中可用,目前仅支持 Docker 部署
-
-随着 LobeChat v0.127.0 的发布,我们迎来了一个激动人心的特性 —— Ollama AI 支持!🤯 在 [Ollama AI](https://ollama.ai/) 强大的基础设施和 [社区的共同努力](https://github.com/lobehub/lobe-chat/pull/1265) 下,现在您可以在 LobeChat 中与本地 LLM (Large Language Model) 进行交流了!🤩
-
-我们非常高兴能在这个特别的时刻,向所有 LobeChat 用户介绍这项革命性的特性。Ollama AI 的集成不仅标志着我们技术上的一个巨大飞跃,更是向用户承诺,我们将不断追求更高效、更智能的沟通方式。
-
-### 如何启动与本地 LLM 的对话?
-
-启动过程异常简单!您只需运行以下 Docker 命令行,就可以在 LobeChat 中体验与本地 LLM 的对话了:
-
-```bash
-docker run -d -p 3210:3210 -e OLLAMA_PROXY_URL=http://host.docker.internal:11434/v1 lobehub/lobe-chat
-```
-
-是的,就是这么简单!🤩 您不需要进行繁杂的配置,也不必担心复杂的安装过程。我们已经为您准备好了一切,只需一行命令,即可开启与本地 AI 的深度对话。
-
-### 体验前所未有的交互速度
-
-借助 Ollama AI 的强大能力,LobeChat 在进行自然语言处理方面的效率得到了极大的提升。无论是处理速度还是响应时间,都达到了新的高度。这意味着您的对话体验将更加流畅,无需等待,即时得到回应。
-
-### 为什么选择本地 LLM?
-
-与基于云的解决方案相比,本地 LLM 提供了更高的隐私性和安全性。您的所有对话都在本地处理,不经过任何外部服务器,确保了您的数据安全性。此外,本地处理还能减少网络延迟,为您带来更加即时的交流体验。
-
-### 开启您的 LobeChat & Ollama AI 之旅
-
-现在,就让我们一起开启这段激动人心的旅程吧!通过 LobeChat 与 Ollama AI 的协作,探索 AI 带来的无限可能。无论您是技术爱好者,还是对 AI 交流充满好奇,LobeChat 都将为您提供一场前所未有的体验。
-
-
-
-
-
-
diff --git a/docs/usage/features/mcp-market.mdx b/docs/usage/features/mcp-market.mdx
deleted file mode 100644
index ccf125671c..0000000000
--- a/docs/usage/features/mcp-market.mdx
+++ /dev/null
@@ -1,26 +0,0 @@
----
-title: 'MCP Marketplace - Discover, Connect, Expand'
-description: >-
- Browse the ever-growing MCP plugin library to effortlessly enhance your AI
- capabilities and streamline workflows. Explore the MCP Marketplace to find
- curated collections of integrations.
-tags:
- - MCP Marketplace
- - Plugin Library
- - Integrations
- - Extensions
- - Workflows
- - Community
- - Developers
----
-
-# MCP Marketplace
-
-
-
-**Discover, Connect, Expand**
-
-Browse the ever-growing MCP plugin library to effortlessly enhance your AI capabilities and streamline your workflows. Visit [lobehub.com/mcp](https://lobehub.com/mcp) to explore the MCP Marketplace, featuring curated collections of integrations that empower your AI to collaborate seamlessly with a variety of tools and services.
-
-From productivity tools to development environments, discover new ways to extend AI’s reach and efficiency. Connect with the community to find the perfect plugin tailored to your specific needs.
-
diff --git a/docs/usage/features/mcp-market.zh-CN.mdx b/docs/usage/features/mcp-market.zh-CN.mdx
deleted file mode 100644
index 5e7ba05677..0000000000
--- a/docs/usage/features/mcp-market.zh-CN.mdx
+++ /dev/null
@@ -1,22 +0,0 @@
----
-title: MCP 市场 - 发现、连接、扩展
-description: 浏览不断增长的 MCP 插件库,轻松扩展你的 AI 能力并简化工作流程。探索 MCP 市场,发现精选的集成集合。
-tags:
- - MCP 市场
- - 插件库
- - 集成
- - 扩展
- - 工作流程
- - 社区
- - 开发者
----
-
-# MCP 市场
-
-
-
-**发现、连接、扩展**
-
-浏览不断增长的 MCP 插件库,轻松扩展你的 AI 能力并简化工作流程。访问 [lobehub.com/mcp](https://lobehub.com/mcp) 探索 MCP 市场,提供精选的集成集合,增强你的 AI 与各种工具和服务协作的能力。
-
-从生产力工具到开发环境,发现扩展 AI 覆盖范围和效率的新方式。与社区连接,找到满足特定需求的完美插件。
diff --git a/docs/usage/features/mcp.mdx b/docs/usage/features/mcp.mdx
deleted file mode 100644
index 161bcc7134..0000000000
--- a/docs/usage/features/mcp.mdx
+++ /dev/null
@@ -1,58 +0,0 @@
----
-title: MCP Plugin One-Click Installation - Seamlessly Connect AI with the World
-description: >-
- Unlock the full potential of AI through the MCP (Model Context Protocol)
- plugin system, enabling smooth, secure, and dynamic interactions with external
- tools, data sources, and services.
-tags:
- - MCP
- - Model Context Protocol
- - Plugin System
- - One-Click Installation
- - Tool Integration
- - Workflow
- - External Services
----
-
-# MCP Plugin One-Click Installation
-
- />
-
-**Seamlessly Connect Your AI with the World**
-
-Unlock the full potential of your AI by enabling smooth, secure, and dynamic interactions with external tools, data sources, and services. The MCP (Model Context Protocol)-based plugin system breaks down barriers between AI and the digital ecosystem, delivering unprecedented connectivity and functionality.
-
-Transform conversations into powerful workflows by connecting databases, APIs, file systems, and more. Experience an AI Agent that truly understands and interacts with your world.
-
-## What is MCP (Model Context Protocol)?
-
-MCP (Model Context Protocol) is an open protocol standard that provides AI models with a standardized way to access and interact with external resources. Through MCP, AI assistants can:
-
-- 🔗 **Secure Connections**: Establish secure links with various tools and services
-- 🔄 **Dynamic Interaction**: Retrieve and update external data in real time
-- 🛡️ **Permission Control**: Manage access rights with fine-grained precision
-- 📊 **Context Awareness**: Maintain rich conversational context information
-
-## Key Features
-
-### 🚀 One-Click Installation Experience
-
-
-No complicated setup required—installing and configuring MCP plugins takes just a few clicks.
-
-
-- **Rapid Deployment**: From discovery to use in under one minute
-- **Automatic Configuration**: The system handles connection and permission settings automatically
-- **Instant Activation**: Ready to use in conversations immediately after installation
-
-### 🔌 Extensive Connectivity
-
-MCP plugins support connections to a wide variety of external resources:
-
-- **Databases**: MySQL, PostgreSQL, MongoDB, and more
-- **API Services**: REST APIs, GraphQL, WebSocket
-- **File Systems**: Local files, cloud storage, version control
-- **Development Tools**: GitHub, GitLab, Jira, Slack
-- **Office Suites**: Google Workspace, Microsoft 365
-- **Professional Tools**: Docker, Kubernetes, Jenkins
-
diff --git a/docs/usage/features/mcp.zh-CN.mdx b/docs/usage/features/mcp.zh-CN.mdx
deleted file mode 100644
index 51ad1acbe7..0000000000
--- a/docs/usage/features/mcp.zh-CN.mdx
+++ /dev/null
@@ -1,54 +0,0 @@
----
-title: MCP 插件一键安装 - 无缝连接 AI 与世界
-description: 通过 MCP(模型上下文协议)插件系统,释放 AI 的全部潜力,实现与外部工具、数据源和服务的平滑、安全和动态交互。
-tags:
- - MCP
- - 模型上下文协议
- - 插件系统
- - 一键安装
- - 工具集成
- - 工作流程
- - 外部服务
----
-
-# MCP 插件一键安装
-
-
-
-**无缝连接你的 AI 与世界**
-
-通过启用与外部工具、数据源和服务的平滑、安全和动态交互,释放你的 AI 的全部潜力。基于 MCP(模型上下文协议)的插件系统打破了 AI 与数字生态系统之间的壁垒,实现了前所未有的连接性和功能性。
-
-将对话转化为强大的工作流程,连接数据库、API、文件系统等。体验真正理解并与你的世界互动的 AI Agent。
-
-## 什么是 MCP(模型上下文协议)?
-
-MCP(Model Context Protocol)是一个开放的协议标准,它为 AI 模型提供了一个标准化的方式来访问和交互外部资源。通过 MCP,AI 助手可以:
-
-- 🔗 **安全连接**:与各种工具和服务建立安全的连接
-- 🔄 **动态交互**:实时获取和更新外部数据
-- 🛡️ **权限控制**:精细化的访问权限管理
-- 📊 **上下文感知**:维护丰富的对话上下文信息
-
-## 主要特性
-
-### 🚀 一键式安装体验
-
-
- 无需复杂的配置过程,只需几次点击即可完成 MCP 插件的安装和配置。
-
-
-- **快速部署**:从发现到使用,整个过程不超过 1 分钟
-- **自动配置**:系统自动处理连接和权限设置
-- **即时生效**:安装完成后立即可在对话中使用
-
-### 🔌 广泛的连接能力
-
-MCP 插件支持连接各种类型的外部资源:
-
-- **数据库**:MySQL、PostgreSQL、MongoDB 等
-- **API 服务**:REST API、GraphQL、WebSocket
-- **文件系统**:本地文件、云存储、版本控制
-- **开发工具**:GitHub、GitLab、Jira、Slack
-- **办公软件**:Google Workspace、Microsoft 365
-- **专业工具**:Docker、Kubernetes、Jenkins
diff --git a/docs/usage/features/mobile.mdx b/docs/usage/features/mobile.mdx
deleted file mode 100644
index d7a02ae41e..0000000000
--- a/docs/usage/features/mobile.mdx
+++ /dev/null
@@ -1,20 +0,0 @@
----
-title: LobeChat with Mobile Device Adaptation
-description: >-
- Explore the enhanced mobile user experience at LobeChat with optimized designs for smoother interactions. Share your feedback on GitHub!
-
-tags:
- - Mobile Device Adaptation
- - User Experience
- - Optimized Designs
- - Feedback
- - GitHub
----
-
-# Mobile Device Adaptation
-
-
-
-LobeChat has undergone a series of optimized designs for mobile devices to enhance the user's mobile experience.
-
-Currently, we are iterating on the user experience for mobile devices to achieve a smoother and more intuitive interaction. If you have any suggestions or ideas, we warmly welcome your feedback through GitHub Issues or Pull Requests.
diff --git a/docs/usage/features/mobile.zh-CN.mdx b/docs/usage/features/mobile.zh-CN.mdx
deleted file mode 100644
index f90421cc62..0000000000
--- a/docs/usage/features/mobile.zh-CN.mdx
+++ /dev/null
@@ -1,19 +0,0 @@
----
-title: LobeChat 移动设备适配 - 提升用户移动体验
-description: LobeChat针对移动设备进行优化设计,版本迭代以实现更流畅直观的交互。欢迎通过GitHub Issues或Pull Requests提供反馈。
-tags:
- - LobeChat
- - 移动设备适配
- - 用户体验
- - 版本迭代
- - GitHub
- - 反馈
----
-
-# 移动设备适配
-
-
-
-LobeChat 针对移动设备进行了一系列的优化设计,以提升用户的移动体验。
-
-目前,我们正在对移动端的用户体验进行版本迭代,以实现更加流畅和直观的交互。如果您有任何建议或想法,我们非常欢迎您通过 GitHub Issues 或者 Pull Requests 提供反馈。
diff --git a/docs/usage/features/more.mdx b/docs/usage/features/more.mdx
deleted file mode 100644
index 18ce2f3438..0000000000
--- a/docs/usage/features/more.mdx
+++ /dev/null
@@ -1,32 +0,0 @@
----
-title: More Features in LobeChat - Enhancing Design and Technical Capabilities
-description: >-
- Explore the additional features offered, including exquisite UI design, smooth conversation experience, fast deployment options, privacy and security measures, and custom domain support.
-
-tags:
- - UI Design
- - Conversation Experience
- - Deployment
- - Privacy
- - Custom Domain
----
-
-# More Features
-
-In addition to the above features, our design and technical capabilities will provide you with more assurance in usage:
-
-- [x] 💎 **Exquisite UI Design**: Carefully designed interface with elegant appearance and smooth interaction effects, supporting light and dark themes, and adaptable to mobile devices. Supports PWA, providing an experience closer to native applications.
-- [x] 🗣️ **Smooth Conversation Experience**: Responsive design brings a smooth conversation experience and supports full Markdown rendering, including code highlighting, LaTex formulas, Mermaid flowcharts, and more.
-- [x] 💨 **Fast Deployment**: Use the Vercel platform or our Docker image, simply click the deploy button, and deployment can be completed within 1 minute without complex configuration processes.
-- [x] 🔒 **Privacy and Security**: All data is stored locally in the user's browser, ensuring user privacy and security.
-- [x] 🌐 **Custom Domain**: If users have their own domain, they can bind it to the platform for quick access to the chat assistant from anywhere.
-
-> ✨ As the product continues to iterate, we will bring more exciting features!
-
----
-
-
- You can find our upcoming [Roadmap][github-project-link] plans in the Projects section.
-
-
-[github-project-link]: https://github.com/lobehub/lobe-chat/projects
diff --git a/docs/usage/features/more.zh-CN.mdx b/docs/usage/features/more.zh-CN.mdx
deleted file mode 100644
index 735db7628e..0000000000
--- a/docs/usage/features/more.zh-CN.mdx
+++ /dev/null
@@ -1,28 +0,0 @@
----
-title: 更多 LobeChat 特性 - 提供精致 UI 设计和流畅的对话体验
-description: 了解更多产品特性,包括精致 UI 设计、流畅的对话体验和快速部署功能,为用户带来更好的体验。
-tags:
- - 精致 UI 设计
- - 流畅对话体验
- - 快速部署
- - 隐私安全
- - 自定义域名
----
-
-# 更多特性
-
-除了上述功能特性以外,我们的所具有的设计和技术能力将为你带来了更多使用保障:
-
-- [x] 💎 **精致 UI 设计**:经过精心设计的界面,具有优雅的外观和流畅的交互效果,支持亮暗色主题,适配移动端。支持 PWA,提供更加接近原生应用的体验。
-- [x] 🗣️ **流畅的对话体验**:流式响应带来流畅的对话体验,并且支持完整的 Markdown 渲染,包括代码高亮、LaTex 公式、Mermaid 流程图等。
-- [x] 💨 **快速部署**:使用 Vercel 平台或者我们的 Docker 镜像,只需点击一键部署按钮,即可在 1 分钟内完成部署,无需复杂的配置过程。
-- [x] 🔒 **隐私安全**:所有数据保存在用户浏览器本地,保证用户的隐私安全。
-- [x] 🌐 **自定义域名**:如果用户拥有自己的域名,可以将其绑定到平台上,方便在任何地方快速访问对话助手。
-
-> ✨ 随着产品迭代持续更新,我们将会带来更多更多令人激动的功能!
-
----
-
-你可以在 Projects 中找到我们后续的 [Roadmap][github-project-link] 计划
-
-[github-project-link]: https://github.com/lobehub/lobe-chat/projects
diff --git a/docs/usage/features/multi-ai-providers.mdx b/docs/usage/features/multi-ai-providers.mdx
deleted file mode 100644
index 0431ba4414..0000000000
--- a/docs/usage/features/multi-ai-providers.mdx
+++ /dev/null
@@ -1,57 +0,0 @@
----
-title: LobeChat with Multi AI Providers
-description: >-
- Discover how LobeChat offers diverse model service provider support, including AWS Bedrock, Google AI Gemini series, ChatGLM, and Moonshot AI, to cater to various user needs. Explore local model support with Ollama integration.
-
-tags:
- - LobeChat
- - model service providers
- - AWS Bedrock
- - Google AI Gemini
- - ChatGLM
- - Moonshot AI
- - Together AI
- - local model support
- - Ollama
----
-
-# Multi-Model Service Provider Support
-
-
-
-Available in version 0.123.0 and later
-
-In the continuous development of LobeChat, we deeply understand the importance of diversity in model service providers for meeting the needs of the community when providing AI conversation services. Therefore, we have expanded our support to multiple model service providers, rather than being limited to a single one, in order to offer users a more diverse and rich selection of conversations.
-
-In this way, LobeChat can more flexibly adapt to the needs of different users, while also providing developers with a wider range of choices.
-
-## Supported Model Service Providers
-
-We have implemented support for the following model service providers:
-
-- **AWS Bedrock**: Integrated with AWS Bedrock service, supporting models such as **Claude / LLama2**, providing powerful natural language processing capabilities. [Learn more](https://aws.amazon.com/cn/bedrock)
-- **Anthropic (Claude)**: Accessed Anthropic's **Claude** series models, including Claude 3 and Claude 2, with breakthroughs in multi-modal capabilities and extended context, setting a new industry benchmark. [Learn more](https://www.anthropic.com/claude)
-- **Google AI (Gemini Pro, Gemini Vision)**: Access to Google's **Gemini** series models, including Gemini and Gemini Pro, to support advanced language understanding and generation. [Learn more](https://deepmind.google/technologies/gemini/)
-- **ChatGLM**: Added the **ChatGLM** series models from Zhipuai (GLM-4/GLM-4-vision/GLM-3-turbo), providing users with another efficient conversation model choice. [Learn more](https://www.zhipuai.cn/)
-- **Moonshot AI (Dark Side of the Moon)**: Integrated with the Moonshot series models, an innovative AI startup from China, aiming to provide deeper conversation understanding. [Learn more](https://www.moonshot.cn/)
-- **Groq**: Accessed Groq's AI models, efficiently processing message sequences and generating responses, capable of multi-turn dialogues and single-interaction tasks. [Learn more](https://groq.com/)
-- **OpenRouter**: Supports routing of models including **Claude 3**, **Gemma**, **Mistral**, **Llama2** and **Cohere**, with intelligent routing optimization to improve usage efficiency, open and flexible. [Learn more](https://openrouter.ai/)
-- **01.AI (Yi Model)**: Integrated the 01.AI models, with series of APIs featuring fast inference speed, which not only shortened the processing time, but also maintained excellent model performance. [Learn more](https://01.ai/)
-- **Together.ai**: Over 100 leading open-source Chat, Language, Image, Code, and Embedding models are available through the Together Inference API. For these models you pay just for what you use. [Learn more](https://www.together.ai/)
-- **Minimax**: Integrated the Minimax models, including the MoE model **abab6**, offers a broader range of choices. [Learn more](https://www.minimaxi.com/)
-- **DeepSeek**: Integrated with the DeepSeek series models, an innovative AI startup from China, The product has been designed to provide a model that balances performance with price. [Learn more](https://www.deepseek.com/)
-- **Qwen**: Integrated with the Qwen series models, including the latest **qwen-turbo**, **qwen-plus** and **qwen-max**. [Learn more](https://help.aliyun.com/zh/dashscope/developer-reference/model-introduction)
-
-At the same time, we are also planning to support more model service providers, such as Replicate and Perplexity, to further enrich our service provider library. If you would like LobeChat to support your favorite service provider, feel free to join our [community discussion](https://github.com/lobehub/lobe-chat/discussions/6157).
-
-## Local Model Support
-
-
-
-To meet the specific needs of users, LobeChat also supports the use of local models based on [Ollama](https://ollama.ai), allowing users to flexibly use their own or third-party models. For more details, see [Local Model Support](/docs/usage/features/local-llm).
-
-
-
-
-
-
diff --git a/docs/usage/features/multi-ai-providers.zh-CN.mdx b/docs/usage/features/multi-ai-providers.zh-CN.mdx
deleted file mode 100644
index 8d490d402e..0000000000
--- a/docs/usage/features/multi-ai-providers.zh-CN.mdx
+++ /dev/null
@@ -1,58 +0,0 @@
----
-title: LobeChat 支持多模型服务商
-description: 了解 LobeChat 在多模型服务商支持方面的最新进展,包括已支持的模型服务商和计划中的扩展,以及本地模型支持的使用方式。
-tags:
- - LobeChat
- - AI 会话服务
- - 模型服务商
- - 多模型支持
- - 本地模型支持
- - AWS Bedrock
- - Google AI
- - ChatGLM
- - Moonshot AI
- - 01 AI
- - Together AI
- - Ollama
----
-
-# 多模型服务商支持
-
-
-
-在 0.123.0 及以后版本中可用
-
-在 LobeChat 的不断发展过程中,我们深刻理解到在提供 AI 会话服务时模型服务商的多样性对于满足社区需求的重要性。因此,我们不再局限于单一的模型服务商,而是拓展了对多种模型服务商的支持,以便为用户提供更为丰富和多样化的会话选择。
-
-通过这种方式,LobeChat 能够更灵活地适应不同用户的需求,同时也为开发者提供了更为广泛的选择空间。
-
-## 已支持的模型服务商
-
-我们已经实现了对以下模型服务商的支持:
-
-- **AWS Bedrock**:集成了 AWS Bedrock 服务,支持了 **Claude / LLama2** 等模型,提供了强大的自然语言处理能力。[了解更多](https://aws.amazon.com/cn/bedrock)
-- **Google AI (Gemini Pro、Gemini Vision)**:接入了 Google 的 **Gemini** 系列模型,包括 Gemini 和 Gemini Pro,以支持更高级的语言理解和生成。[了解更多](https://deepmind.google/technologies/gemini/)
-- **Anthropic (Claude)**:接入了 Anthropic 的 **Claude** 系列模型,包括 Claude 3 和 Claude 2,多模态突破,超长上下文,树立行业新基准。[了解更多](https://www.anthropic.com/claude)
-- **ChatGLM**:加入了智谱的 **ChatGLM** 系列模型(GLM-4/GLM-4-vision/GLM-3-turbo),为用户提供了另一种高效的会话模型选择。[了解更多](https://www.zhipuai.cn/)
-- **Moonshot AI (月之暗面)**:集成了 Moonshot 系列模型,这是一家来自中国的创新性 AI 创业公司,旨在提供更深层次的会话理解。[了解更多](https://www.moonshot.cn/)
-- **Together.ai**:集成部署了数百种开源模型和向量模型,无需本地部署即可随时访问这些模型。[了解更多](https://www.together.ai/)
-- **01.AI (零一万物)**:集成了零一万物模型,系列 API 具备较快的推理速度,这不仅缩短了处理时间,同时也保持了出色的模型效果。[了解更多](https://www.lingyiwanwu.com/)
-- **Groq**:接入了 Groq 的 AI 模型,高效处理消息序列,生成回应,胜任多轮对话及单次交互任务。[了解更多](https://groq.com/)
-- **OpenRouter**:其支持包括 **Claude 3**,**Gemma**,**Mistral**,**Llama2**和**Cohere**等模型路由,支持智能路由优化,提升使用效率,开放且灵活。[了解更多](https://openrouter.ai/)
-- **Minimax**: 接入了 Minimax 的 AI 模型,包括 MoE 模型 **abab6**,提供了更多的选择空间。[了解更多](https://www.minimaxi.com/)
-- **DeepSeek**: 接入了 DeepSeek 的 AI 模型,包括最新的 **DeepSeek-V2**,提供兼顾性能与价格的模型。[了解更多](https://www.deepseek.com/)
-- **Qwen (通义千问)**: 接入了 Qwen 的 AI 模型,包括最新的 **qwen-turbo**,**qwen-plus** 和 **qwen-max** 等模型。[了解更多](https://help.aliyun.com/zh/dashscope/developer-reference/model-introduction)
-
-同时,我们也在计划支持更多的模型服务商,如 Replicate 和 Perplexity 等,以进一步丰富我们的服务商库。如果你希望让 LobeChat 支持你喜爱的服务商,欢迎加入我们的[社区讨论](https://github.com/lobehub/lobe-chat/discussions/6157)。
-
-## 本地模型支持
-
-
-
-为了满足特定用户的需求,LobeChat 还基于 [Ollama](https://ollama.ai) 支持了本地模型的使用,让用户能够更灵活地使用自己的或第三方的模型,详见 [本地模型支持](/zh/docs/usage/features/local-llm)。
-
-
-
-
-
-
diff --git a/docs/usage/features/plugin-system.mdx b/docs/usage/features/plugin-system.mdx
deleted file mode 100644
index a6f5166160..0000000000
--- a/docs/usage/features/plugin-system.mdx
+++ /dev/null
@@ -1,80 +0,0 @@
----
-title: Plugin System in LobeChat
-description: >-
- Explore the diverse plugin ecosystem of LobeChat, extending its capabilities to provide real-time information, interact with various platforms, and simplify user interactions.
-
-tags:
- - LobeChat
- - Plugin Ecosystem
- - Assistant Functionality
- - Plugin Development
- - SDK
- - Chat Application
----
-
-# Plugin System
-
-
-
-The plugin ecosystem of LobeChat is an important extension of its core functionality, greatly enhancing the practicality and flexibility of the LobeChat assistant.
-
-
-
-By utilizing plugins, LobeChat assistants can obtain and process real-time information, such as searching for web information and providing users with instant and relevant news.
-
-In addition, these plugins are not limited to news aggregation, but can also extend to other practical functions, such as quickly searching documents, generating images, obtaining data from various platforms like Bilibili, Steam, and interacting with various third-party services.
-
-Learn more about [plugin usage](/docs/usage/plugins/basic-usage) by checking it out.
-
-
- To help developers better participate in this ecosystem, we provide comprehensive development
- resources. This includes detailed component development documentation, a fully-featured software
- development kit (SDK), and template examples, all aimed at simplifying the development process and
- lowering the entry barrier for developers.
-
-
-
- We welcome developers to utilize these resources, unleash their creativity, and write
- feature-rich, user-friendly plugins. Through collective efforts, we can continuously expand the
- functional boundaries of the chat application and explore a more intelligent and efficient
- creativity platform.
-
-
-## Plugin Ecosystem
-
-
- If you are interested in plugin development, please refer to our [📘 Plugin Development
- Guide](/docs/usage/plugins/development) in the Wiki.
-
-
-- [lobe-chat-plugins][lobe-chat-plugins]: This is the plugin index for LobeChat. It retrieves the list of plugins from the index.json of this repository and displays them to the users.
-- [chat-plugin-template][chat-plugin-template]: Chat Plugin plugin development template, you can quickly create a new plugin project through the project template.
-- [@lobehub/chat-plugin-sdk][chat-plugin-sdk]: The LobeChat plugin SDK can help you create excellent Lobe Chat plugins.
-- [@lobehub/chat-plugins-gateway][chat-plugins-gateway]: The LobeChat plugin gateway is a backend service that serves as the gateway for LobeChat plugins. We deploy this service using Vercel.
-
-### Roadmap Progress
-
-The plugin system of LobeChat has now entered a stable stage, and we have basically completed most of the functionality required by the plugin system. However, we are still planning and considering the new possibilities that plugins can bring to us. You can learn more in the following Issues:
-
-
- ### ✅ Phase One of Plugins
-
- Implementing the separation of plugins from the main body, splitting the plugins into independent repositories for maintenance, and implementing dynamic loading of plugins. [**#73**](https://github.com/lobehub/lobe-chat/issues/73)
-
- ### ✅ Phase Two of Plugins
-
- The security and stability of plugin usage, more accurate presentation of abnormal states, maintainability and developer-friendliness of the plugin architecture. [**#97**](https://github.com/lobehub/lobe-chat/issues/97)
-
- ### ✅ Phase Three of Plugins
-
- Higher-level and improved customization capabilities, support for OpenAPI schema invocation, compatibility with ChatGPT plugins, and the addition of Midjourney plugins. [**#411**](https://github.com/lobehub/lobe-chat/discussions/#411)
-
- ### 💭 Phase Four of Plugins
-
- Comprehensive authentication, visual configuration of plugin definitions, Plugin SDK CLI, Python language development template, any other ideas? Join the discussion: [**#1310**](https://github.com/lobehub/lobe-chat/discussions/#1310)
-
-
-[chat-plugin-sdk]: https://github.com/lobehub/chat-plugin-sdk
-[chat-plugin-template]: https://github.com/lobehub/chat-plugin-template
-[chat-plugins-gateway]: https://github.com/lobehub/chat-plugins-gateway
-[lobe-chat-plugins]: https://github.com/lobehub/lobe-chat-plugins
diff --git a/docs/usage/features/plugin-system.zh-CN.mdx b/docs/usage/features/plugin-system.zh-CN.mdx
deleted file mode 100644
index 627349102d..0000000000
--- a/docs/usage/features/plugin-system.zh-CN.mdx
+++ /dev/null
@@ -1,70 +0,0 @@
----
-title: LobeChat 插件生态系统 - 功能扩展与开发资源
-description: 了解 LobeChat 插件生态系统如何增强 LobeChat 助手的实用性和灵活性,以及提供的开发资源和插件开发指南。
-tags:
- - LobeChat
- - 插件生态系统
- - 开发资源
- - 插件开发指南
----
-
-# 插件系统
-
-
-
-LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地增强了 LobeChat 助手的实用性和灵活性。
-
-
-
-通过利用插件,LobeChat 的助手们能够实现实时信息的获取和处理,例如搜索网络信息,为用户提供即时且相关的资讯。
-
-此外,这些插件不仅局限于新闻聚合,还可以扩展到其他实用的功能,如快速检索文档、生成图片、获取 Bilibili 、Steam 等各种平台数据,以及与其他各式各样的第三方服务交互。
-
-通过查看 [插件使用](/zh/docs/usage/plugins/basic-usage) 了解更多。
-
-
- 为了帮助开发者更好地参与到这个生态中来,我们在提供了全面的开发资源。这包括详尽的组件开发文档、功能齐全的软件开发工具包(SDK),以及样板示例,这些都是为了简化开发过程,降低开发者的入门门槛。
-
-
-
- 我们欢迎开发者利用这些资源,发挥创造力,编写出功能丰富、用户友好的插件。通过共同的努力,我们可以不断扩展聊天应用的功能界限,探索一个更加智能、高效的创造力平台。
-
-
-## 插件生态体系
-
-
- 如果你对插件开发感兴趣,请在 Wiki 中查阅我们的 [📘
- 插件开发指南](/zh/docs/usage/plugins/development)。
-
-
-- [lobe-chat-plugins][lobe-chat-plugins]:这是 LobeChat 的插件索引。它从该仓库的 index.json 中获取插件列表并显示给用户。
-- [chat-plugin-template][chat-plugin-template]: Chat Plugin 插件开发模版,你可以通过项目模版快速新建插件项目。
-- [@lobehub/chat-plugin-sdk][chat-plugin-sdk]:LobeChat 插件 SDK 可帮助您创建出色的 Lobe Chat 插件。
-- [@lobehub/chat-plugins-gateway][chat-plugins-gateway]:LobeChat 插件网关是一个后端服务,作为 LobeChat 插件的网关。我们使用 Vercel 部署此服务。
-
-### 路线进展
-
-LobeChat 的插件系统目前已初步进入一个稳定阶段,我们已基本完成大部分插件系统所需的功能,但我们仍然在规划与思考插件能为我们带来的全新可能性。您可以在以下 Issues 中了解更多信息:
-
-
- ### ✅ 插件一期
-
- 实现插件与主体分离,将插件拆分为独立仓库维护,并实现插件的动态加载。 [**#73**](https://github.com/lobehub/lobe-chat/issues/73)
-
- ### ✅ 插件二期
-
- 插件的安全性与使用的稳定性,更加精准地呈现异常状态,插件架构的可维护性与开发者友好。[**#97**](https://github.com/lobehub/lobe-chat/issues/97)
-
- ### ✅ 插件三期
-
- 更高阶与完善的自定义能力,支持 OpenAPI schema 调用、兼容 ChatGPT 插件、新增 Midjourney 插件。 [**#411**](https://github.com/lobehub/lobe-chat/discussions/#411)
-
- ### 💭 插件四期
-
- 完善的鉴权、可视化配置插件定义、 Plugin SDK CLI 、 Python 语言研发模板、还有什么想法?欢迎参与讨论: [**#1310**](https://github.com/lobehub/lobe-chat/discussions/#1310)
-
-
-[chat-plugin-sdk]: https://github.com/lobehub/chat-plugin-sdk
-[chat-plugin-template]: https://github.com/lobehub/chat-plugin-template
-[chat-plugins-gateway]: https://github.com/lobehub/chat-plugins-gateway
-[lobe-chat-plugins]: https://github.com/lobehub/lobe-chat-plugins
diff --git a/docs/usage/features/pwa.mdx b/docs/usage/features/pwa.mdx
deleted file mode 100644
index d5664feb85..0000000000
--- a/docs/usage/features/pwa.mdx
+++ /dev/null
@@ -1,60 +0,0 @@
----
-title: LobeChat support Progressive Web Apps (PWA)
-description: >-
- Discover how LobeChat utilizes Progressive Web App (PWA) technology to provide a seamless and near-native app experience on both desktop and mobile devices. Learn how to install LobeChat as a desktop app for enhanced convenience.
-
-tags:
- - Progressive Web App
- - PWA
- - LobeChat
- - Web Applications
- - User Experience
----
-
-# Progressive Web App (PWA)
-
-
-
-We understand the importance of providing a seamless experience for users in today's multi-device environment. To achieve this, we have adopted Progressive Web App [PWA](https://support.google.com/chrome/answer/9658361) technology, which is a modern web technology that elevates web applications to a near-native app experience. Through PWA, LobeChat is able to provide a highly optimized user experience on both desktop and mobile devices, while maintaining lightweight and high performance characteristics. Visually and perceptually, we have also carefully designed it to ensure that its interface is indistinguishable from a native app, providing smooth animations, responsive layouts, and adaptation to different screen resolutions of various devices.
-
-If you are unfamiliar with the installation process of PWA, you can follow the steps below to add LobeChat as a desktop app (also applicable to mobile devices):
-
-## Running on Chrome / Edge
-
-
- On macOS, when using a Chrome-installed PWA, it is required that Chrome be open, otherwise Chrome
- will automatically open and then launch the PWA app.
-
-
-
- ### Run Chrome or Edge browser on your computer
-
- ### Visit the LobeChat webpage
-
- ### In the top right corner of the address bar, click the Install icon
-
- ### Follow the on-screen instructions to complete the PWA installation
-
-
-## Running on Safari
-
-Safari PWA requires macOS Ventura or later. The PWA installed by Safari does not require Safari to be open; you can directly open the PWA app.
-
-
- ### Run Safari browser on your computer
-
- ### Visit the LobeChat webpage
-
- ### In the top right corner of the address bar, click the Share icon
-
- ### Click Add to Dock
-
- ### Follow the on-screen instructions to complete the PWA installation
-
-
-
- The default installed LobeChat PWA icon has a black background, you can use cmd +{' '}
- i to paste the following image to replace it with a white background.
-
-
-
diff --git a/docs/usage/features/pwa.zh-CN.mdx b/docs/usage/features/pwa.zh-CN.mdx
deleted file mode 100644
index 53dd372bd1..0000000000
--- a/docs/usage/features/pwa.zh-CN.mdx
+++ /dev/null
@@ -1,61 +0,0 @@
----
-title: LobeChat 支持渐进式 Web 应用(PWA)- 提升用户体验
-description: 了解渐进式 Web 应用(PWA)技术如何提升网页应用至接近原生应用体验,以及如何在桌面和移动设备上提供优化的用户体验。
-tags:
- - 渐进式 Web 应用
- - PWA 技术
- - 用户体验
- - 桌面应用
- - 移动设备
- - 轻量级
- - 高性能
- - 响应式布局
----
-
-# 渐进式 Web 应用(PWA)
-
-
-
-我们深知在当今多设备环境下为用户提供无缝体验的重要性。为此,我们采用了渐进式 Web 应用 [PWA](https://support.google.com/chrome/answer/9658361) 技术,这是一种能够将网页应用提升至接近原生应用体验的现代 Web 技术。通过 PWA,LobeChat 能够在桌面和移动设备上提供高度优化的用户体验,同时保持轻量级和高性能的特点。在视觉和感觉上,我们也经过精心设计,以确保它的界面与原生应用无差别,提供流畅的动画、响应式布局和适配不同设备的屏幕分辨率。
-
-若您未熟悉 PWA 的安装过程,您可以按照以下步骤将 LobeChat 添加为您的桌面应用(也适用于移动设备):
-
-## Chrome / Edge 浏览器上运行
-
-
- macOS 下,使用 Chrome 安装的 PWA 时,必须要求 Chrome 是打开状态,否则会自动打开 Chrome 再打开 PWA
- 应用。
-
-
-
- ### 在电脑上运行 Chrome 或 Edge 浏览器
-
- ### 访问 LobeChat 网页
-
- ### 在地址栏的右上角,单击 安装 图标
-
- ### 根据屏幕上的指示完成 PWA 的安装
-
-
-## Safari 浏览器上运行
-
-Safari PWA 需要 macOS Ventura 或更高版本。Safari 安装的 PWA 并不要求 Safari 是打开状态,可以直接打开 PWA 应用。
-
-
- ### 在电脑上运行 Safari 浏览器
-
- ### 访问 LobeChat 网页
-
- ### 在地址栏的右上角,单击 分享 图标
-
- ### 点选 添加到程序坞
-
- ### 根据屏幕上的指示完成 PWA 的安装
-
-
-
- 默认安装的 LobeChat PWA 图标是黑色背景的,您可以在自行使用 cmd + i{' '}
- 粘贴如下图片替换为白色背景的。
-
-
-
diff --git a/docs/usage/features/search.mdx b/docs/usage/features/search.mdx
deleted file mode 100644
index 53310e9099..0000000000
--- a/docs/usage/features/search.mdx
+++ /dev/null
@@ -1,56 +0,0 @@
----
-title: 'Intelligent Connected Search - Online Knowledge, On Demand'
-description: >-
- Stay synchronized with the world through real-time online access. Obtain the
- latest information, verify facts, and explore current events without leaving
- the conversation.
-tags:
- - Connected Search
- - Real-time Information
- - Search Engines
- - Information Retrieval
- - Fact Verification
- - Real-time Data
- - Knowledge Updates
----
-
-# Intelligent Connected Search
-
- />
-
-**Online Knowledge, On Demand**
-
-Stay in sync with the world through real-time online access — news, data, trends, and more. Keep your information up to date and access the latest available data, enabling your AI to provide accurate and current responses.
-
-Access real-time information, verify facts, and explore ongoing events without leaving the conversation. Your AI becomes a gateway to the world’s knowledge, always up-to-date and comprehensive.
-
-## Features Overview
-
-### 🌐 Real-Time Information Access
-
-
-Intelligent Connected Search empowers AI to access the latest internet information, ensuring accuracy and timeliness in responses.
-
-
-- **Breaking News**: Get the latest news reports and event updates
-- **Market Data**: Real-time stock prices, exchange rates, cryptocurrency values
-- **Weather Information**: Global weather forecasts and meteorological data
-- **Traffic Conditions**: Live traffic updates and road status
-- **Sports Events**: Match results, schedules, and statistics
-
-### 🔍 Multi-Source Search Engines
-
-Supports multiple search engines to ensure comprehensive and accurate information:
-
-- **Google**: The world’s largest search engine
-- **Bing**: Microsoft’s search engine
-- **DuckDuckGo**: Privacy-focused search engine
-
-### 📊 Structured Data Retrieval
-
-- **Academic Papers**: Access academic databases and journals
-- **Technical Documentation**: Obtain the latest technical documents and API references
-- **Statistical Data**: Government statistics, survey reports, market research
-- **Product Information**: Product details and reviews from e-commerce platforms
-- **Laws and Regulations**: Latest legal texts and policy interpretations
-
diff --git a/docs/usage/features/search.zh-CN.mdx b/docs/usage/features/search.zh-CN.mdx
deleted file mode 100644
index 51fad75f37..0000000000
--- a/docs/usage/features/search.zh-CN.mdx
+++ /dev/null
@@ -1,52 +0,0 @@
----
-title: 智能联网搜索 - 在线知识,按需获取
-description: 通过实时联网访问,你的 AI 与世界保持同步。获取最新信息,验证事实,探索当前事件,无需离开对话。
-tags:
- - 联网搜索
- - 实时信息
- - 搜索引擎
- - 信息获取
- - 事实验证
- - 实时数据
- - 知识更新
----
-
-# 智能联网搜索
-
-
-
-**在线知识,按需获取**
-
-通过实时联网访问,你的 AI 与世界保持同步 —— 新闻、数据、趋势等。保持信息更新,获取最新可用信息,使你的 AI 能够提供准确和最新的回复。
-
-访问实时信息,验证事实,探索当前事件,无需离开对话。你的 AI 成为通向世界知识的门户,始终保持最新和全面。
-
-## 功能概述
-
-### 🌐 实时信息获取
-
-
- 智能联网搜索让 AI 能够访问最新的互联网信息,确保回答的准确性和时效性。
-
-
-- **实时新闻**:获取最新的新闻报道和事件动态
-- **市场数据**:实时股价、汇率、加密货币价格
-- **天气信息**:全球天气预报和气象数据
-- **交通状况**:实时路况和交通信息
-- **体育赛事**:比赛结果、赛程和统计数据
-
-### 🔍 多源搜索引擎
-
-支持多个搜索引擎,确保信息的全面性和准确性:
-
-- **Google**:全球最大的搜索引擎
-- **Bing**:Microsoft 搜索引擎
-- **DuckDuckGo**:注重隐私的搜索引擎
-
-### 📊 结构化数据获取
-
-- **学术论文**:访问学术数据库和期刊
-- **技术文档**:获取最新的技术文档和 API 资料
-- **统计数据**:政府统计、调查报告、市场研究
-- **产品信息**:电商平台的产品详情和评价
-- **法律法规**:最新的法律条文和政策解读
diff --git a/docs/usage/features/text-to-image.mdx b/docs/usage/features/text-to-image.mdx
deleted file mode 100644
index ff9064cb57..0000000000
--- a/docs/usage/features/text-to-image.mdx
+++ /dev/null
@@ -1,18 +0,0 @@
----
-title: Text to Image in LobeChat
-description: >-
- Transform your ideas into images with the latest text-to-image generation technology integrated into LobeChat AI Assistant. Experience a private and immersive creative process.
-
-tags:
- - Text to Image Generation
- - LobeChat AI Assistant
- - DALL-E 3
- - MidJourney
- - Pollinations
----
-
-# Text to Image Generation
-
-
-
-Supporting the latest text-to-image generation technology, LobeChat now enables users to directly utilize the Text to Image tool during conversations with the assistant. By harnessing the capabilities of AI tools such as [DALL-E 3](https://openai.com/dall-e-3), [MidJourney](https://www.midjourney.com/), and [Pollinations](https://pollinations.ai/), assistants can now transform your ideas into images. This allows for a more private and immersive creative process.
diff --git a/docs/usage/features/text-to-image.zh-CN.mdx b/docs/usage/features/text-to-image.zh-CN.mdx
deleted file mode 100644
index 3262cd94c3..0000000000
--- a/docs/usage/features/text-to-image.zh-CN.mdx
+++ /dev/null
@@ -1,19 +0,0 @@
----
-title: LobeChat 文生图:文本转图片生成技术
-description: >-
- LobeChat 现在支持最新的文本到图片生成技术,让用户可以在与助手对话中直接调用文生图工具进行创作。利用 DALL-E 3、MidJourney 和 Pollinations 等 AI 工具,助手们可以将你的想法转化为图像,让创作过程更私密和沉浸式。
-
-tags:
- - LobeChat
- - 文生图
- - DALL-E 3
- - MidJourney
- - Pollinations
- - AI工具
----
-
-# Text to Image 文生图
-
-
-
-支持最新的文本到图片生成技术,LobeChat 现在能够让用户在与助手对话中直接调用文成图工具进行创作。通过利用 [`DALL-E 3`](https://openai.com/dall-e-3)、[`MidJourney`](https://www.midjourney.com/) 和 [`Pollinations`](https://pollinations.ai/) 等 AI 工具的能力, 助手们现在可以将你的想法转化为图像。同时可以更私密和沉浸式的完成你的创造过程。
diff --git a/docs/usage/features/theme.mdx b/docs/usage/features/theme.mdx
deleted file mode 100644
index 6294d67555..0000000000
--- a/docs/usage/features/theme.mdx
+++ /dev/null
@@ -1,28 +0,0 @@
----
-title: LobeChat support Custom Themes
-description: >-
- Explore LobeChat's flexible theme modes and color customization options for a personalized interface design. Switch between light and dark modes, customize theme colors, and choose between conversation bubble and document modes.
-
-tags:
- - Custom Themes
- - Personalized User Experiences
- - Theme Modes
- - Color Customization
- - Interface Design
- - LobeChat
----
-
-# Custom Themes
-
-
-
-LobeChat places a strong emphasis on personalized user experiences in its interface design, and thus introduces flexible and diverse theme modes, including a light mode for daytime and a dark mode for nighttime.
-
-In addition to theme mode switching, we also provide a series of color customization options, allowing users to adjust the application's theme colors according to their preferences. Whether it's a stable deep blue, a lively peach pink, or a professional gray and white, users can find color choices in LobeChat that match their own style.
-
-
- The default configuration can intelligently identify the user's system color mode and
- automatically switch themes to ensure a consistent visual experience with the operating system.
-
-
-For users who prefer to manually adjust details, LobeChat also provides intuitive setting options and offers a choice between conversation bubble mode and document mode for chat scenes.
diff --git a/docs/usage/features/theme.zh-CN.mdx b/docs/usage/features/theme.zh-CN.mdx
deleted file mode 100644
index 1a392352ed..0000000000
--- a/docs/usage/features/theme.zh-CN.mdx
+++ /dev/null
@@ -1,25 +0,0 @@
----
-title: LobeChat 自定义主题 - 个性化体验
-description: 了解 LobeChat 的灵活多变主题模式,包括日间亮色模式和夜间深色模式,以及颜色定制选项,让用户根据喜好调整应用主题色彩。
-tags:
- - LobeChat
- - 自定义主题
- - 主题模式
- - 颜色定制
- - 界面设计
- - 个性化体验
----
-
-# 自定义主题
-
-
-
-LobeChat 在界面设计上十分考虑用户的个性化体验,因此引入了灵活多变的主题模式,其中包括日间的亮色模式和夜间的深色模式。
-
-除了主题模式的切换,我们还提供了一系列的颜色定制选项,允许用户根据自己的喜好来调整应用的主题色彩。无论是想要沉稳的深蓝,还是希望活泼的桃粉,或者是专业的灰白,用户都能够在 LobeChat 中找到匹配自己风格的颜色选择。
-
-
- 默认配置能够智能地识别用户系统的颜色模式,自动进行主题切换,以确保应用界面与操作系统保持一致的视觉体验。
-
-
-对于喜欢手动调控细节的用户,LobeChat 同样提供了直观的设置选项,针对聊天场景也提供了对话气泡模式和文档模式的选择。
diff --git a/docs/usage/features/tts.mdx b/docs/usage/features/tts.mdx
deleted file mode 100644
index 6141ef4e70..0000000000
--- a/docs/usage/features/tts.mdx
+++ /dev/null
@@ -1,40 +0,0 @@
----
-title: LobeChat support Speech Synthesis and Recognition (TTS & STT)
-description: >-
- Experience seamless Text-to-Speech (TTS) and Speech-to-Text (STT) technologies in LobeChat. Choose from a variety of high-quality voices for personalized communication. Learn more about Lobe TTS toolkit @lobehub/tts.
-
-tags:
- - LobeChat
- - TTS
- - STT
- - Voice Conversation
- - Lobe TTS
- - Text-to-Speech
- - Speech-to-Text
- - Voice Options
----
-
-# TTS & STT Voice Conversation
-
-
-
-LobeChat supports Text-to-Speech (TTS) and Speech-to-Text (STT) technologies. Our application can convert text information into clear voice output, allowing users to interact with our conversational agents as if they were talking to a real person. Users can choose from a variety of voices and pair the appropriate audio with the assistant. Additionally, for users who prefer auditory learning or need to obtain information while busy, TTS provides an excellent solution.
-
-In LobeChat, we have carefully selected a series of high-quality voice options (OpenAI Audio, Microsoft Edge Speech) to meet the needs of users from different regions and cultural backgrounds. Users can choose suitable voices based on personal preferences or specific scenarios, thereby obtaining a personalized communication experience.
-
-## Lobe TTS
-
-
-
-[`@lobehub/tts`](https://tts.lobehub.com) is a high-quality TTS toolkit developed using the TS language, supporting usage in both server and browser environments.
-
-- **Server**: With just 15 lines of code, it can achieve high-quality speech generation capabilities comparable to OpenAI TTS services. It currently supports EdgeSpeechTTS, MicrosoftTTS, OpenAITTS, and OpenAISTT.
-- **Browser**: It provides high-quality React Hooks and visual audio components, supporting common functions such as loading, playing, pausing, and dragging the timeline, and offering extensive audio track style adjustment capabilities.
-
-
- During the implementation of the TTS feature in LobeChat, we found that there was no good frontend
- TTS library on the market, which resulted in a lot of effort being spent on implementation,
- including data conversion, audio progress management, and speech visualization. Adhering to the
- "Community First" concept, we have polished and open-sourced this implementation, hoping to help
- community developers who want to implement TTS.
-
diff --git a/docs/usage/features/tts.zh-CN.mdx b/docs/usage/features/tts.zh-CN.mdx
deleted file mode 100644
index 1639520b69..0000000000
--- a/docs/usage/features/tts.zh-CN.mdx
+++ /dev/null
@@ -1,35 +0,0 @@
----
-title: LobeChat 支持 TTS & STT 语音会话
-description: LobeChat 支持文字转语音(TTS)和语音转文字(STT)技术,提供高品质声音选项,个性化交流体验。了解更多关于 Lobe TTS 工具包。
-tags:
- - TTS
- - STT
- - 语音会话
- - LobeChat
- - Lobe TTS
- - 文字转语音
- - 语音转文字
----
-
-# TTS & STT 语音会话
-
-
-
-LobeChat 支持文字转语音(Text-to-Speech,TTS)和语音转文字(Speech-to-Text,STT)技术,我们的应用能够将文本信息转化为清晰的语音输出,用户可以像与真人交谈一样与我们的对话代理进行交流。用户可以从多种声音中选择,给助手搭配合适的音源。 同时,对于那些倾向于听觉学习或者想要在忙碌中获取信息的用户来说,TTS 提供了一个极佳的解决方案。
-
-在 LobeChat 中,我们精心挑选了一系列高品质的声音选项 (OpenAI Audio, Microsoft Edge Speech),以满足不同地域和文化背景用户的需求。用户可以根据个人喜好或者特定场景来选择合适的语音,从而获得个性化的交流体验。
-
-## Lobe TTS
-
-
-
-[`@lobehub/tts`](https://tts.lobehub.com) 是一个使用 TS 语言开发的,高质量 TTS 工具包,支持在服务端和浏览器中使用。
-
-- **服务端**:只要使用 15 行代码,即可实现对标 OpenAI TTS 服务的高质量语音生成能力。目前支持 EdgeSpeechTTS 与 MicrosoftTTS 与 OpenAITTS、OpenAISTT。
-- **浏览器**:提供了高质量的 React Hooks 与可视化音频组件,支持加载、播放、暂停、拖动时间轴等常用功能,且提供了非常丰富的音轨样式调整能力。
-
-
- 我们在实现 LobeChat 的 TTS 功能过程中,发现市面上并没有一款很好的 TTS
- 前端库,导致在实现上耗费了很多精力,包括数据转换、音频进度管理、语音可视化等。秉承「 Community
- First 」 的理念,我们把这套实现打磨并开源了出来,希望能帮助到想要实现 TTS 的社区开发者们。
-
diff --git a/docs/usage/features/vision.mdx b/docs/usage/features/vision.mdx
deleted file mode 100644
index 968d2c8580..0000000000
--- a/docs/usage/features/vision.mdx
+++ /dev/null
@@ -1,20 +0,0 @@
----
-title: LobeChat support Vision Recognition
-description: >-
- Discover how LobeChat integrates visual recognition capabilities like OpenAI's gpt-4-vision and Google Gemini Pro vision for intelligent conversations based on uploaded images.
-
-tags:
- - LobeChat
- - Model Vision Recognition
- - Multimodal Interaction
- - Visual Elements
- - Intelligent Conversations
----
-
-# Model Vision Recognition
-
-
-
-LobeChat now supports large language models with visual recognition capabilities such as OpenAI's [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision), Google Gemini Pro vision, and Zhipu GLM-4 Vision, enabling LobeChat to have multimodal interaction capabilities. Users can easily upload or drag and drop images into the chat box, and the assistant will be able to recognize the content of the images and engage in intelligent conversations based on them, creating more intelligent and diverse chat scenarios.
-
-This feature opens up new ways of interaction, allowing communication to extend beyond text and encompass rich visual elements. Whether it's sharing images in daily use or interpreting images in specific industries, the assistant can provide an excellent conversational experience.
diff --git a/docs/usage/features/vision.zh-CN.mdx b/docs/usage/features/vision.zh-CN.mdx
deleted file mode 100644
index 0518c041e6..0000000000
--- a/docs/usage/features/vision.zh-CN.mdx
+++ /dev/null
@@ -1,18 +0,0 @@
----
-title: LobeChat 支持多模态交互:视觉识别助力智能对话
-description: LobeChat 支持多种具有视觉识别能力的大语言模型,用户可上传或拖拽图片,助手将识别内容并展开智能对话,打造更智能、多元化的聊天场景。
-tags:
- - LobeChat
- - 多模态交互
- - 视觉识别
- - 智能对话
- - 大语言模型
----
-
-# 模型视觉识别
-
-
-
-LobeChat 已经支持 OpenAI 的 [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision) 、Google Gemini Pro vision、智谱 GLM-4 Vision 等具有视觉识别能力的大语言模型,这使得 LobeChat 具备了多模态交互的能力。用户可以轻松上传图片或者拖拽图片到对话框中,助手将能够识别图片内容,并在此基础上进行智能对话,构建更智能、更多元化的聊天场景。
-
-这一特性打开了新的互动方式,使得交流不再局限于文字,而是可以涵盖丰富的视觉元素。无论是日常使用中的图片分享,还是在特定行业内的图像解读,助手都能提供出色的对话体验。
diff --git a/docs/usage/foundation/basic.mdx b/docs/usage/foundation/basic.mdx
deleted file mode 100644
index b1bcb44aca..0000000000
--- a/docs/usage/foundation/basic.mdx
+++ /dev/null
@@ -1,43 +0,0 @@
----
-title: Basic Usage Guide for Conversations - Large Language Models (LLMs)
-description: >-
- Learn about the fundamental functions for interacting with Large Language Models (LLMs) and how to utilize features like model selection, file/image upload, temperature setting, and more.
-
-tags:
- - Large Language Models
- - Model Selection
- - File Upload
- - Temperature Setting
- - Voice Input
- - Plugin Setting
----
-
-# Basic Usage Guide for Conversations
-
-
-
-In general, the basic interaction with Large Language Models (LLMs) can be done through the fundamental functions provided in this area (as shown above).
-
-## Basic Function Description
-
-
-
-1. **Model Selection**: Choose the Large Language Model (LLM) to be used in the current conversation. For model settings, refer to [Model Providers](/docs/usage/providers).
-2. **File/Image Upload**: When the selected model supports file or image recognition, users can upload files or images during the conversation with the model.
-3. **Temperature Setting**: Adjust the randomness level of the model's output. The higher the value, the more random the output results. For detailed information, refer to the [Large Language Model Guide](/docs/usage/agents/model).
-4. **History Record Setting**: Set the number of chat records the model needs to remember in this conversation. The longer the history, the more conversation content the model can remember, but it will also consume more context tokens.
-5. **Voice Input**: Click this button to convert speech to text input. For more information, refer to [Speech-to-Text Conversion](/docs/usage/foundation/tts-stt).
-6. **Plugin Setting**: Choose the plugins to enable in this conversation. For more information, refer to [Plugin Usage](/docs/usage/plugins/basic-usage).
-7. **Token Usage**: Display the context length and token consumption of this conversation.
-8. **Start New Topic**: End the current conversation and start a new topic. For more information, refer to [Topic Usage](/docs/usage/agents/topics).
-9. **Send Button**: Send the current input content to the model. The dropdown menu provides additional send operation options.
-
-
-
-
- - **Send Shortcut**: Set a shortcut to send messages and line breaks using the Enter key or ⌘ +
- Enter key. - **Add an AI Message**: Manually add and edit a message input by an AI character in
- the conversation context, which will not trigger a model response. - **Add a User Message**: Add
- the current input content as a message input by the user character to the conversation context,
- which will not trigger a model response.
-
diff --git a/docs/usage/foundation/basic.zh-CN.mdx b/docs/usage/foundation/basic.zh-CN.mdx
deleted file mode 100644
index 5c858e7a9a..0000000000
--- a/docs/usage/foundation/basic.zh-CN.mdx
+++ /dev/null
@@ -1,46 +0,0 @@
----
-title: 会话基本使用指南 - 大型语言模型交互指南
-description: 了解如何使用大型语言模型进行基本交互,包括模型选择、文件/图片上传、温度设置、历史记录设置等。
-tags:
- - 大型语言模型
- - LLM
- - 模型选择
- - 文件上传
- - 温度设置
- - 历史记录设置
- - 语音输入
- - 插件设置
- - Token 用量
- - 新建话题
- - 发送按钮
----
-
-# 会话基本使用指南
-
-
-
-通常情况下,与大型语言模型 (LLMs) 的基本交互可以通过此区域(如上图)提供的基础功能进行。
-
-## 基本功能说明
-
-
-
-1. **模型选择**:选择当前对话所使用的大型语言模型 (LLM)。模型的设置详见[模型服务商](/zh/docs/usage/providers)。
-2. **文件 / 图片上传**:当所选模型支持文件或图片识别功能时,用户可以在与模型的对话中上传文件或图片。
-3. **温度设置**:调节模型输出的随机性程度。数值越高,输出结果越随机。详细说明请参考[大语言模型指南](/zh/docs/usage/agents/model)。
-4. **历史记录设置**:设定本次对话中模型需要记忆的聊天记录数量。历史记录越长,模型能够记忆的对话内容越多,但同时也会消耗更多的上下文 token。
-5. **语音输入**:点击该按钮后,可以将语音转换为文字输入。有关详细信息,请参考[语音文字转换](/zh/docs/usage/foundation/tts-stt)。
-6. **插件设置**:选择本次对话中需要启用的插件。有关详细信息,请参考[插件使用](/zh/docs/usage/plugins/basic-usage)。
-7. **Token 用量**:显示本次对话的上下文长度以及 Token 消耗情况。
-8. **新建话题**:结束当前对话并开启一个新的对话主题。有关详细信息,请参考[话题使用](/zh/docs/usage/agents/topics)。
-9. **发送按钮**:将当前输入内容发送至模型。下拉菜单提供额外的发送操作选项。
-
-
-
-
- - **发送快捷键**:设置使用 Enter 键或 ⌘ + Enter 键发送消息和换行的快捷方式。 -
- **添加一条 AI 消息**:在对话上下文中手动添加并编辑一条由 AI 角色输入的消息,该操作不会触发模型响应。
- -
-
- **添加一条用户消息**:将当前输入内容作为用户角色输入的消息添加到对话上下文中,该操作不会触发模型响应。
-
diff --git a/docs/usage/foundation/share.mdx b/docs/usage/foundation/share.mdx
deleted file mode 100644
index 12e9de3ff1..0000000000
--- a/docs/usage/foundation/share.mdx
+++ /dev/null
@@ -1,35 +0,0 @@
----
-title: Share Conversation Records - Screenshot Sharing & ShareGPT
-description: >-
- Learn how to share conversation records in LobeChat using Screenshot Sharing and ShareGPT methods. Capture conversation details in images or generate permanent links effortlessly.
-
-tags:
- - Share Conversation Records
- - Screenshot Sharing
- - ShareGPT
- - Conversation Sharing
- - AI Conversation Sharing
----
-
-# Share Conversation Records
-
-
-
-By clicking the `Share` button in the top right corner of the chat window, you can share the current conversation records with others. LobeChat supports two sharing methods: `Screenshot Sharing` and `ShareGPT Sharing`.
-
-## Screenshot Sharing
-
-
-
-The screenshot sharing feature will generate and save an image of the current conversation records, with the following options:
-
-- Include Assistant Role Settings: Display the assistant's Prompt information in the screenshot.
-- Include Background Image: Add a gradient background to the generated image.
-- Include Footer: Add LobeChat footer information to the generated image.
-- Image Format: Choose the format for saving the image.
-
-## ShareGPT
-
-
-
-[ShareGPT](https://sharegpt.com/) is an AI conversation sharing platform that allows users to easily share their conversations with Large Language Models (LLMs). Users can generate a permanent link with just one click, making it convenient to share these conversations with friends or others. By integrating ShareGPT functionality, LobeChat can generate links for conversation records with just one click, making sharing easy.
diff --git a/docs/usage/foundation/share.zh-CN.mdx b/docs/usage/foundation/share.zh-CN.mdx
deleted file mode 100644
index 397f4b41b3..0000000000
--- a/docs/usage/foundation/share.zh-CN.mdx
+++ /dev/null
@@ -1,33 +0,0 @@
----
-title: 分享会话记录 - LobeChat 分享功能介绍
-description: 了解如何通过 LobeChat 的分享功能分享会话记录,包括截图分享和 ShareGPT 分享方式。通过分享功能,轻松与他人分享您的对话。
-tags:
- - LobeChat
- - 分享会话记录
- - 截图分享
- - ShareGPT
- - 对话分享
----
-
-# 分享会话记录
-
-
-
-通过会话窗口右上角的`分享`按钮,您可以将当前会话记录分享给其他人。LobeChat 支持两种分享方式:`截图分享`和 `ShareGPT 分享`。
-
-## 截图分享
-
-
-
-截图分享功能将生成当前会话记录的图片并保存,其选项说明如下:
-
-- 包含助手角色设置:在截图中显示助手的 Prompt 信息。
-- 包含背景图:在生成的图片中添加渐变背景。
-- 包含页脚:在生成的图片中添加 LobeChat 页脚信息。
-- 图片格式:选择保存图片的格式。
-
-## ShareGPT
-
-
-
-[ShareGPT](https://sharegpt.com/) 是一个 AI 对话分享平台,允许用户便捷地分享他们与大型语言模型 (LLM) 的对话。用户只需点击即可生成永久链接,方便与朋友或其他人分享这些对话。LobeChat 通过集成 ShareGPT 功能,可以一键将对话记录生成链接,方便分享。
diff --git a/docs/usage/foundation/text2image.mdx b/docs/usage/foundation/text2image.mdx
deleted file mode 100644
index f9b6cb736d..0000000000
--- a/docs/usage/foundation/text2image.mdx
+++ /dev/null
@@ -1,39 +0,0 @@
----
-title: Guide to Using Text-to-Image Models in LobeChat
-description: >-
- Learn how to utilize text-to-image generation in LobeChat using DALL-E and Midjourney plugins. Generate images seamlessly with AI assistance.
-
-tags:
- - Text-to-Image Models
- - LobeChat
- - DALL-E
- - Midjourney
- - Plugin Installation
- - AI Assistance
----
-
-# Guide to Using Text-to-Image Models in LobeChat
-
-LobeChat supports text-to-image generation through a plugin mechanism. Currently, LobeChat comes with the built-in DALL-E plugin, which allows users to generate images using OpenAI's DALL-E model. Additionally, users can also install the official Midjourney plugin to utilize the Midjourney text-to-image feature.
-
-## DALL-E Model
-
-If you have configured the OpenAI API, you can enable the DALL-E plugin directly in the assistant interface and input prompts in the conversation for AI to generate images for you.
-
-
-
-If the DALL-E plugin is not available, please check if the OpenAI API key has been correctly configured.
-
-## Midjourney Model
-
-LobeChat also offers the Midjourney plugin, which generates images by calling the Midjourney API. Please install the Midjourney plugin in the plugin store beforehand.
-
-
-
-
- info For plugin installation, please refer to [Plugin Usage](/docs/usage/plugins/basic-usage)
-
-
-When using the Midjourney plugin for the first time, you will need to fill in your Midjourney API key in the plugin settings.
-
-
diff --git a/docs/usage/foundation/text2image.zh-CN.mdx b/docs/usage/foundation/text2image.zh-CN.mdx
deleted file mode 100644
index 18434dc6dc..0000000000
--- a/docs/usage/foundation/text2image.zh-CN.mdx
+++ /dev/null
@@ -1,35 +0,0 @@
----
-title: LobeChat 文生图模型使用指南
-description: 了解如何在 LobeChat 中使用 DALL-E 和 Midjourney 模型生成图片,配置插件并调用 API。
-tags:
- - LobeChat
- - 文生图模型
- - DALL-E
- - Midjourney
- - 插件
- - API
----
-
-# 文生图模型使用指南
-
-LobeChat 通过插件机制支持文本生成图片功能。目前,LobeChat 内置了 DALL-E 插件,支持调用 OpenAI 的 DALL-E 模型进行图片生成。此外,用户还可以安装官方提供的 Midjourney 插件,使用 Midjourney 文生图功能。
-
-## DALL-E 模型
-
-如果您已配置 OpenAI API,可以直接在助手界面启用 DALL-E 插件,并在对话中输入提示词,让 AI 为您生成图片。
-
-
-
-如果 DALL-E 插件不可用,请检查 OpenAI API 密钥是否已正确配置。
-
-## Midjourney 模型
-
-LobeChat 还提供 Midjourney 插件,通过 API 调用 Midjourney 生成图片。请提前在插件商店中安装 Midjourney 插件。
-
-
-
-插件安装请参考[插件使用](/zh/docs/usage/plugins/basic-usage)
-
-首次使用 Midjourney 插件时,您需要在插件设置中填写您的 Midjourney API 密钥。
-
-
diff --git a/docs/usage/foundation/translate.mdx b/docs/usage/foundation/translate.mdx
deleted file mode 100644
index 9e0ce125a1..0000000000
--- a/docs/usage/foundation/translate.mdx
+++ /dev/null
@@ -1,30 +0,0 @@
----
-title: Translation of Conversation Records - LobeChat
-description: >-
- Learn how to translate conversation content in LobeChat with just one click. Customize translation models for accurate results.
-
-tags:
- - Translation
- - Conversation Translation
- - AI Translation Model
----
-
-# Translation of Conversation Records
-
-
-
-## Translating Conversation Content
-
-LobeChat supports users to translate conversation content into a specified language with just one click. After selecting the target language, LobeChat will use a pre-set AI model for translation and display the translated results in real-time in the chat window.
-
-
-
-## Translation Model Settings
-
-You can specify the model you wish to use as a translation assistant in the settings.
-
-
-
-- Open the `Settings` panel
-- Find the `Translation Settings` option under `System Assistants`
-- Specify a model for your `Translation Assistant`
diff --git a/docs/usage/foundation/translate.zh-CN.mdx b/docs/usage/foundation/translate.zh-CN.mdx
deleted file mode 100644
index 3449ac5d07..0000000000
--- a/docs/usage/foundation/translate.zh-CN.mdx
+++ /dev/null
@@ -1,29 +0,0 @@
----
-title: LobeChat 会话翻译功能 - 一键实时翻译对话内容
-description: LobeChat 支持用户一键将对话内容翻译成指定语言,实时显示翻译结果。了解如何设置翻译模型以优化翻译体验。
-tags:
- - LobeChat
- - 会话翻译
- - 实时翻译
- - 翻译模型设置
----
-
-# 翻译会话记录
-
-
-
-## 翻译对话中的内容
-
-LobeChat 支持用户一键将对话内容翻译成指定语言。选择目标语言后,LobeChat 将调用预先设置的 AI 模型进行翻译,并将翻译结果实时显示在聊天窗口中。
-
-
-
-## 翻译模型设置
-
-你可以在设置中指定您希望使用的模型作为翻译助手。
-
-
-
-- 打开`设置`面板
-- 在`系统助手`中找到`翻译设置`选项
-- 为你的`翻译助手`指定一个模型
diff --git a/docs/usage/foundation/tts-stt.mdx b/docs/usage/foundation/tts-stt.mdx
deleted file mode 100644
index e4ab914b7f..0000000000
--- a/docs/usage/foundation/tts-stt.mdx
+++ /dev/null
@@ -1,38 +0,0 @@
----
-title: Guide to Text-to-Speech Conversion - LobeChat TTS Feature
-description: >-
- Learn how to use LobeChat's text-to-speech (TTS) feature for voice input and output. Explore speech-to-text (STT) functionality and customize TTS settings.
-
-tags:
- - Text-to-Speech
- - TTS feature
- - Speech-to-Text
- - STT feature
- - TTS settings
----
-
-# Guide to Text-to-Speech Conversion
-
-LobeChat supports text-to-speech conversion, allowing users to input content through voice and have the AI output read aloud through speech.
-
-## Text-to-Speech (TTS)
-
-Select any content in the chat window, choose `Text-to-Speech`, and the AI will use the TTS model to read the text content aloud.
-
-
-
-## Speech-to-Text (STT)
-
-Select the voice input feature in the input window, and LobeChat will convert your speech to text and input it into the text box. After completing the input, you can send it directly to the AI.
-
-
-
-## Text-to-Speech Conversion Settings
-
-You can specify the model you want to use for text-to-speech conversion in the settings.
-
-
-
-- Open the `Settings` panel
-- Find the `Text-to-Speech` settings
-- Select the speech service and AI model you prefer
diff --git a/docs/usage/foundation/tts-stt.zh-CN.mdx b/docs/usage/foundation/tts-stt.zh-CN.mdx
deleted file mode 100644
index f5199c9cb2..0000000000
--- a/docs/usage/foundation/tts-stt.zh-CN.mdx
+++ /dev/null
@@ -1,36 +0,0 @@
----
-title: LobeChat 文字语音转换功能指南
-description: 了解如何在 LobeChat 中使用文字语音转换功能,包括文字转语音(TTS)和语音转文字(STT),以及设置您喜欢的语音模型。
-tags:
- - LobeChat
- - 文字语音转换
- - TTS
- - STT
- - 语音模型
----
-
-# 文字语音转换使用指南
-
-LobeChat 支持文字语音转换功能,允许用户通过语音输入内容,以及将 AI 输出的内容通过语音播报。
-
-## 文字转语音(TTS)
-
-在对话窗口中选中任意内容,选择`文字转语音`,AI 将通过 TTS 模型对文本内容进行语音播报。
-
-
-
-## 语音转文字(STT)
-
-在输入窗口中选择语音输入功能,LobeChat 将您的语音转换为文字并输入到文本框中,完成输入后可以直接发送给 AI。
-
-
-
-## 文字语音转换设置
-
-你可以在设置中为文字语音转换功能指定您希望使用的模型。
-
-
-
-- 打开`设置`面板
-- 找到`文字转语音`设置
-- 选择您所需的语音服务和 AI 模型
diff --git a/docs/usage/foundation/vision.mdx b/docs/usage/foundation/vision.mdx
deleted file mode 100644
index e140f108ea..0000000000
--- a/docs/usage/foundation/vision.mdx
+++ /dev/null
@@ -1,36 +0,0 @@
----
-title: Enhancing Multimodal Interaction with Visual Recognition Models
-description: >-
- Explore how LobeChat integrates visual recognition capabilities into large language models, enabling multimodal interactions for enhanced user experiences.
-
-tags:
- - Visual Recognition
- - Multimodal Interaction
- - Large Language Models
- - LobeChat
- - Custom Model Configuration
----
-
-# Visual Model User Guide
-
-The ecosystem of large language models that support visual recognition is becoming increasingly rich. Starting from `gpt-4-vision`, LobeChat now supports various large language models with visual recognition capabilities, enabling LobeChat to have multimodal interaction capabilities.
-
-
-
-## Image Input
-
-If the model you are currently using supports visual recognition, you can input image content by uploading a file or dragging the image directly into the input box. The model will automatically recognize the image content and provide feedback based on your prompts.
-
-
-
-## Visual Models
-
-In the model list, models with a `👁️` icon next to their names indicate that the model supports visual recognition. Selecting such a model allows you to send image content.
-
-
-
-## Custom Model Configuration
-
-If you need to add a custom model that is not currently in the list and explicitly supports visual recognition, you can enable the `Visual Recognition` feature in the `Custom Model Configuration` to allow the model to interact with images.
-
-
diff --git a/docs/usage/foundation/vision.zh-CN.mdx b/docs/usage/foundation/vision.zh-CN.mdx
deleted file mode 100644
index 8df2678537..0000000000
--- a/docs/usage/foundation/vision.zh-CN.mdx
+++ /dev/null
@@ -1,33 +0,0 @@
----
-title: 视觉模型使用指南 - 支持多模态交互的大语言模型
-description: "了解如何在LobeChat中使用支持视觉识别功能的大语言模型,通过上传图片或拖拽图片到输入框进行交互,并选择带有\U0001F441️图标的模型进行图片内容交互。"
-tags:
- - 视觉模型
- - 多模态交互
- - 大语言模型
- - 自定义模型配置
----
-
-# 视觉模型使用指南
-
-当前支持视觉识别的大语言模型生态日益丰富。从 `gpt-4-vision` 开始,LobeChat 开始支持各类具有视觉识别能力的大语言模型,这使得 LobeChat 具备了多模态交互的能力。
-
-
-
-## 图片输入
-
-如果你当前使用的模型支持视觉识别功能,您可以通过上传文件或直接将图片拖入输入框的方式输入图片内容。模型会自动识别图片内容,并根据您的提示词给出反馈。
-
-
-
-## 视觉模型
-
-在模型列表中,模型名称后面带有`👁️`图标表示该模型支持视觉识别功能。选择该模型后即可发送图片内容。
-
-
-
-## 自定义模型配置
-
-如果您需要添加当前列表中没有的自定义模型,并且该模型明确支持视觉识别功能,您可以在`自定义模型配置`中开启`视觉识别`功能,使该模型能够与图片进行交互。
-
-
diff --git a/docs/usage/getting-started/agent.mdx b/docs/usage/getting-started/agent.mdx
new file mode 100644
index 0000000000..0e3de83a07
--- /dev/null
+++ b/docs/usage/getting-started/agent.mdx
@@ -0,0 +1,105 @@
+---
+title: Agent
+description: >-
+ Simple centralized configuration for prompts, model selection, knowledge
+ bases, plugins, and more.
+tags:
+ - LobeHub
+ - LobeHub
+ - AI Assistant
+ - Assistant Organization
+ - Group Settings
+ - Assistant Search
+ - Assistant Pinning
+---
+
+# Agent
+
+At LobeHub, you have the freedom to create and customize your own AI assistants—whether it's a translation assistant that remembers your terminology preferences, a coding assistant familiar with your programming style, or a writing assistant that understands your tone and voice. By configuring prompts, selecting models, adding plugins, and linking knowledge bases, you can build a truly personalized assistant tailored to your needs.
+
+This guide will walk you through how to create and configure your AI assistant.
+
+## Create a New AI Assistant
+
+There are three ways to create a new assistant: use the Agent Builder for smart creation, build one from scratch, or quickly add one from the community. No matter which method you choose, your assistant can be fine-tuned to match your workflow.
+
+### Smart Creation with Agent Builder
+
+Agent Builder is LobeHub’s built-in assistant that helps you create AI assistants. Simply chat with Agent Builder and describe your needs—it will understand and automatically generate a complete assistant configuration, including role settings, system prompts, and plugin setup.
+
+- Click the "Create Agent" button in the left sidebar on the homepage, or use the "Create Agent" option below the chat box.
+- On the assistant profile page, chat with Agent Builder on the right side to describe your needs and complete the smart creation process.
+- After the assistant is generated, you can still manually fine-tune the settings.
+
+
+
+### Create a Custom Assistant
+
+Custom assistants offer the highest level of personalization. You can set the assistant’s avatar, name, prompts, preferred AI model, and plugins to create a truly unique AI assistant. All customization can be done manually on the assistant profile page.
+
+### Add from the Community
+
+If you want a ready-to-use assistant with a complete configuration, you can add one directly from the community. The community offers a wide variety of high-quality assistants—design experts, coding helpers, copywriters, academic mentors, and more—ready to use with a single click.
+
+- Go to "Community" → "Assistants" in the left sidebar, or scroll down the main interface to find "Community Assistants."
+- Choose an assistant, click to view its details, and then click "Add Assistant and Start Chat" to add it.
+
+
+
+### Configure Your AI Assistant
+
+Once your assistant is created, you can enhance its capabilities by adding plugins and linking external workflows. Plugins provide extended functionality, while external workflows allow the assistant to access and interact with your favorite tools. These enhancements can significantly improve your productivity.
+
+### Edit Assistant Information
+
+You can edit any assistant information at any time to keep it aligned with your evolving needs. Enter a chat with the assistant, then click "Assistant Profile" in the left sidebar to make changes.
+
+### Add Plugins and Link Tools
+
+Adding plugins and linking tools makes collaboration with your assistant more efficient.
+
+Enter a chat with the assistant, then go to "Assistant Profile" → "+ Integrate Plugin" in the left sidebar.
+
+
+
+### Link Knowledge Bases
+
+By linking a knowledge base during a conversation, your assistant can provide more accurate and personalized responses based on your data.
+
+In the chat interface, use the appropriate button to select and link a knowledge base.
+
+
+
+
+ For more on using knowledge bases, see [Knowledge Base](./docs/usage/getting-started/resource).
+
+
+## Manage Agents
+
+When you have many assistants and group chats, organizing them into groups is the most intuitive way to manage them. It keeps your assistant list clean and makes switching between them easier.
+
+### Create a Group
+
+On the LobeHub homepage, open the assistant list menu and select "Add New Group" to create a new group.
+
+
+
+### Move to a Group
+
+Select an existing assistant or group chat, then click "Move to Group" to organize it. You can also create new assistants or group chats directly within a group.
+
+### Manage Groups
+
+If you have multiple groups, go to the assistant list or group menu and select "Manage Groups" to easily rename or reorder them.
+
+### Pin Frequently Used Assistants
+
+You can pin frequently used assistants to the top of the list for quicker access. Select the assistant and choose "Pin" from the menu. Pinned assistants will stay at the top of the list for easy access.
+
+## Create a Copy
+
+You can duplicate an assistant to create a copy. Select the assistant and choose "Create Copy" from the menu. The system will generate an identical assistant, which is useful for creating variants with different configurations.
+
+## Delete an Assistant
+
+If you no longer need an assistant, you can delete it. Select the assistant, choose "Delete" from the menu, and confirm the deletion to remove it.
diff --git a/docs/usage/getting-started/agent.zh-CN.mdx b/docs/usage/getting-started/agent.zh-CN.mdx
new file mode 100644
index 0000000000..94c036fa64
--- /dev/null
+++ b/docs/usage/getting-started/agent.zh-CN.mdx
@@ -0,0 +1,103 @@
+---
+title: Agent
+description: 简单的集中配置,例如提示词,选择模型,知识库,插件等。
+tags:
+ - LobeHub
+ - LobeHub
+ - AI 助手
+ - 助手组织
+ - 分组设置
+ - 助手搜索
+ - 助手固定
+---
+
+# Agent
+
+在 LobeHub,你可以自由创建和定制 AI 助理 —— 翻译助理记得你的术语偏好,编程助理熟悉你的代码风格,写作助理了解你的表达习惯。通过配置提示词、选择模型、添加插件和关联资源库,你能打造真正符合需求的专属助理。
+
+本篇指南将详细介绍如何创建和配置你的 AI 助理。
+
+## 新建 AI 助理
+
+新建助理有三种方式:使用 Agent Builder 智能创建、从零开始打造专属助理,或从社区快速添加。无论哪种方式,都能让助理精准匹配你的工作需求。
+
+### 使用 Agent builder 智能创建
+
+Agent Builder 是 LobeHub 的内置助理,可以帮助你完成 AI 助理的创建。只需与 Agent Builder 对话,描述你的需求,它就能理解并自动生成完整的助理配置 —— 包括角色设定、系统提示词、插件配置。
+
+- 在首页左侧边栏找到创建助理按键,或选择对话框下方 「创建 Agent」 开始创建。
+- 在助理档案页面右侧,你可以和 Agent builder 对话,描述需求,完成智能创建。
+- 完成智能创建后,你也可以手动进行调整。
+
+
+
+### 创建自定义助理
+
+自定义助理能最大程度地贴合你的习惯。你可以自行定制助理的头像、名称、提示词,设置想使用的 AI 模型、插件,从而打造个人专属 AI 助理。在助理档案页面手动完成定制即可。
+
+### 从社区添加
+
+如果想快速获取配置完整的助理,可以选择从社区直接添加 AI 助理。社区有大量种类丰富的优质助理 —— 设计专家、代码助理、文案策划师、学术导师…… 添加即用。
+
+- 首页左侧边栏找到 「社区」→「助理」,或直接下滑主界面,找到「社区助理」。
+- 选择想添加的助理,点击进入详情界面了解具体信息,点击 「添加助理并会话」 即可完成添加。
+
+
+
+### 配置 AI 助理
+
+助理创建完成后,你可以通过添加插件和链接外部工作流来增强它的能力。插件让助理获得各种扩展功能,链接外部工作流让助理能直接读取或操作你的更多常用工具。强化 AI 助理能够极大程度优化你的工作流程。
+
+### 编辑助理信息
+
+你可以随时编辑助理的任何信息,让它持续跟随你的脚步。选择助理进入会话,选择左侧边栏「助理档案」即可编辑。
+
+### 添加插件和链接工具
+
+为助理添加插件,链接其他工具,能够让你们的配合更加高效。
+
+选择助理进入会话,选择左侧边栏「助理档案」→「+ 集成插件」。
+
+
+
+### 关联资源库
+
+在会话中为助理关联资源库后,助手能基于你的资源库提供更精准、更个性化的回答。
+
+进入会话页面,找到相应按键即可选择资源库进行关联。
+
+
+
+
+ 关于资源库的使用,请参阅[资源库](./docs/usage/getting-started/resource)。
+
+
+## 管理 Agent
+
+助手和群聊数量过多时,分组是最直观的管理方式,能够让助手列表更加简洁清晰,切换助手更加轻松。
+
+### 创建分组
+
+在 LobeHub 首页助手列表菜单选择「添加新分组」以创建新的分组。
+
+
+
+### 移入分组
+
+选择已有助手和群聊菜单,点击「移动到分组」进行移入。也可以直接在分组内创建助手和群聊。\[Image]
+
+### 管理分组
+
+存在多个分组时,在助手列表或分组菜单选择「分组管理」,可以便捷修改分组显示顺序、分组命名。\[Image]
+
+### 置顶常用助手
+
+你可以把频繁使用的助手置顶在列表最上方,以节省查找和滚动的时间。选中助手,在菜单中选择「置顶」即可。置顶的助手会固定在列表顶部,方便快速访问。\[Image]
+
+## 创建副本
+
+你可以复制一个助手,创建副本。选中助手,在菜单中选择「创建副本」,系统会创建一个完全相同的助手副本。适合在原助手基础上创建不同配置的变体。
+
+## 删除助手
+
+不需要的助手可以删除。选中助手,在菜单中选择「删除」,确认删除后助手将被移除。
diff --git a/docs/usage/getting-started/get-lobehub.mdx b/docs/usage/getting-started/get-lobehub.mdx
new file mode 100644
index 0000000000..15a94dc994
--- /dev/null
+++ b/docs/usage/getting-started/get-lobehub.mdx
@@ -0,0 +1,41 @@
+---
+title: Getting LobeHub
+description: >-
+ Learn how to get started with LobeHub, including downloading and installing
+ the app, signing up and logging in, and creating your assistant.
+tags:
+ - LobeHub
+ - LobeHub
+ - Getting LobeHub
+ - Download and Install
+ - Sign Up and Log In
+ - Create Assistant
+---
+
+# Getting LobeHub
+
+## Use LobeHub in Your Browser
+
+You can access LobeHub directly in your browser by visiting [app.lobehub.com](https://app.lobehub.com).
+
+## Install the macOS App
+
+Visit the [Download Page](https://lobehub.com/download) to get the macOS app.
+
+Currently, the app is not available on the Mac App Store.
+
+## Install the Windows App
+
+Visit the [Download Page](https://lobehub.com/download) to get the Windows app.
+
+Currently, the app is not available on the Microsoft Store.
+
+## Install the Android App
+
+Download the Android app from [Google Play](https://play.google.com/store/apps/details?id=com.lobehub.app).
+
+Alternatively, you can download the APK file from the [Download Page](https://lobehub.com/download) and install it manually.
+
+## Install the iOS App
+
+Download the iOS app from the [App Store](https://apps.apple.com/app/id6471212236).
diff --git a/docs/usage/getting-started/get-lobehub.zh-CN.mdx b/docs/usage/getting-started/get-lobehub.zh-CN.mdx
new file mode 100644
index 0000000000..45e45d3bba
--- /dev/null
+++ b/docs/usage/getting-started/get-lobehub.zh-CN.mdx
@@ -0,0 +1,39 @@
+---
+title: 获取 LobeHub
+description: 了解如何获取 LobeHub,包括下载安装、注册登录、创建助理等。
+tags:
+ - LobeHub
+ - LobeHub
+ - 获取 LobeHub
+ - 下载安装
+ - 注册登录
+ - 创建助理
+---
+
+# 获取 LobeHub
+
+## 在浏览器中使用 LobeHub
+
+你可以访问 [app.lobehub.com](https://app.lobehub.com) 在浏览器中使用 LobeHub。
+
+## 安装 macOS App
+
+前往[下载页](https://lobehub.com/download)即可下载 macOS App。
+
+目前不支持在 App Store 下载。
+
+## 安装 Windows App
+
+前往[下载页](https://lobehub.com/download)即可下载 Windows App。
+
+目前不支持在 Windows Store 下载。
+
+## 安装 Android App
+
+前往[Google Play](https://play.google.com/store/apps/details?id=com.lobehub.app)即可下载 Android App。
+
+或者,前往[下载页](https://lobehub.com/download)下载 APK 文件,然后手动安装。
+
+## 安装 iOS App
+
+前往[App Store](https://apps.apple.com/app/id6471212236)即可下载 iOS App。
diff --git a/docs/usage/getting-started/image-generation.mdx b/docs/usage/getting-started/image-generation.mdx
new file mode 100644
index 0000000000..f5049d3331
--- /dev/null
+++ b/docs/usage/getting-started/image-generation.mdx
@@ -0,0 +1,54 @@
+---
+title: Image Generation
+description: >-
+ A simple centralized configuration for prompts, model selection, knowledge
+ base, plugins, and more.
+tags:
+ - LobeHub
+ - LobeHub
+ - AI Assistant
+ - Assistant Organization
+ - Group Settings
+ - Assistant Search
+ - Assistant Pinning
+---
+
+# Image Generation
+
+LobeHub offers an AI-powered image generation feature that allows you to create visuals from text descriptions. Whether you're working on product prototypes, design inspiration, illustrations, or creative exploration, AI image generation helps bring your ideas to life quickly and effortlessly. Simply enter a description, choose a model and parameters, and receive high-quality images within seconds. All generated images are automatically saved to your LobeHub asset library for easy access, download, and reuse. From concept to creation, the entire process is streamlined and efficient.
+
+## Get Started with Drawing
+
+Click on the "Drawing" section on the LobeHub main interface to access the AI image generation page.
+
+## Enter a Prompt
+
+Describe the image you want in the input box. The more detailed your description, the more accurate the generated image will be.
+
+## Choose an AI Model
+
+LobeHub offers multiple AI image generation models. Select the one that best fits your needs.
+
+
+
+## Select Reference Images
+
+If you have reference images, you can upload them to guide the generation process. Click the upload button or drag and drop your reference images directly. You can upload multiple reference images.
+
+
+
+## Choose Image Aspect Ratio
+
+Select an appropriate aspect ratio based on your intended use case.
+
+## Set Number of Images to Generate
+
+You can choose how many images to generate in one go.
+
+## View and Manage Images
+
+Once the images are generated, they will appear on the drawing page. You can preview them, click to view in full size, select your favorites, and download them.
+
+All generated images are automatically saved to your LobeHub asset library.
+
+
diff --git a/docs/usage/getting-started/image-generation.zh-CN.mdx b/docs/usage/getting-started/image-generation.zh-CN.mdx
new file mode 100644
index 0000000000..de9591a35e
--- /dev/null
+++ b/docs/usage/getting-started/image-generation.zh-CN.mdx
@@ -0,0 +1,52 @@
+---
+title: 图像生成
+description: 简单的集中配置,例如提示词,选择模型,知识库,插件等。
+tags:
+ - LobeHub
+ - LobeHub
+ - AI 助手
+ - 助手组织
+ - 分组设置
+ - 助手搜索
+ - 助手固定
+---
+
+# 图像生成
+
+LobeHub 提供 AI 画图功能,让你通过文字描述生成图像。无论是产品原型、设计灵感、插图配图,还是创意探索,AI 画图都能快速将你的想法可视化。输入描述,选择模型和参数,几秒钟内就能获得高质量的图像。生成的图片会自动保存到你的 LobeHub 资源库,方便随时查看、下载和使用。从创意构思到图像生成,整个过程简单高效。
+
+## 开始画图
+
+在 LobeHub 主界面点击「绘画」板块,进入 AI 画图页面。
+
+## 输入提示词
+
+在输入框中描述你想要的图像。描述越详细,生成的图像越符合预期。
+
+## 选择 AI 模型
+
+LobeHub 提供多个 AI 画图模型,选择最符合需求的模型即可。
+
+
+
+## 选择参考图片
+
+选择参考图片如果你有参考图片,可以上传作为生成的参考。点击上传参考图片按钮或直接把参考图片拖入即可。参考图片可以上传多张。
+
+
+
+## 选择图片比例
+
+选择图片比例你可以根据使用场景选择合适的图片比例。
+
+## 设置生成图片数量
+
+你可以选择一次生成多少张图片。
+
+## 查看和管理图片
+
+图像生成完成后,会显示在画图页面。你可以查看生成的图像、点击图像查看大图、选择满意的图像并下载。
+
+生成的图片会自动保存到你的 LobeHub 资源库。
+
+
diff --git a/docs/usage/getting-started/lobe-ai.mdx b/docs/usage/getting-started/lobe-ai.mdx
new file mode 100644
index 0000000000..9156b96d49
--- /dev/null
+++ b/docs/usage/getting-started/lobe-ai.mdx
@@ -0,0 +1,34 @@
+---
+title: Lobe AI
+description: >-
+ Use Lobe AI to handle your daily tasks, including checking the weather,
+ setting reminders, reading news, managing emails, and more.
+tags:
+ - LobeHub
+ - Lobe AI
+ - Daily Tasks
+---
+
+# Lobe AI
+
+Lobe AI is the official assistant from LobeHub, designed to help you accomplish a wide range of tasks efficiently.
+
+## Use Cases
+
+Lobe AI can assist you with various everyday tasks, such as:
+
+- Software Development: View code snippets, generate code, debug programs, and more.
+- Learning Support: Answer questions, provide study materials, generate practice exercises, etc.
+- Personal Assistant: Check the weather, set reminders, read the news, manage emails, and more.
+- Creative Writing: Generate story ideas, polish your writing, offer writing suggestions, etc.
+- Data Analysis: Create data reports, visualize data, provide analytical insights, and more.
+
+If you have specific needs, you can create a [custom assistant](/docs/usage/getting-started/agent) tailored to your personal requirements.
+
+## Built-in Tools
+
+Lobe AI comes with a variety of built-in tools to help you complete tasks more efficiently:
+
+- [GTD](/docs/usage/agents/gtd)
+- [Plan](/docs/usage/agents/plan)
+- [Notebook](/docs/usage/agents/notebook)
diff --git a/docs/usage/getting-started/lobe-ai.zh-CN.mdx b/docs/usage/getting-started/lobe-ai.zh-CN.mdx
new file mode 100644
index 0000000000..07ddafb15f
--- /dev/null
+++ b/docs/usage/getting-started/lobe-ai.zh-CN.mdx
@@ -0,0 +1,32 @@
+---
+title: Lobe AI
+description: 使用 Lobe AI 完成每日任务,包括查看天气、设置提醒、查看新闻、查看邮件等。
+tags:
+ - LobeHub
+ - Lobe AI
+ - 每日任务
+---
+
+# Lobe AI
+
+Lobe AI 是 LobeHub 的官方助手,可以帮助你完成各种任务。
+
+## 场景
+
+Lobe AI 可以帮助你完成多种日常任务,例如:
+
+- 软件开发:查看代码片段、生成代码、调试代码等。
+- 学习辅助:解答问题、提供学习资料、生成练习题等。
+- 生活助手:查看天气、设置提醒、查看新闻、查看邮件等。
+- 创意写作:生成故事情节、润色文章、提供写作建议等。
+- 数据分析:生成数据报告、可视化数据、提供分析建议等。
+
+如果你有特殊的需求,可以创建[自定义助手](/docs/usage/getting-started/agent),满足你的个性化需求。
+
+## 内置工具
+
+Lobe AI 集成了多种内置工具,帮助你更高效地完成任务:
+
+- [GTD](/docs/usage/agents/gtd)
+- [Plan](/docs/usage/agents/plan)
+- [Notebook](/docs/usage/agents/notebook)
diff --git a/docs/usage/getting-started/memory.mdx b/docs/usage/getting-started/memory.mdx
new file mode 100644
index 0000000000..2b7a213984
--- /dev/null
+++ b/docs/usage/getting-started/memory.mdx
@@ -0,0 +1,72 @@
+---
+title: Memory
+description: >-
+ Learn about LobeHub's memory feature, including types of memory, memory
+ sources, memory search, and more.
+tags:
+ - LobeHub
+ - LobeHub
+ - Memory
+ - Types of Memory
+ - Memory Sources
+ - Memory Search
+---
+
+# Memory
+
+Traditional AI treats every conversation like the first time—you have to repeat the same information over and over. With the Agent Memory feature, your assistant truly gets to know you. It automatically identifies and remembers key information from your conversations, making each interaction more efficient and personalized.
+
+This isn’t just a simple chat history—it’s intelligent knowledge extraction and organization. The assistant recognizes your identity, work context, personal preferences, and practical experience, and stores them in a structured format. In future conversations, it can proactively recall this memory to provide responses tailored to your needs—just like a colleague who really understands you.
+
+## Understanding Agent Memory
+
+Agent Memory is a built-in plugin in LobeHub. It automatically identifies and extracts key information from your conversations with the assistant, forming a structured memory base. These memories are proactively used in future interactions, making communication more personalized and efficient. Instead of merely recording dialogue, the assistant understands and extracts actionable memory instructions. In later conversations, it references relevant memories to provide more personalized responses, avoid asking for known information again, and tailor its output based on your preferences.
+
+
+
+## Enabling Agent Memory
+
+Memory is a built-in plugin in LobeHub and must be enabled for each assistant. You can enable the memory plugin from the "Assistant Profile" page under the "Add Plugin" section, or directly within a conversation by selecting the plugin in the chat interface.
+
+### Enable from Assistant Profile
+
+Go to the Assistant Profile page, click "+ Add Plugin", and check the "Memory" plugin to enable it.
+
+
+
+### Enable in a Conversation
+
+Open a conversation, click the plugin icon below the chat box, and check the "Memory" plugin to activate it.
+
+
+
+## Managing Memory
+
+### Viewing Memory
+
+Click the "Memory" icon at the bottom of the left sidebar on the LobeHub homepage to open the memory management panel. This panel displays all memories extracted from your conversations. Memories are organized by type for easy browsing and management. Each memory includes the following details:
+
+- Memory Title: A concise summary of the memory’s core content, such as "Dislikes coffee".
+- Preference Weight: Indicates the importance of the memory. The higher the weight, the more the assistant prioritizes it in conversations.
+- Created Time: Shows when the memory was extracted from a conversation.
+- Memory Instruction: The behavioral rule derived from the conversation, which the assistant follows in future interactions. For example: "Avoid recommending coffee or coffee-based drinks unless the user explicitly asks for them; suggest non-coffee alternatives instead."
+- Possible Assistant Actions: Describes how the assistant might apply this memory, such as: "Offer tea, hot chocolate, juice, smoothies, or decaf/non-coffee options depending on context."
+- Tags: Tags related to the memory for easier categorization and search.
+
+
+
+### Searching Memory
+
+When you have many memories, use the search function to quickly locate specific ones. You can also open the command menu at any time to search for memories.
+
+### Editing Memory
+
+You can modify the memory instructions generated from conversations to better reflect your actual needs and preferences.
+
+
+
+### Deleting Memory
+
+Select the memory you want to delete and click the delete button.
+
+
diff --git a/docs/usage/getting-started/memory.zh-CN.mdx b/docs/usage/getting-started/memory.zh-CN.mdx
new file mode 100644
index 0000000000..d7aa2662a8
--- /dev/null
+++ b/docs/usage/getting-started/memory.zh-CN.mdx
@@ -0,0 +1,68 @@
+---
+title: 记忆
+description: 了解 LobeHub 的记忆功能,包括记忆的类型、记忆的来源、记忆的搜索等。
+tags:
+ - LobeHub
+ - LobeHub
+ - 记忆
+ - 记忆的类型
+ - 记忆的来源
+ - 记忆的搜索
+---
+
+# 记忆
+
+传统的 AI 每次对话都像第一次见面,你需要反复说明相同的信息。Agent 记忆功能让助手真正了解你,从对话中自动识别和记住关键信息,让每次交流都更高效、更个性化。
+
+这不是简单的对话历史记录,而是智能的知识提取和组织。助手会识别你的身份信息、工作情景、个人偏好、实践经验,并将它们结构化存储。下次对话时,助手能主动调用这些记忆,提供更贴合你需求的回答,就像一个真正了解你的工作伙伴。
+
+## 理解 Agent 记忆
+
+Agent 记忆是 LobeHub 的内置插件。它能从你与助手的对话中自动识别和提取关键信息,形成结构化的记忆库。这些记忆会在后续对话中被助手主动调用,让交流更加个性化和高效。助理不会简单记录对话内容,而是理解并提取关键信息,形成可执行的记忆指令。在后续对话中,助理会根据问题调用相关记忆,提供更个性化的回答,避免重复询问已知信息,基于你的偏好调整输出。
+
+
+
+## 启用 Agent 记忆
+
+记忆是 LobeHub 的内置插件,需要为助理启用后才能使用。你可以在「助手档案」页面添加插件处启用记忆插件,也可以进入会话,在对话框勾选插件处启用记忆插件。在助理档案中启用
+
+进入助理档案页面,点击「+ 集成插件」,勾选「记忆」插件即可开启。
+
+
+
+### 在会话中启用
+
+进入会话页面,点击对话框下方插件图标,勾选「记忆」插件即可。
+
+
+
+## 管理记忆
+
+### 查看记忆
+
+在 LobeHub 首页左侧边栏下方点击「记忆」图标,进入记忆管理面板。这里展示了助手从对话中提取的所有记忆。记忆面板按类型组织,方便查看和管理。每条记忆包含以下信息:
+
+- 记忆标题标题简洁概括了记忆的核心内容,如 "Dislikes coffee"。
+- 偏好权重显示该记忆的重要程度,权重越高,助理在对话中越会优先考虑这条记忆。
+- 创建时间显示记忆是何时从对话中提取的。
+- 记忆指令助理基于对话内容形成的行为指令,这是助理在后续对话中实际遵循的规则。例如:"Avoid recommending coffee or coffee-based drinks unless the user explicitly asks for them; suggest non-coffee alternatives instead."
+- 助理可能采取的行动展示助理如何应用这条记忆,例如:"Offer tea, hot chocolate, juice, smoothies, or decaf/non-coffee options depending on context.
+- 标签记忆相关的标签,方便分类和搜索。
+
+
+
+### 搜索记忆
+
+当记忆数量较多时,可以使用搜索功能快速定位。你也可以随时呼出命令菜单来查找记忆。
+
+### 编辑记忆
+
+你可以调整助手基于对话形成的记忆指令,让记忆更准确地反映你的真实需求。
+
+
+
+### 删除记忆
+
+选中想删除的记忆,点击删除即可。
+
+
diff --git a/docs/usage/getting-started/page.mdx b/docs/usage/getting-started/page.mdx
new file mode 100644
index 0000000000..b4c79781bc
--- /dev/null
+++ b/docs/usage/getting-started/page.mdx
@@ -0,0 +1,62 @@
+---
+title: Create and Configure Your AI Assistant
+description: >-
+ A simple centralized configuration for prompts, model selection, knowledge
+ base, plugins, and more.
+tags:
+ - LobeHub
+ - LobeHub
+ - AI Assistant
+ - Assistant Organization
+ - Group Settings
+ - Assistant Search
+ - Assistant Pinning
+---
+
+# Docs
+
+LobeHub offers a professional writing space designed to help you focus on long-form content creation. Whether you're working on technical documentation, study notes, project proposals, or blog posts, the Docs feature has you covered. It supports Markdown formatting, provides real-time preview, and automatically saves every edit.
+
+You can summon the AI assistant at any time to help with writing. The assistant does more than just offer suggestions—it can directly edit your content, polish your writing, add paragraphs, improve structure, and insert examples.
+
+## Understanding the Docs Feature
+
+Docs is LobeHub’s built-in writing environment, tailored for long-form writing and editing. Unlike the chat feature, Docs provides:
+
+- A persistent writing space: Each document exists independently, allowing you to return and continue editing at any time. Your content is saved permanently.
+- Structured content organization: Full Markdown support lets you organize your content with headings, lists, code blocks, quotes, and more—perfect for creating professional documents.
+- Flexible file management: Create, import, rename, duplicate—comprehensive file management tools help you efficiently organize all your documents.
+
+### Creating and Importing Docs
+
+## Create a New Document
+
+From the LobeHub main interface, go to “Docs” and click “New Document,” or use the dropdown menu from the new file icon on the homepage to quickly create one. Once created, you can start writing immediately. Your work is auto-saved, so you can close it anytime and pick up where you left off later.
+
+### Import a Markdown File
+
+If you already have a Markdown document, you can import it directly. In the Docs section, click “Upload Markdown File,” select your file, and it will be loaded into a new document for immediate editing.
+
+### Editing Documents
+
+Docs are edited using Markdown. Markdown is a lightweight markup language that uses simple symbols to format text, allowing you to focus on content. You can also add an icon to your document, which will appear before the title.
+
+
+
+### Command Palette
+
+While editing, press the / key to open the command palette. This is a quick-access tool that lets you insert various formatting elements with ease.
+
+
+
+### Collaborate with Lobe AI
+
+While editing a document, click the AI assistant icon on the right side of the editor to open the assistant panel. The panel slides in from the right and displays alongside your document, allowing you to chat with the assistant while continuing to write.
+
+By default, the “Docs Assistant” is used—an AI assistant optimized specifically for document writing. If you need help in a specific domain, you can switch to a custom assistant you’ve created via the top-left corner of the assistant panel.
+
+The assistant can directly edit the content within your document.
+
+### Managing Documents
+
+In the document list, you can rename documents, create duplicates, copy the full content, or delete them.
diff --git a/docs/usage/getting-started/page.zh-CN.mdx b/docs/usage/getting-started/page.zh-CN.mdx
new file mode 100644
index 0000000000..dea954fe0a
--- /dev/null
+++ b/docs/usage/getting-started/page.zh-CN.mdx
@@ -0,0 +1,60 @@
+---
+title: 创建和配置你的 AI 助理
+description: 简单的集中配置,例如提示词,选择模型,知识库,插件等。
+tags:
+ - LobeHub
+ - LobeHub
+ - AI 助手
+ - 助手组织
+ - 分组设置
+ - 助手搜索
+ - 助手固定
+---
+
+# 文稿
+
+LobeHub 提供专业的文稿编辑空间,让你能专注于长文本创作。无论是技术文档、学习笔记、项目方案,还是博客文章,文稿功能都能满足你的需求。支持 Markdown 格式,提供实时预览,自动保存每次编辑。
+
+你可以随时呼出 AI 助手参与编写。助手不只是提供建议,还能直接在文稿中编辑内容 —— 润色文字、补充段落、优化结构、添加示例。
+
+## 理解文稿功能
+
+文稿是 LobeHub 的内置写作空间,专为长文本创作和编辑设计。与对话功能不同,文稿提供了:
+
+- 持久的创作空间:文稿独立存在,你可以随时回到文稿继续编辑,内容永久保存。
+- 结构化的内容组织:支持完整的 Markdown 语法,让你能用标题、列表、代码块、引用等元素组织内容,呈现专业的文档效果。
+- 灵活的文件管理:创建、导入、重命名、复制 —— 完整的文件管理功能,让你高效组织所有文稿。
+
+### 创建和导入文稿
+
+## 新建文稿
+
+在 LobeHub 主界面进入「文稿」,点击「新建文稿」,或在主界面新建图标的下拉菜单处选择「创建文稿」快速创建。创建完成后,即可开始编写。文稿会自动保存,你可以随时关闭,下次继续编辑。
+
+### 导入 Markdown 文件
+
+如果你已经有现成的 Markdown 文档,可以直接导入。在文稿板块点击「上传 Markdown 文件」,选择并上传,文件内容会自动加载到新文稿中,你可以立即开始编辑。
+
+### 编辑文稿
+
+文稿使用 Markdown 格式编辑。Markdown 是一种轻量级标记语言,用简单的符号表示格式,让你专注于内容。在文稿标题上方,可以为文稿添加 icon。icon 会显示在文稿标题前。
+
+
+
+### 命令面板
+
+在编辑时按下 / 键,会呼出命令面板。这是一个快捷工具,让你能快速插入各种格式元素。
+
+
+
+### 与 Lobe AI 合作编辑
+
+在编辑文稿时,点击编辑器右侧的 AI 助手图标按钮,助手面板会从右侧滑出。面板与文稿并排显示,你可以一边编辑文稿,一边与助手对话。
+
+呼出助手面板后,默认使用「文稿助理」。这是专门为文稿编写优化的助手,如果需要特定领域的专业帮助,可以在助手面板左上方切换到自己创建的助手。
+
+助手可以直接在文稿中编辑内容。
+
+### 管理文稿
+
+在文稿列表,可以重命名文稿、创建副本、复制全文、删除文稿。
diff --git a/docs/usage/getting-started/resource.mdx b/docs/usage/getting-started/resource.mdx
new file mode 100644
index 0000000000..4439628241
--- /dev/null
+++ b/docs/usage/getting-started/resource.mdx
@@ -0,0 +1,67 @@
+---
+title: Resource Library
+description: >-
+ A simple centralized configuration for prompts, model selection, knowledge
+ bases, plugins, and more.
+tags:
+ - LobeHub
+ - LobeHub
+ - AI Assistant
+ - Assistant Organization
+ - Group Settings
+ - Assistant Search
+ - Assistant Pinning
+---
+
+# Resource Library
+
+The Resource Library in LobeHub is your personal knowledge hub. Documents uploaded during conversations, AI-generated images, and created drafts are all collected here. It strengthens your collaboration with AI by centralizing your knowledge, making it easier to manage and access anytime.
+
+## Sources of Resources
+
+- Conversation Uploads: Files uploaded during chats with assistants are automatically saved to the Resource Library. For example, if you upload a PDF for summarization, the file will be stored and available for future use.
+- AI-Generated Content: Images created in the drawing module are automatically saved to the Resource Library. You can view, download, and reuse them in conversations or drafts at any time.
+- Manual Uploads: You can directly upload files or folders to the Resource Library to actively build your knowledge base. Supported formats include documents, images, videos, audio, and more.
+- Draft Creation: Drafts created in the writing module are also stored in the Resource Library for unified management alongside other resources.
+- Notion Import: You can import content from Notion into the LobeHub Resource Library, enabling seamless knowledge management across platforms.
+- Accessing the Resource Library: Click the "Resources" icon at the bottom of the left sidebar on the LobeHub main interface to enter the Resource Library.
+
+## Creating a Resource Library
+
+You can organize your Resource Library by theme, project, type, and more. Creating separate libraries helps keep your resources well-structured.
+
+Click "New Library" in the left panel of the Resource Library page, enter a name, add an optional description, and click "Create" to get started.
+
+
+
+### Uploading Folders
+
+Choose to upload a folder to batch upload all files within it.
+
+### Connecting Notion
+
+Select and export documents from Notion, then import the Notion ZIP file via the "Connect Notion" page. Once imported, you can continue editing these documents within LobeHub.
+
+
+
+## Managing the Resource Library
+
+File Chunking: This process splits long documents into smaller text segments and vectorizes each one. It enables the AI assistant to better understand and retrieve relevant content.
+
+After chunking:
+
+- When you ask a question, the AI can quickly locate the relevant parts of a document instead of processing the entire file.
+- Vectorization allows the AI to grasp the semantic meaning of the text, not just match keywords.
+- The AI only processes relevant chunks, resulting in faster and more accurate responses.
+
+### Batch Operations
+
+Select multiple files and open the menu in the top-right corner to perform batch operations, including moving files to a library, chunking, or deleting them in bulk.
+
+
+
+### Cross-Feature Collaboration
+
+- Chat + Resource Library: Reference resources during conversations so the assistant can provide answers based on your knowledge.
+- Drafts + Resource Library: Use materials from the Resource Library while writing to enhance content creation efficiency.
+- Drawing + Resource Library: Save generated images to the Resource Library for centralized management of all visual assets.
diff --git a/docs/usage/getting-started/resource.zh-CN.mdx b/docs/usage/getting-started/resource.zh-CN.mdx
new file mode 100644
index 0000000000..638adb3024
--- /dev/null
+++ b/docs/usage/getting-started/resource.zh-CN.mdx
@@ -0,0 +1,65 @@
+---
+title: 资源库
+description: 简单的集中配置,例如提示词,选择模型,知识库,插件等。
+tags:
+ - LobeHub
+ - LobeHub
+ - AI 助手
+ - 助手组织
+ - 分组设置
+ - 助手搜索
+ - 助手固定
+---
+
+# 资源库
+
+LobeHub 的资源库是你的个人知识中心。对话中上传的文档、AI 生成的图片、创建的文稿,都会汇聚在这里。资源库让你和 AI 的协作更紧密,知识积累不再分散在各处,而是统一管理、随时可用。
+
+## 资源的来源
+
+- 对话上传:在与助手对话时上传的文件会自动保存到资源库。例如你上传一份 PDF 让助手总结,这份文件会保留在资源库中,下次可以继续使用。
+- AI 生成:在画图板块生成的图片会自动保存到资源库。你可以随时查看、下载、在对话和文稿中使用这些图片。
+- 主动上传:你可以在资源库直接上传文件或文件夹,主动构建知识库。支持文档、图片、视频、音频等多种格式。
+- 文稿创建:在文稿板块创建的文稿也会出现在资源库中,与其他资源统一管理。
+- Notion 导入:你可以将 Notion 中的内容导入到 LobeHub 资源库,打通两个平台的知识管理。
+- 创建资源库:在 LobeHub 主界面左侧边栏下方点击「资源」图标,进入资源库页面。
+
+## 创建资源库
+
+资源库可以按主题、项目、类型等方式组织。创建不同的资源库,让资源管理更有条理。
+
+在资源库页面左侧面板点击「新建资源库」,输入资源库名称,添加简介(可选),点击新建即可。
+
+
+
+### 上传文件夹
+
+选择上传文件夹,可以批量上传整个文件夹的文件。
+
+### 连接 Notion
+
+在 Notion 选择文档并导出,再在「连接 Notion 」页面选择导入 Notion ZIP 即可。导入后,你可以在 LobeHub 中再编辑这些文档。
+
+
+
+## 管理资源库
+
+文件分块:文件分块是将长文档拆分成多个较小的文本片段,并对每个片段进行向量化处理。这让 AI 助手能更精准地理解和检索文件内容。
+
+进行文件分块后:
+
+- 当你提问时,AI 能快速定位到文档中相关的部分,而不是处理整个文档。
+- 向量化让 AI 能理解文本的语义含义,而不只是关键词匹配。
+- AI 只需要处理相关的文本块,响应更快、更准确。
+
+### 批量操作
+
+勾选多个文件,展开右上角菜单可以进行批量操作,包括将多个文件批量移入资源库、批量分块和批量删除文件。
+
+
+
+### 跨功能协作
+
+- 对话 + 资源库:在对话中引用资源库,让助手基于你的知识提供回答。
+- 文稿 + 资源库:编写文稿时使用资源库中的素材,内容创作更高效。
+- 画图 + 资源库:将生成的图片保存到资源库,统一管理所有视觉素材。
diff --git a/docs/usage/help.mdx b/docs/usage/help.mdx
new file mode 100644
index 0000000000..3e72601c92
--- /dev/null
+++ b/docs/usage/help.mdx
@@ -0,0 +1,38 @@
+---
+title: Help & Support - LobeHub User Guide
+description: >-
+ Learn about help and support options for LobeHub, including community support
+ and how to contact us.
+tags:
+ - LobeHub
+ - LobeHub
+ - Help & Support
+ - Community Support
+ - Contact Us
+---
+
+# Help & Support
+
+If you encounter any issues while using LobeHub, please refer to the following resources.
+
+## Common Guides
+
+- [Migrate from v1.x Local Database to v2.x (Cloud / Self-hosted)](/docs/usage/migrate-from-local-database)
+
+## Report an Issue
+
+- [GitHub Issues](https://github.com/lobehub/lobe-chat/issues/new/choose)
+
+## Community Support
+
+You're welcome to ask questions or help others in the following communities:
+
+- [Discord](https://discord.com/invite/AYFPHvv2jT)
+- [Reddit](https://www.reddit.com/r/LobeHub/)
+
+## Contact Us
+
+You can reach us through the following channels:
+
+- [hi@lobehub.com](mailto:hi@lobehub.com)
+- [X (formerly Twitter)](https://x.com/lobehub)
diff --git a/docs/usage/help.zh-CN.mdx b/docs/usage/help.zh-CN.mdx
new file mode 100644
index 0000000000..6327a3d6b0
--- /dev/null
+++ b/docs/usage/help.zh-CN.mdx
@@ -0,0 +1,36 @@
+---
+title: 帮助与支持 - LobeHub 用户指南
+description: 了解 LobeHub 的帮助与支持,包括社区支持、联系我们等。
+tags:
+ - LobeHub
+ - LobeHub
+ - 帮助与支持
+ - 社区支持
+ - 联系我们
+---
+
+# 帮助与支持
+
+如果你在使用 LobeHub 的过程中遇到任何问题,请参考以下内容。
+
+## 常用指南
+
+- [从 v1.x 本地数据库迁移到 v2.x(云端 / 自部署)](/docs/usage/migrate-from-local-database)
+
+## 反馈问题
+
+- [GitHub Issues](https://github.com/lobehub/lobe-chat/issues/new/choose)
+
+## 社区支持
+
+欢迎你在以下社区中提出问题或帮助他人。
+
+- [Discord](https://discord.com/invite/AYFPHvv2jT)
+- [Reddit](https://www.reddit.com/r/LobeHub/)
+
+## 联系我们
+
+你可以通过以下方式联系我们:
+
+- [hi@lobehub.com](mailto:hi@lobehub.com)
+- [X (Twitter)](https://x.com/lobehub)
diff --git a/docs/usage/migrate-from-local-database.mdx b/docs/usage/migrate-from-local-database.mdx
new file mode 100644
index 0000000000..771c5b982a
--- /dev/null
+++ b/docs/usage/migrate-from-local-database.mdx
@@ -0,0 +1,118 @@
+---
+title: Migrate from Local Database (v1.x) to Cloud (v2.x)
+description: >-
+ Export your data from the v1.x local database and import it into LobeHub v2.x
+ Cloud.
+tags:
+ - LobeHub
+ - Migration
+ - Cloud
+ - Desktop
+ - Local Database
+---
+
+# Migrate from Local Database (v1.x) to Cloud (v2.x)
+
+LobeHub v1.x Desktop supported a **local database** mode, which stored your data on this device. Starting from v2.x, we have moved to a **cloud-first architecture** and removed the local database so we can iterate faster and deliver a more consistent experience across platforms.
+
+
+ Why the change? The v1.x local database typically consumed more system resources, and only a small
+ portion of users relied on it. By simplifying the client and focusing on the cloud-first
+ experience, we can improve overall performance and ship new features faster.
+
+
+This guide explains how to **export your data from v1.x** and **import it into v2.x**.
+
+## Before you start
+
+- Keep your **v1.x client installed** until you confirm the migration is successful.
+- Prepare a **stable network connection** (importing may take time, especially for large histories).
+- Sign in to **LobeHub v2.x** (Cloud or your self-hosted instance).
+
+If you prefer self-hosting, you can follow the [Self-Hosting Guide](/docs/self-hosting/start) first, then import into your self-hosted v2.x instance.
+
+## If you already upgraded to v2.x by accident
+
+You can still migrate your data:
+
+1. Download and install a **v1.x build** from [GitHub Releases](https://github.com/lobehub/lobe-chat/releases).
+2. Use v1.x to export your data (`Settings -> Data Storage -> Export Data`), then import it in v2.x.
+
+Don’t worry: **upgrading to v2.x does not delete your local database files**. As long as the local database files are still there, your data is still there.
+
+### Local database location
+
+By default, the local database is stored at:
+
+- `${appData}/lobehub-storage/lobehub-local-db`
+
+## What will be migrated
+
+The exact items depend on the export format and your v1.x build, but typically include:
+
+- Your assistants/agents
+- Conversations / topics and messages
+- Prompt templates (if used in your v1.x build)
+- Basic preferences and settings (when supported)
+
+## What will NOT be migrated (common cases)
+
+- Provider API keys and secrets (recommended to re-enter them in v2.x)
+- Local-only files or cached data
+- Some device-specific settings
+
+
+ If your v1.x data contains sensitive content, keep the export file in a safe location. Do not
+ share it publicly.
+
+
+## Step 1: Export data from LobeHub v1.x
+
+In the v1.x Desktop app:
+
+1. Go to **Settings** → **Data Storage** → **Export Data**.
+2. You will get a JSON export file (for example: `2026-01-22-10-02_LobeHub-data.json`).
+3. Save the file to a location you can easily find (for example, Desktop).
+
+
+
+### Export tips
+
+- If you have a very large message history, consider exporting during idle time.
+
+## Step 2: Import data into LobeHub v2.x
+
+In any v2.x app (Desktop / Web / self-hosted):
+
+1. Sign in to your LobeHub account.
+2. Go to **Settings** → **Data Storage** → **Import Data**.
+3. Upload the export JSON file from Step 1.
+4. Wait for the import to complete, then refresh the page/app if needed.
+
+
+
+## Verify your migration
+
+After importing, check:
+
+- Agents/assistants are present and their prompts look correct
+- Recent conversations appear as expected
+- Key settings (language, theme, shortcuts, etc.) are set the way you want
+
+If anything is missing, keep the v1.x export file and retry the import after updating to the latest v2.x.
+
+## Troubleshooting
+
+### Import fails or gets stuck
+
+- Confirm your network is stable and try again.
+- Update to the latest v2.x and re-import.
+
+### Some data is missing after import
+
+- Different v1.x builds may store data slightly differently.
+- If you used multiple profiles/workspaces in v1.x, export and import them separately (when supported).
+
+## Getting help
+
+If you run into issues, please check [Help & Support](/docs/usage/help) or report an issue via [GitHub Issues](https://github.com/lobehub/lobe-chat/issues/new/choose).
diff --git a/docs/usage/migrate-from-local-database.zh-CN.mdx b/docs/usage/migrate-from-local-database.zh-CN.mdx
new file mode 100644
index 0000000000..d2d2d541d8
--- /dev/null
+++ b/docs/usage/migrate-from-local-database.zh-CN.mdx
@@ -0,0 +1,115 @@
+---
+title: 从本地数据库(v1.x)迁移到云端(v2.x)
+description: 在 v1.x 客户端导出本地数据库数据,并导入到 LobeHub v2.x 云端版本。
+tags:
+ - LobeHub
+ - 迁移
+ - Cloud
+ - 桌面端
+ - 本地数据库
+---
+
+# 从本地数据库(v1.x)迁移到云端(v2.x)
+
+在 LobeHub v1.x Desktop 中,我们曾提供 **本地数据库** 模式,将数据保存在当前设备上。自 v2.x 起,我们转向 **Cloud-first 架构** 并移除了本地数据库能力,以便后续更好、更快地迭代,并在不同平台上提供更一致的体验。
+
+
+ 为什么要这样做?v1.x
+ 的本地数据库通常会带来更高的资源占用,而实际使用它的用户占比较小。砍掉这条分支能力后,我们可以让客户端更轻量、更稳定,同时把更多精力投入到
+ Cloud-first 体验与新功能交付上。
+
+
+本文会引导你完成:**在 v1.x 导出数据** → **在 v2.x 导入数据**。
+
+## 开始前准备
+
+- 建议在迁移确认完成之前,**不要卸载 v1.x**。
+- 准备 **稳定的网络连接**(如果对话记录较多,导入可能需要更久)。
+- 登录 **LobeHub v2.x**(云端或你的自部署实例)。
+
+如果你希望把 Cloud 部署在自己的服务器上,也可以先参考 [私有化部署指南](/docs/self-hosting/start) 完成部署,再把数据导入到自部署的 v2.x 实例中。
+
+## 如果你已经不小心升级到了 v2.x
+
+你依然可以完成迁移:
+
+1. 前往 [GitHub Releases](https://github.com/lobehub/lobe-chat/releases) 下载并安装一个 **v1.x 版本**。
+2. 使用 v1.x 按照本文的导出流程导出数据(`Settings -> Data Storage -> Export Data`),再在 v2.x 中导入。
+
+请放心:**升级到 v2.x 的过程不会删除你的本地数据库文件**。只要本地数据库文件仍然存在,你的数据就依然存在。
+
+### 本地数据库文件位置
+
+本地数据库默认位于:
+
+- `${appData}/lobehub-storage/lobehub-local-db`
+
+## 通常可以迁移的内容
+
+可迁移内容会受导出格式与 v1.x 构建版本影响,但一般包括:
+
+- 助手 / Agents
+- 会话 / Topics 与消息记录
+- 提示词模板(如你的 v1.x 版本支持)
+- 部分基础偏好设置(如版本支持)
+
+## 通常不会迁移的内容(常见情况)
+
+- 各模型服务商的 API Key / Secret(更推荐在 v2.x 重新填写)
+- 仅存在于本地的临时文件、缓存数据
+- 一些与设备强绑定的设置项
+
+
+ 如果你的 v1.x 数据包含敏感内容,请妥善保管导出的迁移文件,避免在公共渠道分享。
+
+
+## 第一步:在 LobeHub v1.x 导出数据
+
+在 v1.x Desktop 客户端中:
+
+1. 进入 **Settings** → **Data Storage** → **Export Data**。
+2. 你会得到一个 JSON 导出文件(例如:`2026-01-22-10-02_LobeHub-data.json`)。
+3. 将导出的文件保存到你容易找到的位置(例如桌面)。
+
+
+
+### 导出建议
+
+- 如果你的历史记录非常多,建议在设备空闲时导出。
+
+## 第二步:在 LobeHub v2.x 导入数据
+
+在任意 v2.x 应用中(Desktop / Web / 自部署 WebApp):
+
+1. 登录你的 LobeHub 账号。
+2. 进入 **Settings** → **Data Storage** → **Import Data**。
+3. 上传你在第一步导出的 JSON 文件。
+4. 等待导入完成;必要时刷新页面 / 重启应用。
+
+
+
+## 迁移完成后如何验收
+
+导入完成后,建议你检查:
+
+- 助手是否齐全,提示词 / 配置是否正确
+- 最近的会话是否能正常打开、消息是否完整
+- 常用设置(语言、主题、快捷键等)是否符合预期
+
+如发现缺失,建议先升级到最新 v2.x 后再重试导入,同时保留 v1.x 的导出文件用于排查。
+
+## 常见问题排查
+
+### 导入失败或长时间卡住
+
+- 确认网络稳定后重试。
+- 升级到最新 v2.x 再导入一次。
+
+### 导入后部分数据缺失
+
+- 不同 v1.x 构建版本的数据结构可能略有差异。
+- 如果你在 v1.x 使用过多个配置 / 空间(若支持),可尝试分别导出并逐个导入(如版本支持)。
+
+## 获取帮助
+
+如果你遇到迁移问题,可以查看 [帮助与支持](/docs/usage/help) 或通过 [GitHub Issues](https://github.com/lobehub/lobe-chat/issues/new/choose) 反馈。
diff --git a/docs/usage/plugins/basic-usage.mdx b/docs/usage/plugins/basic-usage.mdx
deleted file mode 100644
index 3cd09a073d..0000000000
--- a/docs/usage/plugins/basic-usage.mdx
+++ /dev/null
@@ -1,45 +0,0 @@
----
-title: Enhance Your LobeChat Assistant with Plugins
-description: >-
- Learn how to expand your LobeChat assistant's capabilities by enabling and using various plugins. Access the Plugin Store, install plugins, and configure them to enhance your assistant's functionality.
-
-tags:
- - LobeChat plugins
- - Plugin Store
- - Using Plugins
- - Plugin Configuration
----
-
-# Plugin Usage
-
-The plugin system is a key element in expanding the capabilities of assistants in LobeChat. You can enhance the assistant's abilities by enabling a variety of plugins.
-
-Watch the following video to quickly get started with using LobeChat plugins:
-
-
-
-## Plugin Store
-
-You can access the Plugin Store by navigating to "Extension Tools" -> "Plugin Store" in the session toolbar.
-
-
-
-The Plugin Store allows you to directly install and use plugins within LobeChat.
-
-
-
-## Using Plugins
-
-After installing a plugin, simply enable it under the current assistant to use it.
-
-
-
-## Plugin Configuration
-
-Some plugins may require specific configurations, such as API keys.
-
-After installing a plugin, you can click on "Settings" to enter the plugin's settings and fill in the required configurations:
-
-
-
-
diff --git a/docs/usage/plugins/basic-usage.zh-CN.mdx b/docs/usage/plugins/basic-usage.zh-CN.mdx
deleted file mode 100644
index 6ec4ee51d7..0000000000
--- a/docs/usage/plugins/basic-usage.zh-CN.mdx
+++ /dev/null
@@ -1,44 +0,0 @@
----
-title: LobeChat 插件使用指南
-description: 了解如何在 LobeChat 中使用插件来增强助手功能,包括插件商店浏览、安装、配置等操作。
-tags:
- - LobeChat
- - 插件
- - 助手功能
- - 插件商店
- - 插件配置
----
-
-# 插件使用
-
-插件体系是 LobeChat 中扩展助理的能力的关键要素,你可以通过为助手启用各式各样的插件来增强助手的各项能力。
-
-查看以下视频,快速上手使用 LobeChat 插件:
-
-
-
-## 插件商店
-
-你可以在会话工具条中的 「扩展工具」 -> 「插件商店」,进入插件商店。
-
-
-
-插件商店中会在 LobeChat 中可以直接安装并使用的插件。
-
-
-
-## 使用插件
-
-安装完毕插件后,只需在当前助手下开启插件即可使用。
-
-
-
-## 插件配置
-
-部分插件可能需要你进行相应的配置,例如 API Key 等。
-
-你可以在安装插件后,点击设置进入插件的设置填写配置:
-
-
-
-
diff --git a/docs/usage/plugins/custom-plugin.mdx b/docs/usage/plugins/custom-plugin.mdx
deleted file mode 100644
index e34684212a..0000000000
--- a/docs/usage/plugins/custom-plugin.mdx
+++ /dev/null
@@ -1,37 +0,0 @@
----
-title: Custom LobeChat Plugins Installation and Development Guide
-description: >-
- Learn how to install custom plugins in LobeChat and develop your own plugins to enhance your AI assistant's capabilities.
-
-tags:
- - Custom Plugins
- - LobeChat
- - Plugin Installation
- - Plugin Development
- - ChatGPT Plugins
----
-
-# Custom Plugins
-
-## Installing Custom Plugins
-
-If you wish to install a plugin that is not available in the LobeChat plugin store, such as a custom-developed LobeChat plugin, you can click on "Custom Plugins" to install it:
-
-
-
-In addition, LobeChat's plugin mechanism is compatible with ChatGPT plugins, so you can easily install corresponding ChatGPT plugins.
-
-If you want to try installing custom plugins on your own, you can use the following links to try:
-
-- `Custom Lobe Plugin` Mock Credit Card: [https://lobe-plugin-mock-credit-card.vercel.app/manifest.json](https://lobe-plugin-mock-credit-card.vercel.app/manifest.json)
-- `ChatGPT Plugin` Access Links: [https://www.accesslinks.ai/.well-known/ai-plugin.json](https://www.accesslinks.ai/.well-known/ai-plugin.json)
-
-
-
-
-
-
-
-## Developing Custom Plugins
-
-If you wish to develop a LobeChat plugin on your own, feel free to refer to the [Plugin Development Guide](/docs/usage/plugins/development) to expand the possibilities of your AI assistant!
diff --git a/docs/usage/plugins/custom-plugin.zh-CN.mdx b/docs/usage/plugins/custom-plugin.zh-CN.mdx
deleted file mode 100644
index 8693557821..0000000000
--- a/docs/usage/plugins/custom-plugin.zh-CN.mdx
+++ /dev/null
@@ -1,35 +0,0 @@
----
-title: 自定义插件 - LobeChat 插件安装与开发指南
-description: 学习如何安装自定义插件和开发 LobeChat 插件,扩展你的 AI 智能助手的功能。
-tags:
- - 自定义插件
- - LobeChat
- - 插件安装
- - 插件开发
- - AI 智能助手
----
-
-# 自定义插件
-
-## 安装自定义插件
-
-如果你希望安装一个不在 LobeChat 插件商店中的插件,例如自己开发的 LobeChat,你可以点击「自定义插件」进行安装:
-
-
-
-此外,LobeChat 的插件机制兼容了 ChatGPT 的插件,因此你可以一键安装相应的 ChatGPT 插件。
-
-如果你希望尝试自行安装自定义插件,你可以使用以下链接来尝试:
-
-- `自定义 Lobe 插件` Mock Credit Card:[https://lobe-plugin-mock-credit-card.vercel.app/manifest.json](https://lobe-plugin-mock-credit-card.vercel.app/manifest.json)
-- `ChatGPT 插件` Access Links:[https://www.accesslinks.ai/.well-known/ai-plugin.json](https://www.accesslinks.ai/.well-known/ai-plugin.json)
-
-
-
-
-
-
-
-## 开发自定义插件
-
-如果你希望自行开发一个 LobeChat 的插件,欢迎查阅 [插件开发指南](/zh/docs/usage/plugins/development) 以扩展你的 AI 智能助手的可能性边界!
diff --git a/docs/usage/plugins/development.mdx b/docs/usage/plugins/development.mdx
deleted file mode 100644
index 4be3246341..0000000000
--- a/docs/usage/plugins/development.mdx
+++ /dev/null
@@ -1,279 +0,0 @@
----
-title: LobeChat Plugin Development Guide
-description: >-
- Learn how to create and integrate custom plugins in LobeChat, including plugin composition, custom plugin workflow, local plugin development, manifest structure, project structure, server-side implementation, plugin UI interface, deployment, and release.
-
-tags:
- - Plugin Development
- - LobeChat
- - Custom Plugins
- - Plugin Workflow
- - Manifest Structure
- - Server-side Implementation
- - Plugin UI Interface
- - Deployment
- - Release
----
-
-# Plugin Development Guide
-
-## Plugin Composition
-
-A LobeChat plugin consists of the following components:
-
-1. **Plugin Index**: Used to display basic information about the plugin, including the plugin name, description, author, version, and a link to the plugin manifest. The official plugin index can be found at [lobe-chat-plugins](https://github.com/lobehub/lobe-chat-plugins). If you want to publish a plugin to the official plugin marketplace, you need to [submit a PR](https://github.com/lobehub/lobe-chat-plugins/pulls) to this repository.
-2. **Plugin Manifest**: Used to describe the functionality of the plugin, including the server-side description, frontend display information, and version number. For a detailed introduction to the manifest, see [manifest][manifest-docs-url].
-3. **Plugin Services**: Used to implement the server-side and frontend modules described in the plugin manifest, as follows:
- - **Server-side**: Needs to implement the interface capabilities described in the `api` section of the manifest.
- - **Frontend UI** (optional): Needs to implement the interface described in the `ui` section of the manifest. This interface will be displayed in plugin messages, allowing for a richer display of information than plain text.
-
-## Custom Plugin Workflow
-
-This section will introduce how to add and use a custom plugin in LobeChat.
-
-
- ### Create and Launch Plugin Project
-
- You need to first create a plugin project locally, you can use the template we have prepared [lobe-chat-plugin-template][lobe-chat-plugin-template-url]
-
- ```bash
- $ git clone https://github.com/lobehub/chat-plugin-template.git
- $ cd chat-plugin-template
- $ npm i
- $ npm run dev
- ```
-
- When you see `ready started server on 0.0.0.0:3400, url: http://localhost:3400`, it means the plugin service has been successfully launched locally.
-
-
-
- ### Add Local Plugin in LobeChat Role Settings
-
- Next, go to LobeChat, create a new assistant, and go to its session settings page:
-
-
-
- Click the Add button on the right of the plugin list to open the custom plugin adding popup:
-
-
-
- Fill in the **Plugin Description File Url** with `http://localhost:3400/manifest-dev.json`, which is the manifest address of the plugin we started locally.
-
- At this point, you should see that the identifier of the plugin has been automatically recognized as `chat-plugin-template`. Next, you need to fill in the remaining form fields (only the title is required), and then click the Save button to complete the custom plugin addition.
-
-
-
- After adding, you can see the newly added plugin in the plugin list. If you need to modify the plugin configuration, you can click the Settings button on the far right to make changes.
-
-
-
- ### Test Plugin Function in Session
-
- Next, we need to test whether the plugin's function is working properly.
-
- Click the Back button to return to the session area, and then send a message to the assistant: "What should I wear?" At this point, the assistant will try to ask you about your gender and current mood.
-
-
-
- After answering, the assistant will initiate the plugin call, retrieve recommended clothing data from the server based on your gender and mood, and push it to you. Finally, it will provide a text summary based on this information.
-
-
-
- After completing these operations, you have understood the basic process of adding custom plugins and using them in LobeChat.
-
-
-## Local Plugin Development
-
-In the above process, we have learned how to add and use plugins. Next, we will focus on the process of developing custom plugins.
-
-### Manifest
-
-The `manifest` aggregates information on how the plugin's functionality is implemented. The core fields are `api` and `ui`, which respectively describe the server-side interface capabilities and the front-end rendering interface address of the plugin.
-
-Taking the `manifest` in the template we provided as an example:
-
-```json
-{
- "api": [
- {
- "url": "http://localhost:3400/api/clothes",
- "name": "recommendClothes",
- "description": "Recommend clothes to the user based on their mood",
- "parameters": {
- "properties": {
- "mood": {
- "description": "The user's current mood, with optional values: happy, sad, anger, fear, surprise, disgust",
- "enums": ["happy", "sad", "anger", "fear", "surprise", "disgust"],
- "type": "string"
- },
- "gender": {
- "type": "string",
- "enum": ["man", "woman"],
- "description": "The user's gender, which needs to be asked for from the user to obtain this information"
- }
- },
- "required": ["mood", "gender"],
- "type": "object"
- }
- }
- ],
- "gateway": "http://localhost:3400/api/gateway",
- "identifier": "chat-plugin-template",
- "ui": {
- "url": "http://localhost:3400",
- "height": 200
- },
- "version": "1"
-}
-```
-
-In this manifest, it mainly includes the following parts:
-
-1. `identifier`: This is the unique identifier of the plugin, used to distinguish different plugins. This field needs to be globally unique.
-2. `api`: This is an array containing all the API interface information of the plugin. Each interface includes the url, name, description, and parameters fields, all of which are required. The `description` and `parameters` fields will be sent to GPT as the `functions` parameter of the [Function Call](https://sspai.com/post/81986), and the parameters need to comply with the [JSON Schema](https://json-schema.org/) specification. In this example, the API interface is named `recommendClothes`, and its function is to recommend clothes based on the user's mood and gender. The interface parameters include the user's mood and gender, both of which are required.
-3. `ui`: This field contains information about the plugin's user interface, indicating from which address LobeChat loads the plugin's front-end interface. Since LobeChat plugin interface loading is implemented based on iframes, the height and width of the plugin interface can be specified as needed.
-4. `gateway`: This field specifies the gateway for LobeChat to query the plugin's API interface. LobeChat's default plugin gateway is a cloud-based service, and requests for custom plugins need to be sent to a locally launched service. Remote calls to a local address are generally not feasible. The `gateway` field solves this problem. By specifying the gateway in the manifest, LobeChat will send plugin requests to this address, and the local gateway address will dispatch requests to the local plugin service. Published online plugins do not need to specify this field.
-5. `version`: This is the version number of the plugin, which currently has no effect.
-
-In actual development, you can modify the plugin's description list according to your own needs to declare the functionality you want to implement. For a complete introduction to each field in the manifest, see: [manifest][manifest-docs-url].
-
-### Project Structure
-
-The [lobe-chat-plugin-template][lobe-chat-plugin-template-url] template project uses Next.js as the development framework, and its core directory structure is as follows:
-
-```
-➜ chat-plugin-template
-├── public
-│ └── manifest-dev.json # Manifest file
-├── src
-│ └── pages
-│ │ ├── api # Next.js server-side folder
-│ │ │ ├── clothes.ts # Implementation of the recommendClothes interface
-│ │ │ └── gateway.ts # Local plugin proxy gateway
-│ │ └── index.tsx # Front-end display interface
-```
-
-This template uses Next.js as the development framework. You can use any development framework and language you are familiar with, as long as it can implement the functionality described in the manifest.
-
-Contributions of more plugin templates using different frameworks and languages are also welcome.
-
-### Server-Side
-
-The server-side needs to implement the API interfaces described in the manifest. In the template, we use Vercel's [Edge Runtime](https://nextjs.org/docs/pages/api-reference/edge) to eliminate the need for maintenance.
-
-#### API Implementation
-
-For the Edge Runtime, we provide the `createErrorResponse` method in `@lobehub/chat-plugin-sdk` to quickly return error responses. Currently, the provided error types are detailed in: [PluginErrorType][plugin-error-type-url].
-
-The implementation of the clothes interface in the template is as follows:
-
-```ts
-export default async (req: Request) => {
- if (req.method !== 'POST') return createErrorResponse(PluginErrorType.MethodNotAllowed);
-
- const { gender, mood } = (await req.json()) as RequestData;
-
- const clothes = gender === 'man' ? manClothes : womanClothes;
-
- const result: ResponseData = {
- clothes: clothes[mood] || [],
- mood,
- today: Date.now(),
- };
-
- return new Response(JSON.stringify(result));
-};
-```
-
-Where `manClothes` and `womanClothes` are mock data and can be replaced with database queries in actual scenarios.
-
-#### Plugin Gateway
-
-Since the default plugin gateway for LobeChat is a cloud service `/api/plugins`, the cloud service sends requests to the address specified in the manifest's `api.url` to solve cross-origin issues.
-
-For custom plugins, plugin requests need to be sent to the local service. Therefore, by specifying the gateway in the manifest ([http://localhost:3400/api/gateway](http://localhost:3400/api/gateway)), LobeChat> will directly request this address, and then only the corresponding gateway needs to be created at that address.
-
-```ts
-import { createLobeChatPluginGateway } from '@lobehub/chat-plugins-gateway';
-
-export const config = {
- runtime: 'edge',
-};
-
-export default createLobeChatPluginGateway();
-```
-
-[`@lobehub/chat-plugins-gateway`](https://github.com/lobehub/chat-plugins-gateway) contains the implementation of the plugin gateway in LobeChat [here](https://github.com/lobehub/lobe-chat/blob/main/src/pages/api/plugins.api.ts). You can use this package directly to create a gateway, allowing LobeChat to access the local plugin service.
-
-### Plugin UI Interface
-
-The custom UI interface for plugins is optional. For example, the official plugin [Web Content Extraction](https://github.com/lobehub/chat-plugin-web-crawler) does not have a corresponding user interface.
-
-
-
-If you want to display richer information in plugin messages or include some interactive operations, you can customize a user interface for the plugin. For example, the following image shows the user interface for the [Search Engine](https://github.com/lobehub/chat-plugin-search-engine) plugin.
-
-
-
-#### Implementation of Plugin UI Interface
-
-LobeChat implements the loading of plugin UI through `iframe` and uses `postMessage` to communicate with the plugin. Therefore, the implementation of the plugin UI is consistent with regular web development. You can use any frontend framework and development language you are familiar with.
-
-
-
-In the template we provide, we use React + Next.js + [antd](https://ant.design/) as the frontend interface framework. You can find the implementation of the user interface in [`src/pages/index.tsx`](https://github.com/lobehub/chat-plugin-template/blob/main/src/pages/index.tsx).
-
-As for plugin communication, we provide relevant methods in [`@lobehub/chat-plugin-sdk`](https://github.com/lobehub/chat-plugin-sdk) to simplify communication between the plugin and LobeChat. You can actively retrieve the current message data from LobeChat using the `fetchPluginMessage` method. For detailed information about this method, see: [fetchPluginMessage][fetch-plugin-message-url].
-
-```tsx
-import { fetchPluginMessage } from '@lobehub/chat-plugin-sdk';
-import { memo, useEffect, useState } from 'react';
-
-import { ResponseData } from '@/type';
-
-const Render = memo(() => {
- const [data, setData] = useState();
-
- useEffect(() => {
- // Retrieve the current plugin message from LobeChat
- fetchPluginMessage().then((e: ResponseData) => {
- setData(e);
- });
- }, []);
-
- return <>...>;
-});
-
-export default Render;
-```
-
-## Plugin Deployment and Release
-
-Once you have finished developing the plugin, you can deploy it using your preferred method, such as using Vercel or packaging it as a Docker container for release, and so on.
-
-If you want more people to use your plugin, feel free to [submit it for listing](https://github.com/lobehub/lobe-chat-plugins) on the plugin marketplace.
-
-[![][submit-plugin-shield]][submit-plugin-url]
-
-### Plugin Shield
-
-[](https://github.com/lobehub/lobe-chat-plugins)
-
-```md
-[](https://github.com/lobehub/lobe-chat-plugins)
-```
-
-## Links
-
-- **📘 Pluging SDK Documentation**: [https://chat-plugin-sdk.lobehub.com](https://chat-plugin-sdk.lobehub.com)
-- **🚀 chat-plugin-template**: [https://github.com/lobehub/chat-plugin-template](https://github.com/lobehub/chat-plugin-template)
-- **🧩 chat-plugin-sdk**: [https://github.com/lobehub/chat-plugin-sdk](https://github.com/lobehub/chat-plugin-sdk)
-- **🚪 chat-plugin-gateway**: [https://github.com/lobehub/chat-plugins-gateway](https://github.com/lobehub/chat-plugins-gateway)
-- **🏪 lobe-chat-plugins**: [https://github.com/lobehub/lobe-chat-plugins](https://github.com/lobehub/lobe-chat-plugins)
-
-[fetch-plugin-message-url]: https://github.com/lobehub/chat-plugin-template
-[lobe-chat-plugin-template-url]: https://github.com/lobehub/chat-plugin-template
-[manifest-docs-url]: https://chat-plugin-sdk.lobehub.com/guides/plugin-manifest
-[plugin-error-type-url]: https://github.com/lobehub/chat-plugin-template
-[submit-plugin-shield]: https://img.shields.io/badge/🧩/🏪_submit_plugin-%E2%86%92-95f3d9?labelColor=black&style=for-the-badge
-[submit-plugin-url]: https://github.com/lobehub/lobe-chat-plugins
diff --git a/docs/usage/plugins/store.mdx b/docs/usage/plugins/store.mdx
deleted file mode 100644
index 9f07d7b1f8..0000000000
--- a/docs/usage/plugins/store.mdx
+++ /dev/null
@@ -1,22 +0,0 @@
----
-title: LobeChat Plugin Store
-description: >-
- Learn how to access the Plugin Store in LobeChat to easily install and use various plugins for enhanced functionality.
-
-tags:
- - Plugin Store
- - LobeChat
- - Install Plugins
- - Extension Tools
- - Enhanced Functionality
----
-
-# Plugin Store
-
-You can access the plugin store by going to `Extension Tools` -> `Plugin Store` in the session toolbar.
-
-
-
-In the plugin store, you can directly install and use plugins in LobeChat.
-
-
diff --git a/docs/usage/plugins/store.zh-CN.mdx b/docs/usage/plugins/store.zh-CN.mdx
deleted file mode 100644
index 84842312e2..0000000000
--- a/docs/usage/plugins/store.zh-CN.mdx
+++ /dev/null
@@ -1,19 +0,0 @@
----
-title: LobeChat 插件商店
-description: 在 LobeChat 中浏览和安装各种实用插件,提升会话工具条的功能和体验。
-tags:
- - LobeChat
- - 插件商店
- - 扩展工具
- - 会话工具条
----
-
-# 插件商店
-
-你可以在会话工具条中的 `扩展工具` -> `插件商店`,进入插件商店。
-
-
-
-插件商店中会在 LobeChat 中可以直接安装并使用的插件。
-
-
diff --git a/docs/usage/providers.mdx b/docs/usage/providers.mdx
index 6926b1e576..a400f372c5 100644
--- a/docs/usage/providers.mdx
+++ b/docs/usage/providers.mdx
@@ -1,14 +1,16 @@
---
-title: Enhancing LobeChat with Multiple Model Providers for AI Conversations
+title: Using Multiple Model Providers in LobeHub
description: >-
- Discover how LobeChat offers diverse AI conversation options by supporting multiple model providers, providing flexibility and a wide range of choices for users and developers.
-
+ Learn about the latest developments in LobeHub's support for multiple model
+ providers, including currently supported providers, planned expansions, and
+ how to use local models.
tags:
- - LobeChat
- - AI Conversations
+ - LobeHub
+ - AI Chat Services
- Model Providers
- - Diversity
- - Flexibility
+ - Multi-Model Support
+ - Local Model Support
+ - AWS Bedrock
- Google AI
- ChatGLM
- Moonshot AI
@@ -17,14 +19,14 @@ tags:
- Ollama
---
-# Using Multiple Model Providers in LobeChat
+# Using Multiple Model Providers in LobeHub
-
+
-In the continuous development of LobeChat, we deeply understand the importance of diversity in model providers for providing AI conversation services to meet the needs of the community. Therefore, we have expanded our support to multiple model providers instead of being limited to a single one, in order to offer users a more diverse and rich selection of conversation options.
+As LobeHub continues to evolve, we’ve come to deeply understand the importance of supporting a diverse range of model providers to meet the needs of our community. Rather than relying on a single provider, we’ve expanded our support to include multiple AI model services, offering users a richer and more versatile chat experience.
-This approach allows LobeChat to adapt more flexibly to different user needs and provides developers with a wider range of choices.
+This approach allows LobeHub to better adapt to the varying needs of users while also giving developers a broader range of options to work with.
-## Tutorial on Using Model Providers
+## How to Use Model Providers
diff --git a/docs/usage/providers.zh-CN.mdx b/docs/usage/providers.zh-CN.mdx
index 0760cbdf85..f10f2fb967 100644
--- a/docs/usage/providers.zh-CN.mdx
+++ b/docs/usage/providers.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: 在 LobeChat 中使用多模型服务商
-description: 了解 LobeChat 在多模型服务商支持方面的最新进展,包括已支持的模型服务商和计划中的扩展,以及本地模型支持的使用方式。
+title: 在 LobeHub 中使用多模型服务商
+description: 了解 LobeHub 在多模型服务商支持方面的最新进展,包括已支持的模型服务商和计划中的扩展,以及本地模型支持的使用方式。
tags:
- - LobeChat
+ - LobeHub
- AI 会话服务
- 模型服务商
- 多模型支持
@@ -16,13 +16,13 @@ tags:
- Ollama
---
-# 在 LobeChat 中使用多模型服务商
+# 在 LobeHub 中使用多模型服务商
-
+
-在 LobeChat 的不断发展过程中,我们深刻理解到在提供 AI 会话服务时模型服务商的多样性对于满足社区需求的重要性。因此,我们不再局限于单一的模型服务商,而是拓展了对多种模型服务商的支持,以便为用户提供更为丰富和多样化的会话选择。
+在 LobeHub 的不断发展过程中,我们深刻理解到在提供 AI 会话服务时模型服务商的多样性对于满足社区需求的重要性。因此,我们不再局限于单一的模型服务商,而是拓展了对多种模型服务商的支持,以便为用户提供更为丰富和多样化的会话选择。
-通过这种方式,LobeChat 能够更灵活地适应不同用户的需求,同时也为开发者提供了更为广泛的选择空间。
+通过这种方式,LobeHub 能够更灵活地适应不同用户的需求,同时也为开发者提供了更为广泛的选择空间。
## 模型服务商使用教程
diff --git a/docs/usage/providers/ai21.mdx b/docs/usage/providers/ai21.mdx
index 60fefc912e..5b2039eb0f 100644
--- a/docs/usage/providers/ai21.mdx
+++ b/docs/usage/providers/ai21.mdx
@@ -1,48 +1,47 @@
---
-title: Using AI21 Labs in LobeChat
+title: Using AI21 Labs in LobeHub
description: >-
- Learn how to integrate and utilize AI21 Labs's language model APIs in LobeChat.
-
+ Learn how to configure and use your AI21 Labs API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
+ - LobeHub
- AI21 Labs
- API Key
- Web UI
---
-# Using AI21 Labs in LobeChat
+# Using AI21 Labs in LobeHub
-
+
-[AI21 Labs](https://www.ai21.com/) is a company focused on artificial intelligence, offering advanced language models and API services designed to help developers and businesses leverage natural language processing technology. Their flagship product, the "Jamba" series of models, can perform complex language understanding and generation tasks, widely utilized in fields such as content creation and conversational systems.
+[AI21 Labs](https://www.ai21.com/) is an AI-focused company that offers advanced language models and API services, designed to help developers and businesses leverage natural language processing technologies. Their flagship "Jamba" model series is capable of handling complex language understanding and generation tasks, widely used in content creation, conversational systems, and more.
-This article will guide you on how to use AI21 Labs within LobeChat.
+This guide will walk you through how to use AI21 Labs within LobeHub.
- ### Step 1: Obtain the AI21 Labs API Key
+ ### Step 1: Obtain an API Key from AI21 Labs
- - Register and log in to [AI21 Studio](https://studio.ai21.com)
- - Click on the `User Avatar` menu, then select `API Key`
+ - Sign up and log in to [AI21 Studio](https://studio.ai21.com)
+ - Click on your `user avatar` and select `API Key`
- Copy and save the generated API key
-
+
- ### Step 2: Configure AI21 Labs in LobeChat
+ ### Step 2: Configure AI21 Labs in LobeHub
- - Go to the `Settings` page in LobeChat
- - Under `AI Service Provider`, find the setting for `AI21 Labs`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `AI21labs`
-
+
- - Enter the API key you obtained
- - Choose an AI21 Labs model for your AI assistant to begin the conversation
+ - Paste the API key you obtained earlier
+ - Choose one of AI21 Labs' models for your AI assistant to start chatting
-
+
- During use, you may need to pay the API service provider; please refer to the relevant fee policy
- of AI21 Labs.
+ You may need to pay for API usage depending on your usage level. Please refer to AI21 Labs' pricing policy for more details.
-Now you are ready to engage in conversations using the models provided by AI21 Labs in LobeChat.
+And that’s it! You’re now ready to use AI21 Labs models for conversations in LobeHub.
diff --git a/docs/usage/providers/ai21.zh-CN.mdx b/docs/usage/providers/ai21.zh-CN.mdx
index 1cae62a8f5..93a533528c 100644
--- a/docs/usage/providers/ai21.zh-CN.mdx
+++ b/docs/usage/providers/ai21.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 AI21 Labs
-description: 学习如何在 LobeChat 中配置和使用 AI21 Labs 的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 AI21 Labs
+description: 学习如何在 LobeHub 中配置和使用 AI21 Labs 的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- AI21 Labs
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 AI21 Labs
+# 在 LobeHub 中使用 AI21 Labs
-
+
[AI21 Labs](https://www.ai21.com/) 是一家专注于人工智能的公司,提供先进的语言模型和 API 服务,旨在帮助开发者和企业利用自然语言处理技术。其旗舰产品 "Jamba" 系列模型能够进行复杂的语言理解和生成任务,广泛应用于内容创作、对话系统等领域。
-本文将指导你如何在 LobeChat 中使用 AI21 Labs。
+本文将指导你如何在 LobeHub 中使用 AI21 Labs。
### 步骤一:获得 AI21 Labs 的 API Key
@@ -23,23 +23,23 @@ tags:
- 点击 `用户头像` 菜单,点击 `API Key`
- 复制并保存生成的 API 密钥
-
+
- ### 步骤二:在 LobeChat 中配置 AI21 Labs
+ ### 步骤二:在 LobeHub 中配置 AI21 Labs
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `AI21labs` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 AI21 Labs 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 AI21 Labs 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 AI21 Labs 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 AI21 Labs 提供的模型进行对话了。
diff --git a/docs/usage/providers/ai302.mdx b/docs/usage/providers/ai302.mdx
index c0fbc71461..daa88b7c57 100644
--- a/docs/usage/providers/ai302.mdx
+++ b/docs/usage/providers/ai302.mdx
@@ -1,45 +1,47 @@
---
-title: Using 302.AI in LobeChat
-description: Learn how to configure and use 302.AI's API Key in LobeChat to start conversations and interactions.
+title: Using 302.AI in LobeHub
+description: >-
+ Learn how to configure and use your 302.AI API Key in LobeHub to start
+ chatting and interacting.
tags:
- - LobeChat
+ - LobeHub
- 302.AI
- API Key
- Web UI
---
-# Using 302.AI in LobeChat
+# Using 302.AI in LobeHub
-
+
-[302.AI](https://www.302.ai/) is a pay-as-you-go AI application platform that provides the most comprehensive AI APIs and AI online applications on the market.
+[302.AI](https://www.302.ai/) is a pay-as-you-go AI application platform that offers one of the most comprehensive collections of AI APIs and online AI tools available.
-This article will guide you on how to use 302.AI in LobeChat.
+This guide will walk you through how to use 302.AI within LobeHub.
- ### Step 1: Obtain [302.AI](https://www.302.ai/) API Key
+ ### Step 1: Get Your [302.AI](https://www.302.ai/) API Key
- - Click `Get Started`, register and log in to [302.AI](https://www.302.ai/)
- - Click `API Keys` on the left side
- - Click `Add API KEY`, copy and save the generated API key
+ - Click on `Get Started` to register and log in to [302.AI](https://www.302.ai/)
+ - Navigate to the `API Keys` section on the left sidebar
+ - Click `Add API KEY`, then copy and save the generated API key
-
+
- ### Step 2: Configure 302.AI in LobeChat
+ ### Step 2: Configure 302.AI in LobeHub
- - Access LobeChat's `Settings` interface
- - Find the `302.AI` configuration item under `Language Models`
+ - Go to the `Settings` page in LobeHub
+ - Under the `Language Model` section, find the configuration for `302.AI`
-
+
- - Enter the obtained API key
- - Select a 302.AI model for your AI assistant to start conversations
+ - Paste the API key you obtained earlier
+ - Choose a model from 302.AI for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider. Please refer to 302.AI's relevant pricing policy.
+ You may need to pay for API usage. Please refer to 302.AI’s pricing policy for more details.
-Now you can use 302.AI's models for conversations in LobeChat.
+And that’s it! You’re now ready to use 302.AI models within LobeHub for conversations and interactions.
diff --git a/docs/usage/providers/ai302.zh-CN.mdx b/docs/usage/providers/ai302.zh-CN.mdx
index aba052c591..7c80b2146b 100644
--- a/docs/usage/providers/ai302.zh-CN.mdx
+++ b/docs/usage/providers/ai302.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 302.AI
-description: 学习如何在 LobeChat 中配置和使用 302.AI 的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 302.AI
+description: 学习如何在 LobeHub 中配置和使用 302.AI 的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 302.AI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 302.AI
+# 在 LobeHub 中使用 302.AI
[302.AI](https://www.302.ai/) 是一个按需付费的 AI 应用平台,提供市面上最全的 AI API 和 AI 在线应用。
-本文将指导你如何在 LobeChat 中使用 302.AI。
+本文将指导你如何在 LobeHub 中使用 302.AI。
### 步骤一:获得 [302.AI](https://www.302.ai/) 的 API Key
@@ -25,9 +25,9 @@ tags:
- ### 步骤二:在 LobeChat 中配置 302.AI
+ ### 步骤二:在 LobeHub 中配置 302.AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`语言模型`下找到 `302.AI` 的设置项
@@ -42,4 +42,4 @@ tags:
-至此你已经可以在 LobeChat 中使用 302.AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 302.AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/ai360.mdx b/docs/usage/providers/ai360.mdx
index 6964a09697..6fc8ea7f67 100644
--- a/docs/usage/providers/ai360.mdx
+++ b/docs/usage/providers/ai360.mdx
@@ -1,46 +1,47 @@
---
-title: Using the 360AI in LobeChat
-description: Learn how to integrate and utilize 360AI's language model APIs in LobeChat.
+title: Using 360 Zhinao in LobeHub
+description: >-
+ Learn how to configure and use the 360 Zhinao API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
- - 360AI
+ - LobeHub
+ - 360 Zhinao
- API Key
- Web UI
---
-# Using the 360AI in LobeChat
+# Using 360 Zhinao in LobeHub
-
+
-The [360AI](https://ai.360.com/) is a cognitive general model independently developed by 360 Company, aimed at providing powerful natural language processing capabilities for enterprises and developers. This model has been upgraded to version 4.0 and supports various application scenarios, including conversational services, image generation, vector database services, and more.
+[360 Zhinao](https://ai.360.com/) is a general-purpose cognitive large language model developed by Qihoo 360. It is designed to provide powerful natural language processing capabilities for businesses and developers. Now upgraded to version 4.0, it supports a wide range of applications including conversational services, image generation, vector database services, and more.
-This article will guide you on how to use the 360AI in LobeChat.
+This guide will walk you through how to use 360 Zhinao within LobeHub.
- ### Step 1: Obtain the 360AI API Key
+ ### Step 1: Obtain a 360 Zhinao API Key
- - Register and log in to the [360AI API Open Platform](https://ai.360.com/platform/keys)
+ - Register and log in to the [360 Zhinao API Platform](https://ai.360.com/platform/keys)
- Click on the `API Keys` menu on the left
- - Create an API key and copy it
+ - Create a new API key and copy it
-
+
- ### Step 2: Configure 360AI in LobeChat
+ ### Step 2: Configure 360 Zhinao in LobeHub
- - Access the `Settings` interface in LobeChat
- - Under `AI Service Provider`, find the option for `360`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `360`
-
+
- - Enter the API key you obtained
- - Choose a 360AI model for your AI assistant to start chatting
+ - Paste the API key you obtained
+ - Choose a 360 Zhinao model for your AI assistant to start chatting
-
+
- Please note that you may need to pay the API service provider during use, refer to the relevant
- pricing policy of the 360AI.
+ You may need to pay for API usage depending on your usage level. Please refer to 360 Zhinao’s pricing policy for details.
-You can now use the models provided by the 360AI for conversations in LobeChat.
+That's it! You're now ready to use 360 Zhinao's models for conversations in LobeHub.
diff --git a/docs/usage/providers/ai360.zh-CN.mdx b/docs/usage/providers/ai360.zh-CN.mdx
index 47f45df27f..48bed81f6a 100644
--- a/docs/usage/providers/ai360.zh-CN.mdx
+++ b/docs/usage/providers/ai360.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用360智脑
-description: 学习如何在 LobeChat 中配置和使用360智脑的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用360智脑
+description: 学习如何在 LobeHub 中配置和使用360智脑的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 360智脑
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 360 智脑
+# 在 LobeHub 中使用 360 智脑
-
+
[360 智脑](https://ai.360.com/)是 360 公司自主研发的认知型通用大模型,旨在为企业和开发者提供强大的自然语言处理能力。该模型已升级至 4.0 版本,能够支持多种应用场景,包括对话服务、图片生成、向量数据库服务等。
-本文将指导你如何在 LobeChat 中使用 360 智脑。
+本文将指导你如何在 LobeHub 中使用 360 智脑。
### 步骤一:获得 360 智脑的 API Key
@@ -23,23 +23,23 @@ tags:
- 点击左侧 `API Keys` 菜单
- 创建一个 API 密钥并复制
-
+
- ### 步骤二:在 LobeChat 中配置 360 智脑
+ ### 步骤二:在 LobeHub 中配置 360 智脑
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `360` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 360 智脑的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 360 智脑的相关费用政策。
-至此你已经可以在 LobeChat 中使用 360 智脑提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 360 智脑提供的模型进行对话了。
diff --git a/docs/usage/providers/aihubmix.mdx b/docs/usage/providers/aihubmix.mdx
new file mode 100644
index 0000000000..8a2e6f0ac2
--- /dev/null
+++ b/docs/usage/providers/aihubmix.mdx
@@ -0,0 +1,98 @@
+---
+title: AiHubMix Provider Configuration
+description: Learn how to configure and use the AiHubMix provider in LobeHub
+tags:
+ - AiHubMix
+ - Provider Configuration
+ - Setup Guide
+---
+
+# AiHubMix Provider Configuration
+
+AiHubMix is an AI model aggregation platform that offers access to a variety of AI models through a unified OpenAI-compatible API. This guide will help you set up the AiHubMix provider in LobeHub.
+
+## Prerequisites
+
+Before using the AiHubMix API, you need to:
+
+1. **Create an AiHubMix Account**
+ - Visit [AiHubMix](https://lobe.li/MZmv94N)
+ - Sign up for an account
+
+2. **Obtain an API Key**
+ - Log in to your AiHubMix dashboard
+ - Navigate to the API settings
+ - Generate an API key for use with LobeHub
+
+## Configuration
+
+### Environment Variables
+
+Add the following environment variable to your `.env` file:
+
+```bash
+# AiHubMix API Key (required)
+AIHUBMIX_API_KEY=your_aihubmix_api_key
+```
+
+### Available Models
+
+AiHubMix provides access to a variety of popular AI models, including:
+
+- **GPT-4o Mini** – A cost-effective lightweight model from OpenAI
+- **GPT-4o** – OpenAI’s flagship multimodal model
+- **Claude 3.5 Sonnet** – Anthropic’s advanced reasoning model
+- **Claude 3.5 Haiku** – A fast and efficient Claude model
+- **Gemini Pro 1.5** – Google’s model with long-context support
+- **DeepSeek V3** – A model with strong reasoning capabilities
+
+## How to Use
+
+1. **Configure the API Key**
+ - Set your AiHubMix API key in the environment variables
+ - Restart your LobeHub instance
+
+2. **Select a Model**
+ - Go to LobeHub settings
+ - Navigate to the Language Model section
+ - Choose AiHubMix as your provider
+ - Select a model from the available options
+
+3. **Start a Conversation**
+ - Create a new conversation
+ - Select an AiHubMix model
+ - Begin chatting
+
+## Features
+
+- **Multi-Model Access**: Access a variety of AI models through a single API
+- **OpenAI-Compatible**: Uses the standard OpenAI API format
+- **Function Calling**: Supports function calling for compatible models
+- **Vision Capabilities**: Some models support image analysis
+- **Model Discovery**: Automatically fetches the list of available models
+
+## Troubleshooting
+
+### Common Issues
+
+1. **401 Unauthorized Error**
+ - Verify that your API key is correct
+ - Ensure the API key has the necessary permissions
+ - Check if your account has sufficient credits
+
+2. **Model Unavailable**
+ - Some models may have usage restrictions
+ - Refer to the AiHubMix documentation for model availability
+ - Confirm that your account tier supports the requested model
+
+3. **Rate Limiting**
+ - AiHubMix may enforce rate limits based on your subscription plan
+ - Consider upgrading your plan for higher limits
+
+## Support
+
+For additional help:
+
+- Visit the [AiHubMix Documentation](https://docs.aihubmix.com/)
+- Browse the [Model List](https://aihubmix.com/models)
+- Contact the AiHubMix support team for API-related issues
diff --git a/docs/usage/providers/aihubmix.zh-CN.mdx b/docs/usage/providers/aihubmix.zh-CN.mdx
index 586c082b2c..6817ee90ee 100644
--- a/docs/usage/providers/aihubmix.zh-CN.mdx
+++ b/docs/usage/providers/aihubmix.zh-CN.mdx
@@ -1,6 +1,6 @@
---
title: AiHubMix 提供商配置
-description: 学习如何在 LobeChat 中配置和使用 AiHubMix 提供商
+description: 学习如何在 LobeHub 中配置和使用 AiHubMix 提供商
tags:
- AiHubMix
- 提供商配置
@@ -9,7 +9,7 @@ tags:
# AiHubMix 提供商配置
-AiHubMix 是一个 AI 模型聚合平台,通过统一的 OpenAI 兼容 API 接口提供多种 AI 模型的访问服务。本指南将帮助您在 LobeChat 中设置 AiHubMix 提供商。
+AiHubMix 是一个 AI 模型聚合平台,通过统一的 OpenAI 兼容 API 接口提供多种 AI 模型的访问服务。本指南将帮助您在 LobeHub 中设置 AiHubMix 提供商。
## 前置条件
@@ -22,7 +22,7 @@ AiHubMix 是一个 AI 模型聚合平台,通过统一的 OpenAI 兼容 API 接
2. **获取 API 密钥**
- 登录您的 AiHubMix 控制台
- 导航到 API 设置
- - 生成用于 LobeChat 的 API 密钥
+ - 生成用于 LobeHub 的 API 密钥
## 配置
@@ -50,10 +50,10 @@ AiHubMix 提供多种热门 AI 模型的访问,包括:
1. **配置 API 密钥**
- 在环境变量中设置您的 AiHubMix API 密钥
- - 重启您的 LobeChat 实例
+ - 重启您的 LobeHub 实例
2. **选择模型**
- - 进入 LobeChat 设置
+ - 进入 LobeHub 设置
- 导航到语言模型
- 选择 AiHubMix 作为您的提供商
- 从可用模型中选择
diff --git a/docs/usage/providers/anthropic.mdx b/docs/usage/providers/anthropic.mdx
index 49a403e9d9..2472cc6f40 100644
--- a/docs/usage/providers/anthropic.mdx
+++ b/docs/usage/providers/anthropic.mdx
@@ -1,55 +1,52 @@
---
-title: Using Anthropic Claude API Key in LobeChat
+title: Using the Anthropic Claude API Key in LobeHub
description: >-
- Learn how to integrate Anthropic Claude API in LobeChat to enhance your AI assistant capabilities. Support Claude 3.5 sonnet / Claude 3 Opus / Claude 3 haiku
-
+ Learn how to configure and use the Anthropic Claude API in LobeHub, including
+ Claude 3.5 Sonnet, Claude 3 Opus, and Claude 3 Haiku.
tags:
- Anthropic Claude
- - API Key
- - AI assistant
- - Web UI
+ - API
+ - WebUI
+ - AI Assistant
---
-# Using Anthropic Claude in LobeChat
+# Using Anthropic Claude in LobeHub
-
+
-The Anthropic Claude API is now available for everyone to use. This document will guide you on how to use [Anthropic Claude](https://www.anthropic.com/api) in LobeChat:
+The Anthropic Claude API is now publicly available. This guide will walk you through how to use [Anthropic Claude](https://www.anthropic.com/api) in LobeHub:
- ### Step 1: Obtain Anthropic Claude API Key
+ ### Step 1: Get Your Anthropic Claude API Key
- - Create an [Anthropic Claude API](https://www.anthropic.com/api) account.
- - Get your [API key](https://console.anthropic.com/settings/keys).
+ - Create an [Anthropic Claude API](https://www.anthropic.com/api) account
+ - Retrieve your [API key](https://console.anthropic.com/settings/keys)
-
+
- The Claude API currently offers $5 of free credits, but it is only available in certain specific
- countries/regions. You can go to Dashboard > Claim to see if it is applicable to your
- country/region.
+ Claude API currently offers $5 in free credits. However, this is only available in select countries/regions. You can check your eligibility by visiting Dashboard > Claim.
- - Set up your billing for the API key to work on [https://console.anthropic.com/settings/plans](https://console.anthropic.com/settings/plans) (choose the "Build" plan so you can add credits and only pay for usage).
+ - Set up your billing to activate the API key at [https://console.anthropic.com/settings/plans](https://console.anthropic.com/settings/plans) (Choose the "Developer" plan to add credits and pay only for what you use)
-
+
- ### Step 2: Configure Anthropic Claude in LobeChat
+ ### Step 2: Configure Anthropic Claude in LobeHub
- - Access the `Settings` interface in LobeChat.
- - Find the setting for `Anthropic Claude` under `AI Service Provider`.
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, locate the `Anthropic Claude` configuration section
-
+
- - Enter the obtained API key.
- - Choose an Anthropic Claude model for your AI assistant to start the conversation.
+ - Paste your API key into the input field
+ - Choose one of the available Anthropic Claude models for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider. Please refer to Anthropic Claude's
- relevant pricing policies.
+ You may incur charges from the API provider during usage. Please refer to Anthropic Claude’s pricing policy for more details.
-You can now engage in conversations using the models provided by Anthropic Claude in LobeChat.
+That's it! You’re now ready to use Anthropic Claude models in LobeHub for conversations.
diff --git a/docs/usage/providers/anthropic.zh-CN.mdx b/docs/usage/providers/anthropic.zh-CN.mdx
index 34abda173f..34d817f20c 100644
--- a/docs/usage/providers/anthropic.zh-CN.mdx
+++ b/docs/usage/providers/anthropic.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: 在 LobeChat 中使用 Anthropic Claude API Key
+title: 在 LobeHub 中使用 Anthropic Claude API Key
description: >-
- 学习如何在 LobeChat 中配置和使用 Anthropic Claude API, Claude 3.5 sonnet / Claude 3 Opus / Claude 3 haiku
-
+ 学习如何在 LobeHub 中配置和使用 Anthropic Claude API, Claude 3.5 sonnet / Claude 3 Opus /
+ Claude 3 haiku
tags:
- Anthropic Claude
- API
@@ -10,11 +10,11 @@ tags:
- AI助手
---
-# 在 LobeChat 中使用 Anthropic Claude
+# 在 LobeHub 中使用 Anthropic Claude
-
+
-Anthropic Claude API 现在可供所有人使用,本文档将指导你如何在 LobeChat 中使用 [Anthropic Claude](https://www.anthropic.com/api):
+Anthropic Claude API 现在可供所有人使用,本文档将指导你如何在 LobeHub 中使用 [Anthropic Claude](https://www.anthropic.com/api):
### 步骤一:获取 Anthropic Claude API 密钥
@@ -22,7 +22,7 @@ Anthropic Claude API 现在可供所有人使用,本文档将指导你如何
- 创建一个 [Anthropic Claude API](https://www.anthropic.com/api) 帐户
- 获取您的 [API 密钥](https://console.anthropic.com/settings/keys)
-
+
Claude API 现在提供 5 美元的免费积分,但是,它仅适用于某些特定国家 / 地区,您可以转到 Dashboard >
@@ -31,23 +31,23 @@ Anthropic Claude API 现在可供所有人使用,本文档将指导你如何
- 设置您的账单,让 API 密钥在 [https://console.anthropic.com/settings/plans](https://console.anthropic.com/settings/plans) 上工作(选择 “生成” 计划,以便您可以添加积分并仅为使用量付费)
-
+
- ### 步骤二:在 LobeChat 中配置 Anthropic Claude
+ ### 步骤二:在 LobeHub 中配置 Anthropic Claude
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`Anthropic Claude`的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Anthropic Claude 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Anthropic Claude 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Anthropic Claude 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Anthropic Claude 提供的模型进行对话了。
diff --git a/docs/usage/providers/azure.mdx b/docs/usage/providers/azure.mdx
index 57a7316103..8091ccb099 100644
--- a/docs/usage/providers/azure.mdx
+++ b/docs/usage/providers/azure.mdx
@@ -1,57 +1,52 @@
---
-title: Using Azure OpenAI API Key in LobeChat
+title: Using Azure OpenAI API Key in LobeHub
description: >-
- Learn how to integrate and configure Azure OpenAI in LobeChat to enhance your AI assistant capabilities. Follow these steps to obtain the API key, configure the settings, and start engaging in conversations.
-
+ Learn how to configure and use Azure OpenAI models in LobeHub, including how
+ to obtain your API key and select a model.
tags:
- Azure OpenAI
- - AI assistant
- - API key
- - Configuration
- - Conversation models
+ - API Key
+ - Web UI
---
-# Using Azure OpenAI in LobeChat
+# Using Azure OpenAI in LobeHub
-
+
-This document will guide you on how to use [Azure OpenAI](https://oai.azure.com/) in LobeChat:
+This guide will walk you through how to use [Azure OpenAI](https://oai.azure.com/) in LobeHub:
- ### Step 1: Obtain Azure OpenAI API Key
+ ### Step 1: Obtain Your Azure OpenAI API Key
- - If you haven't registered yet, you need to create an [Azure OpenAI account](https://oai.azure.com/).
+ - If you haven’t already, you’ll need to sign up for an [Azure OpenAI account](https://oai.azure.com/).
-
+
- - After registration, go to the `Deployments` page and create a new deployment with your selected model.
+ - Once registered, go to the `Deployments` page and create a new deployment using the model of your choice.
- 
+
-
+ - Navigate to the `Chat` page and click on `View Code` to retrieve your endpoint and API key.
- - Navigate to the `Chat` page and click on `View Code` to obtain your endpoint and key.
+
-
+
-
+ ### Step 2: Configure Azure OpenAI in LobeHub
- ### Step 2: Configure Azure OpenAI in LobeChat
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, locate the `Azure OpenAI` configuration section
- - Access the `Settings` interface in LobeChat.
- - Find the setting for `Azure OpenAI` under `AI Service Provider`.
+
-
+ - Paste in the API key you obtained earlier
+ - Select an Azure OpenAI model for your AI assistant to start chatting
- - Enter the API key you obtained.
- - Choose an Azure OpenAI model for your AI assistant to start the conversation.
-
-
+
- During usage, you may need to pay the API service provider. Please refer to Azure OpenAI's
- relevant pricing policies.
+ You may incur charges from the API provider while using the service. Please refer to Azure OpenAI’s pricing policy for more details.
-Now you can engage in conversations using the models provided by Azure OpenAI in LobeChat.
+And that’s it! You’re now ready to start chatting with models powered by Azure OpenAI in LobeHub.
diff --git a/docs/usage/providers/azure.zh-CN.mdx b/docs/usage/providers/azure.zh-CN.mdx
index bbb65cd1b3..fc4d8cc5dd 100644
--- a/docs/usage/providers/azure.zh-CN.mdx
+++ b/docs/usage/providers/azure.zh-CN.mdx
@@ -1,50 +1,50 @@
---
-title: 在 LobeChat 中使用 Azure OpenAI API Key
-description: 学习如何在 LobeChat 中配置和使用 Azure OpenAI 模型进行对话,包括获取 API 密钥和选择模型。
+title: 在 LobeHub 中使用 Azure OpenAI API Key
+description: 学习如何在 LobeHub 中配置和使用 Azure OpenAI 模型进行对话,包括获取 API 密钥和选择模型。
tags:
- Azure OpenAI
- API Key
- Web UI
---
-# 在 LobeChat 中使用 Azure OpenAI
+# 在 LobeHub 中使用 Azure OpenAI
-
+
-本文档将指导你如何在 LobeChat 中使用 [Azure OpenAI](https://oai.azure.com/):
+本文档将指导你如何在 LobeHub 中使用 [Azure OpenAI](https://oai.azure.com/):
### 步骤一:获取 Azure OpenAI API 密钥
- 如果尚未注册,则必须注册 [Azure OpenAI 帐户](https://oai.azure.com/)。
-
+
- 注册完毕后,转到 `Deployments` 页面,然后使用您选择的模型创建新部署。
-
+
- 转到 `Chat` 页面,然后单击 `View Code` 以获取您的终结点和密钥。
-
+
-
+
- ### 步骤二:在 LobeChat 中配置 Azure OpenAI
+ ### 步骤二:在 LobeHub 中配置 Azure OpenAI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`Azure OpenAI`的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Azure OpenAI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Azure OpenAI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Azure OpenAI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Azure OpenAI 提供的模型进行对话了。
diff --git a/docs/usage/providers/azureai.mdx b/docs/usage/providers/azureai.mdx
index 68df568071..c099d7d92b 100644
--- a/docs/usage/providers/azureai.mdx
+++ b/docs/usage/providers/azureai.mdx
@@ -1,71 +1,71 @@
---
-title: Using Azure AI API Key in LobeChat
-description: Learn how to configure and use Azure AI models in LobeChat, get the API key, and start a conversation.
+title: Using Azure AI API Key in LobeHub
+description: >-
+ Learn how to configure and use Azure AI models in LobeHub, obtain your API
+ key, and start chatting.
tags:
- - LobeChat
+ - LobeHub
- Azure AI
- API Key
- Web UI
---
-# Using Azure AI in LobeChat
+# Using Azure AI in LobeHub
-
+
-[Azure AI](https://azure.microsoft.com) is an open artificial intelligence technology platform based on the Microsoft Azure cloud platform. It provides various AI functionalities, including natural language processing, machine learning, and computer vision, helping businesses easily develop and deploy AI applications.
+[Azure AI](https://azure.microsoft.com) is an open AI technology platform built on Microsoft Azure's cloud infrastructure. It offers a wide range of AI capabilities including natural language processing, machine learning, and computer vision, enabling businesses to easily develop and deploy AI-powered applications.
-This document will guide you on how to integrate Azure AI models into LobeChat:
+This guide will walk you through how to integrate Azure AI models into LobeHub:
- ### Step 1: Deploy Azure AI Project and Model
+ ### Step 1: Deploy an Azure AI Project and Model
- - First, visit [Azure AI Foundry](https://ai.azure.com/) and complete the registration and login process.
- - After logging in, select `Browse models` on the homepage.
+ - First, visit [Azure AI Foundry](https://ai.azure.com/) and sign up or log in.
+ - Once logged in, go to the homepage and select `Browse Models`.
-
+
- - Choose the model you want in the model marketplace.
- - Enter the model details and click the `Deploy` button.
+ - In the model marketplace, choose the model you want to use.
+ - On the model details page, click the `Deploy` button.
-
+
- In the pop-up dialog, create a new project.
-
+
- For detailed configuration of Azure AI Foundry, please refer to the [official
- documentation](https://learn.microsoft.com/azure/ai-foundry/model-inference/).
+ For detailed configuration of Azure AI Foundry, please refer to the [official documentation](https://learn.microsoft.com/azure/ai-foundry/model-inference/).
- ### Step 2: Obtain the Model's API Key and Endpoint
+ ### Step 2: Retrieve the Model's API Key and Endpoint
- - In the details of the deployed model, you can find the Endpoint and API Key information.
- - Copy and save the obtained information.
+ - In the deployed model's details page, you can find the Endpoint and API Key.
+ - Copy and securely save this information.
-
+
- ### Step 3: Configure Azure AI in LobeChat
+ ### Step 3: Configure Azure AI in LobeHub
- - Visit the `App Settings` and `AI Service Provider` interface in LobeChat.
- - Find the settings for `Azure AI` in the list of providers.
+ - Go to the `App Settings` section in LobeHub and navigate to `AI Service Providers`.
+ - Find the `Azure AI` option in the list of providers.
-
+
- - Enable the Azure AI service provider and fill in the obtained Endpoint and API Key.
+ - Open the Azure AI provider settings and enter the Endpoint and API Key you obtained.
- For the Endpoint, you only need to fill in the first part:
- `https://xxxxxx.services.ai.azure.com/models`.
+ Only enter the base part of the Endpoint, e.g., `https://xxxxxx.services.ai.azure.com/models`.
- - Choose an Azure AI model for your assistant and start the conversation.
+ - Select an Azure AI model for your assistant to start chatting.
-
+
- You may need to pay the API service provider for usage. Please refer to Azure AI's relevant pricing policies.
+ You may incur charges from the API service provider during usage. Please refer to Azure AI's pricing policy for more details.
-Now you can use the models provided by Azure AI in LobeChat for conversations.
+That's it! You're now ready to use Azure AI models for conversations in LobeHub.
diff --git a/docs/usage/providers/azureai.zh-CN.mdx b/docs/usage/providers/azureai.zh-CN.mdx
index e8ae2b9dee..52258cd059 100644
--- a/docs/usage/providers/azureai.zh-CN.mdx
+++ b/docs/usage/providers/azureai.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 Azure AI API Key
-description: 学习如何在 LobeChat 中配置和使用 Azure AI 模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用 Azure AI API Key
+description: 学习如何在 LobeHub 中配置和使用 Azure AI 模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- Azure AI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Azure AI
+# 在 LobeHub 中使用 Azure AI
-
+
[Azure AI](https://azure.microsoft.com) 是一个基于 Microsoft Azure 云平台的开放式人工智能技术平台,提供包括自然语言处理、机器学习、计算机视觉等多种 AI 功能,帮助企业轻松开发和部署 AI 应用。
-本文档将指导你如何在 LobeChat 中接入 Azure AI 的模型:
+本文档将指导你如何在 LobeHub 中接入 Azure AI 的模型:
### 步骤一:部署 Azure AI 项目以及模型
@@ -22,16 +22,16 @@ tags:
- 首先,访问[Azure AI Foundry](https://ai.azure.com/)并完成注册登录
- 登录后在首页选择`浏览模型`
-
+
- 在模型广场中选择你想要模型
- 进入模型详情,点击`部署`按钮
-
+
- 在弹出的对话框中创建一个新的项目
-
+
Azure AI Foundry
@@ -43,14 +43,14 @@ tags:
- 在已部署的模型详情里,可以查询到 Endpoint 以及 API Key 信息
- 复制并保存好获取的信息
-
+
- ### 步骤三:在 LobeChat 中配置 Azure AI
+ ### 步骤三:在 LobeHub 中配置 Azure AI
- - 访问 LobeChat 的 `应用设置` 的 `AI 服务供应商` 界面
+ - 访问 LobeHub 的 `应用设置` 的 `AI 服务供应商` 界面
- 在供应商列表中找到 `Azure AI` 的设置项
-
+
- 打开 Azure AI 服务商并填入获取的 Endpoint 以及 API 密钥
@@ -60,11 +60,11 @@ tags:
- 为你的助手选择一个 Azure AI 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Azure AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Azure AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Azure AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/baichuan.mdx b/docs/usage/providers/baichuan.mdx
index 2988404faa..20774d3801 100644
--- a/docs/usage/providers/baichuan.mdx
+++ b/docs/usage/providers/baichuan.mdx
@@ -1,45 +1,45 @@
---
-title: Using Baichuan API Key in LobeChat
+title: Using Baichuan API Key in LobeHub
description: >-
- Learn how to integrate Baichuan AI into LobeChat for enhanced conversational experiences. Follow the steps to configure Baichuan AI and start using its models.
-
+ Learn how to configure and use Baichuan's API key in LobeHub to start chatting
+ and interacting.
tags:
- - LobeChat
+ - LobeHub
- Baichuan
+ - Baichuan AI
- API Key
- Web UI
---
-# Using Baichuan in LobeChat
+# Using Baichuan in LobeHub
-
+
-This article will guide you on how to use Baichuan in LobeChat:
+This guide will walk you through how to use Baichuan in LobeHub:
- ### Step 1: Obtain Baichuan Intelligent API Key
+ ### Step 1: Obtain a Baichuan AI API Key
- - Create a [Baichuan Intelligent](https://platform.baichuan-ai.com/homePage) account
- - Create and obtain an [API key](https://platform.baichuan-ai.com/console/apikey)
+ - Create a [Baichuan AI](https://platform.baichuan-ai.com/homePage) account
+ - Generate and retrieve your [API Key](https://platform.baichuan-ai.com/console/apikey)
-
+
- ### Step 2: Configure Baichuan in LobeChat
+ ### Step 2: Configure Baichuan in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for `Baichuan` under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the `Baichuan` configuration section
-
+
- - Enter the obtained API key
- - Choose a Baichuan model for your AI assistant to start the conversation
+ - Paste your API key into the input field
+ - Choose a Baichuan model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Baichuan's relevant
- pricing policies.
+ You may need to pay for API usage depending on your usage. Please refer to Baichuan's pricing policy for more details.
-You can now use the models provided by Baichuan for conversation in LobeChat.
+That's it! You're now ready to use Baichuan-powered models in LobeHub for conversations.
diff --git a/docs/usage/providers/baichuan.zh-CN.mdx b/docs/usage/providers/baichuan.zh-CN.mdx
index 5cb0c99646..abe1e8df8b 100644
--- a/docs/usage/providers/baichuan.zh-CN.mdx
+++ b/docs/usage/providers/baichuan.zh-CN.mdx
@@ -1,19 +1,19 @@
---
-title: 在 LobeChat 中使用百川 API Key
-description: 学习如何在 LobeChat 中配置和使用百川的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用百川 API Key
+description: 学习如何在 LobeHub 中配置和使用百川的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 百川
- 百川智能
- API密钥
- Web UI
---
-# 在 LobeChat 中使用百川
+# 在 LobeHub 中使用百川
-
+
-本文将指导你如何在 LobeChat 中使用百川:
+本文将指导你如何在 LobeHub 中使用百川:
### 步骤一:获取百川智能 API 密钥
@@ -21,23 +21,23 @@ tags:
- 创建一个[百川智能](https://platform.baichuan-ai.com/homePage)账户
- 创建并获取 [API 密钥](https://platform.baichuan-ai.com/console/apikey)
-
+
- ### 步骤二:在 LobeChat 中配置百川
+ ### 步骤二:在 LobeHub 中配置百川
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`百川`的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个百川的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考百川的相关费用政策。
-至此你已经可以在 LobeChat 中使用百川提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用百川提供的模型进行对话了。
diff --git a/docs/usage/providers/bedrock.mdx b/docs/usage/providers/bedrock.mdx
index f782149c5b..8643daf7fe 100644
--- a/docs/usage/providers/bedrock.mdx
+++ b/docs/usage/providers/bedrock.mdx
@@ -1,89 +1,89 @@
---
-title: Using Amazon Bedrock API Key in LobeChat
+title: Using Amazon Bedrock API Key in LobeHub
description: >-
- Learn how to integrate Amazon Bedrock models into LobeChat for AI-powered conversations. Follow these steps to grant access, obtain API keys, and configure Amazon Bedrock.
-
+ Learn how to configure and use Amazon Bedrock, a fully managed foundation
+ model API service, in LobeHub to start chatting.
tags:
- Amazon Bedrock
- - Claude 3.5 sonnect
+ - Claude 3.5 Sonnet
- API keys
- Claude 3 Opus
- Web UI
---
-# Using Amazon Bedrock in LobeChat
+# Using Amazon Bedrock in LobeHub
-
+
-Amazon Bedrock is a fully managed foundational model API service that allows users to access models from leading AI companies (such as AI21 Labs, Anthropic, Cohere, Meta, Stability AI) and Amazon's own foundational models.
+Amazon Bedrock is a fully managed foundation model API service that allows users to access models from leading AI companies (such as AI21 Labs, Anthropic, Cohere, Meta, Stability AI) and Amazon itself via API.
-This document will guide you on how to use Amazon Bedrock in LobeChat:
+This guide will walk you through how to use Amazon Bedrock in LobeHub:
- ### Step 1: Grant Access to Amazon Bedrock Models in AWS
+ ### Step 1: Enable Access to Amazon Bedrock Models in AWS
- - Access and log in to the [AWS Console](https://console.aws.amazon.com/)
- - Search for `bedrock` and enter the `Amazon Bedrock` service
+ - Visit and log in to the [AWS Console](https://console.aws.amazon.com/)
+ - Search for "Bedrock" and navigate to the `Amazon Bedrock` service
-
+
- - Select `Models access` from the left menu
+ - In the left-hand menu, select `Model access`
-
+
- - Open model access permissions based on your needs
+ - Enable access to the models you want to use
-
+
- Some models may require additional information from you
+
+ Some models may require you to provide additional information.
+
### Step 2: Obtain API Access Keys
- - Continue searching for IAM in the AWS console and enter the IAM service
+ - In the AWS Console, search for IAM and go to the IAM service
-
+
- - In the `Users` menu, create a new IAM user
+ - Under the `Users` section, create a new IAM user
-
+
- - Enter the user name in the pop-up dialog box
+ - In the pop-up dialog, enter a username
-
+
- - Add permissions for this user or join an existing user group to ensure access to Amazon Bedrock
+ - Assign permissions to the user or add them to an existing group that has access to Amazon Bedrock
-
+
- - Create an access key for the added user
+ - Create access keys for the newly added user
-
+
- - Copy and securely store the access key and secret access key, as they will be needed later
+ - Copy and securely store the Access Key ID and Secret Access Key — you’ll need them later
-
+
- Please securely store the keys as they will only be shown once. If you lose them accidentally, you
- will need to create a new access key.
+ Store your keys securely, as they will only be shown once. If you lose them, you’ll need to generate new access keys.
- ### Step 3: Configure Amazon Bedrock in LobeChat
+ ### Step 3: Configure Amazon Bedrock in LobeHub
- - Access the `Settings` interface in LobeChat
- - Find the setting for `Amazon Bedrock` under `AI Service Provider` and open it
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, find and enable the `Amazon Bedrock` option
-
+
- - Open Amazon Bedrock and enter the obtained access key and secret access key
- - Choose an Amazon Bedrock model for your assistant to start the conversation
+ - Enter your Access Key ID and Secret Access Key
+ - Choose an Amazon Bedrock model for your assistant to start chatting
-
+
- You may incur charges while using the API service, please refer to Amazon Bedrock's pricing
- policy.
+ You may incur charges while using the API. Please refer to Amazon Bedrock’s pricing policy for details.
-You can now engage in conversations using the models provided by Amazon Bedrock in LobeChat.
+You’re all set! You can now start chatting in LobeHub using models provided by Amazon Bedrock.
diff --git a/docs/usage/providers/bedrock.zh-CN.mdx b/docs/usage/providers/bedrock.zh-CN.mdx
index 3ea7a4df52..4b7b8c98f9 100644
--- a/docs/usage/providers/bedrock.zh-CN.mdx
+++ b/docs/usage/providers/bedrock.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用 Amazon Bedrock API Key
-description: 学习如何在 LobeChat 中配置和使用 Amazon Bedrock,一个完全托管的基础模型API服务,以便开始对话。
+title: 在 LobeHub 中使用 Amazon Bedrock API Key
+description: 学习如何在 LobeHub 中配置和使用 Amazon Bedrock,一个完全托管的基础模型API服务,以便开始对话。
tags:
- Amazon Bedrock
- Claude 3.5 sonnect
@@ -9,13 +9,13 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 Amazon Bedrock
+# 在 LobeHub 中使用 Amazon Bedrock
-
+
Amazon Bedrock 是一个完全托管的基础模型 API 服务,允许用户通过 API 访问来自领先 AI 公司 (如 AI21 Labs、Anthropic、Cohere、Meta、Stability AI) 和 Amazon 自家的基础模型。
-本文档将指导你如何在 LobeChat 中使用 Amazon Bedrock:
+本文档将指导你如何在 LobeHub 中使用 Amazon Bedrock:
### 步骤一:在 AWS 中打开 Amazon Bedrock 模型的访问权限
@@ -23,15 +23,15 @@ Amazon Bedrock 是一个完全托管的基础模型 API 服务,允许用户通
- 访问并登录 [AWS Console](https://console.aws.amazon.com/)
- 搜索 beckrock 并进入 `Amazon Bedrock` 服务
-
+
- 在左侧菜单中选择 `Models acess`
-
+
- 根据你所需要的模型,打开模型访问权限
-
+ 某些模型可能需要你提供额外的信息
@@ -39,47 +39,47 @@ Amazon Bedrock 是一个完全托管的基础模型 API 服务,允许用户通
- 继续在 AWS console 中搜索 IAM,进入 IAM 服务
-
+
- 在 `用户` 菜单中,创建一个新的 IAM 用户
-
+
- 在弹出的对话框中,输入用户名称
-
+
- 为这个用户添加权限,或者加入一个已有的用户组,确保用户拥有 Amazon Bedrock 的访问权限
-
+
- 为已添加的用户创建访问密钥
-
+
- 复制并妥善保存访问密钥以及秘密访问密钥,后续将会用到
-
+
请安全地存储密钥,因为它只会出现一次。如果您意外丢失它,您将需要创建一个新访问密钥。
- ### 步骤三:在 LobeChat 中配置 Amazon Bedrock
+ ### 步骤三:在 LobeHub 中配置 Amazon Bedrock
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`Amazon Bedrock`的设置项并打开
-
+
- 打开 Amazon Bedrock 并填入获得的访问密钥与秘密访问密钥
- 为你的助手选择一个 Amazone Bedrock 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Amazon Bedrock 的费用政策。
-至此你已经可以在 LobeChat 中使用 Amazone Bedrock 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Amazone Bedrock 提供的模型进行对话了。
diff --git a/docs/usage/providers/bfl.mdx b/docs/usage/providers/bfl.mdx
index 1ecc1c417e..3f71a0dcb0 100644
--- a/docs/usage/providers/bfl.mdx
+++ b/docs/usage/providers/bfl.mdx
@@ -1,8 +1,8 @@
---
-title: Using Black Forest Labs API Key in LobeChat
+title: Using Black Forest Labs API Key in LobeHub
description: >-
- Learn how to integrate Black Forest Labs API Key in LobeChat for AI image generation using advanced models and high-quality output.
-
+ Learn how to configure and use the Black Forest Labs API Key in LobeHub to
+ generate high-quality AI images with advanced models.
tags:
- Black Forest Labs
- Image Generation
@@ -10,21 +10,21 @@ tags:
- Web UI
---
-# Using Black Forest Labs in LobeChat
+# Using Black Forest Labs in LobeHub
-
+
-[Black Forest Labs](https://bfl.ai/) is currently the world's top-tier AI image generation research lab, having developed the FLUX series of high-quality image generation models and the FLUX Kontext series of image editing models. This document will guide you on how to use Black Forest Labs in LobeChat:
+[Black Forest Labs](https://bfl.ai/) is one of the world’s leading AI image generation labs, known for developing the FLUX series of high-quality image generation models and the FLUX Kontext series for image editing. This guide will walk you through how to use Black Forest Labs in LobeHub:
- ### Step 1: Obtain Black Forest Labs API Key
+ ### Step 1: Get Your Black Forest Labs API Key
- - Register for a [Black Forest Labs account](https://auth.bfl.ai/).
- - Navigate to the [API Keys dashboard](https://dashboard.bfl.ai/api/keys) and click **Add Key** to generate a new API key.
- - Copy the generated API key and keep it secure; it will only be shown once.
+ - Sign up for a [Black Forest Labs](https://auth.bfl.ai/) account;
+ - Go to the [API Keys Console](https://dashboard.bfl.ai/api/keys) and click **Add Key** to create a new API key;
+ - Copy and securely save the generated API key — it will only be shown once.
- ### Step 2: Configure Black Forest Labs in LobeChat
+ ### Step 2: Configure Black Forest Labs in LobeHub
- - Visit the `Settings` page in LobeChat.
- - Under **AI Service Provider**, locate the **Black Forest Labs** configuration section.
+ - Open the `Settings` page in LobeHub;
+ - Under `AI Providers`, locate the configuration section for `Black Forest Labs`;
-
+
- - Paste the API key you obtained.
- - Choose a Black Forest Labs model for image generation.
+ - Paste the API key you obtained;
+ - Select a Black Forest Labs model for image generation.
-
+
- During usage, you may incur charges according to Black Forest Labs's pricing policy. Please review Black Forest Labs's
- official pricing before heavy usage.
+ Please note that usage may incur charges from Black Forest Labs. Be sure to review their official pricing policy before making extensive API calls.
-You can now use Black Forest Labs's advanced image generation models directly within LobeChat to create stunning visual content.
+And that’s it! You’re now ready to create stunning visual content in LobeHub using the advanced image generation models provided by Black Forest Labs.
diff --git a/docs/usage/providers/bfl.zh-CN.mdx b/docs/usage/providers/bfl.zh-CN.mdx
index dd4b2398fc..73c70564a4 100644
--- a/docs/usage/providers/bfl.zh-CN.mdx
+++ b/docs/usage/providers/bfl.zh-CN.mdx
@@ -1,8 +1,6 @@
---
-title: 在 LobeChat 中使用 Black Forest Labs API Key
-description: >-
- 学习如何在 LobeChat 中配置和使用 Black Forest Labs API Key,使用先进模型进行高质量 AI 图像生成。
-
+title: 在 LobeHub 中使用 Black Forest Labs API Key
+description: 学习如何在 LobeHub 中配置和使用 Black Forest Labs API Key,使用先进模型进行高质量 AI 图像生成。
tags:
- Black Forest Labs
- 图像生成
@@ -10,11 +8,11 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 Black Forest Labs
+# 在 LobeHub 中使用 Black Forest Labs
-
+
-[Black Forest Labs](https://bfl.ai/) 是当前世界最顶级的 AI 图像生成实验室团队,研发了 FLUX 系列高质量图像生成模型,FLUX Kontext 系列图像编辑模型。本文将指导你如何在 LobeChat 中使用 Black Forest Labs:
+[Black Forest Labs](https://bfl.ai/) 是当前世界最顶级的 AI 图像生成实验室团队,研发了 FLUX 系列高质量图像生成模型,FLUX Kontext 系列图像编辑模型。本文将指导你如何在 LobeHub 中使用 Black Forest Labs:
### 步骤一:获取 Black Forest Labs API Key
@@ -47,9 +45,9 @@ tags:
}
/>
- ### 步骤二:在 LobeChat 中配置 Black Forest Labs
+ ### 步骤二:在 LobeHub 中配置 Black Forest Labs
- - 访问 LobeChat 的 `设置` 页面;
+ - 访问 LobeHub 的 `设置` 页面;
- 在 `AI服务商` 下找到 `Black Forest Labs` 的设置项;
@@ -64,4 +62,4 @@ tags:
-至此,你已经可以在 LobeChat 中使用 Black Forest Labs 提供的先进图像生成模型来创作精美的视觉内容了。
+至此,你已经可以在 LobeHub 中使用 Black Forest Labs 提供的先进图像生成模型来创作精美的视觉内容了。
diff --git a/docs/usage/providers/cloudflare.mdx b/docs/usage/providers/cloudflare.mdx
index 7085c0e7f1..d7d4a79884 100644
--- a/docs/usage/providers/cloudflare.mdx
+++ b/docs/usage/providers/cloudflare.mdx
@@ -1,8 +1,10 @@
---
-title: Using Cloudflare Workers AI in LobeChat
-description: Learn how to integrate and utilize Cloudflare Workers AI Models in LobeChat.
+title: Using Cloudflare Workers AI in LobeHub
+description: >-
+ Learn how to configure and use the Cloudflare Workers AI API Key in LobeHub to
+ start chatting and interacting.
tags:
- - LobeChat
+ - LobeHub
- Cloudflare
- Workers AI
- Provider
@@ -10,51 +12,49 @@ tags:
- Web UI
---
-# Using Cloudflare Workers AI in LobeChat
+# Using Cloudflare Workers AI in LobeHub
-
+
-[Cloudflare Workers AI](https://www.cloudflare.com/developer-platform/products/workers-ai/) is a service that integrates AI capabilities into the Cloudflare Workers serverless computing platform. Its core functionality lies in delivering fast, scalable computing power through Cloudflare's global network, thereby reducing operational overhead.
+[Cloudflare Workers AI](https://www.cloudflare.com/developer-platform/products/workers-ai/) is a service that integrates AI capabilities into the Cloudflare Workers serverless computing platform. Its core advantage lies in delivering fast and scalable compute power through Cloudflare’s global network, significantly reducing operational overhead.
-This document will guide you on how to use Cloudflare Workers AI in LobeChat:
+This guide will walk you through how to use Cloudflare Workers AI in LobeHub:
- ### Step 1: Obtain Your Cloudflare Workers AI API Key
+ ### Step 1: Obtain a Cloudflare Workers AI API Key
- Visit the [Cloudflare website](https://www.cloudflare.com/) and sign up for an account.
- - Log in to the [Cloudflare dashboard](https://dash.cloudflare.com/).
- - In the left-hand menu, locate the `AI` > `Workers AI` option.
+ - Log in to the [Cloudflare Dashboard](https://dash.cloudflare.com/).
+ - In the left-hand menu, navigate to `AI` > `Workers AI`.
-
+
- - In the `Using REST API` section, click the `Create Workers AI API Token` button.
- - In the drawer dialog, copy and save your `API token`.
- - Also, copy and save your `Account ID`.
+ - Under the "Use REST API" section, click the `Create Workers AI API Token` button.
+ - In the sidebar that appears, copy and save your `API Token`.
+ - Also copy and save your `Account ID`.
-
+
- - Please store your API token securely, as it will only be displayed once. If you accidentally
- lose it, you will need to create a new token.
+ - Be sure to store your API token securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure Cloudflare Workers AI in LobeChat
+ ### Step 2: Configure Cloudflare Workers AI in LobeHub
- - Go to the `Settings` interface in LobeChat.
- - Under `AI Service Provider`, find the `Cloudflare` settings.
+ - Open the `Settings` panel in LobeHub.
+ - Under `AI Providers`, locate the `Cloudflare` configuration section.
-
+
- Enter the `API Token` you obtained.
- - Input your `Account ID`.
- - Choose a Cloudflare Workers AI model for your AI assistant to start the conversation.
+ - Enter your `Account ID`.
+ - Choose a Cloudflare Workers AI model for your AI assistant to start chatting.
-
+
- You may incur charges while using the API service, please refer to Cloudflare's pricing policy for
- details.
+ You may incur charges from the API provider during usage. Please refer to Cloudflare’s pricing policy for details.
-At this point, you can start conversing with the model provided by Cloudflare Workers AI in LobeChat.
+You’re all set! You can now start using Cloudflare Workers AI models for conversations in LobeHub.
diff --git a/docs/usage/providers/cloudflare.zh-CN.mdx b/docs/usage/providers/cloudflare.zh-CN.mdx
index 73073a7cf6..c49ef36198 100644
--- a/docs/usage/providers/cloudflare.zh-CN.mdx
+++ b/docs/usage/providers/cloudflare.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: 在 LobeChat 中使用 Cloudflare Workers AI
-description: 学习如何在 LobeChat 中配置和使用 Cloudflare Workers AI 的 API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 Cloudflare Workers AI
+description: 学习如何在 LobeHub 中配置和使用 Cloudflare Workers AI 的 API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- Cloudflare
- Workers AI
- 供应商
@@ -10,13 +10,13 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 Cloudflare Workers AI
+# 在 LobeHub 中使用 Cloudflare Workers AI
-
+
[Cloudflare Workers AI](https://www.cloudflare.com/developer-platform/products/workers-ai/) 是一种将人工智能能力集成到 Cloudflare Workers 无服务器计算平台的服务。其核心功能在于通过 Cloudflare 的全球网络提供快速、可扩展的计算能力,降低运维开销。
-本文档将指导你如何在 LobeChat 中使用 Cloudflare Workers AI:
+本文档将指导你如何在 LobeHub 中使用 Cloudflare Workers AI:
### 步骤一:获取 Cloudflare Workers AI 的 API Key
@@ -25,34 +25,34 @@ tags:
- 登录 [Cloudflare 控制台](https://dash.cloudflare.com/).
- 在左侧的菜单中找到 `AI` > `Workers AI` 选项。
-
+
- 在 `使用 REST API` 中点击 `创建 Workers AI API 令牌` 按钮
- 在弹出的侧边栏中复制并保存你的 `API 令牌`
- 同时也复制并保存你的 `账户ID`
-
+
- 请安全地存储 API 令牌,因为它只会出现一次。如果您意外丢失它,您将需要创建一个新令牌。
- ### 步骤二:在 LobeChat 中配置 Cloudflare Workers AI
+ ### 步骤二:在 LobeHub 中配置 Cloudflare Workers AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `Cloudflare` 的设置项
-
+
- 填入获得的 `API 令牌`
- 填入你的`账户ID`
- 为你的 AI 助手选择一个 Cloudflare Workers AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Cloudflare 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Cloudflare Workers AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Cloudflare Workers AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/comfyui.mdx b/docs/usage/providers/comfyui.mdx
index 1825c25e85..309d156365 100644
--- a/docs/usage/providers/comfyui.mdx
+++ b/docs/usage/providers/comfyui.mdx
@@ -1,6 +1,20 @@
---
-title: Using ComfyUI for Image Generation in LobeChat
-description: Learn how to configure and use ComfyUI service in LobeChat, supporting FLUX series models for high-quality image generation and editing features
+title: ''
+description: >-
+ Learn to configure ComfyUI in LobeHub for high-quality image generation and
+ editing.
+tags:
+ - ComfyUI
+ - FLUX
+ - Image Generation
+ - AI Image Editing
+ - Text-to-Image
+---
+
+````markdown
+---
+title: Using ComfyUI in LobeHub for Image Generation
+description: Learn how to configure and use the ComfyUI service in LobeHub to support high-quality image generation and editing with the FLUX model series.
tags:
- ComfyUI
- FLUX
@@ -9,54 +23,54 @@ tags:
- AI Image Generation
---
-# Using ComfyUI in LobeChat
+# Using ComfyUI in LobeHub
-
+
-This documentation will guide you on how to use [ComfyUI](https://github.com/comfyanonymous/ComfyUI) in LobeChat for high-quality AI image generation and editing.
+This guide will walk you through how to use [ComfyUI](https://github.com/comfyanonymous/ComfyUI) in LobeHub for high-quality AI image generation and editing.
-## ComfyUI Overview
+## Introduction to ComfyUI
-ComfyUI is a powerful stable diffusion and flow diffusion GUI that provides a node-based workflow interface. LobeChat integrates with ComfyUI, supporting complete FLUX series models, including text-to-image generation and image editing capabilities.
+ComfyUI is a powerful GUI for Stable Diffusion and Flow Diffusion, offering a node-based workflow interface. LobeHub integrates ComfyUI with full support for the FLUX model series, enabling both text-to-image generation and image editing.
### Key Features
-- **Extensive Model Support**: Supports 223 models, including FLUX series (130) and SD series (93)
-- **Configuration-Driven Architecture**: Registry system provides intelligent model selection
-- **Multi-Format Support**: Supports .safetensors and .gguf formats with various quantization levels
-- **Dynamic Precision Selection**: Supports default, fp8\_e4m3fn, fp8\_e5m2, fp8\_e4m3fn\_fast precision
-- **Multiple Authentication Methods**: Supports no authentication, basic authentication, Bearer Token, and custom authentication
-- **Intelligent Component Selection**: Automatically selects optimal T5, CLIP, VAE encoder combinations
-- **Enterprise-Grade Optimization**: Includes NF4, SVDQuant, TorchAO, MFLUX optimization variants
+- **Extensive Model Support**: Supports 223 models, including 130 FLUX and 93 SD models
+- **Config-Driven Architecture**: Registry system enables intelligent model selection
+- **Multi-Format Support**: Compatible with .safetensors and .gguf formats, with various quantization levels
+- **Dynamic Precision Options**: Supports default, fp8_e4m3fn, fp8_e5m2, and fp8_e4m3fn_fast
+- **Multiple Authentication Methods**: Supports None, Basic Auth, Bearer Token, and Custom Headers
+- **Smart Component Selection**: Automatically selects optimal T5, CLIP, and VAE encoders
+- **Enterprise-Grade Optimizations**: Includes NF4, SVDQuant, TorchAO, MFLUX variants
## Quick Start
-### Step 1: Configure ComfyUI in LobeChat
+### Step 1: Configure ComfyUI in LobeHub
-#### 1. Open Settings Interface
+#### 1. Open Settings
-- Access LobeChat's `Settings` interface
-- Find the `ComfyUI` setting item under `AI Providers`
+- Go to the `Settings` panel in LobeHub
+- Under `AI Providers`, find the `ComfyUI` section
-
+
-#### 2. Configure Connection Parameters
+#### 2. Set Connection Parameters
**Basic Configuration**:
-- **Server Address**: Enter ComfyUI server address, e.g., `http://localhost:8188`
-- **Authentication Type**: Select appropriate authentication method (default: no authentication)
+- **Server Address**: Enter your ComfyUI server address, e.g., `http://localhost:8000`
+- **Authentication Type**: Choose the appropriate method (default is None)
-### Step 2: Select Model and Start Generating Images
+### Step 2: Select a Model and Generate Images
-#### 1. Select FLUX Model
+#### 1. Choose a FLUX Model
-In the conversation interface:
+In the chat interface:
- Click the model selection button
-- Select the desired FLUX model from the ComfyUI category
+- Choose your desired FLUX model from the ComfyUI category
-
+
#### 2. Text-to-Image Generation
@@ -64,9 +78,9 @@ In the conversation interface:
```plaintext
Generate an image: A cute orange cat sitting on a sunny windowsill, warm lighting, detailed fur texture
-```
+````
-**Using FLUX Dev (High Quality Generation)**:
+**Using FLUX Dev (High-Quality Generation)**:
```plaintext
Generate high quality image: City skyline at sunset, cyberpunk style, neon lights, 4K high resolution, detailed architecture
@@ -74,7 +88,7 @@ Generate high quality image: City skyline at sunset, cyberpunk style, neon light
#### 3. Image Editing
-**Using FLUX Kontext-dev for Image Editing**:
+**Using FLUX Kontext-dev for Editing**:
```plaintext
Edit this image: Change the background to a starry night sky, keep the main subject, cosmic atmosphere
@@ -83,38 +97,38 @@ Edit this image: Change the background to a starry night sky, keep the main subj
Then upload the original image you want to edit.
- Image editing functionality requires uploading the original image first, then describing the modifications you want to make.
+ Image editing requires uploading the original image first, followed by a description of the desired changes.
## Authentication Configuration Guide
-ComfyUI supports four authentication methods. Choose the appropriate method based on your server configuration and security requirements:
+ComfyUI supports four authentication methods. Choose the one that best fits your server setup and security needs:
### No Authentication (none)
-**Use Cases**:
+**Best for**:
-- Local development environment (localhost)
-- Internal network with trusted users
-- Personal single-machine deployment
+- Local development (localhost)
+- Trusted internal networks
+- Personal single-machine setups
**Configuration**:
```yaml
Authentication Type: None
-Server Address: http://localhost:8188
+Server Address: http://localhost:8000
```
### Basic Authentication (basic)
-**Use Cases**:
+**Best for**:
-- Deployments using Nginx reverse proxy
-- Team internal use requiring basic access control
+- Deployments behind Nginx reverse proxy
+- Internal team use with basic access control
-**Configuration**:
+**Setup**:
-1. **Create User Password**:
+1. **Create Username and Password**:
```bash
# Install apache2-utils
@@ -124,21 +138,21 @@ sudo apt-get install apache2-utils
sudo htpasswd -c /etc/nginx/.htpasswd admin
```
-2. **LobeChat Configuration**:
+2. **LobeHub Configuration**:
```yaml
-Authentication Type: Basic Authentication
-Server Address: http://your-domain.com
+Authentication Type: Basic
+Server Address: https://your-domain.com
Username: admin
Password: your_secure_password
```
### Bearer Token (bearer)
-**Use Cases**:
+**Best for**:
-- API-driven application integration
-- Enterprise environments requiring Token authentication
+- API-driven integrations
+- Enterprise environments requiring token-based auth
**Generate Token**:
@@ -156,41 +170,41 @@ token = jwt.encode(payload, secret_key, algorithm='HS256')
print(f"Bearer Token: {token}")
```
-**LobeChat Configuration**:
+**LobeHub Configuration**:
```yaml
Authentication Type: Bearer Token
-Server Address: http://your-server:8188
-API Key: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...
+Server Address: https://your-domain.com
+API Key: example-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...
```
### Custom Authentication (custom)
-**Use Cases**:
+**Best for**:
-- Integration with existing enterprise authentication systems
-- Systems requiring multiple authentication headers
+- Integrating with existing enterprise auth systems
+- Systems requiring multiple custom headers
-**LobeChat Configuration**:
+**LobeHub Configuration**:
```yaml
Authentication Type: Custom
-Server Address: http://your-server:8188
+Server Address: https://your-domain.com
Custom Headers:
{
"X-API-Key": "your_api_key",
- "X-Client-ID": "lobechat"
+ "X-Client-ID": "LobeHub"
}
```
-## Common Issues Resolution
+## Troubleshooting
### 1. How to Install Comfy-Manager
-Comfy-Manager is ComfyUI's extension manager that allows you to easily install and manage various nodes, models, and extensions.
+Comfy-Manager is an extension manager for ComfyUI that simplifies installing and managing nodes, models, and extensions.
- 📦 Install Comfy-Manager Steps
+ 📦 Steps to Install Comfy-Manager
#### Method 1: Manual Installation (Recommended)
@@ -198,619 +212,45 @@ Comfy-Manager is ComfyUI's extension manager that allows you to easily install a
# Navigate to ComfyUI's custom_nodes directory
cd ComfyUI/custom_nodes
- # Clone Comfy-Manager repository
+ # Clone the Comfy-Manager repository
git clone https://github.com/ltdrdata/ComfyUI-Manager.git
- # Restart ComfyUI server
- # After restart, you'll see the Manager button in the UI
+ # Restart the ComfyUI server
+ # After restarting, you should see a "Manager" button in the UI
```
- #### Method 2: One-Click Installation Script
+ #### Method 2: One-Click Install Script
```bash
- # Execute in ComfyUI root directory
+ # Run from the ComfyUI root directory
curl -fsSL https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/install.sh | bash
```
#### Verify Installation
- 1. Restart ComfyUI server
- 2. Visit `http://localhost:8188`
- 3. You should see the "Manager" button in the bottom-right corner
+ 1. Restart the ComfyUI server
+ 2. Visit `http://localhost:8000`
+ 3. You should see a "Manager" button in the bottom-right corner
#### Using Comfy-Manager
**Install Models**:
- 1. Click "Manager" button
+ 1. Click the "Manager" button
2. Select "Install Models"
- 3. Search for needed models (e.g., FLUX, SD3.5)
- 4. Click "Install" to automatically download to correct directory
+ 3. Search for the desired model (e.g., FLUX, SD3.5)
+ 4. Click "Install" to download automatically
**Install Node Extensions**:
- 1. Click "Manager" button
+ 1. Click the "Manager" button
2. Select "Install Custom Nodes"
- 3. Search for needed nodes (e.g., ControlNet, AnimateDiff)
- 4. Click "Install" and restart server
+ 3. Search for nodes (e.g., ControlNet, AnimateDiff)
+ 4. Click "Install" and restart the server
**Manage Installed Content**:
- 1. Click "Manager" button
+ 1. Click the "Manager" button
2. Select "Installed" to view installed extensions
- 3. Update, disable, or uninstall extensions
+ 3. You can update, disable, or uninstall extensions
-
-### 2. How to Handle "Model not found" Errors
-
-When you see errors like `Model not found: flux1-dev.safetensors, flux1-krea-dev.safetensors, flux1-schnell.safetensors`, it means the required model files are missing from the server.
-
-
- 🔧 Resolve Model not found Errors
-
- #### Error Example
-
- ```plaintext
- Model not found: flux1-dev.safetensors, flux1-krea-dev.safetensors, flux1-schnell.safetensors
- ```
-
- This error indicates the system expects to find these model files but couldn't locate them on the server.
-
- #### Resolution Methods
-
- **Method 1: Download using Comfy-Manager (Recommended)**
-
- 1. Open ComfyUI interface
- 2. Click "Manager" → "Install Models"
- 3. Search for the model name from the error (e.g., "flux1-dev")
- 4. Click "Install" to automatically download
-
- **Method 2: Manual Model Download**
-
- 1. **Download Model Files**:
- - Visit [Hugging Face](https://huggingface.co/black-forest-labs/FLUX.1-dev) or other model sources
- - Download the files mentioned in the error (e.g., `flux1-dev.safetensors`)
-
- 2. **Place in Correct Directory**:
- ```bash
- # FLUX and SD3.5 main models go to
- ComfyUI/models/diffusion_models/flux1-dev.safetensors
-
- # SD1.5 and SDXL models go to
- ComfyUI/models/checkpoints/
- ```
-
- 3. **Verify Files**:
- ```bash
- # Check if file exists
- ls -la ComfyUI/models/diffusion_models/flux1-dev.safetensors
-
- # Check file integrity (optional)
- sha256sum flux1-dev.safetensors
- ```
-
- 4. **Restart ComfyUI Server**
-
- **Method 3: Direct Download with wget/curl**
-
- ```bash
- # Navigate to models directory
- cd ComfyUI/models/diffusion_models/
-
- # Download using wget (replace with actual download link)
- wget https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors
-
- # Or use curl
- curl -L -o flux1-dev.safetensors https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors
- ```
-
- #### Common Model Download Sources
-
- - **Hugging Face**: [https://huggingface.co/models](https://huggingface.co/models)
- - **Civitai**: [https://civitai.com/models](https://civitai.com/models)
- - **Official Sources**:
- - FLUX: [https://huggingface.co/black-forest-labs](https://huggingface.co/black-forest-labs)
- - SD3.5: [https://huggingface.co/stabilityai](https://huggingface.co/stabilityai)
-
- #### Prevention Measures
-
- 1. **Basic Model Package**: Download at least one base model
- - FLUX: `flux1-schnell.safetensors` (fast) or `flux1-dev.safetensors` (high quality)
- - SD3.5: `sd3.5_large.safetensors`
-
- 2. **Check Disk Space**:
- ```bash
- # Check available space
- df -h ComfyUI/models/
- ```
-
- 3. **Set Model Path** (optional):
- If your models are stored elsewhere, create symbolic links:
- ```bash
- ln -s /path/to/your/models ComfyUI/models/diffusion_models/
- ```
-
-
-### 3. How to Handle Missing System Component Errors
-
-When you see errors like `Missing VAE encoder: ae.safetensors` or other component files missing, you need to download the corresponding system components.
-
-
- 🛠️ Resolve Missing System Component Errors
-
- #### Common Component Errors
-
- ```plaintext
- Missing VAE encoder: ae.safetensors. Please download and place it in the models/vae folder.
- Missing CLIP encoder: clip_l.safetensors. Please download and place it in the models/clip folder.
- Missing T5 encoder: t5xxl_fp16.safetensors. Please download and place it in the models/clip folder.
- ```
-
- #### Component Types Description
-
- | Component Type | Example Filename | Purpose | Storage Directory |
- | -------------- | ------------------------------ | ----------------------- | ------------------ |
- | **VAE** | ae.safetensors | Image encoding/decoding | models/vae/ |
- | **CLIP** | clip\_l.safetensors | Text encoding (CLIP) | models/clip/ |
- | **T5** | t5xxl\_fp16.safetensors | Text encoding (T5) | models/clip/ |
- | **ControlNet** | flux-controlnet-\*.safetensors | Control networks | models/controlnet/ |
-
- #### Resolution Methods
-
- **Method 1: Use Comfy-Manager (Recommended)**
-
- 1. Click "Manager" → "Install Models"
- 2. Select component type in "Filter" (VAE/CLIP/T5)
- 3. Download corresponding component files
-
- **Method 2: Manual Component Download**
-
- ##### FLUX Required Components
-
- ```bash
- # 1. VAE Encoder
- cd ComfyUI/models/vae/
- wget https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/ae.safetensors
-
- # 2. CLIP-L Encoder
- cd ComfyUI/models/clip/
- wget https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors
-
- # 3. T5-XXL Encoder (choose different precisions)
- # FP16 version (recommended, balanced performance)
- wget https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors
-
- # Or FP8 version (saves VRAM)
- wget https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn.safetensors
- ```
-
- ##### SD3.5 Required Components
-
- ```bash
- # SD3.5 uses different encoders
- cd ComfyUI/models/clip/
-
- # CLIP-G Encoder
- wget https://huggingface.co/stabilityai/stable-diffusion-3.5-large/resolve/main/text_encoders/clip_g.safetensors
-
- # CLIP-L Encoder
- wget https://huggingface.co/stabilityai/stable-diffusion-3.5-large/resolve/main/text_encoders/clip_l.safetensors
-
- # T5-XXL Encoder
- wget https://huggingface.co/stabilityai/stable-diffusion-3.5-large/resolve/main/text_encoders/t5xxl_fp16.safetensors
- ```
-
- ##### SDXL Required Components
-
- ```bash
- # SDXL VAE
- cd ComfyUI/models/vae/
- wget https://huggingface.co/stabilityai/sdxl-vae/resolve/main/sdxl_vae.safetensors
-
- # SDXL uses built-in CLIP encoders, usually no separate download needed
- ```
-
- #### Component Compatibility Matrix
-
- | Model Series | Required VAE | Required CLIP | Required T5 | Optional Components |
- | ------------ | -------------- | ------------------- | ----------------------- | ------------------- |
- | **FLUX** | ae.safetensors | clip\_l.safetensors | t5xxl\_fp16.safetensors | ControlNet |
- | **SD3.5** | Built-in | clip\_g + clip\_l | t5xxl\_fp16 | - |
- | **SDXL** | sdxl\_vae | Built-in | - | Refiner |
- | **SD1.5** | vae-ft-mse | Built-in | - | ControlNet |
-
- #### Precision Selection Recommendations
-
- **T5 Encoder Precision Selection**:
-
- | VRAM Capacity | Recommended Version | Filename |
- | ------------- | ------------------- | ------------------------------ |
- | \< 12GB | FP8 Quantized | t5xxl\_fp8\_e4m3fn.safetensors |
- | 12-16GB | FP16 | t5xxl\_fp16.safetensors |
- | > 16GB | FP32 | t5xxl.safetensors |
-
- #### Verify Component Installation
-
- ```bash
- # Check all required components
- echo "=== VAE Components ==="
- ls -la ComfyUI/models/vae/
-
- echo "=== CLIP/T5 Components ==="
- ls -la ComfyUI/models/clip/
-
- echo "=== ControlNet Components ==="
- ls -la ComfyUI/models/controlnet/
- ```
-
- #### Troubleshooting
-
- **Issue: Still getting errors after download**
-
- 1. **Check File Permissions**:
- ```bash
- chmod 644 ComfyUI/models/vae/*.safetensors
- chmod 644 ComfyUI/models/clip/*.safetensors
- ```
-
- 2. **Clear Cache**:
- ```bash
- # Clear ComfyUI cache
- rm -rf ComfyUI/temp/*
- rm -rf ComfyUI/__pycache__/*
- ```
-
- 3. **Restart Server**:
- ```bash
- # Fully restart ComfyUI
- pkill -f "python main.py"
- python main.py --listen 0.0.0.0 --port 8188
- ```
-
- **Issue: Insufficient VRAM**
-
- Use quantized component versions:
-
- - T5: Use `t5xxl_fp8_e4m3fn.safetensors` instead of FP16/FP32
- - VAE: Some models support FP16 VAE versions
-
- **Issue: Slow Downloads**
-
- 1. Use mirror sources (if applicable)
- 2. Use download tools (like aria2c) with resume support:
- ```bash
- aria2c -x 16 -s 16 -k 1M [download_link]
- ```
-
-
-## ComfyUI Server Installation
-
-
- 🚀 Install and Configure ComfyUI Server
-
- ### 1. Install ComfyUI
-
- ```bash
- # Clone ComfyUI repository
- git clone https://github.com/comfyanonymous/ComfyUI.git
- cd ComfyUI
-
- # Install dependencies
- pip install -r requirements.txt
-
- # Optional: Install JWT support (for Token authentication)
- pip install PyJWT
-
- # Start ComfyUI server
- python main.py --listen 0.0.0.0 --port 8188
- ```
-
- ### 2. Download Model Files
-
- **Recommended Basic Configuration** (Minimal installation):
-
- **Main Models** (place in `models/diffusion_models/` directory):
-
- - `flux1-schnell.safetensors` - Fast generation (4 steps)
- - `flux1-dev.safetensors` - High-quality creation (20 steps)
-
- **Required Components** (place in respective directories):
-
- - `models/vae/ae.safetensors` - VAE encoder
- - `models/clip/clip_l.safetensors` - CLIP text encoder
- - `models/clip/t5xxl_fp16.safetensors` - T5 text encoder
-
- ### 3. Verify Server Running
-
- Visit `http://localhost:8188` to confirm ComfyUI interface loads properly.
-
-
- **Smart Model Selection**: LobeChat will automatically select the best model based on available model files on the server. You don't need to download all models; the system will automatically choose from available models by priority (Official > Enterprise > Community).
-
-
-
-## Supported Models
-
-LobeChat's ComfyUI integration uses a configuration-driven architecture, supporting **223 models**, providing complete coverage from official models to community-optimized versions.
-
-### FLUX Series Recommended Parameters
-
-| Model Type | Recommended Steps | CFG Scale | Resolution Range |
-| ----------- | ----------------- | --------- | -------------------- |
-| **Schnell** | 4 steps | - | 512×512 to 1536×1536 |
-| **Dev** | 20 steps | 3.5 | 512×512 to 2048×2048 |
-| **Kontext** | 20 steps | 3.5 | 512×512 to 2048×2048 |
-| **Krea** | 20 steps | 4.5 | 512×512 to 2048×2048 |
-
-### SD3.5 Series Parameters
-
-| Model Type | Recommended Steps | CFG Scale | Resolution Range |
-| --------------- | ----------------- | --------- | -------------------- |
-| **Large** | 25 steps | 7.0 | 512×512 to 2048×2048 |
-| **Large Turbo** | 8 steps | 3.5 | 512×512 to 1536×1536 |
-| **Medium** | 20 steps | 6.0 | 512×512 to 1536×1536 |
-
-
- 📋 Complete Supported Model List
-
- ### Model Classification System
-
- #### Priority 1: Official Core Models
-
- **FLUX.1 Official Series**:
-
- - `flux1-dev.safetensors` - High-quality creation model
- - `flux1-schnell.safetensors` - Fast generation model
- - `flux1-kontext-dev.safetensors` - Image editing model
- - `flux1-krea-dev.safetensors` - Safety-enhanced model
-
- **SD3.5 Official Series**:
-
- - `sd3.5_large.safetensors` - SD3.5 large base model
- - `sd3.5_large_turbo.safetensors` - Fast generation version
- - `sd3.5_medium.safetensors` - Medium-scale model
-
- #### Priority 2: Enterprise Optimized Models (106 FLUX)
-
- **Quantization Optimization Series**:
-
- - **GGUF Quantization**: Each variant supports 11 quantization levels (F16, Q8\_0, Q6\_K, Q5\_K\_M, Q5\_K\_S, Q4\_K\_M, Q4\_K\_S, Q4\_0, Q3\_K\_M, Q3\_K\_S, Q2\_K)
- - **FP8 Precision**: fp8\_e4m3fn, fp8\_e5m2 optimized versions
- - **Enterprise Lightweight**: FLUX.1-lite-8B series
- - **Technical Experiments**: NF4, SVDQuant, TorchAO, optimum-quanto, MFLUX optimized versions
-
- #### Priority 3: Community Fine-tuned Models (48 FLUX)
-
- **Community Optimization Series**:
-
- - **Jib Mix Flux** Series: High-quality mixed models
- - **Real Dream FLUX** Series: Realism style
- - **Vision Realistic** Series: Visual realism
- - **PixelWave FLUX** Series: Pixel art optimization
- - **Fluxmania** Series: Diverse style support
-
- ### SD Series Model Support (93 models)
-
- **SD3.5 Series**: 5 models
- **SD1.5 Series**: 37 models (including official, quantized, and community versions)
- **SDXL Series**: 50 models (including base, Refiner, and Playground models)
-
- ### Workflow Support
-
- System supports **6 workflows**:
-
- - **flux-dev**: High-quality creation workflow
- - **flux-schnell**: Fast generation workflow
- - **flux-kontext**: Image editing workflow
- - **sd35**: SD3.5 dedicated workflow
- - **simple-sd**: Simple SD workflow
- - **index**: Workflow entry point
-
-
-## Performance Optimization Recommendations
-
-### Hardware Requirements
-
-**Minimum Configuration** (GGUF quantized models):
-
-- GPU: 6GB VRAM (using Q4 quantization)
-- RAM: 12GB
-- Storage: 30GB available space
-
-**Recommended Configuration** (standard models):
-
-- GPU: 12GB+ VRAM (RTX 4070 Ti or higher)
-- RAM: 24GB+
-- Storage: SSD 100GB+ available space
-
-### VRAM Optimization Strategy
-
-| VRAM Capacity | Recommended Quantization | Model Example | Performance Characteristics |
-| ------------- | ------------------------ | ---------------------------------- | --------------------------- |
-| **6-8GB** | Q4\_0, Q4\_K\_S | `flux1-dev-Q4_0.gguf` | Minimal VRAM usage |
-| **10-12GB** | Q6\_K, Q8\_0 | `flux1-dev-Q6_K.gguf` | Balance performance/quality |
-| **16GB+** | FP8, FP16 | `flux1-dev-fp8-e4m3fn.safetensors` | Near-original quality |
-| **24GB+** | Full model | `flux1-dev.safetensors` | Best quality |
-
-## Custom Model Usage
-
-
- 🎨 Configure Custom SD Models
-
- LobeChat supports using custom Stable Diffusion models. The system uses fixed filenames to identify custom models.
-
- ### 1. Model File Preparation
-
- **Required Files**:
-
- - **Main Model File**: `custom_sd_lobe.safetensors`
- - **VAE File (Optional)**: `custom_sd_vae_lobe.safetensors`
-
- ### 2. Add Custom Model
-
- **Method 1: Rename Existing Model**
-
- ```bash
- # Rename your model to fixed filename
- mv your_custom_model.safetensors custom_sd_lobe.safetensors
-
- # Move to correct directory
- mv custom_sd_lobe.safetensors ComfyUI/models/diffusion_models/
- ```
-
- **Method 2: Create Symbolic Link (Recommended)**
-
- ```bash
- # Create soft link for easy model switching
- ln -s /path/to/your_model.safetensors ComfyUI/models/diffusion_models/custom_sd_lobe.safetensors
- ```
-
- ### 3. Use Custom Model
-
- In LobeChat, custom models will appear as:
-
- - **stable-diffusion-custom**: Standard custom model
- - **stable-diffusion-custom-refiner**: Refiner custom model
-
- ### Custom Model Parameter Recommendations
-
- | Parameter | SD 1.5 Models | SDXL Models |
- | ---------- | ------------- | ----------- |
- | **steps** | 20-30 | 25-40 |
- | **cfg** | 7.0 | 6.0-8.0 |
- | **width** | 512 | 1024 |
- | **height** | 512 | 1024 |
-
-
-## Troubleshooting
-
-### Smart Error Diagnosis System
-
-LobeChat integrates a smart error handling system that can automatically diagnose and provide targeted solutions.
-
-#### Error Types and Solutions
-
-| Error Type | User Prompt | Automatic Diagnosis |
-| ------------------ | ---------------------------------- | --------------------------------------------------- |
-| **Connection** | "Cannot connect to ComfyUI server" | Auto-detect server status and connectivity |
-| **Authentication** | "API key invalid or expired" | Auto-verify authentication credentials |
-| **Permissions** | "Access permissions insufficient" | Auto-check user permissions and file access |
-| **Model Issues** | "Cannot find specified model file" | Auto-scan available models and suggest alternatives |
-| **Configuration** | "Configuration file error" | Auto-verify config completeness and syntax |
-
-
- 🔍 Traditional Troubleshooting Methods
-
- #### 1. Connection Failure
-
- **Issue**: Cannot connect to ComfyUI server
-
- **Solution**:
-
- ```bash
- # Confirm server running
- curl http://localhost:8188/system_stats
-
- # Check port
- netstat -tulpn | grep 8188
- ```
-
- #### 2. Out of Memory
-
- **Issue**: Memory errors during generation
-
- **Solution**:
-
- - Lower image resolution
- - Reduce generation steps
- - Use quantized models
-
- #### 3. Authentication Failure
-
- **Issue**: 401 or 403 errors
-
- **Solution**:
-
- - Verify authentication configuration
- - Check if Token is expired
- - Confirm user permissions
-
-
-## Best Practices
-
-### Prompt Writing
-
-1. **Detailed Description**: Provide clear, detailed image descriptions
-2. **Style Specification**: Clearly specify artistic style, color style, etc.
-3. **Quality Keywords**: Add "4K", "high quality", "detailed" keywords
-4. **Avoid Contradictions**: Ensure description content is logically consistent
-
-**Example**:
-
-```plaintext
-A young woman with flowing long hair, wearing an elegant blue dress, standing in a cherry blossom park,
-sunlight filtering through leaves, warm atmosphere, cinematic lighting, 4K high resolution, detailed, photorealistic
-```
-
-### Parameter Optimization
-
-1. **FLUX Schnell**: Suitable for quick previews, use 4-step generation
-2. **FLUX Dev**: Balance quality and speed, CFG 3.5, 20 steps
-3. **FLUX Krea-dev**: Safe creation, CFG 4.5, note content filtering
-4. **FLUX Kontext-dev**: Image editing, strength 0.6-0.9
-
-
- Please note during use:
-
- - FLUX Dev, Krea-dev, Kontext-dev models are for non-commercial use only
- - Generated content must comply with relevant laws and platform policies
- - Large model generation may take considerable time, please be patient
-
-
-## API Reference
-
-
- 📚 API Documentation
-
- ### Request Format
-
- ```typescript
- interface ComfyUIRequest {
- model: string; // Model ID, e.g., 'flux-schnell'
- prompt: string; // Text prompt
- width: number; // Image width
- height: number; // Image height
- steps: number; // Generation steps
- seed: number; // Random seed
- cfg?: number; // CFG Scale (Dev/Krea/Kontext specific)
- strength?: number; // Edit strength (Kontext specific)
- imageUrl?: string; // Input image (Kontext specific)
- }
- ```
-
- ### Response Format
-
- ```typescript
- interface ComfyUIResponse {
- images: Array<{
- url: string; // Generated image URL
- filename: string; // Filename
- subfolder: string; // Subdirectory
- type: string; // File type
- }>;
- prompt_id: string; // Prompt ID
- }
- ```
-
- ### Error Codes
-
- | Error Code | Description | Resolution Suggestions |
- | ---------- | ------------------------ | -------------------------------- |
- | `400` | Invalid parameters | Check parameter format and range |
- | `401` | Authentication failed | Verify API key and auth config |
- | `403` | Insufficient permissions | Check user permissions |
- | `404` | Model not found | Confirm model file exists |
- | `500` | Server error | Check ComfyUI logs |
-
-
-You can now use ComfyUI in LobeChat for high-quality AI image generation and editing. If you encounter issues, please refer to the troubleshooting section or consult the [ComfyUI official documentation](https://github.com/comfyanonymous/ComfyUI).
diff --git a/docs/usage/providers/comfyui.zh-CN.mdx b/docs/usage/providers/comfyui.zh-CN.mdx
index 069bb5169c..c68b98d6ac 100644
--- a/docs/usage/providers/comfyui.zh-CN.mdx
+++ b/docs/usage/providers/comfyui.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用 ComfyUI 生成图像
-description: 学习如何在 LobeChat 中配置和使用 ComfyUI 服务,支持 FLUX 系列模型的高质量图像生成和编辑功能
+title: 在 LobeHub 中使用 ComfyUI 生成图像
+description: 学习如何在 LobeHub 中配置和使用 ComfyUI 服务,支持 FLUX 系列模型的高质量图像生成和编辑功能
tags:
- ComfyUI
- FLUX
@@ -9,15 +9,15 @@ tags:
- AI 图像生成
---
-# 在 LobeChat 中使用 ComfyUI
+# 在 LobeHub 中使用 ComfyUI
-
+
-本文档将指导你如何在 LobeChat 中使用 [ComfyUI](https://github.com/comfyanonymous/ComfyUI) 进行高质量的 AI 图像生成和编辑。
+本文档将指导你如何在 LobeHub 中使用 [ComfyUI](https://github.com/comfyanonymous/ComfyUI) 进行高质量的 AI 图像生成和编辑。
## ComfyUI 简介
-ComfyUI 是一个功能强大的稳定扩散和流扩散 GUI,提供基于节点的工作流界面。LobeChat 集成了 ComfyUI,支持完整的 FLUX 系列模型,包括文本生成图像和图像编辑功能。
+ComfyUI 是一个功能强大的稳定扩散和流扩散 GUI,提供基于节点的工作流界面。LobeHub 集成了 ComfyUI,支持完整的 FLUX 系列模型,包括文本生成图像和图像编辑功能。
### 主要特性
@@ -31,14 +31,14 @@ ComfyUI 是一个功能强大的稳定扩散和流扩散 GUI,提供基于节
## 快速开始
-### 步骤一:在 LobeChat 中配置 ComfyUI
+### 步骤一:在 LobeHub 中配置 ComfyUI
#### 1. 打开设置界面
-- 访问 LobeChat 的 `设置` 界面
+- 访问 LobeHub 的 `设置` 界面
- 在 `AI 服务商` 下找到 `ComfyUI` 的设置项
-
+
#### 2. 配置连接参数
@@ -56,7 +56,7 @@ ComfyUI 是一个功能强大的稳定扩散和流扩散 GUI,提供基于节
- 点击模型选择按钮
- 从 ComfyUI 分类中选择所需的 FLUX 模型
-
+
#### 2. 文本生成图像
@@ -124,7 +124,7 @@ sudo apt-get install apache2-utils
sudo htpasswd -c /etc/nginx/.htpasswd admin
```
-2. **LobeChat 配置**:
+2. **LobeHub 配置**:
```yaml
认证类型:基本认证
@@ -156,7 +156,7 @@ token = jwt.encode(payload, secret_key, algorithm='HS256')
print(f"Bearer Token: {token}")
```
-**LobeChat 配置**:
+**LobeHub 配置**:
```yaml
认证类型:Bearer Token
@@ -171,7 +171,7 @@ API 密钥:example-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...
- 集成现有企业认证系统
- 需要多重认证头的系统
-**LobeChat 配置**:
+**LobeHub 配置**:
```yaml
认证类型:自定义
@@ -179,7 +179,7 @@ API 密钥:example-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...
自定义请求头:
{
"X-API-Key": "your_api_key",
- "X-Client-ID": "lobechat"
+ "X-Client-ID": "LobeHub"
}
```
@@ -525,13 +525,13 @@ Comfy-Manager 是 ComfyUI 的扩展管理器,让你能够轻松安装和管理
访问 `http://localhost:8000` 确认 ComfyUI 界面正常加载。
- **智能模型选择**:LobeChat 会根据服务器上可用的模型文件自动选择最佳模型。您无需下载所有模型,系统会在可用模型中按优先级(官方 > 企业 > 社区)自动选择。
+ **智能模型选择**:LobeHub 会根据服务器上可用的模型文件自动选择最佳模型。您无需下载所有模型,系统会在可用模型中按优先级(官方 > 企业 > 社区)自动选择。
## 支持的模型
-LobeChat ComfyUI 集成采用配置驱动的架构,支持 **223 个模型**,提供从官方模型到社区优化版本的全覆盖。
+LobeHub ComfyUI 集成采用配置驱动的架构,支持 **223 个模型**,提供从官方模型到社区优化版本的全覆盖。
### FLUX 系列推荐参数
@@ -637,7 +637,7 @@ LobeChat ComfyUI 集成采用配置驱动的架构,支持 **223 个模型**,
🎨 配置自定义 SD 模型
- LobeChat 支持使用自定义的 Stable Diffusion 模型。系统使用固定的文件名来识别自定义模型。
+ LobeHub 支持使用自定义的 Stable Diffusion 模型。系统使用固定的文件名来识别自定义模型。
### 1. 模型文件准备
@@ -667,7 +667,7 @@ LobeChat ComfyUI 集成采用配置驱动的架构,支持 **223 个模型**,
### 3. 使用自定义模型
- 在 LobeChat 中,自定义模型会显示为:
+ 在 LobeHub 中,自定义模型会显示为:
- **stable-diffusion-custom**:标准自定义模型
- **stable-diffusion-custom-refiner**:Refiner 自定义模型
@@ -686,7 +686,7 @@ LobeChat ComfyUI 集成采用配置驱动的架构,支持 **223 个模型**,
### 智能错误诊断系统
-LobeChat 集成了智能错误处理系统,能够自动诊断并提供针对性的解决方案。
+LobeHub 集成了智能错误处理系统,能够自动诊断并提供针对性的解决方案。
#### 错误类型与解决方案
@@ -813,4 +813,4 @@ sunlight filtering through leaves, warm atmosphere, cinematic lighting, 4K high
| `500` | 服务器错误 | 检查 ComfyUI 日志 |
-至此你已经可以在 LobeChat 中使用 ComfyUI 进行高质量的 AI 图像生成和编辑了。如果遇到问题,请参考故障排除部分或查阅 [ComfyUI 官方文档](https://github.com/comfyanonymous/ComfyUI)。
+至此你已经可以在 LobeHub 中使用 ComfyUI 进行高质量的 AI 图像生成和编辑了。如果遇到问题,请参考故障排除部分或查阅 [ComfyUI 官方文档](https://github.com/comfyanonymous/ComfyUI)。
diff --git a/docs/usage/providers/deepseek.mdx b/docs/usage/providers/deepseek.mdx
index 24f5f7f3b0..be33e6f7b8 100644
--- a/docs/usage/providers/deepseek.mdx
+++ b/docs/usage/providers/deepseek.mdx
@@ -1,64 +1,62 @@
---
-title: Using DeepSeek API Key in LobeChat
+title: Using DeepSeek API Key in LobeHub
description: >-
- Learn how to use DeepSeek-V2 in LobeChat, obtain API keys. Get started with DeepSeek integration now!
-
+ Learn how to configure and use the DeepSeek language model in LobeHub, obtain
+ your API key, and start chatting.
tags:
+ - LobeHub
- DeepSeek
- - LobeChat
- - DeepSeek-V2
+ - DeepSeek R1
- API Key
- Web UI
---
-# Using DeepSeek in LobeChat
+# Using DeepSeek in LobeHub
-
+
-[DeepSeek](https://www.deepseek.com/) represents a cutting-edge open-source large language model. The latest versions, DeepSeek-V3 and DeepSeek-R1, have undergone substantial improvements in both architecture and performance, particularly shining in their inference capabilities. By leveraging innovative training methodologies and reinforcement learning, the model has effectively boosted its inference prowess, now nearly matching the pinnacle performance of OpenAI.
+[DeepSeek](https://www.deepseek.com/) is a cutting-edge open-source large language model (LLM). The latest versions, DeepSeek-V3 and DeepSeek-R1, feature significant architectural and performance improvements, particularly in reasoning capabilities. Through innovative training methods and reinforcement learning techniques, DeepSeek has achieved near state-of-the-art performance, rivaling top-tier models from OpenAI.
-This document will guide you on how to use DeepSeek in LobeChat:
+This guide will walk you through how to use DeepSeek in LobeHub:
- ### Step 1: Obtain DeepSeek API Key
+ ### Step 1: Get Your DeepSeek API Key
- - First, you need to register and log in to the [DeepSeek](https://platform.deepseek.com/) open platform.
+ - First, sign up and log in to the [DeepSeek Open Platform](https://platform.deepseek.com/)
- New users will receive a free quota of 500M Tokens
+ New users currently receive 500M tokens for free.
- - Go to the `API keys` menu and click on `Create API Key`.
+ - Navigate to the `API keys` section and click `Create API Key`
-
+
- - Enter the API key name in the pop-up dialog box.
+ - Enter a name for your API key in the pop-up dialog
-
+
- - Copy the generated API key and save it securely.
+ - Copy the generated API key and store it securely
-
+
- Please store the key securely as it will only appear once. If you accidentally lose it, you will
- need to create a new key.
+ Make sure to store your key securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure DeepSeek in LobeChat
+ ### Step 2: Configure DeepSeek in LobeHub
- - Access the `App Settings` interface in LobeChat.
- - Find the setting for `DeepSeek` under `AI Service Provider`.
+ - Go to the `App Settings` page in LobeHub
+ - Under `AI Providers`, find the `DeepSeek` configuration section
-
+
- - Open DeepSeek and enter the obtained API key.
- - Choose a DeepSeek model for your assistant to start the conversation.
+ - Enable DeepSeek and paste in your API key
+ - Choose a DeepSeek model for your assistant to start chatting
-
+
- You may need to pay the API service provider during usage, please refer to DeepSeek's relevant
- pricing policies.
+ You may incur charges from the API provider during usage. Please refer to DeepSeek’s pricing policy for details.
-You can now engage in conversations using the models provided by Deepseek in LobeChat.
+And that’s it! You’re now ready to start chatting with DeepSeek-powered models in LobeHub.
diff --git a/docs/usage/providers/deepseek.zh-CN.mdx b/docs/usage/providers/deepseek.zh-CN.mdx
index 85f59698b7..653dc38fc1 100644
--- a/docs/usage/providers/deepseek.zh-CN.mdx
+++ b/docs/usage/providers/deepseek.zh-CN.mdx
@@ -1,21 +1,21 @@
---
-title: 在 LobeChat 中使用 DeepSeek API Key
-description: 学习如何在 LobeChat 中配置和使用 DeepSeek 语言模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用 DeepSeek API Key
+description: 学习如何在 LobeHub 中配置和使用 DeepSeek 语言模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- DeepSeek
- DeepSeek R1
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 DeepSeek
+# 在 LobeHub 中使用 DeepSeek
-
+
[DeepSeek](https://www.deepseek.com/) 是一款先进的开源大型语言模型(LLM)。最新的 DeepSeek-V3 和 DeepSeek-R1 在架构和性能上进行了显著优化,特别是在推理能力方面表现出色。它通过创新性的训练方法和强化学习技术,成功地提升了模型的推理能力,并且其性能已逼近 OpenAI 的顶尖水平。
-本文档将指导你如何在 LobeChat 中使用 DeepSeek:
+本文档将指导你如何在 LobeHub 中使用 DeepSeek:
### 步骤一:获取 DeepSeek API 密钥
@@ -26,35 +26,35 @@ tags:
- 进入 `API keys` 菜单,并点击 `创建 API Key`
-
+
- 在弹出的对话框中输入 API 密钥名称
-
+
- 复制得到的 API 密钥并妥善保存
-
+
请安全地存储密钥,因为它只会出现一次。如果你意外丢失它,您将需要创建一个新密钥。
- ### 步骤二:在 LobeChat 中配置 DeepSeek
+ ### 步骤二:在 LobeHub 中配置 DeepSeek
- - 访问 LobeChat 的 `应用设置`界面
+ - 访问 LobeHub 的 `应用设置`界面
- 在 `AI 服务商` 下找到 `DeepSeek` 的设置项
-
+
- 打开 DeepSeek 并填入获取的 API 密钥
- 为你的助手选择一个 DeepSeek 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 DeepSeek 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Deepseek 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Deepseek 提供的模型进行对话了。
diff --git a/docs/usage/providers/fal.mdx b/docs/usage/providers/fal.mdx
index 4087851bee..bd0893e8af 100644
--- a/docs/usage/providers/fal.mdx
+++ b/docs/usage/providers/fal.mdx
@@ -1,31 +1,31 @@
---
-title: Using Fal API Key in LobeChat
+title: Using Fal API Key in LobeHub
description: >-
- Learn how to integrate Fal API Key in LobeChat for AI image and video generation using cutting-edge models like FLUX, Kling, and more.
-
+ Learn how to configure and use the Fal API Key in LobeHub to generate
+ AI-powered images and videos with cutting-edge models like FLUX and Kling.
tags:
- - Fal AI
+ - Fal
- Image Generation
- Video Generation
- API Key
- Web UI
---
-# Using Fal in LobeChat
+# Using Fal in LobeHub
-
+
-[Fal.ai](https://fal.ai/) is a lightning-fast inference platform specialized in AI media generation, hosting state-of-the-art models for image and video creation including FLUX, Kling, HiDream, and other cutting-edge generative models. This document will guide you on how to use Fal in LobeChat:
+[Fal.ai](https://fal.ai/) is a high-performance inference platform specializing in AI media generation. It offers state-of-the-art image and video generation models such as FLUX, Kling, and HiDream. This guide will walk you through how to use Fal within LobeHub:
- ### Step 1: Obtain Fal API Key
+ ### Step 1: Obtain Your Fal API Key
- - Register for a [Fal.ai account](https://fal.ai/).
- - Navigate to [API Keys dashboard](https://fal.ai/dashboard/keys) and click **Add key** to create a new API key.
- - Copy the generated API key and keep it secure; it will only be shown once.
+ - Sign up for a [Fal.ai](https://fal.ai/) account;
+ - Go to the [API Keys Dashboard](https://fal.ai/dashboard/keys) and click **Add key** to create a new API key;
+ - Copy the generated API key and store it securely — it will only be shown once.
- ### Step 2: Configure Fal in LobeChat
+ ### Step 2: Configure Fal in LobeHub
- - Visit the `Settings` page in LobeChat.
- - Under **AI Service Provider**, locate the **Fal** configuration section.
+ - Navigate to the `Settings` page in LobeHub;
+ - Under `AI Providers`, locate the configuration section for `Fal`;
-
+
- - Paste the API key you obtained.
- - Choose a Fal model (e.g. `Flux.1 Schnell`, `Flux.1 Kontext Dev`) for image or video generation.
+ - Paste the API key you obtained earlier;
+ - Choose a Fal model (e.g., `Flux.1 Schnell`, `Flux.1 Kontext Dev`) for image or video generation.
-
+
- During usage, you may incur charges according to Fal's pricing policy. Please review Fal's
- official pricing before heavy usage.
+ Please note that using Fal may incur charges. Be sure to review Fal’s official pricing policy before making extensive API calls.
-You can now use Fal's advanced image and video generation models directly within LobeChat to create stunning visual content.
+And that’s it! You’re now ready to create stunning visual content in LobeHub using Fal’s advanced image and video generation models.
diff --git a/docs/usage/providers/fal.zh-CN.mdx b/docs/usage/providers/fal.zh-CN.mdx
index 05155c42d2..ba4cea51cb 100644
--- a/docs/usage/providers/fal.zh-CN.mdx
+++ b/docs/usage/providers/fal.zh-CN.mdx
@@ -1,8 +1,6 @@
---
-title: 在 LobeChat 中使用 Fal API Key
-description: >-
- 学习如何在 LobeChat 中配置和使用 Fal API Key,使用 FLUX、Kling 等尖端模型进行 AI 图像和视频生成。
-
+title: 在 LobeHub 中使用 Fal API Key
+description: 学习如何在 LobeHub 中配置和使用 Fal API Key,使用 FLUX、Kling 等尖端模型进行 AI 图像和视频生成。
tags:
- Fal
- 图像生成
@@ -11,11 +9,11 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 Fal
+# 在 LobeHub 中使用 Fal
-
+
-[Fal.ai](https://fal.ai/) 是一个专门从事 AI 媒体生成的快速推理平台,提供包括 FLUX、Kling、HiDream 等在内的最先进图像和视频生成模型。本文将指导你如何在 LobeChat 中使用 Fal:
+[Fal.ai](https://fal.ai/) 是一个专门从事 AI 媒体生成的快速推理平台,提供包括 FLUX、Kling、HiDream 等在内的最先进图像和视频生成模型。本文将指导你如何在 LobeHub 中使用 Fal:
### 步骤一:获取 Fal API Key
@@ -48,9 +46,9 @@ tags:
}
/>
- ### 步骤二:在 LobeChat 中配置 Fal
+ ### 步骤二:在 LobeHub 中配置 Fal
- - 访问 LobeChat 的 `设置` 页面;
+ - 访问 LobeHub 的 `设置` 页面;
- 在 `AI服务商` 下找到 `Fal` 的设置项;
@@ -65,4 +63,4 @@ tags:
-至此,你已经可以在 LobeChat 中使用 Fal 提供的先进图像和视频生成模型来创作精美的视觉内容了。
+至此,你已经可以在 LobeHub 中使用 Fal 提供的先进图像和视频生成模型来创作精美的视觉内容了。
diff --git a/docs/usage/providers/fireworksai.mdx b/docs/usage/providers/fireworksai.mdx
index 565fc25279..6ef8ac18fe 100644
--- a/docs/usage/providers/fireworksai.mdx
+++ b/docs/usage/providers/fireworksai.mdx
@@ -1,57 +1,55 @@
---
-title: Using Fireworks AI in LobeChat
+title: Using Fireworks AI in LobeHub
description: >-
- Learn how to integrate and utilize Fireworks AI's language model APIs in LobeChat.
-
+ Learn how to configure and use Fireworks AI's API Key in LobeHub to start
+ chatting and interacting.
tags:
- - LobeChat
+ - LobeHub
- Fireworks AI
- API Key
- Web UI
---
-# Using Fireworks AI in LobeChat
+# Using Fireworks AI in LobeHub
-
+
-[Fireworks.ai](https://fireworks.ai/) is a high-performance generative AI model inference platform that allows users to access and utilize various models through its API. The platform supports multiple modalities, including text and visual language models, and offers features like function calls and JSON schemas to enhance the flexibility of application development.
+[Fireworks.ai](https://fireworks.ai/) is a high-performance generative AI model inference platform that allows users to access and utilize a variety of models via its API. The platform supports multiple modalities, including text and vision-language models, and offers features like function calling and JSON mode to enhance application development flexibility.
-This article will guide you on how to use Fireworks AI in LobeChat.
+This guide will walk you through how to use Fireworks AI within LobeHub.
- ### Step 1: Obtain an API Key for Fireworks AI
+ ### Step 1: Obtain Your Fireworks AI API Key
- Log in to the [Fireworks.ai Console](https://fireworks.ai/account/api-keys)
- Navigate to the `User` page and click on `API Keys`
- Create a new API key
-
+
- Copy and securely save the generated API key
-
+
- Please store the key securely, as it will appear only once. If you accidentally lose it, you will
- need to create a new key.
+ Make sure to store your API key securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure Fireworks AI in LobeChat
+ ### Step 2: Configure Fireworks AI in LobeHub
- - Access the `Settings` interface in LobeChat
- - Under `AI Service Provider`, locate the settings for `Fireworks AI`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the settings for `Fireworks AI`
-
+
- - Enter the obtained API key
- - Select a Fireworks AI model for your AI assistant to start a conversation
+ - Paste the API key you obtained earlier
+ - Choose a Fireworks AI model for your AI assistant to start chatting
-
+
- Please note that you may need to pay fees to the API service provider during use; refer to
- Fireworks AI's pricing policy for details.
+ Please note that usage may incur charges from the API provider. Refer to Fireworks AI’s pricing policy for more details.
-You are now ready to use the models provided by Fireworks AI for conversations in LobeChat.
+And that’s it! You’re now ready to start using Fireworks AI models in LobeHub for conversations and interactions.
diff --git a/docs/usage/providers/fireworksai.zh-CN.mdx b/docs/usage/providers/fireworksai.zh-CN.mdx
index 647a51f0e9..e31a67104f 100644
--- a/docs/usage/providers/fireworksai.zh-CN.mdx
+++ b/docs/usage/providers/fireworksai.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 Fireworks AI
-description: 学习如何在 LobeChat 中配置和使用 Fireworks AI 的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 Fireworks AI
+description: 学习如何在 LobeHub 中配置和使用 Fireworks AI 的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- Fireworks AI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Fireworks AI
+# 在 LobeHub 中使用 Fireworks AI
-
+
[Fireworks.ai](https://fireworks.ai/) 是一个高性能的生成式 AI 模型推理平台,允许用户通过其 API 访问和使用各种模型。该平台支持多种模态,包括文本和视觉语言模型,并提供函数调用和 JSON 模式等功能,以增强应用开发的灵活性。
-本文将指导你如何在 LobeChat 中使用 Fireworks AI。
+本文将指导你如何在 LobeHub 中使用 Fireworks AI。
### 步骤一:获得 Fireworks AI 的 API Key
@@ -23,31 +23,31 @@ tags:
- 进入 `User` 页面,点击 `API Keys`
- 创建一个新的 API 密钥
-
+
- 复制并保存生成的 API 密钥
-
+
请安全地存储密钥,因为它只会出现一次。如果您意外丢失它,您将需要创建一个新密钥。
- ### 步骤二:在 LobeChat 中配置 Fireworks AI
+ ### 步骤二:在 LobeHub 中配置 Fireworks AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `Fireworks AI` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Fireworks AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Fireworks AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Fireworks AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Fireworks AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/giteeai.mdx b/docs/usage/providers/giteeai.mdx
index 4d48cef369..ba5b14e103 100644
--- a/docs/usage/providers/giteeai.mdx
+++ b/docs/usage/providers/giteeai.mdx
@@ -1,58 +1,56 @@
---
-title: Using Gitee AI in LobeChat
+title: Using Gitee AI in LobeHub
description: >-
- Learn how to configure and use Gitee AI's API Key in LobeChat to start conversations and interactions.
-
+ Learn how to configure and use Gitee AI's API Key in LobeHub to start chatting
+ and interacting.
tags:
- - LobeChat
+ - LobeHub
- Gitee AI
- API Key
- Web UI
---
-# Using Gitee AI in LobeChat
+# Using Gitee AI in LobeHub
-
+
-[Gitee AI](https://ai.gitee.com/) is an open-source platform based on Git code hosting technology, specifically designed for AI application scenarios. It aims to provide developers and businesses with a one-stop solution for AI application development services, including model experience, inference, fine-tuning, and deployment.
+[Gitee AI](https://ai.gitee.com/) is an open-source platform built on Git-based code hosting technology, designed specifically for artificial intelligence (AI) applications. It aims to provide developers and enterprises with an all-in-one AI development service, including model testing, inference, fine-tuning, and deployment.
-This article will guide you on how to use Gitee AI in LobeChat.
+This guide will walk you through how to use Gitee AI within LobeHub.
- ### Step 1: Obtain the Gitee AI API Key
+ ### Step 1: Obtain Your Gitee AI API Key
- Register and log in to the [Gitee AI official website](https://ai.gitee.com/)
- - Purchase and recharge `Serverless API` from your dashboard
+ - In the dashboard, purchase and top up the `Serverless API`
-
+
- - In `Settings`, click on the `Access Tokens` section
+ - Go to the `Settings` section and click on `Access Tokens`
- Create a new access token
- - Save the access token in the pop-up window
+ - Save the token from the pop-up window
-
+
- Please keep the access token safe as it will only appear once. If you accidentally lose it, you
- will need to create a new one.
+ Make sure to save the access token shown in the pop-up window. It will only be displayed once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure Gitee AI in LobeChat
+ ### Step 2: Configure Gitee AI in LobeHub
- - Access the `Settings` page in LobeChat
- - Under `AI Service Provider`, find the settings for `Gitee AI`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `Gitee AI`
-
+
- - Enter the obtained API key
- - Select a Gitee AI model for your AI assistant to begin chatting
+ - Paste the API key you obtained earlier
+ - Choose a Gitee AI model for your AI assistant to start chatting
-
+
- During usage, you may need to make payments to the API service provider; please refer to Gitee
- AI's relevant pricing policy.
+ You may need to pay for API usage depending on your usage. Please refer to Gitee AI’s pricing policy for more details.
-Now you can start having conversations using the models provided by Gitee AI in LobeChat!
+And that’s it! You’re now ready to use Gitee AI models for conversations in LobeHub.
diff --git a/docs/usage/providers/giteeai.zh-CN.mdx b/docs/usage/providers/giteeai.zh-CN.mdx
index bf7d84fc70..5612358878 100644
--- a/docs/usage/providers/giteeai.zh-CN.mdx
+++ b/docs/usage/providers/giteeai.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 Gitee AI
-description: 学习如何在 LobeChat 中配置和使用 Gitee AI 的 API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 Gitee AI
+description: 学习如何在 LobeHub 中配置和使用 Gitee AI 的 API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- Gitee AI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Gitee AI
+# 在 LobeHub 中使用 Gitee AI
-
+
[Gitee AI](https://ai.gitee.com/) 是一个基于 Git 代码托管技术的开源平台,专为人工智能(AI)应用场景设计。它旨在为开发者和企业提供一站式的 AI 应用开发服务,包括模型体验、推理、微调和部署等功能。
-本文将指导你如何在 LobeChat 中使用 Gitee AI。
+本文将指导你如何在 LobeHub 中使用 Gitee AI。
### 步骤一:获取 Gitee AI 的 API 密钥
@@ -22,33 +22,33 @@ tags:
- 注册并登录 [Gitee AI 官网](https://ai.gitee.com/)
- 在工作台中购买并充值 `Serverless API`
-
+
- 在 `设置` 中点击 `访问令牌` 界面
- 创建一个新的访问令牌
- 在弹出窗口中保存访问令牌
-
+
妥善保存弹窗中的访问令牌,它只会出现一次,如果不小心丢失了,你需要重新创建一个访问令牌。
- ### 步骤二:在 LobeChat 中配置 Gitee AI
+ ### 步骤二:在 LobeHub 中配置 Gitee AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `Gitee AI` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Gitee AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Gitee AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Gitee AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Gitee AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/github.mdx b/docs/usage/providers/github.mdx
index 80877584a0..cf706be5b6 100644
--- a/docs/usage/providers/github.mdx
+++ b/docs/usage/providers/github.mdx
@@ -1,67 +1,69 @@
---
-title: Using GitHub Models in LobeChat
-description: Learn how to integrate and utilize GitHub Models in LobeChat.
+title: Using GitHub Models in LobeHub
+description: >-
+ Learn how to configure and use GitHub's API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
+ - LobeHub
- GitHub
- GitHub Models
- API Key
- Web UI
---
-# Using GitHub Models in LobeChat
+# Using GitHub Models in LobeHub
-
+
-[GitHub Models](https://github.com/marketplace/models) is a new feature recently launched by GitHub, designed to provide developers with a free platform to access and experiment with various AI models. GitHub Models offers an interactive sandbox environment where users can test different model parameters and prompts, and observe the responses of the models. The platform supports advanced language models, including OpenAI's GPT-4o, Meta's Llama 3.1, and Mistral's Large 2, covering a wide range of applications from large-scale language models to task-specific models.
+[GitHub Models](https://github.com/marketplace/models) is a new feature recently launched by GitHub, designed to provide developers with a free platform to access and experiment with various AI models. GitHub Models offers an interactive sandbox environment where users can test different model parameters and prompts to observe the model's responses. The platform supports a range of advanced language models, including OpenAI's GPT-4o, Meta's Llama 3.1, and Mistral's Large 2, covering a wide spectrum of use cases from large language models to task-specific models.
-This article will guide you on how to use GitHub Models in LobeChat.
+This guide will walk you through how to use GitHub Models within LobeHub.
-## Rate Limits for GitHub Models
+## GitHub Models Rate Limits
-Currently, the usage of the Playground and free API is subject to limits on the number of requests per minute, the number of requests per day, the number of tokens per request, and the number of concurrent requests. If you hit the rate limit, you will need to wait for the limit to reset before making further requests. The rate limits vary for different models (low, high, and embedding models). For model type information, please refer to the GitHub Marketplace.
+Currently, usage of the Playground and free API is subject to limits on requests per minute, daily requests, tokens per request, and concurrent requests. If you hit a rate limit, you’ll need to wait for it to reset before making additional requests. Rate limits vary depending on the model type (low, high, or embedding models). For details on model types, refer to the GitHub Marketplace.
-
+
- These limits are subject to change at any time. For specific information, please refer to the
- [GitHub Official
- Documentation](https://docs.github.com/en/github-models/prototyping-with-ai-models#rate-limits).
+ These limits are subject to change. For the most up-to-date information, please refer to the [official GitHub documentation](https://docs.github.com/en/github-models/prototyping-with-ai-models#rate-limits).
---
-## Configuration Guide for GitHub Models
+## GitHub Models Configuration Guide
### Step 1: Obtain a GitHub Access Token
- - Log in to GitHub and open the [Access Tokens](https://github.com/settings/tokens) page.
+ - Log in to GitHub and navigate to the [Personal Access Tokens](https://github.com/settings/tokens) page.
- Create and configure a new access token.
-
+
- - Copy and save the generated token from the results returned.
+ - Copy and securely save the generated token from the result page.
-
+
- - During the testing phase of GitHub Models, users must apply to join the [waitlist](https://github.com/marketplace/models/waitlist/join) in order to gain access.
+ - During the GitHub Models testing phase, you must apply to join the [waitlist](https://github.com/marketplace/models/waitlist/join) to gain access.
- - Please store the access token securely, as it will only be displayed once. If you accidentally lose it, you will need to create a new token.
+ ```
+ - Be sure to store your access token securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
+ ```
- ### Step 2: Configure GitHub Models in LobeChat
+ ### Step 2: Configure GitHub Models in LobeHub
- - Navigate to the `Settings` interface in LobeChat.
- - Under `AI Service Provider`, find the GitHub settings.
+ - Open the `Settings` panel in LobeHub.
+ - Under `AI Providers`, locate the `GitHub` configuration section.
-
+
- - Enter the access token you obtained.
- - Select a GitHub model for your AI assistant to start the conversation.
+ - Paste the access token you obtained earlier.
+ - Choose a GitHub model for your AI assistant to start chatting.
-
+
-You are now ready to use the models provided by GitHub for conversations within LobeChat.
+And that’s it! You’re now ready to start using GitHub-provided models in LobeHub for conversations and interactions.
diff --git a/docs/usage/providers/github.zh-CN.mdx b/docs/usage/providers/github.zh-CN.mdx
index 689a978007..04a11b9f14 100644
--- a/docs/usage/providers/github.zh-CN.mdx
+++ b/docs/usage/providers/github.zh-CN.mdx
@@ -1,27 +1,27 @@
---
-title: 在 LobeChat 中使用 GitHub Models
-description: 学习如何在 LobeChat 中配置和使用 GitHub 的 API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 GitHub Models
+description: 学习如何在 LobeHub 中配置和使用 GitHub 的 API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- GitHub
- GitHub Models
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 GitHub Models
+# 在 LobeHub 中使用 GitHub Models
-
+
[GitHub Models](https://github.com/marketplace/models) 是 GitHub 最近推出的一项新功能,旨在为开发者提供一个免费的平台来访问和实验多种 AI 模型。GitHub Models 提供了一个互动沙盒环境,用户可以在此测试不同的模型参数和提示语,观察模型的响应。该平台支持多种先进的语言模型,包括 OpenAI 的 GPT-4o、Meta 的 Llama 3.1 和 Mistral 的 Large 2 等,覆盖了从大规模语言模型到特定任务模型的广泛应用。
-本文将指导你如何在 LobeChat 中使用 GitHub Models。
+本文将指导你如何在 LobeHub 中使用 GitHub Models。
## GitHub Models 速率限制
当前 Playground 和免费 API 的使用受到每分钟请求数、每日请求数、每个请求的令牌数以及并发请求数的限制。若达到速率限制,则需等待限制重置后方可继续发出请求。不同模型(低、高及嵌入模型)的速率限制有所不同。 模型类型信息请参阅 GitHub Marketplace。
-
+
这些限制可能随时更改,具体信息请参考 [GitHub
@@ -38,11 +38,11 @@ tags:
- 登录 GitHub 并打开 [访问令牌](https://github.com/settings/tokens) 页面
- 创建并设置一个新的访问令牌
-
+
- 在返回的结果中复制并保存生成的令牌
-
+
- GitHub Models 测试期间,要使用 GitHub Models,用户需要申请加入[等待名单(waitlist)](https://github.com/marketplace/models/waitlist/join) 通过后才能获得访问权限。
@@ -50,17 +50,17 @@ tags:
- 请安全地存储访问令牌,因为它只会出现一次。如果您意外丢失它,您将需要创建一个新令牌。
- ### 步骤二:在 LobeChat 中配置 GitHub Models
+ ### 步骤二:在 LobeHub 中配置 GitHub Models
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `GitHub` 的设置项
-
+
- 填入获得的访问令牌
- 为你的 AI 助手选择一个 GitHub 的模型即可开始对话
-
+
-至此你已经可以在 LobeChat 中使用 GitHub 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 GitHub 提供的模型进行对话了。
diff --git a/docs/usage/providers/google.mdx b/docs/usage/providers/google.mdx
index 10d9769c73..457dc38dcb 100644
--- a/docs/usage/providers/google.mdx
+++ b/docs/usage/providers/google.mdx
@@ -1,55 +1,54 @@
---
-title: Using Google Gemini API Key in LobeChat
+title: Using Google Gemini API Key in LobeHub
description: >-
- Learn how to integrate and utilize Google Gemini AI in LobeChat to enhance your conversational experience. Follow these steps to configure Google Gemini and start leveraging its powerful capabilities.
-
+ This guide will walk you through how to configure and use Google Gemini, a
+ powerful language model developed by Google AI, within LobeHub.
tags:
- Google Gemini
- - AI integration
- - Google AI Studio
+ - Google AI
+ - API Key
- Web UI
---
-# Using Google Gemini in LobeChat
+# Using Google Gemini in LobeHub
-
+
-Gemini AI is a set of large language models (LLMs) created by Google AI, known for its cutting-edge advancements in multimodal understanding and processing. It is essentially a powerful artificial intelligence tool capable of handling various tasks involving different types of data, not just text.
+Gemini AI is a suite of large language models (LLMs) developed by Google AI, renowned for its cutting-edge capabilities in multimodal understanding and processing. It is a powerful AI tool designed to handle a wide range of tasks involving various types of data—not just text.
-This document will guide you on how to use Google Gemini in LobeChat:
+This guide will show you how to use Google Gemini in LobeHub:
- ### Step 1: Obtain Google API Key
+ ### Step 1: Obtain a Google API Key
- - Visit and log in to [Google AI Studio](https://aistudio.google.com/)
- - Navigate to `Get API Key` in the menu and click on `Create API Key`
+ - Visit and sign in to [Google AI Studio](https://aistudio.google.com/)
+ - In the "Get API Key" menu, click on "Create API Key"
-
+
- - Select a project and create an API key, or create one in a new project
+ - Choose an existing project or create a new one to generate your API key
-
+
- Copy the API key from the pop-up dialog
-
+
- ### Step 2: Configure OpenAI in LobeChat
+ ### Step 2: Configure Google Gemini in LobeHub
- - Go to the `Settings` interface in LobeChat
- - Find the setting for `Google Gemini` under `AI Service Provider`
+ - Go to the Settings page in LobeHub
+ - Under "AI Providers", locate the "Google Gemini" section
-
+
- - Enable Google Gemini and enter the obtained API key
- - Choose a Gemini model for your assistant to start the conversation
+ - Enable Google Gemini and paste in your API key
+ - Choose a Gemini model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Google Gemini's
- pricing policy.
+ You may incur charges from the API provider while using the service. Please refer to Google Gemini’s pricing policy for details.
-Congratulations! You can now use Google Gemini in LobeChat.
+And that’s it — you’re now ready to use Google Gemini in LobeHub!
diff --git a/docs/usage/providers/google.zh-CN.mdx b/docs/usage/providers/google.zh-CN.mdx
index bc38a3785a..1c57361c18 100644
--- a/docs/usage/providers/google.zh-CN.mdx
+++ b/docs/usage/providers/google.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用 Google Gemini API Key
-description: 本文将指导你如何在 LobeChat 中配置并使用 Google Gemini,一个由 Google AI 创建的强大语言模型。
+title: 在 LobeHub 中使用 Google Gemini API Key
+description: 本文将指导你如何在 LobeHub 中配置并使用 Google Gemini,一个由 Google AI 创建的强大语言模型。
tags:
- Google Gemini
- Google AI
@@ -8,13 +8,13 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 Google Gemini
+# 在 LobeHub 中使用 Google Gemini
-
+
Gemini AI 是由 Google AI 创建的一组大型语言模型(LLM),以其在多模式理解和处理方面的尖端进步而闻名。它本质上是一个强大的人工智能工具,可以处理涉及不同类型数据的各种任务,而不仅仅是文本。
-本文档将指导你如何在 LobeChat 中使用 Google Gemini:
+本文档将指导你如何在 LobeHub 中使用 Google Gemini:
### 步骤一:获取 Google 的 API 密钥
@@ -22,31 +22,31 @@ Gemini AI 是由 Google AI 创建的一组大型语言模型(LLM),以其
- 访问并登录 [Google AI Studio](https://aistudio.google.com/)
- 在 `获取 API 密钥` 菜单中 `创建 API 密钥`
-
+
- 选择一个项目并创建 API 密钥,或者在新项目中创建 API 密钥
-
+
- 在弹出的对话框中复制 API 密钥
-
+
- ### 步骤二:在 LobeChat 中配置 OpenAI
+ ### 步骤二:在 LobeHub 中配置 OpenAI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`Google Gemini`的设置项
-
+
- 打开 Google Gemini 并填入获得的 API 密钥
- 为你的助手选择一个 Gemini 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Google Gemini 的费用政策。
-至此,你已经可以在 LobeChat 中使用 Google Gemini 啦。
+至此,你已经可以在 LobeHub 中使用 Google Gemini 啦。
diff --git a/docs/usage/providers/groq.mdx b/docs/usage/providers/groq.mdx
index add4015330..63e84663dd 100644
--- a/docs/usage/providers/groq.mdx
+++ b/docs/usage/providers/groq.mdx
@@ -1,55 +1,50 @@
---
-title: Using Groq API Key in LobeChat
+title: Using the Groq API Key in LobeHub
description: >-
- Learn how to obtain GroqCloud API keys and configure Groq in LobeChat for optimal performance.
-
+ Learn how to obtain a GroqCloud API Key and configure Groq in LobeHub to
+ experience its powerful performance.
tags:
- - LPU Inference Engine
- - GroqCloud
- LLAMA3
- Qwen2
- API keys
- Web UI
+ - API Key
---
-# Using Groq in LobeChat
+# Using Groq in LobeHub
-
+
-Groq's [LPU Inference Engine](https://wow.groq.com/news_press/groq-lpu-inference-engine-leads-in-first-independent-llm-benchmark/) has excelled in the latest independent Large Language Model (LLM) benchmark, redefining the standard for AI solutions with its remarkable speed and efficiency. By integrating LobeChat with Groq Cloud, you can now easily leverage Groq's technology to accelerate the operation of large language models in LobeChat.
+Groq’s [LPU Inference Engine](https://wow.groq.com/news_press/groq-lpu-inference-engine-leads-in-first-independent-llm-benchmark/) has demonstrated outstanding performance in the latest independent large language model (LLM) benchmarks, redefining the standards for AI solutions with its incredible speed and efficiency. With the integration of Groq Cloud into LobeHub, you can now easily harness Groq’s technology to accelerate LLM performance within LobeHub.
- Groq's LPU Inference Engine achieved a sustained speed of 300 tokens per second in internal
- benchmark tests, and according to benchmark tests by ArtificialAnalysis.ai, Groq outperformed
- other providers in terms of throughput (241 tokens per second) and total time to receive 100
- output tokens (0.8 seconds).
+ In internal benchmarks, Groq’s LPU Inference Engine consistently achieved speeds of 300 tokens per second. According to ArtificialAnalysis.ai, Groq outperforms other providers in both throughput (241 tokens per second) and total time to receive 100 output tokens (0.8 seconds).
-This document will guide you on how to use Groq in LobeChat:
+This guide will walk you through how to use Groq in LobeHub:
- ### Obtaining GroqCloud API Keys
+ ### Step 1: Get Your GroqCloud API Key
- First, you need to obtain an API Key from the [GroqCloud Console](https://console.groq.com/).
+ First, visit the [GroqCloud Console](https://console.groq.com/) to obtain your API Key.
-
+
- Create an API Key in the `API Keys` menu of the console.
+ In the console, navigate to the `API Keys` section and create a new API Key.
-
+
- Safely store the key from the pop-up as it will only appear once. If you accidentally lose it, you
- will need to create a new key.
+ Make sure to save the key shown in the popup — it will only be displayed once. If you lose it, you’ll need to generate a new one.
- ### Configure Groq in LobeChat
+ ### Step 2: Configure Groq in LobeHub
- You can find the Groq configuration option in `Settings` -> `AI Service Provider`, where you can input the API Key you just obtained.
+ Go to `Settings` -> `AI Providers` in LobeHub, and find the configuration section for Groq. Paste the API Key you just obtained.
-
+
-Next, select a Groq-supported model in the assistant's model options, and you can experience the powerful performance of Groq in LobeChat.
+Next, in the assistant’s model selection menu, choose a model supported by Groq to start experiencing Groq’s powerful performance in LobeHub.
-
+
diff --git a/docs/usage/providers/groq.zh-CN.mdx b/docs/usage/providers/groq.zh-CN.mdx
index ad2bf43a05..09259776bf 100644
--- a/docs/usage/providers/groq.zh-CN.mdx
+++ b/docs/usage/providers/groq.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用 Groq API Key
-description: 了解如何获取 GroqCloud API Key,并在 LobeChat 中配置 Groq,体验 Groq 强大的性能。
+title: 在 LobeHub 中使用 Groq API Key
+description: 了解如何获取 GroqCloud API Key,并在 LobeHub 中配置 Groq,体验 Groq 强大的性能。
tags:
- LLAMA3
- Qwen2
@@ -9,11 +9,11 @@ tags:
- API Key
---
-# 在 LobeChat 中使用 Groq
+# 在 LobeHub 中使用 Groq
-
+
-Groq 的 [LPU 推理引擎](https://wow.groq.com/news_press/groq-lpu-inference-engine-leads-in-first-independent-llm-benchmark/) 在最新的独立大语言模型(LLM)基准测试中表现卓越,以其惊人的速度和效率重新定义了 AI 解决方案的标准。通过 LobeChat 与 Groq Cloud 的集成,你现在可以轻松地利用 Groq 的技术,在 LobeChat 中加速大语言模型的运行。
+Groq 的 [LPU 推理引擎](https://wow.groq.com/news_press/groq-lpu-inference-engine-leads-in-first-independent-llm-benchmark/) 在最新的独立大语言模型(LLM)基准测试中表现卓越,以其惊人的速度和效率重新定义了 AI 解决方案的标准。通过 LobeHub 与 Groq Cloud 的集成,你现在可以轻松地利用 Groq 的技术,在 LobeHub 中加速大语言模型的运行。
Groq LPU 推理引擎在内部基准测试中连续达到每秒 300 个令牌的速度,据 ArtificialAnalysis.ai
@@ -21,30 +21,30 @@ Groq 的 [LPU 推理引擎](https://wow.groq.com/news_press/groq-lpu-inference-e
秒)方面优于其他提供商。
-本文档将指导你如何在 LobeChat 中使用 Groq:
+本文档将指导你如何在 LobeHub 中使用 Groq:
### 获取 GroqCloud API Key
首先,你需要到 [GroqCloud Console](https://console.groq.com/) 中获取一个 API Key。
-
+
在控制台的 `API Keys` 菜单中创建一个 API Key。
-
+
妥善保存弹窗中的 key,它只会出现一次,如果不小心丢失了,你需要重新创建一个 key。
- ### 在 LobeChat 中配置 Groq
+ ### 在 LobeHub 中配置 Groq
你可以在 `设置` -> `AI 服务商` 中找到 Groq 的配置选项,将刚才获取的 API Key 填入。
-
+
-接下来,在助手的模型选项中,选中一个 Groq 支持的模型,就可以在 LobeChat 中体验 Groq 强大的性能了。
+接下来,在助手的模型选项中,选中一个 Groq 支持的模型,就可以在 LobeHub 中体验 Groq 强大的性能了。
-
+
diff --git a/docs/usage/providers/hunyuan.mdx b/docs/usage/providers/hunyuan.mdx
index 2922be251a..6130af23b9 100644
--- a/docs/usage/providers/hunyuan.mdx
+++ b/docs/usage/providers/hunyuan.mdx
@@ -1,52 +1,51 @@
---
-title: Using Tencent Hunyuan in LobeChat
+title: Using Tencent Hunyuan in LobeHub
description: >-
- Learn how to integrate and utilize Tencent Hunyuan's language model APIs in LobeChat.
-
+ Learn how to configure and use Tencent Hunyuan's API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
+ - LobeHub
- Tencent Hunyuan
- API Key
- Web UI
---
-# Using Tencent Hunyuan in LobeChat
+# Using Tencent Hunyuan in LobeHub
-
+
-[Tencent Hunyuan](https://hunyuan.tencent.com/) is a large model launched by Tencent, designed to provide users with intelligent assistant services. It utilizes natural language processing technology to help users solve problems, offer suggestions, and generate content. By conversing with the model, users can quickly access the information they need, thereby enhancing work efficiency.
+[Tencent Hunyuan](https://hunyuan.tencent.com/) is a large language model developed by Tencent, designed to provide intelligent assistant services. Leveraging natural language processing technology, it helps users solve problems, offer suggestions, and generate content. By interacting with the model through conversation, users can quickly access the information they need, boosting productivity.
-This article will guide you on how to use Tencent Hunyuan in LobeChat.
+This guide will walk you through how to use Tencent Hunyuan in LobeHub.
- ### Step 1: Obtain the Tencent Hunyuan API Key
+ ### Step 1: Obtain a Tencent Hunyuan API Key
- Register and log in to the [Tencent Cloud Console](https://console.cloud.tencent.com/hunyuan/api-key)
- Navigate to `Hunyuan Large Model` and click on `API KEY Management`
- - Create an API key
+ - Create a new API Key
-
+
- - Click `View`, and copy the API key from the pop-up panel, ensuring you save it securely
+ - Click `View`, then copy the API Key from the pop-up panel and store it securely
-
+
- ### Step 2: Configure Tencent Hunyuan in LobeChat
+ ### Step 2: Configure Tencent Hunyuan in LobeHub
- - Go to the `Settings` page in LobeChat
- - Find the `Tencent Hunyuan` settings under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `Tencent Hunyuan`
-
+
- - Enter the API key you obtained
- - Select a Tencent Hunyuan model for your AI assistant to start the conversation
+ - Paste the API Key you obtained earlier
+ - Choose a Tencent Hunyuan model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Tencent Hunyuan's
- relevant pricing policy.
+ You may incur charges from the API service provider during usage. Please refer to Tencent Hunyuan’s pricing policy for details.
-You can now engage in conversations using the models provided by Tencent Hunyuan in LobeChat.
+And that’s it! You’re now ready to use Tencent Hunyuan’s models for conversations in LobeHub.
diff --git a/docs/usage/providers/hunyuan.zh-CN.mdx b/docs/usage/providers/hunyuan.zh-CN.mdx
index 3b7fac59b3..3adadc80be 100644
--- a/docs/usage/providers/hunyuan.zh-CN.mdx
+++ b/docs/usage/providers/hunyuan.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用腾讯混元
-description: 学习如何在 LobeChat 中配置和使用腾讯混元的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用腾讯混元
+description: 学习如何在 LobeHub 中配置和使用腾讯混元的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 腾讯混元
- API密钥
- Web UI
---
-# 在 LobeChat 中使用腾讯混元
+# 在 LobeHub 中使用腾讯混元
-
+
[腾讯混元](https://hunyuan.tencent.com/)是由腾讯推出的一款大模型,旨在为用户提供智能助手服务。它能够通过自然语言处理技术,帮助用户解决问题、提供建议以及进行内容生成等任务。用户可以通过与模型的对话,快速获取所需信息,从而提高工作效率。
-本文将指导你如何在 LobeChat 中使用腾讯混元。
+本文将指导你如何在 LobeHub 中使用腾讯混元。
### 步骤一:获得腾讯混元的 API Key
@@ -23,27 +23,27 @@ tags:
- 进入 `混元大模型` 并点击 `API KEY 管理`
- 创建一个 API 密钥
-
+
- 点击`查看`,在弹出面板中复制 API 密钥,并妥善保存
-
+
- ### 步骤二:在 LobeChat 中配置腾讯混元
+ ### 步骤二:在 LobeHub 中配置腾讯混元
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `腾讯混元` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个腾讯混元的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考腾讯混元的相关费用政策。
-至此你已经可以在 LobeChat 中使用腾讯混元提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用腾讯混元提供的模型进行对话了。
diff --git a/docs/usage/providers/infiniai.mdx b/docs/usage/providers/infiniai.mdx
index d83f1c3996..b4700ac131 100644
--- a/docs/usage/providers/infiniai.mdx
+++ b/docs/usage/providers/infiniai.mdx
@@ -1,29 +1,36 @@
---
-title: Using Infini-AI in LobeChat
-description: Learn how to configure and utilize Infini-AI's model services in LobeChat.
+title: Using Infini-AI in LobeHub
+description: >-
+ Learn how to configure and use Infini-AI's API Key in LobeHub to enable
+ AI-powered conversations.
tags:
- - LobeChat
+ - LobeHub
- Infini-AI
- API Key
- - LLM Deployment
+ - Large Model Deployment
---
-# Using Infini-AI in LobeChat
+# Using Infini-AI in LobeHub
-[Infini-AI](https://cloud.infini-ai.com/) is a large model service platform optimized for multiple chip architectures, providing efficient and unified AGI infrastructure solutions.
+[Infini-AI](https://cloud.infini-ai.com/) is a large model service platform optimized for diverse chip architectures, offering a high-performance and unified AGI infrastructure solution.
-This guide will help you quickly integrate Infini-AI's AI capabilities into LobeChat.
+This guide will walk you through the steps to quickly integrate Infini-AI's capabilities into LobeHub.
+
+
+ Infini-AI enforces a whitelist mechanism for image URLs. Currently, image links from services like Alibaba Cloud OSS and AWS S3 are supported.\
+ If you encounter a 400 error when using image-based conversations, try [uploading images using base64 encoding](/docs/self-hosting/environment-variables/s3#llm-vision-image-use-base-64).
+
- ### Step 1: Obtain Infini-AI API Key
+ ### Step 1: Obtain an Infini-AI API Key
- Log in to the [Large Model Service Platform](https://cloud.infini-ai.com/genstudio/model)
- - Select "API KEY Management" in the left navigation bar
- - In the newly opened page, click the "Create API KEY" button, enter a name, and click "Create"
+ - In the left-hand navigation menu, select "API KEY Management"
+ - On the newly opened page, click the "Create API KEY" button, enter a name, and click "Create"
- ### Step 2: Configure LobeChat Model Service
+ ### Step 2: Configure the Model Service in LobeHub
- - Open LobeChat and go to the "Settings" interface
- - Select "Infini-AI" in the "Language Model" module
- - Paste the API key you obtained
+ - Open LobeHub and go to the "Settings" page
+ - In the "Language Model" section, select "Infini-AI"
+ - Paste the API Key you obtained earlier
diff --git a/docs/usage/providers/infiniai.zh-CN.mdx b/docs/usage/providers/infiniai.zh-CN.mdx
index e2a0ee90a1..c22c9cdf39 100644
--- a/docs/usage/providers/infiniai.zh-CN.mdx
+++ b/docs/usage/providers/infiniai.zh-CN.mdx
@@ -1,18 +1,18 @@
---
-title: 在 LobeChat 中使用无问芯穹
-description: 学习如何在 LobeChat 中配置和使用无问芯穹的 API Key,实现 AI 对话交互。
+title: 在 LobeHub 中使用无问芯穹
+description: 学习如何在 LobeHub 中配置和使用无问芯穹的 API Key,实现 AI 对话交互。
tags:
- - LobeChat
+ - LobeHub
- 无问芯穹
- API密钥
- 大模型部署
---
-# 在 LobeChat 中使用无问芯穹
+# 在 LobeHub 中使用无问芯穹
[无问芯穹](https://cloud.infini-ai.com/)是基于多元芯片优化的大模型服务平台,提供高效统一的 AGI 基础设施解决方案。
-本文将指导你如何在 LobeChat 中快速接入无问芯穹的 AI 能力。
+本文将指导你如何在 LobeHub 中快速接入无问芯穹的 AI 能力。
无问芯穹的图片链接输入有白名单机制,目前已知支持阿里云 OSS / AWS S3
@@ -27,9 +27,9 @@ tags:
- 在左侧导航栏选择「API KEY 管理」
- 在新打开的页面中,点击「创建 API KEY」按钮,填入名称,点击「创建」
- ### 步骤二:配置 LobeChat 模型服务
+ ### 步骤二:配置 LobeHub 模型服务
- - 打开 LobeChat 进入「设置」界面
+ - 打开 LobeHub 进入「设置」界面
- 在「语言模型」模块选择「Infini-AI」
- 粘贴已获取的 API 密钥
diff --git a/docs/usage/providers/internlm.mdx b/docs/usage/providers/internlm.mdx
index 8e2bb77a7d..ffef312e13 100644
--- a/docs/usage/providers/internlm.mdx
+++ b/docs/usage/providers/internlm.mdx
@@ -1,53 +1,51 @@
---
-title: Using InternLM in LobeChat
+title: Using InternLM in LobeHub
description: >-
- Learn how to configure and use SenseNova's API Key in LobeChat to start conversations and interactions.
-
+ Learn how to configure and use your InternLM API Key in LobeHub to start
+ chatting and interacting.
tags:
- - LobeChat
+ - LobeHub
- InternLM
- API Key
- Web UI
---
-# Using InternLM in LobeChat
+# Using InternLM in LobeHub
-
+
-[InternLM](https://internlm.intern-ai.org.cn/) is a large pre-trained language model jointly launched by the Shanghai Artificial Intelligence Laboratory and Shusheng Group. This model focuses on natural language processing, aimed at understanding and generating human language, boasting powerful semantic comprehension and text generation capabilities.
+[InternLM](https://internlm.intern-ai.org.cn/) is a large-scale pre-trained language model jointly developed by Shanghai AI Laboratory and Intern Studio. Designed for natural language processing tasks, InternLM excels at understanding and generating human language, offering powerful semantic comprehension and text generation capabilities.
-This article will guide you on how to use InternLM in LobeChat.
+This guide will walk you through how to use InternLM within LobeHub.
- ### Step 1: Obtain the InternLM API Key
+ ### Step 1: Obtain Your InternLM API Key
- - Register and log in to [InternLM API](https://InternLM.intern-ai.org.cn/api/tokens)
- - Create an API token
- - Save the API token in the pop-up window
+ - Register and log in to the [InternLM API Portal](https://internlm.intern-ai.org.cn/api/tokens)
+ - Create a new API token
+ - Save the token from the pop-up window
-
+
- Please store the API token shown in the pop-up securely; it will only appear once. If you lose it,
- you will need to create a new API token.
+ Make sure to save the API token shown in the pop-up window. It will only be displayed once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure InternLM in LobeChat
+ ### Step 2: Configure InternLM in LobeHub
- - Go to the `Settings` interface in LobeChat
- - Find the settings option for `InternLM` under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the configuration section for `InternLM`
-
+
- - Enter the obtained API Key
- - Choose a InternLM model for your AI assistant to start a conversation
+ - Paste your API Key into the input field
+ - Choose an InternLM model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider; please refer to the pricing policy
- regarding InternLM.
+ Please note that usage may incur charges depending on the API provider’s pricing policy. Refer to InternLM’s official documentation for details.
-You are now ready to engage in conversations using the models provided by InternLM in LobeChat.
+You’re all set! You can now start using InternLM-powered models in LobeHub for conversations and interactions.
diff --git a/docs/usage/providers/internlm.zh-CN.mdx b/docs/usage/providers/internlm.zh-CN.mdx
index eba76cae46..9446f36ee4 100644
--- a/docs/usage/providers/internlm.zh-CN.mdx
+++ b/docs/usage/providers/internlm.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用书生浦语
-description: 学习如何在 LobeChat 中配置和使用书生浦语的 API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用书生浦语
+description: 学习如何在 LobeHub 中配置和使用书生浦语的 API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 书生浦语
- API密钥
- Web UI
---
-# 在 LobeChat 中使用书生浦语
+# 在 LobeHub 中使用书生浦语
-
+
[书生浦语(InternLM)](https://internlm.intern-ai.org.cn/) 是由上海人工智能实验室与书生集团联合推出的一款大型预训练语言模型。该模型专注于自然语言处理,旨在理解和生成自然语言,具备强大的语义理解和文本生成能力。
-本文将指导你如何在 LobeChat 中使用书生浦语。
+本文将指导你如何在 LobeHub 中使用书生浦语。
### 步骤一:获取书生浦语的 API 密钥
@@ -23,27 +23,27 @@ tags:
- 创建一个 API 令牌
- 在弹出窗口中保存 API 令牌
-
+
妥善保存弹窗中的 API 令牌,它只会出现一次,如果不小心丢失了,你需要重新创建一个 API 令牌。
- ### 步骤二:在 LobeChat 中配置书生浦语
+ ### 步骤二:在 LobeHub 中配置书生浦语
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `书生` 的设置项
-
+
- 填入获得的 API Key
- 为你的 AI 助手选择一个书生浦语的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考书生浦语的相关费用政策。
-至此你已经可以在 LobeChat 中使用书生浦语提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用书生浦语提供的模型进行对话了。
diff --git a/docs/usage/providers/jina.mdx b/docs/usage/providers/jina.mdx
index a39c62fe89..0e9de26d95 100644
--- a/docs/usage/providers/jina.mdx
+++ b/docs/usage/providers/jina.mdx
@@ -1,52 +1,53 @@
---
-title: Using Jina AI API Key in LobeChat
-description: Learn how to configure and use Jina AI models in LobeChat, obtain an API key, and start conversations.
+title: Using Jina AI API Key in LobeHub
+description: >-
+ Learn how to configure and use Jina AI models in LobeHub, obtain your API key,
+ and start chatting.
tags:
- - LobeChat
+ - LobeHub
- Jina AI
- API Key
- Web UI
---
-# Using Jina AI in LobeChat
+# Using Jina AI in LobeHub
-
+
-[Jina AI](https://jina.ai/) is an open-source neural search company founded in 2020. It focuses on using deep learning technology to process multimodal data, providing efficient information retrieval solutions and supporting search for various data types such as text, images, and videos.
+[Jina AI](https://jina.ai/) is an open-source neural search company founded in 2020. It specializes in leveraging deep learning technologies to process multimodal data and offers efficient information retrieval solutions that support text, image, video, and other data types.
-This document will guide you on how to use Jina AI in LobeChat:
+This guide will walk you through how to use Jina AI in LobeHub:
- ### Step 1: Obtain a Jina AI API Key
+ ### Step 1: Get Your Jina AI API Key
- - Visit the [Jina AI official website](https://jina.ai/) and click the `API` button on the homepage.
+ - Visit the [Jina AI official website](https://jina.ai/) and click the `API` button on the homepage
-
+
- - Find the API Key generated for you in the `API Key` menu below.
- - Copy and save the generated API Key.
+ - In the `API Key` section, you’ll find an API key automatically generated for you
+ - Copy and save the generated API key
- * Jina AI provides each user with 1M free API Tokens, and the API can be used without
- registration. \* If you need to manage the API Key or recharge the API, you need to register and
- log in to the [Jina AI Console](https://jina.ai/api-dashboard/).
+ \* Jina AI provides each user with 1M free API tokens—no registration required to start using the API.\
+ \* To manage your API key or purchase additional tokens, you’ll need to sign up and log in to the [Jina AI Dashboard](https://jina.ai/api-dashboard/).
- ### Step 2: Configure Jina AI in LobeChat
+ ### Step 2: Configure Jina AI in LobeHub
- - Visit LobeChat's `Application Settings` interface.
- - Find the `Jina AI` setting under `AI Service Provider`.
+ - Go to the `App Settings` page in LobeHub
+ - Under `AI Providers`, locate the `Jina AI` configuration section
-
+
- - Enable Jina AI and fill in the obtained API Key.
- - Select a Jina AI model for your assistant and start the conversation.
+ - Enable Jina AI and paste in your API key
+ - Choose a Jina AI model for your assistant to start chatting
-
+
- You may need to pay the API service provider during use. Please refer to Jina AI's relevant fee policy.
+ You may incur charges when using the API. Please refer to Jina AI’s pricing policy for more details.
-Now you can use the models provided by Jina AI in LobeChat to have conversations.
+And that’s it! You’re now ready to start chatting with models powered by Jina AI in LobeHub.
diff --git a/docs/usage/providers/jina.zh-CN.mdx b/docs/usage/providers/jina.zh-CN.mdx
index 8aa2529e58..3f6a29f071 100644
--- a/docs/usage/providers/jina.zh-CN.mdx
+++ b/docs/usage/providers/jina.zh-CN.mdx
@@ -1,27 +1,27 @@
---
-title: 在 LobeChat 中使用 Jina AI API Key
-description: 学习如何在 LobeChat 中配置和使用 Jina AI 模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用 Jina AI API Key
+description: 学习如何在 LobeHub 中配置和使用 Jina AI 模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- Jina AI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Jina AI
+# 在 LobeHub 中使用 Jina AI
-
+
[Jina AI](https://jina.ai/) 是一家成立于 2020 年的开源神经搜索公司,专注于利用深度学习技术处理多模态数据,提供高效的信息检索解决方案,支持文本、图像、视频等多种数据类型的搜索。
-本文档将指导你如何在 LobeChat 中使用 Jina AI:
+本文档将指导你如何在 LobeHub 中使用 Jina AI:
### 步骤一:获取 Jina AI API 密钥
- 访问 [Jina AI 官方网站](https://jina.ai/),点击首页的 `API` 按钮
-
+
- 在下方的 `API Key` 菜单中找到系统为你生成的 API Key
- 复制并保存生成的 API Key
@@ -31,21 +31,21 @@ tags:
API 充值,你需要注册并登录 [Jina AI 控制台](https://jina.ai/api-dashboard/)
- ### 步骤二:在 LobeChat 中配置 Jina AI
+ ### 步骤二:在 LobeHub 中配置 Jina AI
- - 访问 LobeChat 的 `应用设置`界面
+ - 访问 LobeHub 的 `应用设置`界面
- 在 `AI 服务商` 下找到 `Jina AI` 的设置项
-
+
- 打开 Jina AI 并填入获取的 API 密钥
- 为你的助手选择一个 Jina AI 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Jina AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Jina AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Jina AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/lmstudio.mdx b/docs/usage/providers/lmstudio.mdx
index 488e010ea0..abb7b802a8 100644
--- a/docs/usage/providers/lmstudio.mdx
+++ b/docs/usage/providers/lmstudio.mdx
@@ -1,75 +1,77 @@
---
-title: Using LM Studio in LobeChat
-description: Learn how to configure and use LM Studio, and run AI models for conversations in LobeChat through LM Studio.
+title: Using LM Studio in LobeHub
+description: >-
+ Learn how to configure and use LM Studio to run AI models for conversations
+ within LobeHub.
tags:
- - LobeChat
+ - LobeHub
- LM Studio
- - Open Source Model
+ - Open Source Models
- Web UI
---
-# Using LM Studio in LobeChat
+# Using LM Studio in LobeHub
-
+
-[LM Studio](https://lmstudio.ai/) is a platform for testing and running large language models (LLMs), providing an intuitive and easy-to-use interface suitable for developers and AI enthusiasts. It supports deploying and running various open-source LLM models, such as Deepseek or Qwen, on local computers, enabling offline AI chatbot functionality, thereby protecting user privacy and providing greater flexibility.
+[LM Studio](https://lmstudio.ai/) is a platform designed for testing and running large language models (LLMs). It offers an intuitive and user-friendly interface, making it ideal for developers and AI enthusiasts. LM Studio supports deploying and running various open-source LLMs locally—such as Deepseek or Qwen—enabling offline AI chatbot functionality that enhances privacy and flexibility.
-This document will guide you on how to use LM Studio in LobeChat:
+This guide will walk you through how to use LM Studio within LobeHub:
- ### Step 1: Obtain and Install LM Studio
+ ### Step 1: Download and Install LM Studio
- - Go to the [LM Studio official website](https://lmstudio.ai/)
- - Choose your platform and download the installation package. LM Studio currently supports MacOS, Windows, and Linux platforms.
- - Follow the prompts to complete the installation and run LM Studio.
+ - Visit the [official LM Studio website](https://lmstudio.ai/)
+ - Choose your operating system and download the installer. LM Studio currently supports macOS, Windows, and Linux
+ - Follow the installation instructions and launch LM Studio
-
+
- ### Step 2: Search and Download Models
+ ### Step 2: Search and Download a Model
- - Open the `Discover` menu on the left, search for and download the model you want to use.
- - Find a suitable model (such as Deepseek R1) and click download.
- - The download may take some time, please wait patiently for it to complete.
+ - Open the `Discover` tab on the left sidebar to search for models
+ - Find a model you’d like to use (e.g., Deepseek R1) and click to download
+ - The download may take some time—please be patient
-
+
- ### Step 3: Deploy and Run Models
+ ### Step 3: Deploy and Run the Model
- - Select the downloaded model in the top model selection bar and load the model.
- - Configure the model runtime parameters in the pop-up panel. Refer to the [LM Studio official documentation](https://lmstudio.ai/docs) for detailed parameter settings.
+ - Use the model selector at the top to choose the downloaded model and load it
+ - In the pop-up panel, configure the model’s runtime parameters. For detailed settings, refer to the [LM Studio documentation](https://lmstudio.ai/docs)
-
+
- - Click the `Load Model` button and wait for the model to finish loading and running.
- - Once the model is loaded, you can use it in the chat interface for conversations.
+ - Click the `Load Model` button and wait for the model to fully load and start
+ - Once loaded, you can begin chatting with the model in the built-in interface
- ### Step 4: Enable Local Service
+ ### Step 4: Enable Local API Service
- - If you want to use the model through other programs, you need to start a local API service. Start the service through the `Developer` panel or the software menu. The LM Studio service starts on port `1234` on your local machine by default.
+ - To use the model with other applications, you’ll need to start a local API service. This can be done via the `Developer` panel or from the app menu. By default, LM Studio runs the service on port `1234`
-
+
- - After the local service is started, you also need to enable the `CORS (Cross-Origin Resource Sharing)` option in the service settings so that the model can be used in other programs.
+ - After starting the service, make sure to enable the `CORS (Cross-Origin Resource Sharing)` option in the service settings. This is required for external applications to access the model
-
+
- ### Step 5: Use LM Studio in LobeChat
+ ### Step 5: Connect LM Studio to LobeHub
- - Visit the `AI Service Provider` interface in LobeChat's `Application Settings`.
- - Find the settings for `LM Studio` in the list of providers.
+ - Go to the `App Settings` in LobeHub and open the `AI Service Providers` section
+ - Find and select the `LM Studio` provider from the list
-
+
- - Open the LM Studio service provider and fill in the API service address.
+ - Enable the LM Studio provider and enter the API service address
- If your LM Studio is running locally, make sure to turn on `Client Request Mode`.
+ If LM Studio is running locally, make sure to enable the "Client Request Mode".
- - Add the model you are running in the model list below.
- - Select a Volcano Engine model for your assistant to start the conversation.
+ - Add the model you’re running to the model list below
+ - Choose a model for your assistant and start chatting
-
+
-Now you can use the model running in LM Studio in LobeChat for conversations.
+And that’s it! You’re now ready to use models running in LM Studio directly within LobeHub.
diff --git a/docs/usage/providers/lmstudio.zh-CN.mdx b/docs/usage/providers/lmstudio.zh-CN.mdx
index 23ac7af1a0..394f5488a7 100644
--- a/docs/usage/providers/lmstudio.zh-CN.mdx
+++ b/docs/usage/providers/lmstudio.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 LM Studio
-description: 学习如何配置和使用 LM Studio,并在 LobeChat 中 通过 LM Studio 运行 AI 模型进行对话。
+title: 在 LobeHub 中使用 LM Studio
+description: 学习如何配置和使用 LM Studio,并在 LobeHub 中 通过 LM Studio 运行 AI 模型进行对话。
tags:
- - LobeChat
+ - LobeHub
- LM Studio
- 开源模型
- Web UI
---
-# 在 LobeChat 中使用 LM Studio
+# 在 LobeHub 中使用 LM Studio
-
+
[LM Studio](https://lmstudio.ai/) 是一个用于测试和运行大型语言模型(LLM)的平台,提供了直观易用的界面,适合开发人员和 AI 爱好者使用。它支持在本地电脑上部署和运行各种开源 LLM 模型,例如 Deepseek 或 Qwen,实现离线 AI 聊天机器人的功能,从而保护用户隐私并提供更大的灵活性。
-本文档将指导你如何在 LobeChat 中使用 LM Studio:
+本文档将指导你如何在 LobeHub 中使用 LM Studio:
### 步骤一:获取并安装 LM Studio
@@ -23,7 +23,7 @@ tags:
- 选择你的平台并下载安装包,LM Studio 目前支持 MacOS、Windows 和 Linux 平台
- 按照提示完成安装,运行 LM Studio
-
+
### 步骤二:搜索并下载模型
@@ -31,14 +31,14 @@ tags:
- 找到合适的模型(如 Deepseek R1),点击下载
- 下载可能需要一些时间,耐心等待完成
-
+
### 步骤三:部署并运行模型
- 在顶部的模型选择栏中选择下载好的模型,并加载模型
- 在弹出的面板中配置模型运行参数,详细的参数设置请参考 [LM Studio 官方文档](https://lmstudio.ai/docs)
-
+
- 点击 `加载模型` 按钮,等待模型完成加载并运行
- 模型加载完成后,你可以在聊天界面中使用该模型进行对话
@@ -47,18 +47,18 @@ tags:
- 如果你希望通过其它程序使用该模型,需要启动一个本地 API 服务,通过 `Developer` 面板或软件菜单启动服务,LM Studio 服务默认启动在本机的 `1234` 端口
-
+
- 本地服务启动后,你还需要在服务设置中开启 `CORS(跨域资源共享)`选项,这样才能在其它程序中使用该模型
-
+
- ### 步骤五:在 LobeChat 中使用 LM Studio
+ ### 步骤五:在 LobeHub 中使用 LM Studio
- - 访问 LobeChat 的 `应用设置` 的 `AI 服务供应商` 界面
+ - 访问 LobeHub 的 `应用设置` 的 `AI 服务供应商` 界面
- 在供应商列表中找到 `LM Studio` 的设置项
-
+
- 打开 LM Studio 服务商并填入 API 服务地址
@@ -67,7 +67,7 @@ tags:
- 在下方的模型列表中添加你运行的模型
- 为你的助手选择一个火山引擎模型即可开始对话
-
+
-至此你已经可以在 LobeChat 中使用 LM Studio 运行的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 LM Studio 运行的模型进行对话了。
diff --git a/docs/usage/providers/minimax.mdx b/docs/usage/providers/minimax.mdx
index 34902e2d26..ed1dba583f 100644
--- a/docs/usage/providers/minimax.mdx
+++ b/docs/usage/providers/minimax.mdx
@@ -1,60 +1,59 @@
---
-title: Using Minimax API Key in LobeChat
+title: Using the Minimax API Key in LobeHub
description: >-
- Learn how to use MiniMax in LobeChat to enhance AI conversations. Obtain MiniMax API key, configure MiniMax in LobeChat settings, and select a model for your AI assistant.
-
+ Learn how to configure and use the MiniMax AI model in LobeHub. This guide
+ walks you through obtaining a MiniMax API key, detailed setup steps, and
+ starting conversations with the MiniMax model.
tags:
+ - LobeHub
- MiniMax
- - Web UI
- API Key
- - MiniMax Models
+ - Web UI
---
-# Using Minimax in LobeChat
+# Using Minimax in LobeHub
-
+
-[MiniMax](https://www.minimaxi.com/) is a general artificial intelligence technology company founded in 2021, dedicated to co-creating intelligence with users. MiniMax has independently developed universal large models of different modalities, including trillion-parameter MoE text large models, speech large models, and image large models. They have also launched applications like Hai Luo AI.
+[MiniMax](https://www.minimaxi.com/) is a general artificial intelligence company founded in 2021, dedicated to co-creating intelligence with users. MiniMax has independently developed multi-modal general-purpose large models, including trillion-parameter MoE text models, speech models, and image models. It has also launched applications such as Conch AI.
-This document will guide you on how to use Minimax in LobeChat:
+This guide will walk you through how to use MiniMax in LobeHub:
- ### Step 1: Obtain MiniMax API Key
+ ### Step 1: Obtain a MiniMax API Key
- - Register and log in to the [MiniMax Open Platform](https://www.minimaxi.com/platform)
- - In `Account Management`, locate the `API Key` menu and create a new key
+ - Register and log in to the [MiniMax Developer Platform](https://www.minimaxi.com/platform)
+ - Navigate to the `Account Management` section and find the `API Keys` menu to create a new key
-
+
- - Enter a name for the API key and create it
+ - Enter a name for your API key and create it
-
+
- - Copy the API key from the pop-up dialog box and save it securely
+ - Copy the API key from the pop-up dialog and store it securely
-
+
- Please store the key securely as it will only appear once. If you accidentally lose it, you will
- need to create a new key.
+ Make sure to store your API key securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure MiniMax in LobeChat
+ ### Step 2: Configure MiniMax in LobeHub
- - Go to the `Settings` interface of LobeChat
- - Find the setting for `MiniMax` under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the `MiniMax` configuration section
-
+
- - Open Minimax and enter the obtained API key
- - Choose a MiniMax model for your AI assistant to start the conversation
+ - Enable MiniMax and paste in your API key
+ - Choose a MiniMax model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to MiniMax's relevant
- pricing policies.
+ Please note that usage may incur charges from the API provider. Refer to MiniMax’s pricing policy for more details.
-You can now use the models provided by MiniMax to have conversations in LobeChat.
+You're all set! You can now start chatting with models powered by MiniMax in LobeHub.
diff --git a/docs/usage/providers/minimax.zh-CN.mdx b/docs/usage/providers/minimax.zh-CN.mdx
index dea7a06f01..88749741c3 100644
--- a/docs/usage/providers/minimax.zh-CN.mdx
+++ b/docs/usage/providers/minimax.zh-CN.mdx
@@ -1,22 +1,22 @@
---
-title: 在 LobeChat 中使用 Minimax API Key
+title: 在 LobeHub 中使用 Minimax API Key
description: >-
- 学习如何在 LobeChat 中配置并使用 MiniMax 智能模型进行对话。获取 MiniMax API 密钥、配置步骤详解,开始与 MiniMax 模型交互。
-
+ 学习如何在 LobeHub 中配置并使用 MiniMax 智能模型进行对话。获取 MiniMax API 密钥、配置步骤详解,开始与 MiniMax
+ 模型交互。
tags:
- - LobeChat
+ - LobeHub
- MiniMax
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Minimax
+# 在 LobeHub 中使用 Minimax
-
+
[MiniMax](https://www.minimaxi.com/) 是 2021 年成立的通用人工智能科技公司,致力于与用户共创智能。MiniMax 自主研发了不同模态的通用大模型,其中包括万亿参数的 MoE 文本大模型、语音大模型以及图像大模型。并推出了海螺 AI 等应用。
-本文档将指导你如何在 LobeChat 中使用 Minimax:
+本文档将指导你如何在 LobeHub 中使用 Minimax:
### 步骤一:获取 MiniMax API 密钥
@@ -24,35 +24,35 @@ tags:
- 注册并登录 [MiniMax 开放平台](https://www.minimaxi.com/platform)
- 在 `账户管理` 中找到 `接口密钥` 菜单,并创建新的密钥
-
+
- 填写一个 API 密钥的名称并创建
-
+
- 在弹出的对话框中复制 API 密钥,并妥善保存
-
+
请安全地存储密钥,因为它只会出现一次。如果您意外丢失它,您将需要创建一个新密钥。
- ### 步骤二:在 LobeChat 中配置 MiniMax
+ ### 步骤二:在 LobeHub 中配置 MiniMax
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`MiniMax`的设置项
-
+
- 打开 Minimax 并填入获得的 API 密钥
- 为你的 AI 助手选择一个 MiniMax 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 MiniMax 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 MiniMax 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 MiniMax 提供的模型进行对话了。
diff --git a/docs/usage/providers/mistral.mdx b/docs/usage/providers/mistral.mdx
index ab4ba1a30a..b46dd90940 100644
--- a/docs/usage/providers/mistral.mdx
+++ b/docs/usage/providers/mistral.mdx
@@ -1,50 +1,47 @@
---
-title: Using Mistral AI API Key in LobeChat
+title: Using Mistral AI API Key in LobeHub
description: >-
- Learn how to integrate Mistral AI into LobeChat for enhanced conversational experiences. Follow the steps to configure Mistral AI and start using its models.
-
+ Learn how to configure and use Mistral AI in LobeHub, including how to obtain
+ an API key and select the appropriate AI model for conversations.
tags:
- - Mistral AI
- Web UI
- - API key
+ - Mistral AI
+ - API Key
---
-# Using Mistral AI in LobeChat
+# Using Mistral AI in LobeHub
-
+
-The Mistral AI API is now available for everyone to use. This document will guide you on how to use [Mistral AI](https://mistral.ai/) in LobeChat:
+The Mistral AI API is now publicly available. This guide will walk you through how to use [Mistral AI](https://mistral.ai/) in LobeHub:
- ### Step 1: Obtain Mistral AI API Key
+ ### Step 1: Get Your Mistral AI API Key
- Create a [Mistral AI](https://mistral.ai/) account
- - Obtain your [API key](https://console.mistral.ai/user/api-keys/)
+ - Retrieve your [API key](https://console.mistral.ai/user/api-keys/)
-
+
- ### Step 2: Configure Mistral AI in LobeChat
+ ### Step 2: Configure Mistral AI in LobeHub
- - Go to the `Settings` interface in LobeChat
- - Find the setting for `Mistral AI` under `AI Service Provider`
+ - Go to the `Settings` panel in LobeHub
+ - Under `AI Providers`, locate the `Mistral AI` configuration section
-
+
- If you are using mistral.ai, your account must have a valid subscription for the API key to work
- properly. Newly created API keys may take 2-3 minutes to become active. If the "Test" button
- fails, please retry after 2-3 minutes.
+ If you're using mistral.ai, your account must have an active subscription for the API key to function properly. Newly created API keys may take 2–3 minutes to become active. If clicking the "Test" button fails, please try again after a few minutes.
- - Enter the obtained API key
- - Choose a Mistral AI model for your AI assistant to start the conversation
+ - Paste your API key into the input field
+ - Choose a Mistral AI model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Mistral AI's relevant
- pricing policies.
+ You may incur charges from the API provider during usage. Please refer to Mistral AI’s pricing policy for more details.
-You can now engage in conversations using the models provided by Mistral AI in LobeChat.
+You're all set! You can now start chatting with models powered by Mistral AI in LobeHub.
diff --git a/docs/usage/providers/mistral.zh-CN.mdx b/docs/usage/providers/mistral.zh-CN.mdx
index 799637a8d8..605f6cb80f 100644
--- a/docs/usage/providers/mistral.zh-CN.mdx
+++ b/docs/usage/providers/mistral.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: 在 LobeChat 中使用 Mistral AI API Key
-description: 学习如何在 LobeChat 中配置并使用 Mistral AI,包括获取 API 密钥和选择适合的 AI 模型进行对话。
+title: 在 LobeHub 中使用 Mistral AI API Key
+description: 学习如何在 LobeHub 中配置并使用 Mistral AI,包括获取 API 密钥和选择适合的 AI 模型进行对话。
tags:
- Web UI
- Mistral AI
- API Key
---
-# 在 LobeChat 中使用 Mistral AI
+# 在 LobeHub 中使用 Mistral AI
-
+
-Mistral AI API 现在可供所有人使用,本文档将指导你如何在 LobeChat 中使用 [Mistral AI](https://mistral.ai/):
+Mistral AI API 现在可供所有人使用,本文档将指导你如何在 LobeHub 中使用 [Mistral AI](https://mistral.ai/):
### 步骤一:获取 Mistral AI API 密钥
@@ -19,14 +19,14 @@ Mistral AI API 现在可供所有人使用,本文档将指导你如何在 Lobe
- 创建一个 [Mistral AI](https://mistral.ai/) 帐户
- 获取您的 [API 密钥](https://console.mistral.ai/user/api-keys/)
-
+
- ### 步骤二:在 LobeChat 中配置 Mistral AI
+ ### 步骤二:在 LobeHub 中配置 Mistral AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`Mistral AI`的设置项
-
+
如果您使用的是 mistral.ai,则您的帐户必须具有有效的订阅才能使 API 密钥正常工作。新创建的 API
@@ -36,11 +36,11 @@ Mistral AI API 现在可供所有人使用,本文档将指导你如何在 Lobe
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Mistral AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Mistral AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Mistral AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Mistral AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/modelscope.mdx b/docs/usage/providers/modelscope.mdx
index 665d0b5bd9..4cac01ee0b 100644
--- a/docs/usage/providers/modelscope.mdx
+++ b/docs/usage/providers/modelscope.mdx
@@ -1,33 +1,33 @@
---
-title: ModelScope Provider Setup
-description: Learn how to configure and use ModelScope provider in LobeChat
+title: ModelScope Provider Configuration
+description: Learn how to configure and use the ModelScope provider in LobeHub
tags:
- ModelScope
---
-# ModelScope Provider Setup
+# ModelScope Provider Configuration
-ModelScope (魔塔社区) is Alibaba's open-source model community that provides access to various AI models. This guide will help you set up the ModelScope provider in LobeChat.
+ModelScope is an open-source AI model community by Alibaba, offering access to a wide range of AI models. This guide will walk you through setting up the ModelScope provider in LobeHub.
## Prerequisites
-Before using ModelScope API, you need to:
+Before using the ModelScope API, you need to:
1. **Create a ModelScope Account**
- Visit [ModelScope](https://www.modelscope.cn/)
- - Register for an account
+ - Sign up for an account
-2. **Bind Alibaba Cloud Account**
+2. **Link Your Alibaba Cloud Account**
- - **Important**: ModelScope API requires binding with an Alibaba Cloud account
- - Visit your [ModelScope Access Token page](https://www.modelscope.cn/my/myaccesstoken)
- - Follow the instructions to bind your Alibaba Cloud account
+ - **Important**: Accessing the ModelScope API requires linking your Alibaba Cloud account
+ - Go to your [ModelScope Access Token Page](https://www.modelscope.cn/my/myaccesstoken)
+ - Follow the instructions to link your Alibaba Cloud account
- This step is mandatory for API access
-3. **Get API Token**
- - After binding your Alibaba Cloud account, generate an API token
- - Copy the token for use in LobeChat
+3. **Obtain an API Token**
+ - After linking your Alibaba Cloud account, generate an API token
+ - Copy the token for use in LobeHub
## Configuration
@@ -45,13 +45,13 @@ MODELSCOPE_API_KEY=your_modelscope_api_token
# Optional: Custom model list (comma-separated)
MODELSCOPE_MODEL_LIST=deepseek-ai/DeepSeek-V3-0324,Qwen/Qwen3-235B-A22B
-# Optional: Proxy URL if needed
+# Optional: Proxy URL (if needed)
MODELSCOPE_PROXY_URL=https://your-proxy-url
```
### Docker Configuration
-If using Docker, add the ModelScope environment variables to your `docker-compose.yml`:
+If you're using Docker, add the ModelScope environment variables to your `docker-compose.yml`:
```yaml
environment:
@@ -62,7 +62,7 @@ environment:
## Available Models
-ModelScope provides access to various models including:
+ModelScope provides access to a variety of models, including:
- **DeepSeek Models**: DeepSeek-V3, DeepSeek-R1 series
- **Qwen Models**: Qwen3 series, Qwen2.5 series
@@ -73,25 +73,25 @@ ModelScope provides access to various models including:
### Common Issues
-1. **"Please bind your Alibaba Cloud account before use" Error**
+1. **"Please link your Alibaba Cloud account first" error**
- - This means you haven't bound your Alibaba Cloud account to ModelScope
- - Visit [ModelScope Access Token page](https://www.modelscope.cn/my/myaccesstoken)
- - Complete the Alibaba Cloud account binding process
+ - This means your Alibaba Cloud account is not yet linked to ModelScope
+ - Visit the [ModelScope Access Token Page](https://www.modelscope.cn/my/myaccesstoken)
+ - Complete the account linking process
-2. **401 Authentication Error**
+2. **401 Unauthorized Error**
- Check if your API token is correct
- - Ensure the token hasn't expired
- - Verify that your Alibaba Cloud account is properly bound
+ - Ensure the token has not expired
+ - Verify that your Alibaba Cloud account is properly linked
-3. **Model Not Available**
+3. **Model Unavailable**
- Some models may require additional permissions
- Check the model's page on ModelScope for access requirements
### Debug Mode
-Enable debug mode to see detailed logs:
+Enable debug mode to view detailed logs:
```bash
DEBUG_MODELSCOPE_CHAT_COMPLETION=1
@@ -99,19 +99,39 @@ DEBUG_MODELSCOPE_CHAT_COMPLETION=1
## Notes
-- ModelScope API is compatible with OpenAI API format
-- The service is primarily designed for users in China
-- Some models may have usage limitations or require additional verification
-- API responses are in Chinese by default for some models
+- The ModelScope API is compatible with the OpenAI API format
+- This service is primarily designed for users in China
+- Some models may have usage restrictions or require additional verification
+- API responses for certain models may default to Chinese
## Support
For ModelScope-specific issues:
-- Visit [ModelScope Documentation](https://www.modelscope.cn/docs)
-- Check [ModelScope Community](https://www.modelscope.cn/community)
+- Visit the [ModelScope Documentation](https://www.modelscope.cn/docs)
+- Check out the [ModelScope Community](https://www.modelscope.cn/community)
-For LobeChat integration issues:
+For LobeHub integration issues:
-- Check our [GitHub Issues](https://github.com/lobehub/lobe-chat/issues)
+- Browse our [GitHub Issues](https://github.com/lobehub/lobe-chat/issues)
- Join our community discussions
+
+## Model ID Format
+
+ModelScope uses a namespace-prefixed format for model IDs, for example:
+
+```
+deepseek-ai/DeepSeek-V3-0324
+deepseek-ai/DeepSeek-R1-0528
+Qwen/Qwen3-235B-A22B
+Qwen/Qwen3-32B
+```
+
+When configuring the model list, be sure to use the full model ID format.
+
+## API Limitations
+
+- The ModelScope API is subject to rate limits
+- Some models may require special permissions
+- It's recommended to monitor API usage in production environments
+- Certain advanced models may require a paid subscription to access
diff --git a/docs/usage/providers/modelscope.zh-CN.mdx b/docs/usage/providers/modelscope.zh-CN.mdx
index e508f9195e..b55f3fc037 100644
--- a/docs/usage/providers/modelscope.zh-CN.mdx
+++ b/docs/usage/providers/modelscope.zh-CN.mdx
@@ -1,13 +1,13 @@
---
title: ModelScope 提供商配置
-description: 学习如何在 LobeChat 中配置和使用 ModelScope 提供商
+description: 学习如何在 LobeHub 中配置和使用 ModelScope 提供商
tags:
- ModelScope
---
# ModelScope 提供商配置
-ModelScope(魔塔社区)是阿里巴巴的开源模型社区,提供各种 AI 模型的访问服务。本指南将帮助您在 LobeChat 中设置 ModelScope 提供商。
+ModelScope(魔塔社区)是阿里巴巴的开源模型社区,提供各种 AI 模型的访问服务。本指南将帮助您在 LobeHub 中设置 ModelScope 提供商。
## 前置条件
@@ -27,7 +27,7 @@ ModelScope(魔塔社区)是阿里巴巴的开源模型社区,提供各种
3. **获取 API 令牌**
- 绑定阿里云账户后,生成 API 令牌
- - 复制令牌以在 LobeChat 中使用
+ - 复制令牌以在 LobeHub 中使用
## 配置
@@ -111,7 +111,7 @@ DEBUG_MODELSCOPE_CHAT_COMPLETION=1
- 访问 [ModelScope 文档](https://www.modelscope.cn/docs)
- 查看 [ModelScope 社区](https://www.modelscope.cn/community)
-对于 LobeChat 集成问题:
+对于 LobeHub 集成问题:
- 查看我们的 [GitHub Issues](https://github.com/lobehub/lobe-chat/issues)
- 加入我们的社区讨论
diff --git a/docs/usage/providers/moonshot.mdx b/docs/usage/providers/moonshot.mdx
index d9f95ff924..deda8c178c 100644
--- a/docs/usage/providers/moonshot.mdx
+++ b/docs/usage/providers/moonshot.mdx
@@ -1,49 +1,46 @@
---
-title: Using Moonshot AI API Key in LobeChat
+title: Using Moonshot AI API Key in LobeHub
description: >-
- Learn how to integrate Moonshot AI into LobeChat for AI-powered conversations. Follow the steps to get the API key, configure Moonshot AI, and start engaging with AI models.
-
+ Learn how to configure and use Moonshot AI in LobeHub, including how to obtain
+ an API key and select the appropriate AI model for conversations.
tags:
- Moonshot AI
- Web UI
- API Key
---
-# Using Moonshot AI in LobeChat
+# Using Moonshot AI in LobeHub
-
+
-The Moonshot AI API is now available for everyone to use. This document will guide you on how to use [Moonshot AI](https://www.moonshot.cn/) in LobeChat:
+The Moonshot AI API is now available to everyone. This guide will walk you through how to use [Moonshot AI](https://www.moonshot.cn/) in LobeHub:
- ### Step 1: Get Moonshot AI API Key
+ ### Step 1: Get Your Moonshot AI API Key
- - Apply for your [API key](https://platform.moonshot.cn/console/api-keys)
+ - Apply for your [API Key](https://platform.moonshot.cn/console/api-keys)
-
+
- ### Step 2: Configure Moonshot AI in LobeChat
+ ### Step 2: Configure Moonshot AI in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for `Moonshot AI` under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `Moonshot AI`
-
+
- If you are using mistral.ai, your account must have a valid subscription for the API key to work
- properly. Newly created API keys may take 2-3 minutes to become active. If the "Test" button
- fails, please retry after 2-3 minutes.
+ If you're using mistral.ai, your account must have an active subscription for the API key to work properly. Newly created API keys may take 2–3 minutes to become active. If clicking the "Test" button fails, please try again after a few minutes.
- - Enter the API key you obtained
- - Choose a Moonshot AI model for your AI assistant to start the conversation
+ - Paste your API key into the input field
+ - Choose a Moonshot AI model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider according to Moonshot AI's related
- pricing policies.
+ You may be required to pay for API usage. Please refer to Moonshot AI’s pricing policy for more details.
-You can now engage in conversations using the models provided by Moonshot AI in LobeChat.
+That's it! You're now ready to start chatting with models powered by Moonshot AI in LobeHub.
diff --git a/docs/usage/providers/moonshot.zh-CN.mdx b/docs/usage/providers/moonshot.zh-CN.mdx
index 30f3027634..8d1366be41 100644
--- a/docs/usage/providers/moonshot.zh-CN.mdx
+++ b/docs/usage/providers/moonshot.zh-CN.mdx
@@ -1,31 +1,31 @@
---
-title: 在 LobeChat 中使用 Moonshot(月之暗面) AI API Key
-description: 学习如何在 LobeChat 中配置和使用 Moonshot AI,包括获取 API 密钥和选择适合的 AI 模型进行对话。
+title: 在 LobeHub 中使用 Moonshot(月之暗面) AI API Key
+description: 学习如何在 LobeHub 中配置和使用 Moonshot AI,包括获取 API 密钥和选择适合的 AI 模型进行对话。
tags:
- Moonshot AI
- Web UI
- API Key
---
-# 在 LobeChat 中使用 Moonshot AI
+# 在 LobeHub 中使用 Moonshot AI
-
+
-Moonshot AI API 现在可供所有人使用,本文档将指导你如何在 LobeChat 中使用 [Moonshot AI](https://www.moonshot.cn/):
+Moonshot AI API 现在可供所有人使用,本文档将指导你如何在 LobeHub 中使用 [Moonshot AI](https://www.moonshot.cn/):
### 步骤一:获取 Moonshot AI API 密钥
- 申请您的 [API 密钥](https://platform.moonshot.cn/console/api-keys)
-
+
- ### 步骤二:在 LobeChat 中配置 Moonshot AI
+ ### 步骤二:在 LobeHub 中配置 Moonshot AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`Moonshot AI`的设置项
-
+
如果您使用的是 mistral.ai,则您的帐户必须具有有效的订阅才能使 API 密钥正常工作。新创建的 API
@@ -35,11 +35,11 @@ Moonshot AI API 现在可供所有人使用,本文档将指导你如何在 Lob
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Moonshot AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Moonshot AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Moonshot AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Moonshot AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/novita.mdx b/docs/usage/providers/novita.mdx
index e8b3fbca47..6229fc1df5 100644
--- a/docs/usage/providers/novita.mdx
+++ b/docs/usage/providers/novita.mdx
@@ -1,8 +1,10 @@
---
-title: Using Novita AI API Key in LobeChat
+title: Using Novita AI API Key in LobeHub
description: >-
- Learn how to integrate Novita AI's language model APIs into LobeChat. Follow the steps to register, create an Novita AI API key, configure settings, and chat with our various AI models.
-
+ Learn how to integrate Novita AI's large language model API into LobeHub.
+ Follow the steps to register a Novita AI account, create an API key, add
+ credits, and configure it in LobeHub. Start chatting with a variety of AI
+ models.
tags:
- Novita AI
- Llama3
@@ -12,45 +14,44 @@ tags:
- Web UI
---
-# Using Novita AI in LobeChat
+# Using Novita AI in LobeHub
-
+
-[Novita AI](https://novita.ai/) is an AI API platform that provides a variety of LLM and image generation APIs, supporting Llama3 (8B, 70B), Mistral, and many other cutting-edge models. We offer a variety of censored and uncensored models to meet your different needs.
+[Novita AI](https://novita.ai/) is an AI API platform offering a variety of large language models and AI image generation services. It supports models like Llama3 (8B, 70B), Mistral, and other cutting-edge models.
-This document will guide you on how to integrate Novita AI in LobeChat:
+This guide will walk you through how to use Novita AI in LobeHub:
- ### Step 1: Register and Log in to Novita AI
+ ### Step 1: Sign Up and Log In to Novita AI
- Visit [Novita.ai](https://novita.ai/) and create an account
- - You can log in with your Google or Github account
- - Upon registration, Novita AI will provide a $0.5 credit.
+ - You can sign in using your Google or GitHub account
+ - Upon registration, Novita AI grants you $0.50 in free credits
-
+
- ### Step 2: Obtain the API Key
+ ### Step 2: Create an API Key
- - Visit Novita AI's [key management page](https://novita.ai/dashboard/key), create and copy an API Key.
+ - Go to Novita AI’s [API Key Management page](https://novita.ai/dashboard/key), then create and copy your API key
-
+
- ### Step 3: Configure Novita AI in LobeChat
+ ### Step 3: Configure Novita AI in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for `novita.ai` under `AI Service Provider`
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, find the `novita.ai` section
+ - Enable novita.ai and paste in your API key
-
+
- - Open novita.ai and enter the obtained API key
- - Choose a Novita AI model for your assistant to start the conversation
+ - Choose a Novita AI model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Novita AI's pricing
- policy.
+ You may need to pay for API usage. Please refer to Novita AI’s pricing policy for more details.
-You can now engage in conversations using the models provided by Novita AI in LobeChat.
+That’s it! You’re now ready to chat with Novita AI models directly in LobeHub.
diff --git a/docs/usage/providers/novita.zh-CN.mdx b/docs/usage/providers/novita.zh-CN.mdx
index 1112894d83..328602898c 100644
--- a/docs/usage/providers/novita.zh-CN.mdx
+++ b/docs/usage/providers/novita.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: 在 LobeChat 中使用 Novita AI API Key
+title: 在 LobeHub 中使用 Novita AI API Key
description: >-
- 学习如何将 Novita AI 的大语言模型 API 集成到 LobeChat 中。跟随以下步骤注册 Novita AI 账号、创建 API Key、充值信用额度并在 LobeChat 中进行设置。并与我们的多种 AI 模型交谈。
-
+ 学习如何将 Novita AI 的大语言模型 API 集成到 LobeHub 中。跟随以下步骤注册 Novita AI 账号、创建 API
+ Key、充值信用额度并在 LobeHub 中进行设置。并与我们的多种 AI 模型交谈。
tags:
- Novita AI
- Llama3
@@ -12,13 +12,13 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 Novita AI
+# 在 LobeHub 中使用 Novita AI
-
+
[Novita AI](https://novita.ai/) 是一个 AI API 平台,它提供多种大语言模型与 AI 图像生成的 API 服务。支持 Llama3 (8B, 70B),Mistral 和其他最新的模型。
-本文档将指导你如何在 LobeChat 中使用 Novita AI:
+本文档将指导你如何在 LobeHub 中使用 Novita AI:
### 步骤一:注册 Novita AI 账号并登录
@@ -27,29 +27,29 @@ tags:
- 你可以使用 Google 或者 Github 账号登录
- 注册后,Novita AI 会赠送 0.5 美元的使用额度
-
+
### 步骤二:创建 API 密钥
- 访问 Novita AI 的 [密钥管理页面](https://novita.ai/dashboard/key) ,创建并且复制一个 API 密钥.
-
+
- ### 步骤三:在 LobeChat 中配置 Novita AI
+ ### 步骤三:在 LobeHub 中配置 Novita AI
- - 访问 LobeChat 的 `设置` 界面
+ - 访问 LobeHub 的 `设置` 界面
- 在 `AI 服务商` 下找到 `novita.ai` 的设置项
- 打开 novita.ai 并填入获得的 API 密钥
-
+
- 为你的助手选择一个 Novita AI 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Novita AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Novita AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Novita AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/nvidia.mdx b/docs/usage/providers/nvidia.mdx
index 06721b49ad..c92aa4cabc 100644
--- a/docs/usage/providers/nvidia.mdx
+++ b/docs/usage/providers/nvidia.mdx
@@ -1,56 +1,57 @@
---
-title: Using Nvidia NIM API Key in LobeChat
-description: Learn how to configure and use Nvidia NIM AI models in LobeChat, obtain an API key, and start a conversation.
+title: Using Nvidia NIM API Key in LobeHub
+description: >-
+ Learn how to configure and use Nvidia NIM AI models in LobeHub, obtain your
+ API key, and start chatting.
tags:
- - LobeChat
+ - LobeHub
- Nvidia NIM
- API Key
- Web UI
---
-# Using Nvidia NIM in LobeChat
+# Using Nvidia NIM in LobeHub
-
+
-[NVIDIA NIM](https://developer.nvidia.com/nim) is part of NVIDIA AI Enterprise and is designed to accelerate the deployment of generative AI applications through microservices. It provides a set of easy-to-use inference microservices that can run on any cloud, data center, or workstation, supporting NVIDIA GPU acceleration.
+[NVIDIA NIM](https://developer.nvidia.com/nim) is part of NVIDIA AI Enterprise, designed to accelerate the deployment of generative AI applications through microservices. It offers a set of easy-to-use inference microservices that can run on any cloud, data center, or workstation, with support for NVIDIA GPU acceleration.
-This document will guide you on how to access and use AI models provided by Nvidia NIM in LobeChat:
+This guide will walk you through how to integrate and use AI models provided by Nvidia NIM in LobeHub:
- ### Step 1: Obtain Nvidia NIM API Key
+ ### Step 1: Obtain Your Nvidia NIM API Key
- - First, visit the [Nvidia NIM console](https://build.nvidia.com/explore/discover) and complete the registration and login.
- - On the `Models` page, select the model you need, such as Deepseek-R1.
+ - First, visit the [Nvidia NIM Console](https://build.nvidia.com/explore/discover) and complete the registration and login process.
+ - On the `Models` page, select the model you want to use, such as Deepseek-R1.
-
+
- - On the model details page, click "Build with this NIM".
+ - On the model details page, click on `Build with this NIM`.
- In the pop-up dialog, click the `Generate API Key` button.
-
+
- - Copy and save the created API Key.
+ - Copy and securely save the generated API key.
- Please store the key securely as it will only appear once. If you accidentally lose it, you will
- need to create a new key.
+ Make sure to store your API key securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure Nvidia NIM in LobeChat
+ ### Step 2: Configure Nvidia NIM in LobeHub
- - Visit the `Application Settings` -> `AI Service Provider` interface in LobeChat.
- - Find the settings item for `Nvidia NIM` in the list of providers.
+ - Go to the `App Settings` section in LobeHub and navigate to `AI Service Providers`.
+ - Find the `Nvidia NIM` option in the list of providers.
-
+
- - Enable the Nvidia NIM service provider and fill in the obtained API key.
- - Select an Nvidia NIM model for your assistant and start the conversation.
+ - Enable the Nvidia NIM provider and paste in your API key.
+ - Choose a Nvidia NIM model for your assistant to start chatting.
-
+
- You may need to pay the API service provider during use, please refer to Nvidia NIM's related fee policies.
+ You may incur charges from the API service provider during usage. Please refer to Nvidia NIM’s pricing policy for details.
-Now you can use the models provided by Nvidia NIM to have conversations in LobeChat.
+That’s it! You’re now ready to use Nvidia NIM-powered models in LobeHub for your conversations.
diff --git a/docs/usage/providers/nvidia.zh-CN.mdx b/docs/usage/providers/nvidia.zh-CN.mdx
index ae6ab12caf..3de83fc828 100644
--- a/docs/usage/providers/nvidia.zh-CN.mdx
+++ b/docs/usage/providers/nvidia.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 Nvidia NIM API Key
-description: 学习如何在 LobeChat 中配置和使用 Nvidia NIM AI 模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用 Nvidia NIM API Key
+description: 学习如何在 LobeHub 中配置和使用 Nvidia NIM AI 模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- Nvidia NIM
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Nvidia NIM
+# 在 LobeHub 中使用 Nvidia NIM
-
+
[NVIDIA NIM](https://developer.nvidia.com/nim) 是 NVIDIA AI Enterprise 的一部分,旨在通过微服务加速生成式 AI 应用的部署。它提供了一组易于使用的推理微服务,可以在任何云、数据中心或工作站上运行,支持 NVIDIA GPU 加速。
-本文档将指导你如何在 LobeChat 中接入并使用 Nvidia NIM 提供的 AI 模型:
+本文档将指导你如何在 LobeHub 中接入并使用 Nvidia NIM 提供的 AI 模型:
### 步骤一:获取 Nvidia NIM API 密钥
@@ -22,12 +22,12 @@ tags:
- 首先,访问[Nvidia NIM 控制台](https://build.nvidia.com/explore/discover)并完成注册登录
- 在 `Models` 页面选择你需要的模型,例如 Deepseek-R1
-
+
- 在模型详情页点击`使用此NIM构建`
- 在弹出的对话框中点击`生成 API Key` 按钮
-
+
- 复制并保存创建好的 API Key
@@ -35,21 +35,21 @@ tags:
请安全地存储密钥,因为它只会出现一次。如果你意外丢失它,您将需要创建一个新密钥。
- ### 步骤二:在 LobeChat 中配置 Nvidia NIM
+ ### 步骤二:在 LobeHub 中配置 Nvidia NIM
- - 访问 LobeChat 的 `应用设置` 的 `AI 服务供应商` 界面
+ - 访问 LobeHub 的 `应用设置` 的 `AI 服务供应商` 界面
- 在供应商列表中找到 ` Nvidia NIM` 的设置项
-
+
- 打开 Nvidia NIM 服务商并填入获取的 API 密钥
- 为你的助手选择一个 Nvidia NIM 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Nvidia NIM 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Nvidia NIM 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Nvidia NIM 提供的模型进行对话了。
diff --git a/docs/usage/providers/ollama.mdx b/docs/usage/providers/ollama.mdx
index a1c9233388..5250c77e2d 100644
--- a/docs/usage/providers/ollama.mdx
+++ b/docs/usage/providers/ollama.mdx
@@ -1,98 +1,98 @@
---
-title: Using Ollama in LobeChat
+title: Using Ollama in LobeHub
description: >-
- Learn how to use Ollama in LobeChat, run LLM locally, and experience cutting-edge AI usage.
-
+ Learn how to use Ollama in LobeHub to run large language models locally and
+ experience cutting-edge AI capabilities.
tags:
- Ollama
- - Local LLM
- - Ollama WebUI
- Web UI
- API Key
+ - Local LLM
+ - Ollama WebUI
---
-# Using Ollama in LobeChat
+# Using Ollama in LobeHub
-
+
-Ollama is a powerful framework for running large language models (LLMs) locally, supporting various language models including Llama 2, Mistral, and more. Now, LobeChat supports integration with Ollama, meaning you can easily enhance your application by using the language models provided by Ollama in LobeChat.
+Ollama is a powerful framework for running large language models (LLMs) locally. It supports a variety of models, including Llama 2, Mistral, and more. LobeHub now integrates seamlessly with Ollama, allowing you to leverage these models directly within your chat interface.
-This document will guide you on how to use Ollama in LobeChat:
+This guide will walk you through how to use Ollama in LobeHub:
-
+
## Using Ollama on macOS
- ### Local Installation of Ollama
+ ### Install Ollama Locally
- [Download Ollama for macOS](https://ollama.com/download?utm_source=lobehub\&utm_medium=docs\&utm_campaign=download-macos) and unzip/install it.
+ [Download Ollama for macOS](https://ollama.com/download?utm_source=lobehub\&utm_medium=docs\&utm_campaign=download-macos), then unzip and install it.
### Configure Ollama for Cross-Origin Access
- Due to Ollama's default configuration, which restricts access to local only, additional environment variable setting `OLLAMA_ORIGINS` is required for cross-origin access and port listening. Use `launchctl` to set the environment variable:
+ By default, Ollama only allows local access. To enable cross-origin access and port listening, set the `OLLAMA_ORIGINS` environment variable using `launchctl`:
```bash
launchctl setenv OLLAMA_ORIGINS "*"
```
- After setting up, restart the Ollama application.
+ After setting the variable, restart the Ollama application.
- ### Conversing with Local Large Models in LobeChat
+ ### Chat with Local LLMs in LobeHub
- Now, you can start conversing with the local LLM in LobeChat.
+ You can now start chatting with local LLMs in LobeHub.
-
+
## Using Ollama on Windows
- ### Local Installation of Ollama
+ ### Install Ollama Locally
[Download Ollama for Windows](https://ollama.com/download?utm_source=lobehub\&utm_medium=docs\&utm_campaign=download-windows) and install it.
### Configure Ollama for Cross-Origin Access
- Since Ollama's default configuration allows local access only, additional environment variable setting `OLLAMA_ORIGINS` is needed for cross-origin access and port listening.
+ By default, Ollama only allows local access. To enable cross-origin access and port listening, set the `OLLAMA_ORIGINS` environment variable.
- On Windows, Ollama inherits your user and system environment variables.
+ On Windows, Ollama inherits your user and system environment variables:
- 1. First, exit the Ollama program by clicking on it in the Windows taskbar.
- 2. Edit system environment variables from the Control Panel.
- 3. Edit or create the Ollama environment variable `OLLAMA_ORIGINS` for your user account, setting the value to `*`.
- 4. Click `OK/Apply` to save and restart the system.
- 5. Run `Ollama` again.
+ 1. Exit Ollama from the system tray.
+ 2. Open the Control Panel and edit system environment variables.
+ 3. Add or edit the `OLLAMA_ORIGINS` variable for your user account and set its value to `*`.
+ 4. Click `OK/Apply` and restart your system.
+ 5. Relaunch Ollama.
- ### Conversing with Local Large Models in LobeChat
+ ### Chat with Local LLMs in LobeHub
- Now, you can start conversing with the local LLM in LobeChat.
+ You can now start chatting with local LLMs in LobeHub.
## Using Ollama on Linux
- ### Local Installation of Ollama
+ ### Install Ollama Locally
- Install using the following command:
+ Run the following command to install:
```bash
curl -fsSL https://ollama.com/install.sh | sh
```
- Alternatively, you can refer to the [Linux manual installation guide](https://github.com/ollama/ollama/blob/main/docs/linux.md).
+ Alternatively, refer to the [manual installation guide for Linux](https://github.com/ollama/ollama/blob/main/docs/linux.md).
### Configure Ollama for Cross-Origin Access
- Due to Ollama's default configuration, which allows local access only, additional environment variable setting `OLLAMA_ORIGINS` is required for cross-origin access and port listening. If Ollama runs as a systemd service, use `systemctl` to set the environment variable:
+ By default, Ollama only allows local access. To enable cross-origin access and port listening, set the `OLLAMA_ORIGINS` environment variable. If Ollama is running as a systemd service, use `systemctl` to configure it:
- 1. Edit the systemd service by calling `sudo systemctl edit ollama.service`:
+ 1. Edit the systemd service with:
```bash
sudo systemctl edit ollama.service
```
- 2. Add `Environment` under `[Service]` for each environment variable:
+ 2. Add the following under the `[Service]` section:
```bash
[Service]
@@ -101,24 +101,24 @@ This document will guide you on how to use Ollama in LobeChat:
```
3. Save and exit.
- 4. Reload `systemd` and restart Ollama:
+ 4. Reload systemd and restart Ollama:
```bash
sudo systemctl daemon-reload
sudo systemctl restart ollama
```
- ### Conversing with Local Large Models in LobeChat
+ ### Chat with Local LLMs in LobeHub
- Now, you can start conversing with the local LLM in LobeChat.
+ You can now start chatting with local LLMs in LobeHub.
-## Deploying Ollama using Docker
+## Using Ollama with Docker
- ### Pulling Ollama Image
+ ### Pull the Ollama Docker Image
- If you prefer using Docker, Ollama provides an official Docker image that you can pull using the following command:
+ If you prefer using Docker, Ollama provides an official image. Pull it with:
```bash
docker pull ollama/ollama
@@ -126,48 +126,48 @@ This document will guide you on how to use Ollama in LobeChat:
### Configure Ollama for Cross-Origin Access
- Since Ollama's default configuration allows local access only, additional environment variable setting `OLLAMA_ORIGINS` is needed for cross-origin access and port listening.
-
- If Ollama runs as a Docker container, you can add the environment variable to the `docker run` command.
+ By default, Ollama only allows local access. To enable cross-origin access and port listening, set the `OLLAMA_ORIGINS` environment variable in your `docker run` command:
```bash
docker run -d --gpus=all -v ollama:/root/.ollama -e OLLAMA_ORIGINS="*" -p 11434:11434 --name ollama ollama/ollama
```
- ### Conversing with Local Large Models in LobeChat
+ ### Chat with Local LLMs in LobeHub
- Now, you can start conversing with the local LLM in LobeChat.
+ You can now start chatting with local LLMs in LobeHub.
## Installing Ollama Models
-Ollama supports various models, which you can view in the [Ollama Library](https://ollama.com/library) and choose the appropriate model based on your needs.
+Ollama supports a wide range of models. You can browse the available models in the [Ollama Library](https://ollama.com/library) and choose the ones that best suit your needs.
-### Installation in LobeChat
+### Install via LobeHub
-In LobeChat, we have enabled some common large language models by default, such as llama3, Gemma, Mistral, etc. When you select a model for conversation, we will prompt you to download that model.
+LobeHub comes pre-configured with popular LLMs like llama3, Gemma, and Mistral. When you select a model for the first time, LobeHub will prompt you to download it.
-
+
-Once downloaded, you can start conversing.
+Once the download is complete, you can start chatting.
-### Pulling Models to Local with Ollama
+### Pull Models via Ollama CLI
-Alternatively, you can install models by executing the following command in the terminal, using llama3 as an example:
+Alternatively, you can install models directly via the terminal. For example, to install llama3:
```bash
ollama pull llama3
```
-
+
## Custom Configuration
-You can find Ollama's configuration options in `Settings` -> `AI Service Provider`, where you can configure Ollama's proxy, model names, etc.
+You can configure Ollama settings in LobeHub under `Settings` -> `AI Providers`. Here, you can set the proxy, model name, and more.
-
+
- Visit [Integrating with Ollama](/docs/self-hosting/examples/ollama) to learn how to deploy
- LobeChat to meet integration needs with Ollama.
+ To learn how to deploy LobeHub with Ollama integration, visit [Integrating with Ollama](/en/docs/self-hosting/examples/ollama).
+
+```
+```
diff --git a/docs/usage/providers/ollama.zh-CN.mdx b/docs/usage/providers/ollama.zh-CN.mdx
index 3d560d00c2..3adf514b0b 100644
--- a/docs/usage/providers/ollama.zh-CN.mdx
+++ b/docs/usage/providers/ollama.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用 Ollama
-description: 了解如何在 LobeChat 中使用 Ollama ,在你的本地运行大型语言模型,获得最前沿的 AI 使用体验。
+title: 在 LobeHub 中使用 Ollama
+description: 了解如何在 LobeHub 中使用 Ollama ,在你的本地运行大型语言模型,获得最前沿的 AI 使用体验。
tags:
- Ollama
- Web UI
@@ -9,15 +9,15 @@ tags:
- Ollama WebUI
---
-# 在 LobeChat 中使用 Ollama
+# 在 LobeHub 中使用 Ollama
-
+
-Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支持多种语言模型,包括 Llama 2, Mistral 等。现在,LobeChat 已经支持与 Ollama 的集成,这意味着你可以在 LobeChat 中轻松使用 Ollama 提供的语言模型来增强你的应用。
+Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支持多种语言模型,包括 Llama 2, Mistral 等。现在,LobeHub 已经支持与 Ollama 的集成,这意味着你可以在 LobeHub 中轻松使用 Ollama 提供的语言模型来增强你的应用。
-本文档将指导你如何在 LobeChat 中使用 Ollama:
+本文档将指导你如何在 LobeHub 中使用 Ollama:
-
+
## 在 macOS 下使用 Ollama
@@ -36,11 +36,11 @@ Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支
完成设置后,需要重启 Ollama 应用程序。
- ### 在 LobeChat 中与本地大模型对话
+ ### 在 LobeHub 中与本地大模型对话
- 接下来,你就可以使用 LobeChat 与本地 LLM 对话了。
+ 接下来,你就可以使用 LobeHub 与本地 LLM 对话了。
-
+
## 在 windows 下使用 Ollama
@@ -62,9 +62,9 @@ Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支
4. 点击`OK/应用`保存后重启系统。
5. 重新运行`Ollama`。
- ### 在 LobeChat 中与本地大模型对话
+ ### 在 LobeHub 中与本地大模型对话
- 接下来,你就可以使用 LobeChat 与本地 LLM 对话了。
+ 接下来,你就可以使用 LobeHub 与本地 LLM 对话了。
## 在 linux 下使用 Ollama
@@ -106,9 +106,9 @@ Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支
sudo systemctl restart ollama
```
- ### 在 LobeChat 中与本地大模型对话
+ ### 在 LobeHub 中与本地大模型对话
- 接下来,你就可以使用 LobeChat 与本地 LLM 对话了。
+ 接下来,你就可以使用 LobeHub 与本地 LLM 对话了。
## 使用 docker 部署使用 Ollama
@@ -132,20 +132,20 @@ Ollama 是一款强大的本地运行大型语言模型(LLM)的框架,支
docker run -d --gpus=all -v ollama:/root/.ollama -e OLLAMA_ORIGINS="*" -p 11434:11434 --name ollama ollama/ollama
```
- ### 在 LobeChat 中与本地大模型对话
+ ### 在 LobeHub 中与本地大模型对话
- 接下来,你就可以使用 LobeChat 与本地 LLM 对话了。
+ 接下来,你就可以使用 LobeHub 与本地 LLM 对话了。
## 安装 Ollama 模型
Ollama 支持多种模型,你可以在 [Ollama Library](https://ollama.com/library) 中查看可用的模型列表,并根据需求选择合适的模型。
-### LobeChat 中安装
+### LobeHub 中安装
-在 LobeChat 中,我们默认开启了一些常用的大语言模型,例如 llama3、 Gemma 、 Mistral 等。当你选中模型进行对话时,我们会提示你需要下载该模型。
+在 LobeHub 中,我们默认开启了一些常用的大语言模型,例如 llama3、 Gemma 、 Mistral 等。当你选中模型进行对话时,我们会提示你需要下载该模型。
-
+
下载完成后即可开始对话。
@@ -157,15 +157,15 @@ Ollama 支持多种模型,你可以在 [Ollama Library](https://ollama.com/lib
ollama pull llama3
```
-
+
## 自定义配置
你可以在 `设置` -> `AI 服务商` 中找到 Ollama 的配置选项,你可以在这里配置 Ollama 的代理、模型名称等。
-
+
- 你可以前往 [与 Ollama 集成](/zh/docs/self-hosting/examples/ollama) 了解如何部署 LobeChat
+ 你可以前往 [与 Ollama 集成](/zh/docs/self-hosting/examples/ollama) 了解如何部署 LobeHub
,以满足与 Ollama 的集成需求。
diff --git a/docs/usage/providers/ollama/gemma.mdx b/docs/usage/providers/ollama/gemma.mdx
index 79efe20ee2..650790620f 100644
--- a/docs/usage/providers/ollama/gemma.mdx
+++ b/docs/usage/providers/ollama/gemma.mdx
@@ -1,50 +1,49 @@
---
-title: Using Google Gemma Model in LobeChat
+title: Using the Google Gemma Model in LobeHub
description: >-
- Learn how to integrate and utilize Google Gemma in LobeChat, an open-source large language model, in LobeChat with the help of Ollama. Follow these steps to pull and select the Gemma model for natural language processing tasks.
-
+ Easily perform natural language processing tasks with the Google Gemma model
+ through LobeHub's integration with Ollama. Install Ollama, pull the Gemma
+ model, select it from the model panel, and start chatting.
tags:
- Google Gemma
- - LobeChat
+ - LobeHub
- Ollama
- Natural Language Processing
- - Language Model
+ - Model Selection
---
-# Using Google Gemma Model
+# Using the Google Gemma Model
-
+
-[Gemma](https://blog.google/technology/developers/gemma-open-models/) is an open-source large language model (LLM) from Google, designed to provide a more general and flexible model for various natural language processing tasks. Now, with the integration of LobeChat and [Ollama](https://ollama.com/), you can easily use Google Gemma in LobeChat.
+[Gemma](https://blog.google/technology/developers/gemma-open-models/) is an open-source large language model (LLM) developed by Google. It is designed to be a general-purpose and flexible model for a wide range of natural language processing (NLP) tasks. Now, thanks to LobeHub’s integration with [Ollama](https://ollama.com/), you can easily use Google Gemma directly within LobeHub.
-This document will guide you on how to use Google Gemma in LobeChat:
+This guide will walk you through how to use the Google Gemma model in LobeHub:
- ### Install Ollama locally
+ ### Install Ollama Locally
- First, you need to install Ollama. For the installation process, please refer to the [Ollama usage documentation](/docs/usage/providers/ollama).
+ First, you’ll need to install Ollama. For installation instructions, refer to the [Ollama usage guide](/en/docs/usage/providers/ollama).
- ### Pull Google Gemma model to local using Ollama
+ ### Pull the Google Gemma Model Using Ollama
- After installing Ollama, you can install the Google Gemma model using the following command, using the 7b model as an example:
+ Once Ollama is installed, you can pull the Google Gemma model locally. For example, to pull the 7b model, run the following command:
```bash
ollama pull gemma
```
-
+
- ### Select Gemma model
+ ### Select the Gemma Model
- In the session page, open the model panel and then select the Gemma model.
+ In the chat interface, open the model selection panel and choose the Gemma model.
-
+
- If you do not see the Ollama provider in the model selection panel, please refer to [Integrating
- with Ollama](/docs/self-hosting/examples/ollama) to learn how to enable the Ollama provider in
- LobeChat.
+ If you don’t see the Ollama provider in the model selection panel, refer to the [Ollama Integration Guide](/en/docs/self-hosting/examples/ollama) to learn how to enable the Ollama provider in LobeHub.
-Now, you can start conversing with the local Gemma model using LobeChat.
+You’re all set! You can now start chatting with the local Gemma model directly in LobeHub.
diff --git a/docs/usage/providers/ollama/gemma.zh-CN.mdx b/docs/usage/providers/ollama/gemma.zh-CN.mdx
index 9fe774c0a9..e2d7ecb148 100644
--- a/docs/usage/providers/ollama/gemma.zh-CN.mdx
+++ b/docs/usage/providers/ollama/gemma.zh-CN.mdx
@@ -1,11 +1,11 @@
---
-title: 在 LobeChat 中使用 Google Gemma 模型
+title: 在 LobeHub 中使用 Google Gemma 模型
description: >-
- 通过 LobeChat 与 Ollama 的集成,轻松使用 Google Gemma 模型进行自然语言处理任务。安装 Ollama,拉取 Gemma 模型,选择模型面板中的 Gemma 模型,开始对话。
-
+ 通过 LobeHub 与 Ollama 的集成,轻松使用 Google Gemma 模型进行自然语言处理任务。安装 Ollama,拉取 Gemma
+ 模型,选择模型面板中的 Gemma 模型,开始对话。
tags:
- Google Gemma
- - LobeChat
+ - LobeHub
- Ollama
- 自然语言处理
- 模型选择
@@ -13,11 +13,11 @@ tags:
# 使用 Google Gemma 模型
-
+
-[Gemma](https://blog.google/technology/developers/gemma-open-models/) 是 Google 开源的一款大语言模型(LLM),旨在提供一个更加通用、灵活的模型用于各种自然语言处理任务。现在,通过 LobeChat 与 [Ollama](https://ollama.com/) 的集成,你可以轻松地在 LobeChat 中使用 Google Gemma。
+[Gemma](https://blog.google/technology/developers/gemma-open-models/) 是 Google 开源的一款大语言模型(LLM),旨在提供一个更加通用、灵活的模型用于各种自然语言处理任务。现在,通过 LobeHub 与 [Ollama](https://ollama.com/) 的集成,你可以轻松地在 LobeHub 中使用 Google Gemma。
-本文档将指导你如何在 LobeChat 中使用 Google Gemma:
+本文档将指导你如何在 LobeHub 中使用 Google Gemma:
### 本地安装 Ollama
@@ -32,18 +32,18 @@ tags:
ollama pull gemma
```
-
+
### 选择 Gemma 模型
在会话页面中,选择模型面板打开,然后选择 Gemma 模型。
-
+
如果你没有在模型选择面板中看到 Ollama 服务商,请查阅 [与 Ollama
- 集成](/zh/docs/self-hosting/examples/ollama) 了解如何在 LobeChat 中开启 Ollama 服务商。
+ 集成](/zh/docs/self-hosting/examples/ollama) 了解如何在 LobeHub 中开启 Ollama 服务商。
-接下来,你就可以使用 LobeChat 与本地 Gemma 模型对话了。
+接下来,你就可以使用 LobeHub 与本地 Gemma 模型对话了。
diff --git a/docs/usage/providers/ollama/qwen.mdx b/docs/usage/providers/ollama/qwen.mdx
index 12df710767..44324e8893 100644
--- a/docs/usage/providers/ollama/qwen.mdx
+++ b/docs/usage/providers/ollama/qwen.mdx
@@ -1,54 +1,52 @@
---
-title: Using Local Qwen Model in LobeChat
-description: 通过LobeChat和Ollama的集成,您可以轻松在LobeChat中使用Qwen。本文将指导您如何在LobeChat中使用本地部署版本的Qwen。
+title: Using the Local Qwen Model in LobeHub
+description: >-
+ Easily chat with the locally deployed Qwen model through LobeHub's integration
+ with Ollama. Learn how to install and select the Qwen model.
tags:
- Qwen
- - LobeChat
+ - Qwen Model
+ - LobeHub Integration
- Ollama
- - 本地部署
- - AI大模型
+ - Local Deployment
---
# Using the Local Qwen Model
-
+
-[Qwen](https://github.com/QwenLM/Qwen1.5) is a large language model (LLM) open-sourced by Alibaba Cloud. It is officially defined as a constantly evolving AI large model, and it achieves more accurate Chinese recognition capabilities through more training set content.
+[Qwen](https://github.com/QwenLM/Qwen1.5) is an open-source large language model (LLM) developed by Alibaba Cloud. It is officially described as an evolving AI model that achieves more accurate Chinese language understanding through extensive training data.
-
+
-Now, through the integration of LobeChat and [Ollama](https://ollama.com/), you can easily use Qwen in LobeChat. This document will guide you on how to use the local deployment version of Qwen in LobeChat:
+Now, thanks to LobeHub’s integration with [Ollama](https://ollama.com/), you can easily use the Qwen model locally within LobeHub.
+
+This guide will walk you through how to use the locally deployed Qwen model in LobeHub:
- ## Local Installation of Ollama
+ ### Install Ollama Locally
- First, you need to install Ollama. For the installation process, please refer to the [Ollama Usage Document](/docs/usage/providers/ollama).
+ First, you’ll need to install Ollama. For installation instructions, refer to the [Ollama usage guide](/en/docs/usage/providers/ollama).
- ## Pull the Qwen Model to Local with Ollama
+ ### Pull the Qwen Model Using Ollama
- After installing Ollama, you can install the Qwen model with the following command, taking the 14b model as an example:
+ Once Ollama is installed, you can pull the Qwen model locally. For example, to pull the 14b version of the model, run:
```bash
ollama pull qwen:14b
```
-
- The local version of Qwen provides different model sizes to choose from. Please refer to the
- [Qwen's Ollama integration page](https://ollama.com/library/qwen) to understand how to choose the
- model size.
-
-
-
+
### Select the Qwen Model
- In the LobeChat conversation page, open the model selection panel, and then select the Qwen model.
+ In the chat interface, open the model selection panel and choose the Qwen model.
-
+
- If you do not see the Ollama provider in the model selection panel, please refer to [Integration with Ollama](/docs/self-hosting/examples/ollama) to learn how to enable the Ollama provider in LobeChat.
+ If you don’t see the Ollama provider in the model selection panel, refer to the [Ollama Integration Guide](/en/docs/self-hosting/examples/ollama) to learn how to enable the Ollama provider in LobeHub.
-Next, you can have a conversation with the local Qwen model in LobeChat.
+You’re now ready to start chatting with the local Qwen model in LobeHub.
diff --git a/docs/usage/providers/ollama/qwen.zh-CN.mdx b/docs/usage/providers/ollama/qwen.zh-CN.mdx
index da13ad753c..149cb3b331 100644
--- a/docs/usage/providers/ollama/qwen.zh-CN.mdx
+++ b/docs/usage/providers/ollama/qwen.zh-CN.mdx
@@ -1,25 +1,25 @@
---
-title: 在 LobeChat 中使用本地通义千问 Qwen 模型
-description: 通过 LobeChat 与 Ollama 的集成,轻松在本地部署的通义千问 Qwen 模型中进行对话。学习如何安装和选择 Qwen 模型。
+title: 在 LobeHub 中使用本地通义千问 Qwen 模型
+description: 通过 LobeHub 与 Ollama 的集成,轻松在本地部署的通义千问 Qwen 模型中进行对话。学习如何安装和选择 Qwen 模型。
tags:
- 通义千问
- Qwen模型
- - LobeChat集成
+ - LobeHub集成
- Ollama
- 本地部署
---
# 使用本地通义千问 Qwen 模型
-
+
[通义千问](https://github.com/QwenLM/Qwen1.5) 是阿里云开源的一款大语言模型(LLM),官方定义是一个不断进化的 AI 大模型,并通过更多的训练集内容达到更精准的中文识别能力。
-
+
-现在,通过 LobeChat 与 [Ollama](https://ollama.com/) 的集成,你可以轻松地在 LobeChat 中使用 通义千问。
+现在,通过 LobeHub 与 [Ollama](https://ollama.com/) 的集成,你可以轻松地在 LobeHub 中使用 通义千问。
-本文档将指导你如何在 LobeChat 中使用通义千问本地部署版:
+本文档将指导你如何在 LobeHub 中使用通义千问本地部署版:
### 本地安装 Ollama
@@ -34,18 +34,18 @@ tags:
ollama pull qwen:14b
```
-
+
### 选择 Qwen 模型
在会话页面中,选择模型面板打开,然后选择 Qwen 模型。
-
+
如果你没有在模型选择面板中看到 Ollama 服务商,请查阅 [与 Ollama
- 集成](/zh/docs/self-hosting/examples/ollama) 了解如何在 LobeChat 中开启 Ollama 服务商。
+ 集成](/zh/docs/self-hosting/examples/ollama) 了解如何在 LobeHub 中开启 Ollama 服务商。
-接下来,你就可以使用 LobeChat 与本地 Qwen 模型对话了。
+接下来,你就可以使用 LobeHub 与本地 Qwen 模型对话了。
diff --git a/docs/usage/providers/openai.mdx b/docs/usage/providers/openai.mdx
index 4bd8204644..6c457646e9 100644
--- a/docs/usage/providers/openai.mdx
+++ b/docs/usage/providers/openai.mdx
@@ -1,10 +1,9 @@
---
-title: Using OpenAI API Key in LobeChat
+title: Using OpenAI API Key in LobeHub
description: >-
- Learn how to integrate OpenAI API Key in LobeChat. Support GPT-4o / GPT-4-turbo / GPT-4-vision
-
+ Learn how to configure and use your OpenAI API Key in LobeHub, supporting
+ GPT-4o / GPT-4-turbo / GPT-4-vision
tags:
- - OpenAI
- ChatGPT
- GPT-4
- GPT-4o
@@ -12,43 +11,43 @@ tags:
- Web UI
---
-# Using OpenAI in LobeChat
+# Using OpenAI in LobeHub
-
+
-This document will guide you on how to use [OpenAI](https://openai.com/) in LobeChat:
+This guide will walk you through how to use [OpenAI](https://openai.com/) in LobeHub:
- ### Step 1: Obtain OpenAI API Key
+ ### Step 1: Get Your OpenAI API Key
- - Register for an [OpenAI account](https://platform.openai.com/signup). You will need to register using an international phone number and a non-mainland email address.
+ - Sign up for an [OpenAI account](https://platform.openai.com/signup). You’ll need an international phone number and a non-mainland China email address.
- - After registration, go to the [API Keys](https://platform.openai.com/api-keys) page and click on `Create new secret key` to generate a new API Key.
+ - After signing up, go to the [API Keys](https://platform.openai.com/api-keys) page and click `Create new secret key` to generate a new API key:
- - Open the creation window
+ - Open the key creation window
- - Create API Key
+ - Create your API key
- - Retrieve API Key
+ - Copy and save your API key
- After registering, you generally have a free credit of $5, but it is only valid for three months.
+ New accounts typically receive $5 in free credits, valid for three months.
- ### Step 2: Configure OpenAI in LobeChat
+ ### Step 2: Configure OpenAI in LobeHub
- - Visit the `Settings` page in LobeChat
- - Find the setting for `OpenAI` under `AI Service Provider`
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, locate the `OpenAI` section
-
+
- - Enter the obtained API Key
- - Choose an OpenAI model for your AI assistant to start the conversation
+ - Paste your API key into the input field
+ - Choose an OpenAI model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider. Please refer to OpenAI's relevant
- pricing policies.
+ You may incur charges while using the API. Please refer to OpenAI’s pricing policy for details.
-You can now engage in conversations using the models provided by OpenAI in LobeChat.
+That’s it! You’re now ready to use OpenAI models in LobeHub for conversations.
diff --git a/docs/usage/providers/openai.zh-CN.mdx b/docs/usage/providers/openai.zh-CN.mdx
index 1ffd2ce189..dfa7d3628a 100644
--- a/docs/usage/providers/openai.zh-CN.mdx
+++ b/docs/usage/providers/openai.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用 OpenAI API Key
-description: 学习如何在 LobeChat 中配置和使用 OpenAI API Key,支持 GPT-4o / GPT-4-turbo / GPT-4-vision
+title: 在 LobeHub 中使用 OpenAI API Key
+description: 学习如何在 LobeHub 中配置和使用 OpenAI API Key,支持 GPT-4o / GPT-4-turbo / GPT-4-vision
tags:
- ChatGPT
- GPT-4
@@ -9,11 +9,11 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 OpenAI
+# 在 LobeHub 中使用 OpenAI
-
+
-本文档将指导你如何在 LobeChat 中使用 [OpenAI](https://openai.com/):
+本文档将指导你如何在 LobeHub 中使用 [OpenAI](https://openai.com/):
### 步骤一:获取 OpenAI API 密钥
@@ -54,21 +54,21 @@ tags:
账户注册后,一般有 5 美元的免费额度,但有效期只有三个月。
- ### 步骤二:在 LobeChat 中配置 OpenAI
+ ### 步骤二:在 LobeHub 中配置 OpenAI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`OpenAI`的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 OpenAI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 OpenAI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 OpenAI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 OpenAI 提供的模型进行对话了。
diff --git a/docs/usage/providers/openrouter.mdx b/docs/usage/providers/openrouter.mdx
index 15e1e79963..d0c6ddb8c0 100644
--- a/docs/usage/providers/openrouter.mdx
+++ b/docs/usage/providers/openrouter.mdx
@@ -1,73 +1,70 @@
---
-title: Using OpenRouter API Key in LobeChat
+title: Using OpenRouter API Key in LobeHub
description: >-
- Learn how to integrate and utilize OpenRouter's language model APIs in LobeChat. Follow these steps to register, create an API key, recharge credit, and configure OpenRouter for seamless conversations.
-
+ Learn how to register, create an API key, add credit, and configure OpenRouter
+ in LobeHub to start using a variety of powerful large language model APIs.
tags:
- OpenRouter
- - LobeChat
- API Key
- Web UI
---
-# Using OpenRouter in LobeChat
+# Using OpenRouter in LobeHub
-
+
-[OpenRouter](https://openrouter.ai/) is a service that provides a variety of excellent large language model APIs, supporting models such as OpenAI (including GPT-3.5/4), Anthropic (Claude2, Instant), LLaMA 2, and PaLM Bison.
+[OpenRouter](https://openrouter.ai/) is a platform that provides access to a wide range of high-quality large language model APIs. It supports models from OpenAI (including GPT-3.5/4), Anthropic (Claude2, Instant), LLaMA 2, PaLM Bison, and more.
-This document will guide you on how to use OpenRouter in LobeChat:
+This guide will walk you through how to use OpenRouter in LobeHub:
- ### Step 1: Register and Log in to OpenRouter
+ ### Step 1: Register and Log In to OpenRouter
- - Visit [OpenRouter.ai](https://openrouter.ai/) and create an account
- - You can log in using your Google account or MetaMask wallet
+ - Visit [OpenRouter.ai](https://openrouter.ai/) and create an account.
+ - You can log in using your Google account or MetaMask wallet.
-
+
### Step 2: Create an API Key
- - Go to the `Keys` menu or visit [OpenRouter Keys](https://openrouter.ai/keys) directly
- - Click on `Create Key` to start the creation process
- - Name your API key in the pop-up dialog, for example, "LobeChat Key"
- - Leave the `Credit limit` blank to indicate no amount limit
+ - Go to the `Keys` section or visit [OpenRouter Keys](https://openrouter.ai/keys) directly.
+ - Click `Create Key` to begin.
+ - In the pop-up dialog, give your API key a name, such as "LobeHub Key".
+ - Leave the `Credit limit` field blank if you don’t want to set a spending cap.
-
+
- - Copy the API key from the pop-up dialog and save it securely
+ - Copy the API key from the dialog and store it securely.
-
+
- Please store the key securely as it will only appear once. If you lose it accidentally, you will
- need to create a new key.
+ Make sure to store your API key securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 3: Recharge Credit
+ ### Step 3: Add Credit to Your Account
- - Go to the `Credit` menu or visit [OpenRouter Credit](https://openrouter.ai/credits) directly
- - Click on `Manage Credits` to recharge your credit, you can check model prices at [https://openrouter.ai/models](https://openrouter.ai/models)
- - OpenRouter provides some free models that can be used without recharging
+ - Go to the `Credit` section or visit [OpenRouter Credit](https://openrouter.ai/credits).
+ - Click `Manage Credits` to add funds. You can view model pricing at [https://openrouter.ai/models](https://openrouter.ai/models).
+ - OpenRouter also offers some free models that can be used without adding credit.
-
+
- ### Step 4: Configure OpenRouter in LobeChat
+ ### Step 4: Configure OpenRouter in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for `OpenRouter` under `AI Service Provider`
- - Enable OpenRouter and enter the API key you obtained
+ - Open the `Settings` panel in LobeHub.
+ - Under `AI Providers`, find the `OpenRouter` section.
+ - Enable OpenRouter and paste in your API key.
-
+
- - Choose an OpenRouter model for your assistant to start the conversation
+ - Choose an OpenRouter model for your assistant to start chatting.
-
+
- You may need to pay the API service provider during usage, please refer to OpenRouter's relevant
- fee policies.
+ You may incur charges when using API services. Please refer to OpenRouter’s pricing policy for more details.
-You can now engage in conversations using the models provided by OpenRouter in LobeChat.
+You’re now ready to start using OpenRouter’s models in LobeHub for conversations.
diff --git a/docs/usage/providers/openrouter.zh-CN.mdx b/docs/usage/providers/openrouter.zh-CN.mdx
index bb0b368c1f..daadd5bbbe 100644
--- a/docs/usage/providers/openrouter.zh-CN.mdx
+++ b/docs/usage/providers/openrouter.zh-CN.mdx
@@ -1,19 +1,19 @@
---
-title: 在 LobeChat 中使用 OpenRouter API Key
-description: 学习如何在 LobeChat 中注册、创建 API Key、充值信用额度并配置 OpenRouter,以便开始使用多种优秀大语言模型 API。
+title: 在 LobeHub 中使用 OpenRouter API Key
+description: 学习如何在 LobeHub 中注册、创建 API Key、充值信用额度并配置 OpenRouter,以便开始使用多种优秀大语言模型 API。
tags:
- OpenRouter
- API Key
- Web UI
---
-# 在 LobeChat 中使用 OpenRouter
+# 在 LobeHub 中使用 OpenRouter
-
+
[OpenRouter](https://openrouter.ai/) 是一个提供多种优秀大语言模型 API 的服务,它支持 OpenAI (包括 GPT-3.5/4)、Anthropic (Claude2、Instant)、LLaMA 2 和 PaLM Bison 等众多模型。
-本文档将指导你如何在 LobeChat 中使用 OpenRouter:
+本文档将指导你如何在 LobeHub 中使用 OpenRouter:
### 步骤一:注册 OpenRouter 账号并登录
@@ -21,20 +21,20 @@ tags:
- 访问 [OpenRouter.ai](https://openrouter.ai/) 并创建一个账号
- 你可以用 Google 账号或 MetaMask 钱包登录
-
+
### 步骤二:创建 API 密钥
- 进入 `Keys` 菜单或直接访问 [OpenRouter Keys](https://openrouter.ai/keys)
- 点击 `Create Key` 开始创建
- - 在弹出对话框中为 API 密钥取一个名字,例如 "LobeChat Key"
+ - 在弹出对话框中为 API 密钥取一个名字,例如 "LobeHub Key"
- 留空 `Credit limit` 表示不设置金额限制
-
+
- 在弹出的对话框中复制 API 密钥,并妥善保存
-
+
请安全地存储密钥,因为它只会出现一次。如果您意外丢失它,您将需要创建一个新密钥。
@@ -46,23 +46,23 @@ tags:
- 点击 `Manage Credits` 充值信用额度,在 [https://openrouter.ai/models](https://openrouter.ai/models) 中可以查看模型价格
- OpenRouter 提供了一些免费模型,未充值的情况下可以使用
-
+
- ### 步骤四:在 LobeChat 中配置 OpenRouter
+ ### 步骤四:在 LobeHub 中配置 OpenRouter
- - 访问 LobeChat 的 `设置` 界面
+ - 访问 LobeHub 的 `设置` 界面
- 在 `AI 服务商` 下找到 `OpenRouter` 的设置项
- 打开 OpenRouter 并填入获得的 API 密钥
-
+
- 为你的助手选择一个 OpenRouter 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 OpenRouter 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 OpenRouter 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 OpenRouter 提供的模型进行对话了。
diff --git a/docs/usage/providers/perplexity.mdx b/docs/usage/providers/perplexity.mdx
index c4953135e1..1c31a382dd 100644
--- a/docs/usage/providers/perplexity.mdx
+++ b/docs/usage/providers/perplexity.mdx
@@ -1,44 +1,43 @@
---
-title: Using Perplexity AI API Key in LobeChat
+title: Using the Perplexity AI API Key in LobeHub
description: >-
- Learn how to integrate and use Perplexity AI in LobeChat to enhance your AI assistant's capabilities.
-
+ Learn how to configure and use Perplexity AI in LobeHub, obtain your API key,
+ and choose the right language model to start chatting.
tags:
- Perplexity AI
- API key
- Web UI
---
-# Using Perplexity AI in LobeChat
+# Using Perplexity AI in LobeHub
-
+
-The Perplexity AI API is now available for everyone to use. This document will guide you on how to use [Perplexity AI](https://www.perplexity.ai/) in LobeChat:
+The Perplexity AI API is now available to everyone. This guide will walk you through how to use [Perplexity AI](https://www.perplexity.ai/) within LobeHub:
- ### Step 1: Obtain Perplexity AI API Key
+ ### Step 1: Get Your Perplexity AI API Key
- Create a [Perplexity AI](https://www.perplexity.ai/) account
- - Obtain your [API key](https://www.perplexity.ai/settings/api)
+ - Retrieve your [API key](https://www.perplexity.ai/settings/api)
-
+
- ### Step 2: Configure Perplexity AI in LobeChat
+ ### Step 2: Configure Perplexity AI in LobeHub
- - Go to the `Settings` interface in LobeChat
- - Find the setting for `Perplexity AI` under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the `Perplexity AI` configuration section
-
+
- - Enter the API key you obtained
- - Choose a Perplexity AI model for your AI assistant to start the conversation
+ - Paste your API key into the input field
+ - Choose a Perplexity AI model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider. Please refer to Perplexity AI's
- relevant pricing policies.
+ You may incur charges from the API provider while using the service. Please refer to Perplexity AI’s pricing policy for more details.
-You can now engage in conversations using the models provided by Perplexity AI in LobeChat.
+And that’s it! You’re now ready to start chatting with models powered by Perplexity AI in LobeHub.
diff --git a/docs/usage/providers/perplexity.zh-CN.mdx b/docs/usage/providers/perplexity.zh-CN.mdx
index f14e4cf4c7..ac0faead63 100644
--- a/docs/usage/providers/perplexity.zh-CN.mdx
+++ b/docs/usage/providers/perplexity.zh-CN.mdx
@@ -1,17 +1,17 @@
---
-title: 在 LobeChat 中使用 Perplexity AI API Key
-description: 学习如何在 LobeChat 中配置和使用 Perplexity AI,获取 API 密钥并选择适合的语言模型开始对话。
+title: 在 LobeHub 中使用 Perplexity AI API Key
+description: 学习如何在 LobeHub 中配置和使用 Perplexity AI,获取 API 密钥并选择适合的语言模型开始对话。
tags:
- Perplexity AI
- API key
- Web UI
---
-# 在 LobeChat 中使用 Perplexity AI
+# 在 LobeHub 中使用 Perplexity AI
-
+
-Perplexity AI API 现在可供所有人使用,本文档将指导你如何在 LobeChat 中使用 [Perplexity AI](https://www.perplexity.ai/):
+Perplexity AI API 现在可供所有人使用,本文档将指导你如何在 LobeHub 中使用 [Perplexity AI](https://www.perplexity.ai/):
### 步骤一:获取 Perplexity AI API 密钥
@@ -19,23 +19,23 @@ Perplexity AI API 现在可供所有人使用,本文档将指导你如何在 L
- 创建一个 [Perplexity AI](https://www.perplexity.ai/) 帐户
- 获取您的 [API 密钥](https://www.perplexity.ai/settings/api)
-
+
- ### 步骤二:在 LobeChat 中配置 Perplexity AI
+ ### 步骤二:在 LobeHub 中配置 Perplexity AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`Perplexity AI`的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Perplexity AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Perplexity AI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Perplexity AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Perplexity AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/ppio.mdx b/docs/usage/providers/ppio.mdx
index d2929b8cea..64e0b4e482 100644
--- a/docs/usage/providers/ppio.mdx
+++ b/docs/usage/providers/ppio.mdx
@@ -1,56 +1,55 @@
---
-title: Using PPIO API Key in LobeChat
+title: Using PPIO API Key in LobeHub
description: >-
- Learn how to integrate PPIO's language model APIs into LobeChat. Follow the steps to register, create an PPIO API key, configure settings, and chat with our various AI models.
-
+ Learn how to integrate PPIO's LLM API into LobeHub. Follow these steps to
+ register a PPIO account, create an API key, and configure it in LobeHub.
tags:
- PPIO
+ - PPInfra
- DeepSeek
- - Llama
- Qwen
- - uncensored
+ - Llama3
- API key
- Web UI
---
-# Using PPIO in LobeChat
+# Using PPIO in LobeHub
-
+
-[PPIO](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_lobechat) supports stable and cost-efficient open-source LLM APIs, such as DeepSeek, Llama, Qwen etc.
+[PPIO](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_LobeHub) offers stable and cost-effective API services for open-source large language models, supporting industry-leading models such as DeepSeek, Llama, and Qwen.
-This document will guide you on how to integrate PPIO in LobeChat:
+This guide will walk you through how to use PPIO in LobeHub:
- ### Step 1: Register and Log in to PPIO
+ ### Step 1: Register and Log In to PPIO
- - Visit [PPIO](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_lobechat) and create an account
- - Upon registration, PPIO will provide a ¥5 credit (about 5M tokens).
+ - Visit [PPIO](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_LobeHub) and sign up for an account.
+ - Upon registration, PPIO will grant you a free credit of ¥5 (approximately 5 million tokens).
-
+
- ### Step 2: Obtain the API Key
+ ### Step 2: Create an API Key
- - Visit PPIO's [key management page](https://ppinfra.com/settings/key-management), create and copy an API Key.
+ - Go to PPIO’s [API Key Management page](https://ppinfra.com/settings/key-management), create a new API key, and copy it.
-
+
- ### Step 3: Configure PPIO in LobeChat
+ ### Step 3: Configure PPIO in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for `PPIO` under `AI Service Provider`
+ - Open the Settings panel in LobeHub.
+ - Under the "AI Providers" section, find and select "PPIO".
+ - Enable PPIO and paste in your API key.
-
+
- - Open PPIO and enter the obtained API key
- - Choose a PPIO model for your assistant to start the conversation
+ - Choose a PPIO model for your assistant to start chatting.
-
+
- During usage, you may need to pay the API service provider, please refer to PPIO's [pricing
- policy](https://ppinfra.com/llm-api?utm_source=github_lobe-chat\&utm_medium=github_readme\&utm_campaign=link).
+ You may incur charges when using the API services. For pricing details, refer to the official [PPIO pricing page](https://ppinfra.com/llm-api?utm_source=github_lobe-chat\&utm_medium=github_readme\&utm_campaign=link).
-You can now engage in conversations using the models provided by PPIO in LobeChat.
+That's it! You're now ready to use PPIO's models for conversations in LobeHub.
diff --git a/docs/usage/providers/ppio.zh-CN.mdx b/docs/usage/providers/ppio.zh-CN.mdx
index bafc0c2127..c248f86f61 100644
--- a/docs/usage/providers/ppio.zh-CN.mdx
+++ b/docs/usage/providers/ppio.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: 在 LobeChat 中使用 PPIO 派欧云 API Key
+title: 在 LobeHub 中使用 PPIO 派欧云 API Key
description: >-
- 学习如何将 PPIO 派欧云的 LLM API 集成到 LobeChat 中。跟随以下步骤注册 PPIO 账号、创建 API Key、并在 LobeChat 中进行设置。
-
+ 学习如何将 PPIO 派欧云的 LLM API 集成到 LobeHub 中。跟随以下步骤注册 PPIO 账号、创建 API Key、并在 LobeHub
+ 中进行设置。
tags:
- PPIO
- PPInfra
@@ -13,43 +13,43 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用 PPIO 派欧云
+# 在 LobeHub 中使用 PPIO 派欧云
-
+
-[PPIO 派欧云](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_lobechat)提供稳定、高性价比的开源模型 API 服务,支持 DeepSeek 全系列、Llama、Qwen 等行业领先大模型。
+[PPIO 派欧云](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_LobeHub)提供稳定、高性价比的开源模型 API 服务,支持 DeepSeek 全系列、Llama、Qwen 等行业领先大模型。
-本文档将指导你如何在 LobeChat 中使用 PPIO:
+本文档将指导你如何在 LobeHub 中使用 PPIO:
### 步骤一:注册 PPIO 派欧云账号并登录
- - 访问 [PPIO 派欧云](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_lobechat) 并注册账号
+ - 访问 [PPIO 派欧云](https://ppinfra.com/user/register?invited_by=RQIMOC\&utm_source=github_LobeHub) 并注册账号
- 注册后,PPIO 会赠送 5 元(约 500 万 tokens)的使用额度
-
+
### 步骤二:创建 API 密钥
- 访问 PPIO 派欧云的 [密钥管理页面](https://ppinfra.com/settings/key-management) ,创建并且复制一个 API 密钥.
-
+
- ### 步骤三:在 LobeChat 中配置 PPIO 派欧云
+ ### 步骤三:在 LobeHub 中配置 PPIO 派欧云
- - 访问 LobeChat 的 `设置` 界面
+ - 访问 LobeHub 的 `设置` 界面
- 在 `AI 服务商` 下找到 `PPIO` 的设置项
- 打开 PPIO 并填入获得的 API 密钥
-
+
- 为你的助手选择一个 PPIO 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,PPIO 的 API 费用参考[这里](https://ppinfra.com/llm-api?utm_source=github_lobe-chat\&utm_medium=github_readme\&utm_campaign=link)。
-至此你已经可以在 LobeChat 中使用 PPIO 派欧云提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 PPIO 派欧云提供的模型进行对话了。
diff --git a/docs/usage/providers/qiniu.mdx b/docs/usage/providers/qiniu.mdx
index 37f29a625d..c2e8591457 100644
--- a/docs/usage/providers/qiniu.mdx
+++ b/docs/usage/providers/qiniu.mdx
@@ -1,58 +1,57 @@
---
-title: Using Qiniu API Key in LobeChat
+title: Using Qiniu Cloud's Large Language Model API Key in LobeHub
description: >-
- Learn how to integrate and utilize powerful language models developed by Qiniu into LobeChat for various tasks. Follow the steps to obtain an API key and configure it for seamless interaction.
-
+ Learn how to configure and use Qiniu Cloud's large language models in LobeHub
+ to enable powerful natural language understanding and generation.
tags:
- API key
- Web UI
- - 七牛
- - 七牛云
- - 七牛智能
- Qiniu
+ - Qiniu Cloud
+ - Qiniu AI
- DeepSeek
---
-# Using Qiniu's AI Models in LobeChat
+# Using Qiniu Cloud's Large Language Model in LobeHub
-
+
-[Qiniu](https://www.qiniu.com), as a long-established cloud service provider, delivers cost-effective and reliable AI inference services for both real-time and batch processing, with a simple and user-friendly experience.
+[Qiniu Cloud](https://www.qiniu.com), a well-established cloud service provider, offers cost-effective and reliable real-time and batch AI inference services that are easy to use.
-This document will guide you on how to use Qiniu's AI Models in LobeChat:
+This guide will walk you through how to use Qiniu Cloud's large language models in LobeHub:
- ### Step 1: [Obtain AI Model API Key](https://developer.qiniu.com/aitokenapi/12884/how-to-get-api-key)
+ ### Step 1: [Obtain Your AI Model API Key](https://developer.qiniu.com/aitokenapi/12884/how-to-get-api-key)
- - Method 1: Using Console
+ - Method 1: Get it via the Console
1. [Register a Qiniu account](https://s.qiniu.com/umqq6n?ref=developer.qiniu.com\&s_path=%2Faitokenapi%2F12884%2Fhow-to-get-api-key)
- 2. [Go to the console to obtain your API Key](https://portal.qiniu.com/ai-inference/api-key)
-
+ 2. [Go to the Console to retrieve your API Key](https://portal.qiniu.com/ai-inference/api-key)
+
- - Method 2: Using Mini Program
- 1. Open the Qiniu mini program
- 2. Quick login to your account
- 3. Click the \[Me] tab in the bottom navigation bar
- 4. Click \[My Console]
- 5. Navigate to \[AI Inference]
+ - Method 2: Get it via the Mini Program
+ 1. Open the Qiniu Mini Program
+ 2. Log in quickly with your account
+ 3. Tap on "My" in the bottom navigation bar
+ 4. Tap on "My Console"
+ 5. Navigate to "AI Inference"
6. View and copy your API key
- ### Step 2: Configure Qiniu's AI Model Service in LobeChat
+ ### Step 2: Configure Qiniu Cloud LLM Service in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for `Qiniu` under `AI Service Provider`
+ - Open the Settings panel in LobeHub
+ - Under the "AI Providers" section, find the "Qiniu Cloud" configuration
-
+
- - Open Qiniu and enter the obtained API key.
- - Choose a Qiniu's model for your AI assistant to start the conversation.
+ - Enable Qiniu Cloud and paste in your API key
+ - Choose a Qiniu Cloud large language model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider. Please refer to [Qiniu's relevant pricing policies](https://developer.qiniu.com/aitokenapi/12898/ai-token-api-pricing).
+ You may incur charges from the API service provider during usage. Please refer to [Qiniu Cloud's pricing policy](https://developer.qiniu.com/aitokenapi/12898/ai-token-api-pricing) for details.
-You can now engage in conversations using the models provided by Qiniu in LobeChat.
+That's it! You're now ready to use Qiniu Cloud's large language models in LobeHub for intelligent conversations.
diff --git a/docs/usage/providers/qiniu.zh-CN.mdx b/docs/usage/providers/qiniu.zh-CN.mdx
index 2bba25631f..04bd9d50c7 100644
--- a/docs/usage/providers/qiniu.zh-CN.mdx
+++ b/docs/usage/providers/qiniu.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用七牛云大模型 API Key
-description: 学习如何在 LobeChat 中配置和使用七牛云的大模型,提供强大的自然语言理解和生成能力。
+title: 在 LobeHub 中使用七牛云大模型 API Key
+description: 学习如何在 LobeHub 中配置和使用七牛云的大模型,提供强大的自然语言理解和生成能力。
tags:
- API key
- Web UI
@@ -11,13 +11,13 @@ tags:
- DeepSeek
---
-# 在 LobeChat 中使用七牛云大模型
+# 在 LobeHub 中使用七牛云大模型
-
+
[七牛云](https://www.qiniu.com)作为老牌云服务厂商,提供高性价比稳定的实时、批量 AI 推理服务,简单易用。
-本文档将指导你如何在 LobeChat 中使用七牛云大模型:
+本文档将指导你如何在 LobeHub 中使用七牛云大模型:
### 步骤一:[获取 AI 大模型 API 密钥](https://developer.qiniu.com/aitokenapi/12884/how-to-get-api-key)
@@ -36,9 +36,9 @@ tags:
5. 进入【AI 推理】
6. 查看和复制你的 API 密钥
- ### 步骤二:在 LobeChat 中配置七牛云大模型服务
+ ### 步骤二:在 LobeHub 中配置七牛云大模型服务
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`七牛云`的设置项
@@ -53,4 +53,4 @@ tags:
-至此你已经可以在 LobeChat 中使用七牛云提供的大模型进行对话了。
+至此你已经可以在 LobeHub 中使用七牛云提供的大模型进行对话了。
diff --git a/docs/usage/providers/qwen.mdx b/docs/usage/providers/qwen.mdx
index 6f946df8c5..2fffefa1ce 100644
--- a/docs/usage/providers/qwen.mdx
+++ b/docs/usage/providers/qwen.mdx
@@ -1,64 +1,63 @@
---
-title: Using Qwen2 API Key in LobeChat
+title: Using Tongyi Qianwen API Key in LobeHub
description: >-
- Learn how to integrate and utilize Tongyi Qianwen, a powerful language model by Alibaba Cloud, in LobeChat for various tasks. Follow the steps to activate the service, obtain the API key, and configure Tongyi Qianwen for seamless interaction.
-
+ Learn how to configure and use Alibaba Cloud's Tongyi Qianwen model in
+ LobeHub, offering powerful natural language understanding and generation
+ capabilities.
tags:
+ - LobeHub
- Tongyi Qianwen
- - Alibaba Cloud
- DashScope
- API key
- Web UI
---
-# Using Tongyi Qianwen in LobeChat
+# Using Tongyi Qianwen in LobeHub
-
+
-[Tongyi Qianwen](https://tongyi.aliyun.com/) is a large-scale language model independently developed by Alibaba Cloud, with powerful natural language understanding and generation capabilities. It can answer various questions, create text content, express opinions, write code, and play a role in multiple fields.
+[Tongyi Qianwen](https://tongyi.aliyun.com/) is a large-scale language model developed by Alibaba Cloud, known for its powerful natural language understanding and generation capabilities. It can answer questions, generate content, express opinions, write code, and more—making it useful across a wide range of applications.
-This document will guide you on how to use Tongyi Qianwen in LobeChat:
+This guide will walk you through how to use Tongyi Qianwen in LobeHub:
- ### Step 1: Activate DashScope Model Service
+ ### Step 1: Enable the DashScope Model Service
- - Visit and log in to Alibaba Cloud's [DashScope](https://dashscope.console.aliyun.com/) platform.
- - If it is your first time, you need to activate the DashScope service.
- - If you have already activated it, you can skip this step.
+ - Visit and log in to the [DashScope](https://dashscope.console.aliyun.com/) platform by Alibaba Cloud.
+ - If this is your first time, you’ll need to activate the DashScope service.
+ - If you’ve already enabled it, you can skip this step.
-
+
- ### Step 2: Obtain DashScope API Key
+ ### Step 2: Obtain a DashScope API Key
- - Go to the `API-KEY` interface and create an API key.
+ - Navigate to the API-KEY section and create a new API key.
-
+
- - Copy the API key from the pop-up dialog box and save it securely.
+ - Copy the API key from the pop-up dialog and store it securely.
-
+
- Please store the key securely as it will only appear once. If you accidentally lose it, you will
- need to create a new key.
+ Make sure to store your API key securely, as it will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 3: Configure Tongyi Qianwen in LobeChat
+ ### Step 3: Configure Tongyi Qianwen in LobeHub
- - Visit the `Settings` interface in LobeChat.
- - Find the setting for `Tongyi Qianwen` under `AI Service Provider`.
+ - Open the Settings panel in LobeHub.
+ - Under AI Providers, locate the configuration section for Tongyi Qianwen.
-
+
- - Open Tongyi Qianwen and enter the obtained API key.
- - Choose a Qwen model for your AI assistant to start the conversation.
+ - Enable Tongyi Qianwen and paste in your API key.
+ - Choose a Qwen model for your AI assistant to start chatting.
-
+
- During usage, you may need to pay the API service provider. Please refer to Tongyi Qianwen's
- relevant pricing policies.
+ Please note that usage may incur charges from the API provider. Refer to Tongyi Qianwen’s pricing policy for details.
-You can now engage in conversations using the models provided by Tongyi Qianwen in LobeChat.
+You’re all set! You can now start using Tongyi Qianwen’s models in LobeHub for intelligent conversations.
diff --git a/docs/usage/providers/qwen.zh-CN.mdx b/docs/usage/providers/qwen.zh-CN.mdx
index 7dd95be765..f32f9fbe69 100644
--- a/docs/usage/providers/qwen.zh-CN.mdx
+++ b/docs/usage/providers/qwen.zh-CN.mdx
@@ -1,8 +1,8 @@
---
-title: 在 LobeChat 中使用通义千问 API Key
-description: 学习如何在 LobeChat 中配置和使用阿里云的通义千问模型,提供强大的自然语言理解和生成能力。
+title: 在 LobeHub 中使用通义千问 API Key
+description: 学习如何在 LobeHub 中配置和使用阿里云的通义千问模型,提供强大的自然语言理解和生成能力。
tags:
- - LobeChat
+ - LobeHub
- 通义千问
- DashScope
- DashScope
@@ -10,13 +10,13 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用通义千问
+# 在 LobeHub 中使用通义千问
-
+
[通义千问](https://tongyi.aliyun.com/)是阿里云自主研发的超大规模语言模型,具有强大的自然语言理解和生成能力。它可以回答各种问题、创作文字内容、表达观点看法、撰写代码等,在多个领域发挥作用。
-本文档将指导你如何在 LobeChat 中使用通义千问:
+本文档将指导你如何在 LobeHub 中使用通义千问:
### 步骤一:开通 DashScope 模型服务
@@ -25,37 +25,37 @@ tags:
- 初次进入时需要开通 DashScope 服务
- 若你已开通,可跳过该步骤
-
+
### 步骤二:获取 DashScope API 密钥
- 进入`API-KEY` 界面,并创建一个 API 密钥
-
+
- 在弹出的对话框中复制 API 密钥,并妥善保存
-
+
请安全地存储密钥,因为它只会出现一次。如果您意外丢失它,您将需要创建一个新密钥。
- ### 步骤三:在 LobeChat 中配置通义千问
+ ### 步骤三:在 LobeHub 中配置通义千问
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`通义千问`的设置项
-
+
- 打开通义千问并填入获得的 API 密钥
- 为你的 AI 助手选择一个 Qwen 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考通义千问的相关费用政策。
-至此你已经可以在 LobeChat 中使用通义千问提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用通义千问提供的模型进行对话了。
diff --git a/docs/usage/providers/sambanova.mdx b/docs/usage/providers/sambanova.mdx
index 50f2c48084..31f2479a0c 100644
--- a/docs/usage/providers/sambanova.mdx
+++ b/docs/usage/providers/sambanova.mdx
@@ -1,51 +1,52 @@
---
-title: Using SambaNova API Key in LobeChat
-description: Learn how to configure and use SambaNova models in LobeChat, obtain an API key, and start a conversation.
+title: Using the SambaNova API Key in LobeHub
+description: >-
+ Learn how to configure and use SambaNova models in LobeHub, obtain your API
+ key, and start chatting.
tags:
- - LobeChat
+ - LobeHub
- SambaNova
- API Key
- Web UI
---
-# Using SambaNova in LobeChat
+# Using SambaNova in LobeHub
-
+
-[SambaNova](https://sambanova.ai/) is a company based in Palo Alto, California, USA, focused on developing high-performance AI hardware and software solutions. It provides fast AI model training, fine-tuning, and inference capabilities, especially suitable for large-scale generative AI models.
+[SambaNova](https://sambanova.ai/) is a company based in Palo Alto, California, specializing in high-performance AI hardware and software solutions. It offers fast AI model training, fine-tuning, and inference capabilities, particularly well-suited for large-scale generative AI models.
-This document will guide you on how to use SambaNova in LobeChat:
+This guide will walk you through how to use SambaNova in LobeHub:
### Step 1: Obtain a SambaNova API Key
- - First, you need to register and log in to [SambaNova Cloud](https://cloud.sambanova.ai/)
- - Create an API key in the `APIs` page
+ - First, sign up and log in to [SambaNova Cloud](https://cloud.sambanova.ai/)
+ - Navigate to the `APIs` page and create a new API key
-
+
- - Copy the obtained API key and save it securely
+ - Copy the generated API key and store it securely
- Please save the generated API Key securely, as it will only appear once. If you accidentally lose
- it, you will need to create a new API key.
+ Make sure to save your API key securely. It will only be shown once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure SambaNova in LobeChat
+ ### Step 2: Configure SambaNova in LobeHub
- - Access the `Application Settings` interface of LobeChat
- - Find the `SambaNova` setting item under `AI Service Provider`
+ - Go to the `App Settings` section in LobeHub
+ - Under `AI Providers`, locate the `SambaNova` configuration option
-
+
- - Turn on SambaNova and fill in the obtained API key
- - Select a SambaNova model for your assistant to start the conversation
+ - Enable SambaNova and paste in your API key
+ - Choose a SambaNova model for your assistant to start chatting
-
+
- You may need to pay the API service provider during use, please refer to SambaNova's related fee policies.
+ Please note that you may incur charges from the API provider. Refer to SambaNova’s pricing policy for more details.
-Now you can use the models provided by SambaNova in LobeChat to conduct conversations.
+You're now ready to start chatting with models powered by SambaNova in LobeHub.
diff --git a/docs/usage/providers/sambanova.zh-CN.mdx b/docs/usage/providers/sambanova.zh-CN.mdx
index 0412ab0cdb..75443ce7b3 100644
--- a/docs/usage/providers/sambanova.zh-CN.mdx
+++ b/docs/usage/providers/sambanova.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 SambaNova API Key
-description: 学习如何在 LobeChat 中配置和使用 SambaNova 模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用 SambaNova API Key
+description: 学习如何在 LobeHub 中配置和使用 SambaNova 模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- SambaNova
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 SambaNova
+# 在 LobeHub 中使用 SambaNova
-
+
[SambaNova](https://sambanova.ai/) 是一家位于美国加利福尼亚州帕洛阿尔托的公司,专注于开发高性能 AI 硬件和软件解决方案,提供快速的 AI 模型训练、微调和推理能力,尤其适用于大规模生成式 AI 模型。
-本文档将指导你如何在 LobeChat 中使用 SambaNova:
+本文档将指导你如何在 LobeHub 中使用 SambaNova:
### 步骤一:获取 SambaNova API 密钥
@@ -22,7 +22,7 @@ tags:
- 首先,你需要注册并登录 [SambaNova Cloud](https://cloud.sambanova.ai/)
- 在 `APIs` 页面中创建一个 API 密钥
-
+
- 复制得到的 API 密钥并妥善保存
@@ -30,21 +30,21 @@ tags:
请妥善保存生成的 API Key,它只会出现一次,如果不小心丢失了,你需要重新创建一个 API key
- ### 步骤二:在 LobeChat 中配置 SambaNova
+ ### 步骤二:在 LobeHub 中配置 SambaNova
- - 访问 LobeChat 的 `应用设置`界面
+ - 访问 LobeHub 的 `应用设置`界面
- 在 `AI 服务商` 下找到 `SambaNova` 的设置项
-
+
- 打开 SambaNova 并填入获取的 API 密钥
- 为你的助手选择一个 SambaNova 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 SambaNova 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 SambaNova 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 SambaNova 提供的模型进行对话了。
diff --git a/docs/usage/providers/sensenova.mdx b/docs/usage/providers/sensenova.mdx
index 22712e388e..1d09226654 100644
--- a/docs/usage/providers/sensenova.mdx
+++ b/docs/usage/providers/sensenova.mdx
@@ -1,58 +1,56 @@
---
-title: Using SenseNova in LobeChat
+title: Using SenseTime SenseNova in LobeHub
description: >-
- Learn how to configure and use SenseNova's API Key in LobeChat to start conversations and interactions.
-
+ Learn how to configure and use your SenseTime SenseNova API Key in LobeHub to
+ start chatting and interacting.
tags:
- - LobeChat
- - SenseNova
+ - LobeHub
+ - SenseTime SenseNova
- API Key
- Web UI
---
-# Using SenseNova in LobeChat
+# Using SenseTime SenseNova in LobeHub
-
+
-[SenseNova](https://platform.sensenova.cn/home) is a large model system introduced by SenseTime, aimed at promoting the rapid iteration and practical application of artificial intelligence (AI) technology.
+[SenseTime SenseNova](https://platform.sensenova.cn/home) is a large-scale AI model ecosystem developed by SenseTime, designed to accelerate the iteration and real-world application of artificial intelligence technologies.
-This article will guide you on how to use SenseNova in LobeChat.
+This guide will walk you through how to use SenseNova within LobeHub.
- ### Step 1: Obtain the API Key for SenseNova
+ ### Step 1: Obtain Your SenseNova API Key
- - Register and log in to the [SenseCore Development Platform](https://www.sensecore.cn/product/aistudio).
- - Locate the `SenseNova Large Model` in the product menu and activate the service.
+ - Register and log in to the [SenseCore AI Studio](https://www.sensecore.cn/product/aistudio)
+ - In the product menu, locate and activate the `SenseNova Large Model` service
-
+
- - Go to the [AccessKey Management](https://console.sensecore.cn/iam/Security/access-key) page.
- - Create an access key.
- - Save the Access Key ID and secret in the pop-up window.
+ - Go to the [AccessKey Management](https://console.sensecore.cn/iam/Security/access-key) page
+ - Create a new access key
+ - Save the AccessKey ID and Secret Token from the pop-up window
-
+
- Please keep the access key from the pop-up window secure, as it will only appear once. If you lose
- it, you will need to create a new access key.
+ Make sure to save the access key shown in the pop-up window. It will only be displayed once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure SenseNova in LobeChat
+ ### Step 2: Configure SenseNova in LobeHub
- - Access the `Settings` interface on LobeChat.
- - Find the setting for `SenseNova` under `AI Service Provider`.
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, find the configuration section for `SenseTime SenseNova`
-
+
- - Input the obtained `Access Key ID` and `Access Key Secret`.
- - Choose a SenseNova model for your AI assistant and start the conversation.
+ - Enter your `AccessKey ID` and `AccessKey Secret`
+ - Choose a SenseNova model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to the relevant fee
- policy for SenseNova.
+ You may incur charges when using the API services. Please refer to SenseNova’s pricing policy for more details.
-You can now have conversations using the models provided by SenseNova in LobeChat.
+And that’s it! You’re now ready to use SenseTime SenseNova models in LobeHub for intelligent conversations.
diff --git a/docs/usage/providers/sensenova.zh-CN.mdx b/docs/usage/providers/sensenova.zh-CN.mdx
index 7b96ed715e..133ab1f939 100644
--- a/docs/usage/providers/sensenova.zh-CN.mdx
+++ b/docs/usage/providers/sensenova.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用商汤日日新
-description: 学习如何在 LobeChat 中配置和使用商汤日日新的 API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用商汤日日新
+description: 学习如何在 LobeHub 中配置和使用商汤日日新的 API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 商汤日日新
- API密钥
- Web UI
---
-# 在 LobeChat 中使用商汤日日新
+# 在 LobeHub 中使用商汤日日新
-
+
[商汤日日新](https://platform.sensenova.cn/home) 是商汤科技(SenseTime)推出的一个大模型体系,旨在推动人工智能(AI)技术的快速迭代和应用落地。
-本文将指导你如何在 LobeChat 中使用商汤日日新。
+本文将指导你如何在 LobeHub 中使用商汤日日新。
### 步骤一:获取商汤日日新的 API 密钥
@@ -22,33 +22,33 @@ tags:
- 注册并登录 [万象模型开发平台](https://www.sensecore.cn/product/aistudio)
- 在产品菜单中找到 `日日新大模型` 并开通服务
-
+
- 进入 [AccessKey 访问密钥](https://console.sensecore.cn/iam/Security/access-key) 页面
- 创建一个访问密钥
- 在弹出窗口中保存访问密钥 ID 和令牌
-
+
妥善保存弹窗中的访问密钥,它只会出现一次,如果不小心丢失了,你需要重新创建一个访问密钥。
- ### 步骤二:在 LobeChat 中配置商汤日日新
+ ### 步骤二:在 LobeHub 中配置商汤日日新
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `商汤日日新` 的设置项
-
+
- 填入获得的 `AccessKey ID` 和 `AccessKey Secret`
- 为你的 AI 助手选择一个商汤日日新的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考商汤日日新的相关费用政策。
-至此你已经可以在 LobeChat 中使用商汤日日新提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用商汤日日新提供的模型进行对话了。
diff --git a/docs/usage/providers/siliconcloud.mdx b/docs/usage/providers/siliconcloud.mdx
index f6a12644a3..5b62c2d50e 100644
--- a/docs/usage/providers/siliconcloud.mdx
+++ b/docs/usage/providers/siliconcloud.mdx
@@ -1,48 +1,47 @@
---
-title: Using SiliconCloud in LobeChat
+title: Using SiliconCloud in LobeHub
description: >-
- Learn how to integrate and utilize SiliconCloud's language model APIs in LobeChat.
-
+ Learn how to configure and use SiliconCloud's API Key in LobeHub to start
+ chatting and interacting.
tags:
- - LobeChat
+ - LobeHub
- SiliconCloud
- API Key
- Web UI
---
-# Using SiliconCloud in LobeChat
+# Using SiliconCloud in LobeHub
-
+
[SiliconCloud](https://siliconflow.cn/) is an AI service platform based on open-source foundational models, offering a variety of generative AI (GenAI) services.
-This article will guide you on how to use SiliconCloud in LobeChat.
+This guide will walk you through how to use SiliconCloud within LobeHub.
- ### Step 1: Obtain the API Key from SiliconCloud
+ ### Step 1: Obtain an API Key from SiliconCloud
- - Sign up and log in to [SiliconCloud](https://cloud.siliconflow.cn/account/ak)
- - Click on the `API Keys` menu on the left side
- - Create an API Key and copy it
+ - Register and log in to [SiliconCloud](https://cloud.siliconflow.cn/account/ak)
+ - Click on the `API Key` menu on the left sidebar
+ - Create a new API key and copy it
-
+
- ### Step 2: Configure SiliconCloud in LobeChat
+ ### Step 2: Configure SiliconCloud in LobeHub
- - Go to the `Settings` page in LobeChat
- - Under `AI Service Provider`, find the setting for `SiliconFlow`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `SiliconFlow`
-
+
- - Enter the API Key you obtained
- - Choose a SiliconCloud model for your AI assistant to start the conversation
+ - Paste the API key you obtained
+ - Choose a model from SiliconCloud for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, so please refer to SiliconCloud's
- relevant pricing policy.
+ You may need to pay the API service provider during usage. Please refer to SiliconCloud’s pricing policy for details.
-At this point, you can start chatting using the models provided by SiliconCloud in LobeChat.
+That's it! You're now ready to use SiliconCloud's models for conversations in LobeHub.
diff --git a/docs/usage/providers/siliconcloud.zh-CN.mdx b/docs/usage/providers/siliconcloud.zh-CN.mdx
index 270a689ac4..911caf5ed3 100644
--- a/docs/usage/providers/siliconcloud.zh-CN.mdx
+++ b/docs/usage/providers/siliconcloud.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 SiliconCloud
-description: 学习如何在 LobeChat 中配置和使用 SiliconCloud 的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 SiliconCloud
+description: 学习如何在 LobeHub 中配置和使用 SiliconCloud 的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- SiliconCloud
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 SiliconCloud
+# 在 LobeHub 中使用 SiliconCloud
-
+
[SiliconCloud](https://siliconflow.cn/) 是一个基于开源基础模型的人工智能服务平台,提供多种生成式 AI(GenAI)服务。
-本文将指导你如何在 LobeChat 中使用 SiliconCloud。
+本文将指导你如何在 LobeHub 中使用 SiliconCloud。
### 步骤一:获得 SiliconCloud 的 API Key
@@ -23,23 +23,23 @@ tags:
- 点击左侧 `API 密钥` 菜单
- 创建一个 API 密钥并复制
-
+
- ### 步骤二:在 LobeChat 中配置 SiliconCloud
+ ### 步骤二:在 LobeHub 中配置 SiliconCloud
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `SiliconFlow` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 SiliconCloud 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 SiliconCloud 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 SiliconCloud 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 SiliconCloud 提供的模型进行对话了。
diff --git a/docs/usage/providers/spark.mdx b/docs/usage/providers/spark.mdx
index 948da8098a..676e3eba37 100644
--- a/docs/usage/providers/spark.mdx
+++ b/docs/usage/providers/spark.mdx
@@ -1,51 +1,51 @@
---
-title: Using iFLYTEK Spark in LobeChat
-description: Learn how to integrate and utilize iFLYTEK's Spark model APIs in LobeChat.
+title: Using iFLYTEK Spark in LobeHub
+description: >-
+ Learn how to configure and use the iFLYTEK Spark API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
- - iFLYTEK
- - Spark
+ - LobeHub
+ - iFLYTEK Spark
- API Key
- Web UI
---
-# Using iFLYTEK Spark in LobeChat
+# Using iFLYTEK Spark in LobeHub
-
+
-[iFLYTEK Spark](https://xinghuo.xfyun.cn/) is a powerful AI model launched by iFLYTEK, equipped with cross-domain knowledge and language understanding capabilities, able to perform various tasks such as Q\&A, conversations, and literary creation.
+[iFLYTEK Spark](https://xinghuo.xfyun.cn/) is a powerful AI large language model developed by iFLYTEK. It features cross-domain knowledge and language understanding capabilities, and can perform a variety of tasks such as Q\&A, conversation, and creative writing.
-This guide will instruct you on how to use iFLYTEK Spark in LobeChat.
+This guide will walk you through how to use iFLYTEK Spark in LobeHub.
- ### Step 1: Obtain the iFLYTEK Spark API Key
+ ### Step 1: Obtain an API Key for iFLYTEK Spark
- Register and log in to the [iFLYTEK Open Platform](https://console.xfyun.cn/)
- - Create an application
+ - Create a new application
-
+
- - Select a large model to view details
- - Copy the `API Password` from the top right corner under the HTTP service interface authentication information
+ - Select a large model and view its details
+ - Copy the `API Password` from the top-right corner under the HTTP service authentication section
-
+
- ### Step 2: Configure iFLYTEK Spark in LobeChat
+ ### Step 2: Configure iFLYTEK Spark in LobeHub
- - Access the `Settings` menu in LobeChat
- - Find the iFLYTEK Spark settings under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `iFLYTEK Spark`
-
+
- - Input the obtained API Key
- - Choose an iFLYTEK Spark model for your AI assistant to start the conversation
+ - Paste the API Key you obtained
+ - Choose an iFLYTEK Spark model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to the relevant pricing
- policy of iFLYTEK Spark.
+ You may need to pay for API usage depending on your usage level. Please refer to iFLYTEK Spark's pricing policy for more details.
-Now you can use the models provided by iFLYTEK Spark for conversations in LobeChat.
+That's it! You're now ready to use iFLYTEK Spark's models for conversations in LobeHub.
diff --git a/docs/usage/providers/spark.zh-CN.mdx b/docs/usage/providers/spark.zh-CN.mdx
index 7498a78c59..ecd618d5e8 100644
--- a/docs/usage/providers/spark.zh-CN.mdx
+++ b/docs/usage/providers/spark.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用讯飞星火
-description: 学习如何在 LobeChat 中配置和使用讯飞星火的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用讯飞星火
+description: 学习如何在 LobeHub 中配置和使用讯飞星火的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 讯飞星火
- API密钥
- Web UI
---
-# 在 LobeChat 中使用讯飞星火
+# 在 LobeHub 中使用讯飞星火
-
+
[讯飞星火](https://xinghuo.xfyun.cn/)是科大讯飞推出的一款强大的 AI 大模型,具备跨领域的知识和语言理解能力,能够执行问答、对话和文学创作等多种任务。
-本文将指导你如何在 LobeChat 中使用讯飞星火。
+本文将指导你如何在 LobeHub 中使用讯飞星火。
### 步骤一:获得讯飞星火的 API Key
@@ -22,28 +22,28 @@ tags:
- 注册并登录 [讯飞开放平台](https://console.xfyun.cn/)
- 创建一个应用
-
+
- 选择一个大模型查看详情
- 复制右上角 http 服务接口认证信息中的 `API Password`
-
+
- ### 步骤二:在 LobeChat 中配置讯飞星火
+ ### 步骤二:在 LobeHub 中配置讯飞星火
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `讯飞星火` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个讯飞星火的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考讯飞星火的相关费用政策。
-至此你已经可以在 LobeChat 中使用讯飞星火提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用讯飞星火提供的模型进行对话了。
diff --git a/docs/usage/providers/stepfun.mdx b/docs/usage/providers/stepfun.mdx
index 83099426e2..28d18d4ebe 100644
--- a/docs/usage/providers/stepfun.mdx
+++ b/docs/usage/providers/stepfun.mdx
@@ -1,47 +1,46 @@
---
-title: Using Stepfun API Key in LobeChat
+title: Using the Stepfun API Key in LobeHub
description: >-
- Learn how to integrate Stepfun AI models into LobeChat for engaging conversations. Obtain Stepfun API key, configure Stepfun in LobeChat settings, and select a model to start chatting.
-
+ Learn how to configure and use Stepfun's AI models in LobeHub, including how
+ to obtain an API key and select a model to start chatting.
tags:
- Stepfun
- API key
- Web UI
---
-# Using Stepfun in LobeChat
+# Using Stepfun in LobeHub
-
+
-[Stepfun](https://www.stepfun.com/) is a startup focusing on the research and development of Artificial General Intelligence (AGI). They have released the Step-1 billion-parameter language model, Step-1V billion-parameter multimodal model, and the Step-2 trillion-parameter MoE language model preview.
+[Stepfun](https://www.stepfun.com/) is a startup focused on the development of Artificial General Intelligence (AGI). They have released several large-scale models, including the Step-1 language model with hundreds of billions of parameters, the Step-1V multimodal model, and a preview version of the Step-2 trillion-parameter MoE language model.
-This document will guide you on how to use Stepfun in LobeChat:
+This guide will walk you through how to use Stepfun in LobeHub:
- ### Step 1: Obtain Stepfun API Key
+ ### Step 1: Get Your Stepfun API Key
- - Visit and log in to the [Stepfun Open Platform](https://platform.stepfun.com/)
- - Go to the `API Key` menu, where the system has already created an API key for you
- - Copy the created API key
+ - Visit and log in to the [Stepfun Developer Platform](https://platform.stepfun.com/)
+ - Navigate to the `API Key` section — an API key will be automatically generated for you
+ - Copy the generated API key
-
+
- ### Step 2: Configure Stepfun in LobeChat
+ ### Step 2: Configure Stepfun in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for Stepfun under `AI Service Provider`
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, locate the `Stepfun` configuration section
-
+
- - Open Stepfun and enter the obtained API key
- - Choose a Stepfun model for your AI assistant to start the conversation
+ - Enable Stepfun and paste in your API key
+ - Choose a Stepfun model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Stepfun's relevant
- pricing policies.
+ You may need to pay for API usage depending on your usage level. Please refer to Stepfun’s pricing policy for more details.
-You can now use the models provided by Stepfun to have conversations in LobeChat.
+And that’s it! You’re now ready to start chatting with AI models powered by Stepfun in LobeHub.
diff --git a/docs/usage/providers/stepfun.zh-CN.mdx b/docs/usage/providers/stepfun.zh-CN.mdx
index ad74a5d507..fea8cca8ad 100644
--- a/docs/usage/providers/stepfun.zh-CN.mdx
+++ b/docs/usage/providers/stepfun.zh-CN.mdx
@@ -1,19 +1,19 @@
---
-title: 在 LobeChat 中使用 Stepfun 阶跃星辰 API Key
-description: 学习如何在 LobeChat 中配置和使用 Stepfun 阶跃星辰的人工智能模型,包括获取 API Key 和选择模型开始对话。
+title: 在 LobeHub 中使用 Stepfun 阶跃星辰 API Key
+description: 学习如何在 LobeHub 中配置和使用 Stepfun 阶跃星辰的人工智能模型,包括获取 API Key 和选择模型开始对话。
tags:
- Stepfun 阶跃星辰
- API key
- Web UI
---
-# 在 LobeChat 中使用 Stepfun 阶跃星辰
+# 在 LobeHub 中使用 Stepfun 阶跃星辰
-
+
[Stepfun 阶跃星辰](https://www.stepfun.com/)是一家专注于通用人工智能 (AGI) 研发的创业公司,目前已推出 Step-1 千亿参数语言大模型、Step-1V 千亿参数多模态大模型,以及 Step-2 万亿参数 MoE 语言大模型预览版。
-本文档将指导你如何在 LobeChat 中使用 Stepfun 阶跃星辰:
+本文档将指导你如何在 LobeHub 中使用 Stepfun 阶跃星辰:
### 步骤一:获取 Stepfun 阶跃星辰 API 密钥
@@ -22,23 +22,23 @@ tags:
- 进入`接口密钥`菜单,系统已为你创建好 API 密钥
- 复制已创建的 API 密钥
-
+
- ### 步骤二:在 LobeChat 中配置 Stepfun Stepfun 阶跃星辰
+ ### 步骤二:在 LobeHub 中配置 Stepfun Stepfun 阶跃星辰
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到` Stepfun 阶跃星辰`的设置项
-
+
- 打开 Stepfun 阶跃星辰并填入获得的 API 密钥
- 为你的 AI 助手选择一个 Stepfun 阶跃星辰的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Stepfun 阶跃星辰的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Stepfun 阶跃星辰提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Stepfun 阶跃星辰提供的模型进行对话了。
diff --git a/docs/usage/providers/taichu.mdx b/docs/usage/providers/taichu.mdx
index a8f20b9d32..4a7db278ee 100644
--- a/docs/usage/providers/taichu.mdx
+++ b/docs/usage/providers/taichu.mdx
@@ -1,45 +1,45 @@
---
-title: Using Taichu API Key in LobeChat
+title: Using Zidong Taichu API Key in LobeHub
description: >-
- Learn how to integrate Taichu AI into LobeChat for enhanced conversational experiences. Follow the steps to configure Taichu AI and start using its models.
-
+ Learn how to configure and use the Zidong Taichu API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
+ - LobeHub
- Taichu
+ - Zidong Taichu
- API Key
- Web UI
---
-# Using Taichu in LobeChat
+# Using Zidong Taichu in LobeHub
-
+
-This article will guide you on how to use Taichu in LobeChat:
+This guide will walk you through how to use Zidong Taichu in LobeHub:
- ### Step 1: Obtain Taichu API Key
+ ### Step 1: Obtain a Zidong Taichu API Key
- - Create an account on [Taichu](https://ai-maas.wair.ac.cn/)
- - Create and obtain an [API key](https://ai-maas.wair.ac.cn/#/settlement/api/key)
+ - Create a [Zidong Taichu](https://ai-maas.wair.ac.cn/) account
+ - Generate and retrieve your [API Key](https://ai-maas.wair.ac.cn/#/settlement/api/key)
-
+
- ### Step 2: Configure Taichu in LobeChat
+ ### Step 2: Configure Zidong Taichu in LobeHub
- - Go to the `Settings` interface in LobeChat
- - Find the setting for `Taichu` under `AI Service Provider`
+ - Go to the LobeHub `Settings` page
+ - Under `AI Providers`, locate the `Zidong Taichu` configuration section
-
+
- - Enter the obtained API key
- - Choose a Purple Taichu model for your AI assistant to start the conversation
+ - Paste the API Key you obtained
+ - Choose a Zidong Taichu model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Taichu's relevant
- pricing policies.
+ You may need to pay for API usage depending on the provider’s pricing policy. Please refer to Zidong Taichu’s official pricing for details.
-Now you can start conversing with the models provided by Taichu in LobeChat.
+Now you're all set to start using Zidong Taichu models for conversations in LobeHub.
diff --git a/docs/usage/providers/taichu.zh-CN.mdx b/docs/usage/providers/taichu.zh-CN.mdx
index c7149ac10c..f00ef529ec 100644
--- a/docs/usage/providers/taichu.zh-CN.mdx
+++ b/docs/usage/providers/taichu.zh-CN.mdx
@@ -1,19 +1,19 @@
---
-title: 在 LobeChat 中使用紫东太初 API Key
-description: 学习如何在 LobeChat 中配置和使用紫东太初的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用紫东太初 API Key
+description: 学习如何在 LobeHub 中配置和使用紫东太初的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 太初
- 紫东太初
- API密钥
- Web UI
---
-# 在 LobeChat 中使用紫东太初
+# 在 LobeHub 中使用紫东太初
-
+
-本文将指导你如何在 LobeChat 中使用紫东太初:
+本文将指导你如何在 LobeHub 中使用紫东太初:
### 步骤一:获取紫东太初 API 密钥
@@ -21,23 +21,23 @@ tags:
- 创建一个[紫东太初](https://ai-maas.wair.ac.cn/)账户
- 创建并获取 [API 密钥](https://ai-maas.wair.ac.cn/#/settlement/api/key)
-
+
- ### 步骤二:在 LobeChat 中配置紫东太初
+ ### 步骤二:在 LobeHub 中配置紫东太初
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`紫东太初`的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个紫东太初的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考紫东太初的相关费用政策。
-至此你已经可以在 LobeChat 中使用紫东太初提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用紫东太初提供的模型进行对话了。
diff --git a/docs/usage/providers/tencentcloud.mdx b/docs/usage/providers/tencentcloud.mdx
index 12284f8677..cee36703ea 100644
--- a/docs/usage/providers/tencentcloud.mdx
+++ b/docs/usage/providers/tencentcloud.mdx
@@ -1,49 +1,51 @@
---
-title: Using Tencent Cloud API Key in LobeChat
-description: Learn how to configure and use Tencent Cloud AI models in LobeChat, obtain an API key, and start a conversation.
+title: Using Tencent Cloud API Key in LobeHub
+description: >-
+ Learn how to configure and use Tencent Cloud AI models in LobeHub, obtain your
+ API key, and start chatting.
tags:
- - LobeChat
+ - LobeHub
- Tencent Cloud
- API Key
- Web UI
---
-# Using Tencent Cloud in LobeChat
+# Using Tencent Cloud in LobeHub
-
+
-[Tencent Cloud](https://cloud.tencent.com/) is the cloud computing service brand of Tencent, specializing in providing cloud computing services for enterprises and developers. Tencent Cloud provides a series of AI large model solutions, through which AI models can be connected stably and efficiently.
+[Tencent Cloud](https://cloud.tencent.com/) is the cloud computing service brand under Tencent, offering a wide range of cloud solutions for businesses and developers. Tencent Cloud provides a suite of AI large model services that allow for stable and efficient integration with AI models.
-This document will guide you on how to connect Tencent Cloud's AI models in LobeChat:
+This guide will walk you through how to integrate Tencent Cloud's AI models into LobeHub:
- ### Step 1: Obtain the Tencent Cloud API Key
+ ### Step 1: Obtain a Tencent Cloud API Key
- - First, visit [Tencent Cloud](https://cloud.tencent.com/) and complete the registration and login.
- - Enter the Tencent Cloud Console and navigate to [Large-scale Knowledge Engine Atomic Capability](https://console.cloud.tencent.com/lkeap).
- - Activate the Large-scale Knowledge Engine, which requires real-name authentication during the activation process.
+ - First, visit [Tencent Cloud](https://cloud.tencent.com/) and complete the registration and login process.
+ - Go to the Tencent Cloud Console and navigate to the [Atomic Capabilities of Knowledge Engine](https://console.cloud.tencent.com/lkeap).
+ - Enable the Large Model Knowledge Engine. Real-name verification is required during activation.
-
+
- - In the `Access via OpenAI SDK` option, click the `Create API Key` button to create a new API Key.
- - You can view and manage the created API Keys in `API Key Management`.
- - Copy and save the created API Key.
+ - Under the "Access via OpenAI SDK" section, click the Create API Key button to generate a new API key.
+ - You can view and manage your API keys in the API Key Management section.
+ - Copy and securely save your newly created API key.
- ### Step 2: Configure Tencent Cloud in LobeChat
+ ### Step 2: Configure Tencent Cloud in LobeHub
- - Visit the `Application Settings` and `AI Service Provider` interface of LobeChat.
- - Find the `Tencent Cloud` settings item in the list of providers.
+ - Open LobeHub and go to App Settings > AI Service Providers.
+ - Find the Tencent Cloud option in the list of providers.
-
+
- - Open the Tencent Cloud provider and fill in the obtained API Key.
- - Select a Tencent Cloud model for your assistant to start the conversation.
+ - Enable the Tencent Cloud provider and paste in your API key.
+ - Choose a Tencent Cloud model for your assistant to start chatting.
-
+
- You may need to pay the API service provider during use, please refer to Tencent Cloud's relevant fee policy.
+ You may incur charges when using services from the API provider. Please refer to Tencent Cloud’s pricing policy for details.
-You can now use the models provided by Tencent Cloud in LobeChat to have conversations.
+That's it! You’re now ready to use Tencent Cloud’s AI models in LobeHub for conversations.
diff --git a/docs/usage/providers/tencentcloud.zh-CN.mdx b/docs/usage/providers/tencentcloud.zh-CN.mdx
index e90d39ecdd..8bad25fd87 100644
--- a/docs/usage/providers/tencentcloud.zh-CN.mdx
+++ b/docs/usage/providers/tencentcloud.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用腾讯云 API Key
-description: 学习如何在 LobeChat 中配置和使用腾讯云 AI 模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用腾讯云 API Key
+description: 学习如何在 LobeHub 中配置和使用腾讯云 AI 模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- 腾讯云
- API密钥
- Web UI
---
-# 在 LobeChat 中使用腾讯云
+# 在 LobeHub 中使用腾讯云
-
+
[腾讯云(Tencent Cloud)](https://cloud.tencent.com/)是腾讯公司旗下的云计算服务品牌,专门为企业和开发者提供云计算服务。腾讯云提供了一系列 AI 大模型解决方案,通过这些工具可以稳定高效接入 AI 模型。
-本文档将指导你如何在 LobeChat 中接入腾讯云的 AI 模型:
+本文档将指导你如何在 LobeHub 中接入腾讯云的 AI 模型:
### 步骤一:获取腾讯云 API 密钥
@@ -23,27 +23,27 @@ tags:
- 进入腾讯云控制台并导航至[知识引擎原子能力](https://console.cloud.tencent.com/lkeap)
- 开通大模型知识引擎,开通过程需要实名认证
-
+
- 在`使用OpenAI SDK方式接入`选项中,点击 `创建 API Key` 按钮,创建一个新的 API Key
- 在 `API key 管理` 中可以查看和管理已创建的 API Key
- 复制并保存创建好的 API Key
- ### 步骤二:在 LobeChat 中配置腾讯云
+ ### 步骤二:在 LobeHub 中配置腾讯云
- - 访问 LobeChat 的 `应用设置` 的 `AI 服务供应商` 界面
+ - 访问 LobeHub 的 `应用设置` 的 `AI 服务供应商` 界面
- 在供应商列表中找到 `腾讯云` 的设置项
-
+
- 打开腾讯云服务商并填入获取的 API 密钥
- 为你的助手选择一个腾讯云模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考腾讯云的相关费用政策。
-至此你已经可以在 LobeChat 中使用腾讯云提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用腾讯云提供的模型进行对话了。
diff --git a/docs/usage/providers/togetherai.mdx b/docs/usage/providers/togetherai.mdx
index aebca3be51..82d061f314 100644
--- a/docs/usage/providers/togetherai.mdx
+++ b/docs/usage/providers/togetherai.mdx
@@ -1,50 +1,50 @@
---
-title: Using Together AI in LobeChat API Key
+title: Using the Together AI API Key in LobeHub
description: >-
- Learn how to integrate Together AI into LobeChat, obtain the API key, configure settings, and start conversations with AI models.
-
+ Learn how to configure and use the Together AI API key in LobeHub to start
+ chatting and interacting.
tags:
+ - LobeHub
- Together AI
- - API key
+ - API Key
- Web UI
---
-# Using Together AI in LobeChat
+# Using Together AI in LobeHub
-
+
-[together.ai](https://www.together.ai/) is a platform focused on the field of Artificial Intelligence Generated Content (AIGC), founded in June 2022. It is dedicated to building a cloud platform for running, training, and fine-tuning open-source models, providing scalable computing power at prices lower than mainstream vendors.
+[together.ai](https://www.together.ai/) is a platform focused on the field of generative AI (AIGC), founded in June 2022. It aims to build a cloud platform for running, training, and fine-tuning open-source models, offering scalable compute power at prices lower than mainstream providers.
-This document will guide you on how to use Together AI in LobeChat:
+This guide will walk you through how to use Together AI in LobeHub:
- ### Step 1: Obtain the API Key for Together AI
+ ### Step 1: Get Your Together AI API Key
- Visit and log in to [Together AI API](https://api.together.ai/)
- - Upon initial login, the system will automatically create an API key for you and provide a $5.0 credit
+ - Upon your first login, the system will automatically generate an API key for you and grant you a $5.00 free credit
-
+
- - If you haven't saved it, you can also view the API key at any time in the `API Key` interface under `Settings`
+ - If you didn’t save it, you can always view it later under the API Key section in the Settings menu
-
+
- ### Step 2: Configure Together AI in LobeChat
+ ### Step 2: Configure Together AI in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Find the setting for `together.ai` under `AI Service Provider`
+ - Go to the Settings page in LobeHub
+ - Under the AI Providers section, find the settings for together.ai
-
+
- - Open together.ai and enter the obtained API key
- - Choose a Together AI model for your assistant to start the conversation
+ - Enable together.ai and paste in your API key
+ - Choose a Together AI model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Together AI's pricing
- policy.
+ You may need to pay for API usage depending on your usage. Please refer to Together AI’s pricing policy for more details.
-You can now engage in conversations using the models provided by Together AI in LobeChat.
+And that’s it! You’re now ready to use Together AI models for conversations in LobeHub.
diff --git a/docs/usage/providers/togetherai.zh-CN.mdx b/docs/usage/providers/togetherai.zh-CN.mdx
index 4396f112e0..851491346e 100644
--- a/docs/usage/providers/togetherai.zh-CN.mdx
+++ b/docs/usage/providers/togetherai.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 Together AI API Key
-description: 学习如何在 LobeChat 中配置和使用 Together AI 的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 Together AI API Key
+description: 学习如何在 LobeHub 中配置和使用 Together AI 的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- Together AI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Together AI
+# 在 LobeHub 中使用 Together AI
-
+
[together.ai](https://www.together.ai/) 是一家专注于生成式人工智能 (AIGC) 领域的平台,成立于 2022 年 6 月。 它致力于构建用于运行、训练和微调开源模型的云平台,以低于主流供应商的价格提供可扩展的计算能力。
-本文档将指导你如何在 LobeChat 中使用 Together AI:
+本文档将指导你如何在 LobeHub 中使用 Together AI:
### 步骤一:获取 Together AI 的 API 密钥
@@ -22,27 +22,27 @@ tags:
- 访问并登录 [Together AI API](https://api.together.ai/)
- 初次登录时系统会自动为你创建好 API 密钥并赠送 $5.0 的额度
-
+
- 如果你没有保存,也可以在后续任意时间,通过 `设置` 中的 `API 密钥` 界面查看
-
+
- ### 步骤二:在 LobeChat 中配置 Together AI
+ ### 步骤二:在 LobeHub 中配置 Together AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`together.ai`的设置项
-
+
- 打开 together.ai 并填入获得的 API 密钥
- 为你的助手选择一个 Together AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Together AI 的费用政策。
-至此你已经可以在 LobeChat 中使用 Together AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Together AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/upstage.mdx b/docs/usage/providers/upstage.mdx
index 0e88f4807b..d06ec1f167 100644
--- a/docs/usage/providers/upstage.mdx
+++ b/docs/usage/providers/upstage.mdx
@@ -1,47 +1,48 @@
---
-title: Using Upstage in LobeChat
-description: Learn how to integrate and utilize Upstage's language model APIs in LobeChat.
+title: Using Upstage in LobeHub
+description: >-
+ Learn how to configure and use Upstage's API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
+ - LobeHub
- Upstage
- API Key
- Web UI
---
-# Using Upstage in LobeChat
+# Using Upstage in LobeHub
-
+
-[Upstage](https://www.upstage.ai/) is a platform that offers AI models and services, focusing on applications in natural language processing and machine learning. It allows developers to access its powerful AI capabilities through APIs, supporting various tasks such as text generation and conversational systems.
+[Upstage](https://www.upstage.ai/) is a platform that provides AI models and services, with a focus on natural language processing and machine learning applications. It allows developers to integrate powerful AI capabilities via API, supporting a variety of tasks such as text generation, conversational systems, and more.
-This article will guide you on how to use Upstage in LobeChat.
+This guide will walk you through how to use Upstage within LobeHub.
- ### Step 1: Obtain an Upstage API Key
+ ### Step 1: Obtain an API Key from Upstage
- - Register and log in to the [Upstage Console](https://console.upstage.ai/home)
- - Navigate to the `API Keys` page
+ - Sign up and log in to the [Upstage Console](https://console.upstage.ai/home)
+ - Navigate to the `API Keys` section
- Create a new API key
- - Copy and save the generated API key
+ - Copy and securely save the generated API key
-
+
- ### Step 2: Configure Upstage in LobeChat
+ ### Step 2: Configure Upstage in LobeHub
- - Access the `Settings` interface in LobeChat
- - Locate the `Upstage` settings under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the `Upstage` configuration section
-
+
- - Enter the obtained API key
- - Select an Upstage model for your AI assistant to start the conversation
+ - Paste the API key you obtained earlier
+ - Choose an Upstage model for your AI assistant to start chatting
-
+
- Please note that you may need to pay the API service provider for usage. Refer to Upstage's
- pricing policy for more information.
+ You may need to pay for API usage depending on your usage level. Please refer to Upstage’s pricing policy for more details.
-You can now use the models provided by Upstage for conversations in LobeChat.
+And that’s it! You’re now ready to use Upstage’s models for conversations in LobeHub.
diff --git a/docs/usage/providers/upstage.zh-CN.mdx b/docs/usage/providers/upstage.zh-CN.mdx
index c8a251ee71..66c03554c9 100644
--- a/docs/usage/providers/upstage.zh-CN.mdx
+++ b/docs/usage/providers/upstage.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 Upstage
-description: 学习如何在 LobeChat 中配置和使用 Upstage 的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 Upstage
+description: 学习如何在 LobeHub 中配置和使用 Upstage 的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- Upstage
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Upstage
+# 在 LobeHub 中使用 Upstage
-
+
[Upstage](https://www.upstage.ai/) 是一个提供 AI 模型和服务的平台,专注于自然语言处理和机器学习应用。它允许开发者通过 API 接入其强大的 AI 功能,支持多种任务,如文本生成、对话系统等。
-本文将指导你如何在 LobeChat 中使用 Upstage。
+本文将指导你如何在 LobeHub 中使用 Upstage。
### 步骤一:获得 Upstage 的 API Key
@@ -24,23 +24,23 @@ tags:
- 创建一个新的 API 密钥
- 复制并保存生成的 API 密钥
-
+
- ### 步骤二:在 LobeChat 中配置 Upstage
+ ### 步骤二:在 LobeHub 中配置 Upstage
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `Upstage` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 Upstage 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Upstage 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Upstage 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Upstage 提供的模型进行对话了。
diff --git a/docs/usage/providers/vercel-ai-gateway.mdx b/docs/usage/providers/vercel-ai-gateway.mdx
index 0454edc4b4..98fc678d13 100644
--- a/docs/usage/providers/vercel-ai-gateway.mdx
+++ b/docs/usage/providers/vercel-ai-gateway.mdx
@@ -1,59 +1,57 @@
---
-title: Using Vercel AI Gateway in LobeChat
-description: >-
- Learn how to integrate and utilize Vercel AI Gateway's unified API in LobeChat.
-
+title: Using Vercel AI Gateway in LobeHub
+description: Learn how to integrate and use the unified API of Vercel AI Gateway in LobeHub
tags:
- - LobeChat
+ - LobeHub
- Vercel AI Gateway
- API Key
- - Web UI
+ - Web Interface
---
-# Using Vercel AI Gateway in LobeChat
+# Using Vercel AI Gateway in LobeHub
-[Vercel AI Gateway](https://vercel.com/ai-gateway) is a unified API that provides access to 100+ AI models through a single endpoint. It offers features like budget management, usage monitoring, load balancing, and fallback handling.
+[Vercel AI Gateway](https://vercel.com/ai-gateway) is a unified API that provides access to over 100 AI models through a single endpoint. It offers features such as budget management, usage monitoring, load balancing, and fallback handling.
-This article will guide you on how to use Vercel AI Gateway in LobeChat.
+This guide will walk you through how to use Vercel AI Gateway within LobeHub.
### Step 1: Create an API Key in Vercel AI Gateway
- - Go to [Vercel Dashboard](https://vercel.com/dashboard)
- - Click on the **AI Gateway** tab on the left side
- - Click on **API keys** in the left sidebar
- - Click **Create key** and then **Create key** in the dialog to complete
+ - Go to the [Vercel Dashboard](https://vercel.com/dashboard)
+ - Click on the **AI Gateway** tab on the left sidebar
+ - Select **API Keys** from the sidebar
+ - Click **Create Key**, then confirm by clicking **Create Key** in the dialog
- ### Step 2: Configure Vercel AI Gateway in LobeChat
+ ### Step 2: Configure Vercel AI Gateway in LobeHub
- - Go to the `Settings` page in LobeChat
- - Under `AI Service Provider`, find the setting for `Vercel AI Gateway`
- - Enter the API Key you obtained
- - Choose a model from Vercel AI Gateway for your AI assistant to start the conversation
+ - Navigate to the `Settings` page in LobeHub
+ - Under `AI Service Provider`, find the `Vercel AI Gateway` section
+ - Enter the API key you obtained
+ - Choose a model from Vercel AI Gateway and start chatting with the AI assistant
- During usage, you may need to pay the API service provider, so please refer to Vercel AI Gateway's
- [pricing policy](https://vercel.com/docs/ai-gateway/models).
+ You may incur charges from the API service provider during usage. Please refer to the
+ [Vercel AI Gateway pricing policy](https://vercel.com/docs/ai-gateway/models) for details.
-At this point, you can start chatting using the models provided by Vercel AI Gateway in LobeChat.
+That's it! You can now start chatting in LobeHub using models provided by Vercel AI Gateway.
## Model Selection
-Vercel AI Gateway supports various model providers including:
+Vercel AI Gateway supports a variety of model providers, including:
-- **OpenAI**: `openai/gpt-4o`, `openai/gpt-4o-mini`, `openai/o1`, etc.
-- **Anthropic**: `anthropic/claude-3-5-sonnet`, `anthropic/claude-3-opus`, etc.
-- **Google**: `google/gemini-2.5-pro`, `google/gemini-2.0-flash`, etc.
-- **DeepSeek**: `deepseek/deepseek-chat`, `deepseek/deepseek-reasoner`, etc.
-- And many more...
+- **OpenAI**: `openai/gpt-4o`, `openai/gpt-4o-mini`, `openai/o1`, and more
+- **Anthropic**: `anthropic/claude-3-5-sonnet`, `anthropic/claude-3-opus`, and more
+- **Google**: `google/gemini-2.5-pro`, `google/gemini-2.0-flash`, and more
+- **DeepSeek**: `deepseek/deepseek-chat`, `deepseek/deepseek-reasoner`, and more
+- And many others...
-For a complete list of supported models, visit [Vercel AI Gateway Models](https://vercel.com/ai-gateway/models).
+To view the full list of supported models, visit the [Vercel AI Gateway Models](https://vercel.com/ai-gateway/models) page.
## API Configuration
-Vercel AI Gateway uses OpenAI-compatible API format. The base URL is:
+Vercel AI Gateway uses an OpenAI-compatible API format. The base URL is:
```
https://ai-gateway.vercel.sh/v1
diff --git a/docs/usage/providers/vercel-ai-gateway.zh-CN.mdx b/docs/usage/providers/vercel-ai-gateway.zh-CN.mdx
index 9d7385fd09..752be46294 100644
--- a/docs/usage/providers/vercel-ai-gateway.zh-CN.mdx
+++ b/docs/usage/providers/vercel-ai-gateway.zh-CN.mdx
@@ -1,19 +1,18 @@
---
-title: 在 LobeChat 中使用 Vercel AI Gateway
-description: 了解如何在 LobeChat 中集成和使用 Vercel AI Gateway 的统一 API
-
+title: 在 LobeHub 中使用 Vercel AI Gateway
+description: 了解如何在 LobeHub 中集成和使用 Vercel AI Gateway 的统一 API
tags:
- - LobeChat
+ - LobeHub
- Vercel AI Gateway
- API 密钥
- Web 界面
---
-# 在 LobeChat 中使用 Vercel AI Gateway
+# 在 LobeHub 中使用 Vercel AI Gateway
[Vercel AI Gateway](https://vercel.com/ai-gateway) 是一个统一的 API,通过单一端点提供对 100+ AI 模型的访问。它提供预算管理、使用监控、负载均衡和回退处理等功能。
-本文将指导您如何在 LobeChat 中使用 Vercel AI Gateway。
+本文将指导您如何在 LobeHub 中使用 Vercel AI Gateway。
### 第一步:在 Vercel AI Gateway 中创建 API 密钥
@@ -23,9 +22,9 @@ tags:
- 点击左侧边栏的 **API 密钥**
- 点击 **创建密钥**,然后在对话框中点击 **创建密钥** 完成创建
- ### 第二步:在 LobeChat 中配置 Vercel AI Gateway
+ ### 第二步:在 LobeHub 中配置 Vercel AI Gateway
- - 进入 LobeChat 的 `设置` 页面
+ - 进入 LobeHub 的 `设置` 页面
- 在 `AI 服务提供商` 下,找到 `Vercel AI Gateway` 设置
- 输入您获得的 API 密钥
- 选择 Vercel AI Gateway 的模型,开始与 AI 助手对话
@@ -36,7 +35,7 @@ tags:
-至此,您可以在 LobeChat 中使用 Vercel AI Gateway 提供的模型开始聊天了。
+至此,您可以在 LobeHub 中使用 Vercel AI Gateway 提供的模型开始聊天了。
## 模型选择
diff --git a/docs/usage/providers/vertexai.mdx b/docs/usage/providers/vertexai.mdx
index f1caf944e9..c02c0372fd 100644
--- a/docs/usage/providers/vertexai.mdx
+++ b/docs/usage/providers/vertexai.mdx
@@ -1,59 +1,61 @@
---
-title: Using Vertex AI API Key in LobeChat
-description: Learn how to configure and use Vertex AI models in LobeChat, get an API key, and start a conversation.
+title: Using Vertex AI API Key in LobeHub
+description: >-
+ Learn how to configure and use Vertex AI models in LobeHub, obtain your API
+ key, and start chatting.
tags:
- - LobeChat
+ - LobeHub
- Vertex AI
- API Key
- Web UI
---
-# Using Vertex AI in LobeChat
+# Using Vertex AI in LobeHub
-
+
-[Vertex AI](https://cloud.google.com/vertex-ai) is a fully managed, integrated AI development platform from Google Cloud, designed for building and deploying generative AI. It provides easy access to Vertex AI Studio, Agent Builder, and over 160 foundational models for AI development.
+[Vertex AI](https://cloud.google.com/vertex-ai) is a fully managed, integrated AI development platform from Google Cloud, designed for building and deploying generative AI applications. It provides easy access to Vertex AI Studio, Agent Builder, and over 160 foundation models for your AI development needs.
-This document will guide you on how to connect Vertex AI models in LobeChat:
+This guide will walk you through how to integrate Vertex AI models into LobeHub:
- ### Step 1: Prepare a Vertex AI Project
+ ### Step 1: Set Up Your Vertex AI Project
- - First, visit [Google Cloud](https://console.cloud.google.com/) and complete the registration and login process.
+ - First, visit the [Google Cloud Console](https://console.cloud.google.com/) and sign in or create an account.
- Create a new Google Cloud project or select an existing one.
- - Go to the [Vertex AI Console](https://console.cloud.google.com/vertex-ai).
- - Ensure that the Vertex AI API service is enabled for the project.
+ - Navigate to the [Vertex AI Console](https://console.cloud.google.com/vertex-ai).
+ - Ensure that the Vertex AI API is enabled for your project.
-
+
- ### Step 2: Set Up API Access Permissions
+ ### Step 2: Configure API Access
- - Go to the Google Cloud [IAM Management page](https://console.cloud.google.com/iam-admin/serviceaccounts) and navigate to `Service Accounts`.
- - Create a new service account and assign a role permission to it, such as `Vertex AI User`.
+ - Go to the [IAM & Admin Service Accounts page](https://console.cloud.google.com/iam-admin/serviceaccounts) in Google Cloud and navigate to the "Service Accounts" section.
+ - Create a new service account and assign it a role such as Vertex AI User.
-
+
- - On the service account management page, find the service account you just created, click `Keys`, and create a new JSON format key.
- - After successful creation, the key file will be automatically saved to your computer in JSON format. Please keep it safe.
+ - In the service account management page, find the newly created account, click on "Keys", and generate a new key in JSON format.
+ - Once created, the key file will be automatically downloaded to your computer. Be sure to store it securely.
-
+
- ### Step 3: Configure Vertex AI in LobeChat
+ ### Step 3: Configure Vertex AI in LobeHub
- - Visit the `App Settings` and then the `AI Service Provider` interface in LobeChat.
- - Find the settings item for `Vertex AI` in the list of providers.
+ - Open LobeHub and go to the App Settings > AI Service Providers section.
+ - Find the Vertex AI option in the list of providers.
-
+
- - Open the Vertex AI service provider settings.
- - Fill the entire content of the JSON format key you just obtained into the API Key field.
- - Select a Vertex AI model for your assistant to start the conversation.
+ - Open the Vertex AI provider settings.
+ - Paste the entire contents of the JSON key file into the API Key field.
+ - Choose a Vertex AI model for your assistant to start chatting.
-
+
- You may need to pay the API service provider during usage. Please refer to Google Cloud's relevant fee policies.
+ You may incur charges when using the API services. Please refer to Google Cloud’s pricing documentation for more details.
-Now you can use the models provided by Vertex AI for conversations in LobeChat.
+You're all set! You can now start using Vertex AI models in LobeHub for your conversations.
diff --git a/docs/usage/providers/vertexai.zh-CN.mdx b/docs/usage/providers/vertexai.zh-CN.mdx
index 41c8677421..1e21e656e5 100644
--- a/docs/usage/providers/vertexai.zh-CN.mdx
+++ b/docs/usage/providers/vertexai.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 Vertex AI API Key
-description: 学习如何在 LobeChat 中配置和使用 Vertex AI 模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用 Vertex AI API Key
+description: 学习如何在 LobeHub 中配置和使用 Vertex AI 模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- Vertex AI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 Vertex AI
+# 在 LobeHub 中使用 Vertex AI
-
+
[Vertex AI](https://cloud.google.com/vertex-ai) 是 Google Cloud 的一款全面托管、集成的 AI 开发平台,旨在构建与应用生成式 AI。你可轻松访问 Vertex AI Studio、Agent Builder 以及超过 160 种基础模型,进行 AI 开发。
-本文档将指导你如何在 LobeChat 中接入 Vertex AI 的模型:
+本文档将指导你如何在 LobeHub 中接入 Vertex AI 的模型:
### 步骤一:准备 Vertex AI 项目
@@ -24,36 +24,36 @@ tags:
- 进入 [Vertex AI 控制台](https://console.cloud.google.com/vertex-ai)
- 确认该项目已开通 Vertex AI API 服务
-
+
### 步骤二:设置 API 访问权限
- 进入 Google Cloud [IAM 管理页面](https://console.cloud.google.com/iam-admin/serviceaccounts),并导航至`服务账号`
- 创建一个新的服务账号,并为其分配一个角色权限,例如 `Vertex AI User`
-
+
- 在服务账号管理页面找到刚刚创建的服务账号,点击`密钥`并创建一个新的 JSON 格式密钥
- 创建成功后,密钥文件将会以 JSON 文件的格式自动保存到你的电脑上,请妥善保存
-
+
- ### 步骤三:在 LobeChat 中配置 Vertex AI
+ ### 步骤三:在 LobeHub 中配置 Vertex AI
- - 访问 LobeChat 的 `应用设置` 的 `AI 服务供应商` 界面
+ - 访问 LobeHub 的 `应用设置` 的 `AI 服务供应商` 界面
- 在供应商列表中找到 `Vertex AI` 的设置项
-
+
- 打开 Vertex AI 服务供应商
- 将刚刚获取的 JSON 格式的全部内容填入 API Key 字段中
- 为你的助手选择一个 Vertex AI 模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 Google Cloud 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 Vertex AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 Vertex AI 提供的模型进行对话了。
diff --git a/docs/usage/providers/vllm.mdx b/docs/usage/providers/vllm.mdx
index 15309e6706..e3c8c827b9 100644
--- a/docs/usage/providers/vllm.mdx
+++ b/docs/usage/providers/vllm.mdx
@@ -1,25 +1,27 @@
---
-title: Using vLLM API Key in LobeChat
-description: Learn how to configure and use the vLLM language model in LobeChat, obtain an API key, and start a conversation.
+title: Using vLLM API Key in LobeHub
+description: >-
+ Learn how to configure and use vLLM language models in LobeHub, obtain an API
+ key, and start chatting.
tags:
- - LobeChat
+ - LobeHub
- vLLM
- API Key
- Web UI
---
-# Using vLLM in LobeChat
+# Using vLLM in LobeHub
-
+
-[vLLM](https://github.com/vllm-project/vllm) is an open-source local large language model (LLM) deployment tool that allows users to efficiently run LLM models on local devices and provides an OpenAI API-compatible service interface.
+[vLLM](https://github.com/vllm-project/vllm) is an open-source local deployment tool for large language models (LLMs). It allows users to efficiently run LLMs on their local machines and provides an OpenAI-compatible API interface.
-This document will guide you on how to use vLLM in LobeChat:
+This guide will walk you through how to use vLLM in LobeHub:
- ### Step 1: Preparation
+ ### Step 1: Prerequisites
- vLLM has certain requirements for hardware and software environments. Be sure to configure according to the following requirements:
+ vLLM has specific hardware and software requirements. Please ensure your environment meets the following:
| Hardware Requirements | |
| --------------------- | ----------------------------------------------------------------------- |
@@ -33,7 +35,7 @@ This document will guide you on how to use vLLM in LobeChat:
### Step 2: Install vLLM
- If you are using an NVIDIA GPU, you can directly install vLLM using `pip`. However, it is recommended to use `uv` here, which is a very fast Python environment manager, to create and manage the Python environment. Please follow the [documentation](https://docs.astral.sh/uv/#getting-started) to install uv. After installing uv, you can use the following command to create a new Python environment and install vLLM:
+ If you're using an NVIDIA GPU, you can install vLLM directly via `pip`. However, we recommend using `uv`, a fast Python environment manager, to create and manage your Python environments. Follow the [official guide](https://docs.astral.sh/uv/#getting-started) to install uv. Once installed, you can create a new Python environment and install vLLM with the following commands:
```shell
uv venv myenv --python 3.12 --seed
@@ -41,13 +43,13 @@ This document will guide you on how to use vLLM in LobeChat:
uv pip install vllm
```
- Another method is to use `uv run` with the `--with [dependency]` option, which allows you to run commands such as `vllm serve` without creating an environment:
+ Alternatively, you can use `uv run` with the `--with [dependency]` option to run commands like `vllm serve` without creating a dedicated environment:
```shell
uv run --with vllm vllm --help
```
- You can also use [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/getting-started.html) to create and manage your Python environment.
+ You can also use [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/getting-started.html) to manage your Python environment:
```shell
conda create -n myenv python=3.12 -y
@@ -56,46 +58,43 @@ This document will guide you on how to use vLLM in LobeChat:
```
- For non-CUDA platforms, please refer to the [official
- documentation](https://docs.vllm.ai/en/latest/getting_started/installation/index.html#installation-index)
- to learn how to install vLLM.
+ For non-CUDA platforms, please refer to the [official documentation](https://docs.vllm.ai/en/latest/getting_started/installation/index.html#installation-index) for installation instructions.
- ### Step 3: Start Local Service
+ ### Step 3: Start the Local Server
- vLLM can be deployed as an OpenAI API protocol-compatible server. By default, it will start the server at `http://localhost:8000`. You can specify the address using the `--host` and `--port` parameters. The server currently runs only one model at a time.
+ vLLM can be deployed as a server compatible with the OpenAI API protocol. By default, it starts at `http://localhost:8000`. You can customize the address using the `--host` and `--port` parameters. Note that the server can only run one model at a time.
- The following command will start a vLLM server and run the `Qwen2.5-1.5B-Instruct` model:
+ The following command starts a vLLM server running the `Qwen2.5-1.5B-Instruct` model:
```shell
vllm serve Qwen/Qwen2.5-1.5B-Instruct
```
- You can enable the server to check the API key in the header by passing the parameter `--api-key` or the environment variable `VLLM_API_KEY`. If not set, no API Key is required to access.
+ To enable API key authentication, you can pass the `--api-key` parameter or set the `VLLM_API_KEY` environment variable. If not set, the server will be accessible without an API key.
- For more detailed vLLM server configuration, please refer to the [official
- documentation](https://docs.vllm.ai/en/latest/).
+ For more detailed server configuration options, refer to the [official vLLM documentation](https://docs.vllm.ai/en/latest/).
- ### Step 4: Configure vLLM in LobeChat
+ ### Step 4: Configure vLLM in LobeHub
- - Access the `Application Settings` interface of LobeChat.
- - Find the `vLLM` settings item under `AI Service Provider`.
+ - Open the `App Settings` panel in LobeHub
+ - Under `AI Providers`, locate the `vLLM` configuration section
-
+
- - Open the vLLM service provider and fill in the API service address and API Key.
+ - Enable the vLLM provider and enter the API service URL and API key
- * If your vLLM is not configured with an API Key, please leave the API Key blank. \* If your vLLM
- is running locally, please make sure to turn on `Client Request Mode`.
+ \* If your vLLM server is not configured with an API key, leave the API key field blank.\
+ \* If your vLLM server is running locally, make sure to enable "Client Request Mode".
- - Add the model you are running to the model list below.
- - Select a vLLM model to run for your assistant and start the conversation.
+ - Add the model you are running to the model list below
+ - Assign the vLLM model to your assistant to start chatting
-
+
-Now you can use the models provided by vLLM in LobeChat to have conversations.
+You're now ready to use vLLM-powered models in LobeHub for conversations.
diff --git a/docs/usage/providers/vllm.zh-CN.mdx b/docs/usage/providers/vllm.zh-CN.mdx
index eebcbf3eb0..982873bc83 100644
--- a/docs/usage/providers/vllm.zh-CN.mdx
+++ b/docs/usage/providers/vllm.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 vLLM API Key
-description: 学习如何在 LobeChat 中配置和使用 vLLM 语言模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用 vLLM API Key
+description: 学习如何在 LobeHub 中配置和使用 vLLM 语言模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- vLLM
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 vLLM
+# 在 LobeHub 中使用 vLLM
-
+
[vLLM](https://github.com/vllm-project/vllm)是一个开源的本地大型语言模型(LLM)部署工具,允许用户在本地设备上高效运行 LLM 模型,并提供兼容 OpenAI API 的服务接口。
-本文档将指导你如何在 LobeChat 中使用 vLLM:
+本文档将指导你如何在 LobeHub 中使用 vLLM:
### 步骤一:准备工作
@@ -77,12 +77,12 @@ tags:
更详细的 vLLM 服务器配置,请参考[官方文档](https://docs.vllm.ai/en/latest/)
- ### 步骤四:在 LobeChat 中配置 vLLM
+ ### 步骤四:在 LobeHub 中配置 vLLM
- - 访问 LobeChat 的 `应用设置`界面
+ - 访问 LobeHub 的 `应用设置`界面
- 在 `AI 服务商` 下找到 `vLLM` 的设置项
-
+
- 打开 vLLM 服务商并填入 API 服务地址以及 API Key
@@ -94,7 +94,7 @@ tags:
- 在下方的模型列表中添加你运行的模型
- 为你的助手选择一个 vLLM 运行的模型即可开始对话
-
+
-至此你已经可以在 LobeChat 中使用 vLLM 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 vLLM 提供的模型进行对话了。
diff --git a/docs/usage/providers/volcengine.mdx b/docs/usage/providers/volcengine.mdx
index e7d61eb306..7674471fac 100644
--- a/docs/usage/providers/volcengine.mdx
+++ b/docs/usage/providers/volcengine.mdx
@@ -1,49 +1,50 @@
---
-title: Using the Volcano Engine API Key in LobeChat
-description: Learn how to configure and use the Volcano Engine AI model in LobeChat, obtain API keys, and start conversations.
+title: Using Volcengine API Key in LobeHub
+description: >-
+ Learn how to configure and use Volcengine AI models in LobeHub, obtain your
+ API key, and start chatting.
tags:
- - LobeChat
+ - LobeHub
- Volcengine
- Doubao
- API Key
- Web UI
---
-# Using Volcengine in LobeChat
+# Using Volcengine in LobeHub
-
+
-[Volcengine](https://www.volcengine.com/) is a cloud service platform under ByteDance that provides large language model (LLM) services through "Volcano Ark," supporting multiple mainstream models such as Baichuan Intelligent, Mobvoi, and more.
+[Volcengine](https://www.volcengine.com/) is a cloud service platform under ByteDance. Through its "Volcano Ark" platform, it provides large language model (LLM) services, supporting several mainstream models such as Baichuan Intelligence and Mobvoi.
-This document will guide you on how to use Volcengine in LobeChat:
+This guide will walk you through how to use Volcengine in LobeHub:
- ### Step 1: Obtain the Volcengine API Key
+ ### Step 1: Obtain a Volcengine API Key
- First, visit the [Volcengine official website](https://www.volcengine.com/) and complete the registration and login process.
- - Access the Volcengine console and navigate to [Volcano Ark](https://console.volcengine.com/ark/).
+ - Navigate to the [Volcano Ark Console](https://console.volcengine.com/ark/)
-
+
- - Go to the `API Key Management` menu and click `Create API Key`.
- - Copy and save the created API Key.
+ - Go to the `API Key Management` section and click `Create API Key`
+ - Copy and securely save your newly created API Key
- ### Step 2: Configure Volcengine in LobeChat
+ ### Step 2: Configure Volcengine in LobeHub
- - Navigate to the `Application Settings` page in LobeChat and select `AI Service Providers`.
- - Find the `Volcengine` option in the provider list.
+ - Open the `App Settings` in LobeHub and go to the `AI Service Providers` section
+ - Find the `Volcengine` option in the list of providers
-
+
- - Open the Volcengine service provider and enter the obtained API Key.
- - Choose a Volcengine model for your assistant to start the conversation.
+ - Enable the Volcengine provider and paste in your API Key
+ - Choose a Volcengine model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, so please refer to Volcengine's
- pricing policy.
+ You may incur charges when using the API services. Please refer to Volcengine’s pricing policy for details.
-You can now use the models provided by Volcengine for conversations in LobeChat.
+You're now ready to start chatting with models powered by Volcengine in LobeHub.
diff --git a/docs/usage/providers/volcengine.zh-CN.mdx b/docs/usage/providers/volcengine.zh-CN.mdx
index 9cc835ecd3..fadfc572aa 100644
--- a/docs/usage/providers/volcengine.zh-CN.mdx
+++ b/docs/usage/providers/volcengine.zh-CN.mdx
@@ -1,21 +1,21 @@
---
-title: 在 LobeChat 中使用火山引擎 API Key
-description: 学习如何在 LobeChat 中配置和使用火山引擎 AI 模型,获取 API 密钥并开始对话。
+title: 在 LobeHub 中使用火山引擎 API Key
+description: 学习如何在 LobeHub 中配置和使用火山引擎 AI 模型,获取 API 密钥并开始对话。
tags:
- - LobeChat
+ - LobeHub
- 火山引擎
- 豆包
- API密钥
- Web UI
---
-# 在 LobeChat 中使用火山引擎
+# 在 LobeHub 中使用火山引擎
-
+
[火山引擎](https://www.volcengine.com/)是字节跳动旗下的云服务平台,通过 "火山方舟" 提供大型语言模型 (LLM) 服务,支持多个主流模型如百川智能、Mobvoi 等。
-本文档将指导你如何在 LobeChat 中使用火山引擎:
+本文档将指导你如何在 LobeHub 中使用火山引擎:
### 步骤一:获取火山引擎 API 密钥
@@ -23,26 +23,26 @@ tags:
- 首先,访问[火山引擎官网](https://www.volcengine.com/)并完成注册登录
- 进入火山引擎控制台并导航至[火山方舟](https://console.volcengine.com/ark/)
-
+
- 进入 `API key 管理` 菜单,并点击 `创建 API Key`
- 复制并保存创建好的 API Key
- ### 步骤二:在 LobeChat 中配置火山引擎
+ ### 步骤二:在 LobeHub 中配置火山引擎
- - 访问 LobeChat 的 `应用设置` 的 `AI 服务供应商` 界面
+ - 访问 LobeHub 的 `应用设置` 的 `AI 服务供应商` 界面
- 在供应商列表中找到 `火山引擎` 的设置项
-
+
- 打开火山引擎服务商并填入获取的 API 密钥
- 为你的助手选择一个火山引擎模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考火山引擎的相关费用政策。
-至此你已经可以在 LobeChat 中使用火山引擎提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用火山引擎提供的模型进行对话了。
diff --git a/docs/usage/providers/wenxin.mdx b/docs/usage/providers/wenxin.mdx
index 4a461dc07b..253cb5e8ec 100644
--- a/docs/usage/providers/wenxin.mdx
+++ b/docs/usage/providers/wenxin.mdx
@@ -1,59 +1,58 @@
---
-title: Using Wenxin Qianfan in LobeChat
+title: Using Wenxin Qianfan in LobeHub
description: >-
- Learn how to integrate and utilize Wenxin Qianfan's language model APIs in LobeChat.
-
+ Learn how to configure and use the Wenxin Qianfan API Key in LobeHub to start
+ conversations and interactions.
tags:
- - LobeChat
- - 百度
- - 文心千帆
- - API密钥
+ - LobeHub
+ - Baidu
+ - Wenxin Qianfan
+ - API Key
- Web UI
---
-# Using Wenxin Qianfan in LobeChat
+# Using Wenxin Qianfan in LobeHub
-
+
-[Wenxin Qianfan](https://qianfan.cloud.baidu.com/) is an artificial intelligence large language model platform launched by Baidu, supporting a variety of application scenarios, including literary creation, commercial copywriting, and mathematical logic reasoning. The platform features deep semantic understanding and generation capabilities across modalities and languages, and it is widely utilized in fields such as search Q\&A, content creation, and smart office applications.
+[Wenxin Qianfan](https://qianfan.cloud.baidu.com/) is a large language model platform developed by Baidu. It supports a wide range of applications, including creative writing, business copy generation, and mathematical logic reasoning. The platform features deep semantic understanding and generation across modalities and languages, and is widely used in search Q\&A, content creation, and intelligent office scenarios.
-This article will guide you on how to use Wenxin Qianfan in LobeChat.
+This guide will walk you through how to use Wenxin Qianfan in LobeHub.
- ### Step 1: Obtain the Wenxin Qianfan API Key
+ ### Step 1: Obtain a Wenxin Qianfan API Key
- Register and log in to the [Baidu AI Cloud Console](https://console.bce.baidu.com/)
- Navigate to `Baidu AI Cloud Qianfan ModelBuilder`
- - Select `API Key` from the left menu
+ - In the left-hand menu, select `API Key`
-
+
- - Click `Create API Key`
- - In `Service`, select `Qianfan ModelBuilder`
- - In `Resource`, choose `All Resources`
+ - Click "Create API Key"
+ - Under `Service`, select `Qianfan ModelBuilder`
+ - Under `Resource`, choose `All Resources`
- Click the `Confirm` button
- - Copy the `API Key` and keep it safe
+ - Copy the generated `API Key` and store it securely
-
+
-
+
- ### Step 2: Configure Wenxin Qianfan in LobeChat
+ ### Step 2: Configure Wenxin Qianfan in LobeHub
- - Go to the `Settings` page of LobeChat
- - Under `AI Service Provider`, find the `Wenxin Qianfan` settings
+ - Open the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the configuration section for `Wenxin Qianfan`
-
+
- - Enter the obtained `API Key`
- - Select a Wenxin Qianfan model for your AI assistant, and you're ready to start chatting!
+ - Paste the API Key you obtained earlier
+ - Choose a Wenxin Qianfan model for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider. Please refer to Wenxin Qianfan's
- relevant fee policy.
+ You may incur charges from the API service provider during usage. Please refer to Wenxin Qianfan’s pricing policy for details.
-You can now use the models provided by Wenxin Qianfan for conversations in LobeChat.
+Now you're all set to start using Wenxin Qianfan models for conversations in LobeHub.
diff --git a/docs/usage/providers/wenxin.zh-CN.mdx b/docs/usage/providers/wenxin.zh-CN.mdx
index 4816474c99..7a0901c343 100644
--- a/docs/usage/providers/wenxin.zh-CN.mdx
+++ b/docs/usage/providers/wenxin.zh-CN.mdx
@@ -1,21 +1,21 @@
---
-title: 在 LobeChat 中使用文心千帆
-description: 学习如何在 LobeChat 中配置和使用文心千帆的API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用文心千帆
+description: 学习如何在 LobeHub 中配置和使用文心千帆的API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- 百度
- 文心千帆
- API密钥
- Web UI
---
-# 在 LobeChat 中使用文心千帆
+# 在 LobeHub 中使用文心千帆
-
+
[文心千帆](https://qianfan.cloud.baidu.com/)是百度推出的一个人工智能大语言模型平台,支持多种应用场景,包括文学创作、商业文案生成、数理逻辑推算等。该平台具备跨模态、跨语言的深度语义理解与生成能力,广泛应用于搜索问答、内容创作和智能办公等领域。
-本文将指导你如何在 LobeChat 中使用文心千帆。
+本文将指导你如何在 LobeHub 中使用文心千帆。
### 步骤一:获得文心千帆的 API Key
@@ -24,7 +24,7 @@ tags:
- 进入 `百度智能云千帆 ModelBuilder`
- 在左侧菜单中选择 `API Key`
-
+
- 点击创建 API Key
- 在 `服务` 中选择 `千帆ModelBuilder`
@@ -32,25 +32,25 @@ tags:
- 点击 `确定` 按钮
- 复制 `API Key` 并妥善保存
-
+
-
+
- ### 步骤二:在 LobeChat 中配置文心千帆
+ ### 步骤二:在 LobeHub 中配置文心千帆
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `文心千帆` 的设置项
-
+
- 填入获得的 `API Key`
- 为你的 AI 助手选择一个文心千帆的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考文心千帆的相关费用政策。
-至此你已经可以在 LobeChat 中使用文心千帆提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用文心千帆提供的模型进行对话了。
diff --git a/docs/usage/providers/xai.mdx b/docs/usage/providers/xai.mdx
index 6a3607d813..221b3ac419 100644
--- a/docs/usage/providers/xai.mdx
+++ b/docs/usage/providers/xai.mdx
@@ -1,53 +1,51 @@
---
-title: Using xAI in LobeChat
+title: Using xAI in LobeHub
description: >-
- Learn how to configure and use xAI's API Key in LobeChat to start conversations and interactions.
-
+ Learn how to configure and use xAI's API Key in LobeHub to start chatting and
+ interacting.
tags:
- - LobeChat
+ - LobeHub
- xAI
- API Key
- Web UI
---
-# Using xAI in LobeChat
+# Using xAI in LobeHub
-
+
-[xAI](https://x.ai/) is an artificial intelligence company founded by Elon Musk in 2023, aimed at exploring and understanding the true nature of the universe. The company's mission is to solve complex scientific and mathematical problems using AI technology and to advance the field of artificial intelligence.
+[xAI](https://x.ai/) is an artificial intelligence company founded by Elon Musk in 2023, with the mission of exploring and understanding the true nature of the universe. The company aims to solve complex scientific and mathematical problems and advance the development of AI technologies.
-This article will guide you on how to use xAI in LobeChat.
+This guide will walk you through how to use xAI within LobeHub.
- ### Step 1: Obtain an API Key from xAI
+ ### Step 1: Obtain an xAI API Key
- - Register and login to the [xAI console](https://console.x.ai/)
- - Create an API token
- - Copy and save the API token
+ - Sign up and log in to the [xAI Console](https://console.x.ai/)
+ - Create a new API Token
+ - Copy and securely save your API Token
-
+
- Make sure to securely save the API token displayed in the popup; it only appears once. If you
- accidentally lose it, you will need to create a new API token.
+ Make sure to save the API token shown in the popup — it will only be displayed once. If you lose it, you’ll need to generate a new one.
- ### Step 2: Configure xAI in LobeChat
+ ### Step 2: Configure xAI in LobeHub
- - Go to the `Settings` menu in LobeChat
- - Locate the `xAI` settings under `AI Service Provider`
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, locate the `xAI` configuration section
-
+
- - Enter the API key you obtained
- - Select an xAI model for your AI assistant to start a conversation
+ - Paste the API Key you obtained
+ - Choose an xAI model for your AI assistant to start chatting
-
+
- During use, you may need to pay the API service provider, so please refer to xAI's relevant
- pricing policies.
+ Please note that usage may incur charges from the API provider. Refer to xAI’s pricing policy for more details.
-You are now ready to engage in conversations using the models provided by xAI in LobeChat.
+You’re now ready to start using xAI-powered models in LobeHub for conversations and interactions.
diff --git a/docs/usage/providers/xai.zh-CN.mdx b/docs/usage/providers/xai.zh-CN.mdx
index 6edc911810..bfeb3c8aed 100644
--- a/docs/usage/providers/xai.zh-CN.mdx
+++ b/docs/usage/providers/xai.zh-CN.mdx
@@ -1,20 +1,20 @@
---
-title: 在 LobeChat 中使用 xAI
-description: 学习如何在 LobeChat 中配置和使用 xAI 的 API Key,以便开始对话和交互。
+title: 在 LobeHub 中使用 xAI
+description: 学习如何在 LobeHub 中配置和使用 xAI 的 API Key,以便开始对话和交互。
tags:
- - LobeChat
+ - LobeHub
- xAI
- API密钥
- Web UI
---
-# 在 LobeChat 中使用 xAI
+# 在 LobeHub 中使用 xAI
-
+
[xAI](https://x.ai/) 是由埃隆・马斯克于 2023 年成立的一家人工智能公司,旨在探索和理解宇宙的真实本质。该公司的目标是通过人工智能技术解决复杂的科学和数学问题,并推动人工智能的发展。
-本文将指导你如何在 LobeChat 中使用 xAI。
+本文将指导你如何在 LobeHub 中使用 xAI。
### 步骤一:获取 xAI 的 API 密钥
@@ -23,27 +23,27 @@ tags:
- 创建一个 API Token
- 复制并保存 API Token
-
+
妥善保存弹窗中的 API 令牌,它只会出现一次,如果不小心丢失了,你需要重新创建一个 API 令牌。
- ### 步骤二:在 LobeChat 中配置 xAI
+ ### 步骤二:在 LobeHub 中配置 xAI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到 `xAI` 的设置项
-
+
- 填入获得的 API 密钥
- 为你的 AI 助手选择一个 xAI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考 xAI 的相关费用政策。
-至此你已经可以在 LobeChat 中使用 xAI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用 xAI 提供的模型进行对话了。
diff --git a/docs/usage/providers/zeroone.mdx b/docs/usage/providers/zeroone.mdx
index 40cab37e77..deba73b081 100644
--- a/docs/usage/providers/zeroone.mdx
+++ b/docs/usage/providers/zeroone.mdx
@@ -1,57 +1,59 @@
---
-title: Using 01 AI API Key in LobeChat
+title: Using the 01.AI API Key in LobeHub
description: >-
- Learn how to integrate and use 01 AI in LobeChat with step-by-step instructions. Obtain an API key, configure 01 AI, and start conversations with AI models.
-
+ Learn how to configure and use AI models provided by 01.AI (Zero One AI) in
+ LobeHub. Get your API key, enter it in the settings, choose a model, and start
+ chatting with your AI assistant.
tags:
+ - LobeHub
- 01.AI
+ - Zero One AI
- Web UI
- - API key
- - AI models
+ - API Key
+ - Configuration Guide
---
-# Using 01 AI in LobeChat
+# Using Zero One AI in LobeHub
-
+
-[01 AI](https://www.01.ai/) is a global company dedicated to AI 2.0 large model technology and applications. Its billion-parameter Yi-Large closed-source model, when evaluated on Stanford University's English ranking AlpacaEval 2.0, is on par with GPT-4.
+[01.AI (Zero One AI)](https://www.01.ai/) is a global company focused on AI 2.0 large language models and their applications. Its proprietary Yi-Large model, with over 100 billion parameters, has achieved top rankings alongside GPT-4 on Stanford’s English benchmark AlpacaEval 2.0.
-This document will guide you on how to use 01 AI in LobeChat:
+This guide will walk you through how to use 01.AI in LobeHub:
- ### Step 1: Obtain 01 AI API Key
+ ### Step 1: Get Your 01.AI API Key
- - Register and log in to the [01 AI Large Model Open Platform](https://platform.lingyiwanwu.com/)
- - Go to the `Dashboard` and access the `API Key Management` menu
- - A system-generated API key has been created for you automatically, or you can create a new one on this interface
+ - Register and log in to the [01.AI Developer Platform](https://platform.lingyiwanwu.com/)
+ - Go to the `Dashboard` and navigate to the `API Key Management` section
+ - An API key is automatically generated for you. You can also create a new one if needed
-
+
- - Account verification is required for first-time use
+ - First-time users will need to complete account verification
-
+
- - Click on the created API key
- - Copy and save the API key in the pop-up dialog box
+ - Click on the API key you created
+ - In the pop-up dialog, copy and save your API key securely
-
+
- ### Step 2: Configure 01 AI in LobeChat
+ ### Step 2: Configure 01.AI in LobeHub
- - Access the `Settings` interface in LobeChat
- - Find the setting for `01 AI` under `AI Service Provider`
+ - Open the `Settings` panel in LobeHub
+ - Under `AI Providers`, locate the configuration section for `01.AI`
-
+
- - Open 01 AI and enter the obtained API key
- - Choose a 01.AI model for your AI assistant to start the conversation
+ - Enable 01.AI and paste in your API key
+ - Choose a model from 01.AI for your AI assistant to start chatting
-
+
- During usage, you may need to pay the API service provider. Please refer to 01 AI's relevant fee
- policies.
+ You may incur charges when using the API. Please refer to 01.AI’s pricing policy for more details.
-You can now use the models provided by 01 AI for conversations in LobeChat.
+And that’s it! You’re now ready to use 01.AI’s models in LobeHub for intelligent conversations.
diff --git a/docs/usage/providers/zeroone.zh-CN.mdx b/docs/usage/providers/zeroone.zh-CN.mdx
index 3f05c31eae..e4b132e622 100644
--- a/docs/usage/providers/zeroone.zh-CN.mdx
+++ b/docs/usage/providers/zeroone.zh-CN.mdx
@@ -1,10 +1,8 @@
---
-title: 在 LobeChat 中使用 01.AI 零一万物 API Key
-description: >-
- 学习如何在 LobeChat 中配置并使用 01.AI 零一万物提供的 AI 模型进行对话。获取 API 密钥、填入设置项、选择模型,开始与 AI 助手交流。
-
+title: 在 LobeHub 中使用 01.AI 零一万物 API Key
+description: 学习如何在 LobeHub 中配置并使用 01.AI 零一万物提供的 AI 模型进行对话。获取 API 密钥、填入设置项、选择模型,开始与 AI 助手交流。
tags:
- - LobeChat
+ - LobeHub
- 01.AI
- Zero One AI
- 零一万物
@@ -13,13 +11,13 @@ tags:
- 配置指南
---
-# 在 LobeChat 中使用零一万物
+# 在 LobeHub 中使用零一万物
-
+
[零一万物](https://www.01.ai/)是一家致力于 AI 2.0 大模型技术和应用的全球公司,其发布的千亿参数的 Yi-Large 闭源模型,在斯坦福大学的英语排行 AlpacaEval 2.0 上,与 GPT-4 互有第一。
-本文档将指导你如何在 LobeChat 中使用零一万物:
+本文档将指导你如何在 LobeHub 中使用零一万物:
### 步骤一:获取零一万物 API 密钥
@@ -28,32 +26,32 @@ tags:
- 进入`工作台`并访问`API Key管理`菜单
- 系统已为你自动创建了一个 API 密钥,你也可以在此界面创建新的 API 密钥
-
+
- 初次使用时需要完成账号认证
-
+
- 点击创建好的 API 密钥
- 在弹出的对话框中复制并保存 API 密钥
-
+
- ### 步骤二:在 LobeChat 中配置零一万物
+ ### 步骤二:在 LobeHub 中配置零一万物
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`零一万物`的设置项
-
+
- 打开零一万物并填入获得的 API 密钥
- 为你的 AI 助手选择一个 01.AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考零一万物的相关费用政策。
-至此你已经可以在 LobeChat 中使用零一万物提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用零一万物提供的模型进行对话了。
diff --git a/docs/usage/providers/zhipu.mdx b/docs/usage/providers/zhipu.mdx
index e38d3491f7..f5d0537adf 100644
--- a/docs/usage/providers/zhipu.mdx
+++ b/docs/usage/providers/zhipu.mdx
@@ -1,8 +1,8 @@
---
-title: Using Zhipu ChatGLM API Key in LobeChat
+title: Using Zhipu ChatGLM API Key in LobeHub
description: >-
- Learn how to integrate and utilize Zhipu AI models in LobeChat for enhanced conversational experiences. Obtain the API key, configure settings, and start engaging with cognitive intelligence.
-
+ Learn how to configure and use Zhipu AI's API Key in LobeHub to start chatting
+ with models provided by Zhipu AI.
tags:
- Zhipu AI
- ChatGLM
@@ -10,39 +10,38 @@ tags:
- Web UI
---
-# Using Zhipu ChatGLM in LobeChat
+# Using Zhipu ChatGLM in LobeHub
-
+
-[Zhipu AI](https://www.zhipuai.cn/) is a high-tech company originating from the Department of Computer Science at Tsinghua University. Established in 2019, the company focuses on natural language processing, machine learning, and big data analysis, dedicated to expanding the boundaries of artificial intelligence technology in the field of cognitive intelligence.
+[Zhipu AI](https://www.zhipuai.cn/) is a high-tech company founded in 2019, originating from the Department of Computer Science at Tsinghua University. It focuses on natural language processing, machine learning, and big data analytics, aiming to push the boundaries of AI technology in the field of cognitive intelligence.
-This document will guide you on how to use Zhipu AI in LobeChat:
+This guide will walk you through how to use Zhipu AI in LobeHub:
- ### Step 1: Obtain the API Key for Zhipu AI
+ ### Step 1: Get Your Zhipu AI API Key
- Visit and log in to the [Zhipu AI Open Platform](https://open.bigmodel.cn/)
- - Upon initial login, the system will automatically create an API key for you and gift you a resource package of 25M Tokens
- - Navigate to the `API Key` section at the top to view your API key
+ - Upon your first login, the system will automatically generate an API key for you and grant a free 25M token package
+ - Navigate to the top menu and click on `API Key` to view your key
-
+
- ### Step 2: Configure Zhipu AI in LobeChat
+ ### Step 2: Configure Zhipu AI in LobeHub
- - Visit the `Settings` interface in LobeChat
- - Under `AI Service Provider`, locate the settings for Zhipu AI
+ - Go to the `Settings` page in LobeHub
+ - Under `AI Providers`, find the configuration section for `Zhipu AI`
-
+
- - Open Zhipu AI and enter the obtained API key
- - Choose a Zhipu AI model for your assistant to start the conversation
+ - Enable Zhipu AI and paste in your API key
+ - Choose a Zhipu AI model for your assistant to start chatting
-
+
- During usage, you may need to pay the API service provider, please refer to Zhipu AI's pricing
- policy.
+ You may need to pay for API usage depending on your usage. Please refer to Zhipu AI’s pricing policy for more details.
-You can now engage in conversations using the models provided by Zhipu AI in LobeChat.
+And that’s it! You’re now ready to start chatting with models provided by Zhipu AI in LobeHub.
diff --git a/docs/usage/providers/zhipu.zh-CN.mdx b/docs/usage/providers/zhipu.zh-CN.mdx
index 8c649654e9..86f10e14ad 100644
--- a/docs/usage/providers/zhipu.zh-CN.mdx
+++ b/docs/usage/providers/zhipu.zh-CN.mdx
@@ -1,6 +1,6 @@
---
-title: 在 LobeChat 中使用智谱 ChatGLM API Key
-description: 学习如何在 LobeChat 中配置和使用智谱AI的 API Key,开始与智谱AI提供的模型进行对话。
+title: 在 LobeHub 中使用智谱 ChatGLM API Key
+description: 学习如何在 LobeHub 中配置和使用智谱AI的 API Key,开始与智谱AI提供的模型进行对话。
tags:
- 智谱AI
- ChatGLM
@@ -8,13 +8,13 @@ tags:
- Web UI
---
-# 在 LobeChat 中使用智谱 ChatGLM
+# 在 LobeHub 中使用智谱 ChatGLM
-
+
[智谱 AI](https://www.zhipuai.cn/) 是一家源自清华大学计算机系技术成果的高科技公司,成立于 2019 年,专注于自然语言处理、机器学习和大数据分析,致力于在认知智能领域拓展人工智能技术的边界。
-本文档将指导你如何在 LobeChat 中使用智谱 AI:
+本文档将指导你如何在 LobeHub 中使用智谱 AI:
### 步骤一:获取智谱 AI 的 API 密钥
@@ -23,23 +23,23 @@ tags:
- 初次登录时系统会自动为你创建好 API 密钥并赠送 25M Tokens 的资源包
- 进入顶部的 `API密钥` 可以查看你的 API
-
+
- ### 步骤二:在 LobeChat 中配置智谱 AI
+ ### 步骤二:在 LobeHub 中配置智谱 AI
- - 访问 LobeChat 的`设置`界面
+ - 访问 LobeHub 的`设置`界面
- 在`AI 服务商`下找到`智谱AI`的设置项
-
+
- 打开智谱 AI 并填入获得的 API 密钥
- 为你的助手选择一个智谱 AI 的模型即可开始对话
-
+
在使用过程中你可能需要向 API 服务提供商付费,请参考智谱 AI 的费用政策。
-至此你已经可以在 LobeChat 中使用智谱 AI 提供的模型进行对话了。
+至此你已经可以在 LobeHub 中使用智谱 AI 提供的模型进行对话了。
diff --git a/docs/usage/start.mdx b/docs/usage/start.mdx
index 8e2addf4cf..3632411b89 100644
--- a/docs/usage/start.mdx
+++ b/docs/usage/start.mdx
@@ -1,62 +1,33 @@
---
-title: Get started with LobeChat
-description: >-
- Explore the exciting features in LobeChat, including Vision Model, TTS & STT, Local LLMs, and Multi AI Providers. Discover more about Agent Market, Plugin System, and Personalization.
-
+title: Getting Started
+description: Getting started with LobeHub
tags:
- - Feature Overview
- - Vision Model
- - TTS & STT
- - Local LLMs
- - Multi AI Providers
- - Agent Market
+ - LobeHub
+ - LobeHub
+ - Features
+ - Visual Recognition
+ - Voice Conversations
+ - AI Providers
+ - Assistant Marketplace
+ - Local Large Language Models
- Plugin System
---
-# ✨ Feature Overview
+# LobeHub User Guide
-## 2024 Overview
+Welcome to the official LobeHub User Guide.
-
+
+ For self-hosting assistance, please visit the [Self-Hosting Guide](/docs/self-hosting/start). For developer resources, check out the [Developer Guide](/docs/development/start).
+
-
-
+## Getting Started
-
+- [Create Your First Agent](/docs/usage/getting-started/page)
+- [Create Your First Team](/docs/usage/getting-started/lobe-ai)
+- [Explore the LobeHub Community](/docs/usage/community/agent-market)
+- [Migrate from v1.x Local Database to v2.x (Cloud / Self-hosted)](/docs/usage/migrate-from-local-database)
-
+## Help & Support
-
-
-
-
-
-
-
-
-
-## 2023 Overview
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+If you need assistance, please refer to our [Help & Support](/docs/usage/help) section.
diff --git a/docs/usage/start.zh-CN.mdx b/docs/usage/start.zh-CN.mdx
index 31771469ea..23bfc5e697 100644
--- a/docs/usage/start.zh-CN.mdx
+++ b/docs/usage/start.zh-CN.mdx
@@ -1,8 +1,9 @@
---
-title: 开始使用 LobeChat
-description: 了解 LobeChat 的功能特性,包括视觉识别、语音会话、多 AI 服务商等,体验助手市场、本地大语言模型、插件系统等功能。
+title: 开始
+description: 上手使用 LobeHub
tags:
- - LobeChat
+ - LobeHub
+ - LobeHub
- 功能特性
- 视觉识别
- 语音会话
@@ -12,50 +13,21 @@ tags:
- 插件系统
---
-# ✨ LobeChat 功能特性一览
+# LobeHub 用户指南
-## 2024 特性一览
+欢迎来到 LobeHub 官方用户指南。
-
+
+ 欲获取自部署帮助,请访问[私有化部署指南](/docs/self-hosting/start)。欲获取开发者指南,请访问[开发指南](/docs/development/start)
+
-
-
+## 开始
-
+- [创建你的第一个 Agent](/docs/usage/getting-started/page)
+- [创建你的第一个 Team](/docs/usage/getting-started/lobe-ai)
+- [探索 LobeHub 社区](/docs/usage/community/agent-market)
+- [从 v1.x 本地数据库迁移到 v2.x(云端 / 自部署)](/docs/usage/migrate-from-local-database)
-
+## 帮助与支持
-
-
-
-
-
-
-
-
-
-## 2023 特性一览
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+如果你需要帮助,可以参考 [帮助与支持](/docs/usage/help)。
diff --git a/docs/usage/tools-calling.mdx b/docs/usage/tools-calling.mdx
deleted file mode 100644
index acc025dc99..0000000000
--- a/docs/usage/tools-calling.mdx
+++ /dev/null
@@ -1,12 +0,0 @@
----
-title: Tools Calling
-description: Discover the best tools to enhance your calling experience and productivity.
-tags:
- - Calling Tools
- - Productivity
- - Communication
----
-
-# Tools Calling
-
-TODO
diff --git a/docs/usage/tools-calling.zh-CN.mdx b/docs/usage/tools-calling.zh-CN.mdx
deleted file mode 100644
index 92ef954d45..0000000000
--- a/docs/usage/tools-calling.zh-CN.mdx
+++ /dev/null
@@ -1,248 +0,0 @@
----
-title: 大模型工具调用(Tools Calling)评测
-description: 基于 LobeChat 测试主流支持工具调用(Tools Calling) 的大模型,并客观呈现评测结果
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling 评测
- - 工具调用
- - LobeChat 插件
----
-
-# 大模型工具调用(Tools Calling)评测
-
-Tools Calling 是大语言模型的高级能力。你可以通过在 API 请求中传入一组工具列表,让模型智能地选择具体使用哪个工具,并在返回请求中输出工具调用的 JSON 参数。
-
-
- 如果你之前没有了解过 Tools Calling, 可以查看 [Function Call: Chat
- 应用的插件基石与交互技术的变革黎明](https://lobehub.com/zh/blog/openai-function-call) 这篇文章。
-
-
-随着社区中越来越多的大语言模型支持了 Tools Calling 能力,同时得益于 LobeChat 的 Agent Runtime 架构,我们几乎实现了所有主流大语言模型( OpenAI 、Claude 、Gemini 等等)的 Tools Calling 调用能力。
-
-LobeChat 的插件实现基于模型的 Tools Calling 能力,模型本身的 Tools Calling 能力决定插件调用是否正常。作为上层应用,我们针对各个模型的 Tools Calling 做了较为完善的测试,以便帮助我们的用户了解现有的模型能力,更好地进行抉择。
-
-## 评测任务介绍
-
-我们基于实际真实的用户场景出发构建了两大组测试任务,第一组为简单的调用指令(天气查询),第二组为复杂调用指令(文生图)。这两组指令的系统描述如下:
-
-
-
- ```md
- ## Tools
-
- You can use these tools below:
-
- ### Realtime Weather
-
- Get realtime weather information
-
- The APIs you can use:
-
- #### `realtime-weather____fetchCurrentWeather`
-
- 获取当前天气情况
- ```
-
-
-
- ```md
- ## Tools
-
- You can use these tools below:
-
- ### DALL·E 3
-
- Whenever a description of an image is given, use lobe-image-designer to create the images and then summarize the prompts used to generate the images in plain text. If the user does not ask for a specific number of images, default to creating four captions to send to lobe-image-designer that are written to be as diverse as possible.
-
- All captions sent to lobe-image-designer must abide by the following policies:
-
- 1. If the description is not in English, then translate it.
- 2. Do not create more than 4 images, even if the user requests more.
- 3. Don't create images of politicians or other public figures. Recommend other ideas instead.
- 4. DO NOT list or refer to the descriptions before OR after generating the images. They should ONLY ever be written out ONCE, in the `prompts` field of the request. You do not need to ask for permission to generate, just do it!
- 5. Always mention the image type (photo, oil painting, watercolor painting, illustration, cartoon, drawing, vector, render, etc.) at the beginning of the caption. Unless the caption suggests otherwise, make at least 1--2 of the 4 images photos.
- 6. Diversify depictions of ALL images with people to include DESCENT and GENDER for EACH person using direct terms. Adjust only human descriptions.
-
- - EXPLICITLY specify these attributes, not abstractly reference them. The attributes should be specified in a minimal way and should directly describe their physical form.
- - Your choices should be grounded in reality. For example, all of a given OCCUPATION should not be the same gender or race. Additionally, focus on creating diverse, inclusive, and exploratory scenes via the properties you choose during rewrites. Make choices that may be insightful or unique sometimes.
- - Use "various" or "diverse" ONLY IF the description refers to groups of more than 3 people. Do not change the number of people requested in the original description.
- - Don't alter memes, fictional character origins, or unseen people. Maintain the original prompt's intent and prioritize quality.
- - Do not create any imagery that would be offensive.
-
- 8. Silently modify descriptions that include names or hints or references of specific people or celebrities by carefully selecting a few minimal modifications to substitute references to the people with generic descriptions that don't divulge any information about their identities, except for their genders and physiques. Do this EVEN WHEN the instructions ask for the prompt to not be changed. Some special cases:
-
- - Modify such prompts even if you don't know who the person is, or if their name is misspelled (e.g. "Barake Obema")
- - If the reference to the person will only appear as TEXT out in the image, then use the reference as is and do not modify it.
- - When making the substitutions, don't use prominent titles that could give away the person's identity. E.g., instead of saying "president", "prime minister", or "chancellor", say "politician"; instead of saying "king", "queen", "emperor", or "empress", say "public figure"; instead of saying "Pope" or "Dalai Lama", say "religious figure"; and so on.
- - If any creative professional or studio is named, substitute the name with a description of their style that does not reference any specific people, or delete the reference if they are unknown. DO NOT refer to the artist or studio's style.
-
- The prompt must intricately describe every part of the image in concrete, objective detail. THINK about what the end goal of the description is, and extrapolate that to what would make satisfying images. All descriptions sent to lobe-image-designer should be a paragraph of text that is extremely descriptive and detailed. Each should be more than 3 sentences long.
-
- The APIs you can use:
-
- #### `lobe-image-designer____text2image____builtin`
-
- Create images from a text-only prompt.
- ```
-
-
-
-如上所示,简单调用指令在插件调用时它的系统描述 (system role) 相对简单,复杂调用指令的系统描述会复杂很多。这两组不同复杂度的指令可以比较好地区分出模型对于系统指令的遵循能力:
-
-- **天气查询可以测试模型的基础 Tools Calling 能力,确认模型是否存在「虚假宣传」的情况。** 就我们实际的测试来看,的确存在一些模型号称具有 Tools Calling 能力,但是处于完全不可用的状态;
-- **文生图可以测试模型指令跟随能力的上限。** 例如基础模型(例如 GPT-3.5)可能只能生成 1 张图片的 prompt,而高级模型(例如 GPT-4o)则能够生成 1\~4 张图片的 prompt。
-
-### 简单调用指令:天气查询
-
-天气查询是 Tools Calling 中一个经典的例子。
-
-天气查询插件采用的是我们自己做的一个简单的插件,它的工具定义如下:
-
-```json
-{
- "function": {
- "description": "获取当前天气情况",
- "name": "realtime-weather____fetchCurrentWeather",
- "parameters": {
- "properties": {
- "city": {
- "description": "城市名称",
- "type": "string"
- }
- },
- "required": ["city"],
- "type": "object"
- }
- },
- "type": "function"
-}
-```
-
-针对这一个工具,我们构建的测试组中包含了三个指令:
-
-| 指令编号 | 指令内容 | 基础 Tools Calling 调用 | 并发调用 | 复合指令跟随 |
-| ---- | ------------------ | ------------------- | ---- | ------ |
-| 指令 ① | 告诉我杭州和北京的天气,先回答我好的 | 🟢 | 🟢 | 🟢 |
-| 指令 ② | 告诉我杭州和北京的天气 | 🟢 | 🟢 | - |
-| 指令 ③ | 告诉我杭州的天气 | 🟢 | - | - |
-
-上述三个指令的复杂度逐渐递减,我们可以通过这三个指令来测试模型对于简单指令的处理能力。
-
-- 指令 ① 测试的能力项包含 「基础 Tools Calling 调用」、「并发调用」、「复合指令跟随」三项。
-- 指令 ② 测试的能力项包含 「基础 Tools Calling 调用」、「并发调用」 两项。
-- 指令 ③ 测试的能力项仅包含「基础 Tools Calling 调用」。
-
-
- 将指令 ① 、② 、③ 按照难度递减的方式排序的目的,是为了降低测试的成本。因为当模型能通过指令 ①
- 的测试时,我们就不需要继续测试指令 ② 和指令 ③ ,必然能通过。
-
-
-测试能力项详细说明:
-
-
-
- 根据我们实际的日常使用,工具调用往往会和普通文本生成结合在一起回答。例如比较经典的 Code Interpreter 插件,ChatGPT 往往会先回复一些代码生成的思路,然后再调用 Code Interpreter 插件生成代码。
-
- 这种情况下,我们需要模型能够正确地识别出用户的意图,然后调用对应的工具。
-
- 因此, 指令 ① 中的「告诉我杭州和北京的天气,先回答我好的」就是一个复合指令跟随的例子。前半句期望模型调用天气查询工具,后半句期望模型回答「好的」。并且理想的顺序应该是先回答「好的」,然后再调用天气查询工具。
-
-
-
- 并发工具调用(Parallel function calling)是指模型能够同时调用多个工具,或同时调用一个工具多次,这在对话中可以大大降低用户等待的时间,提升用户体验。
-
- 并发工具调用能力由 OpenAI 于 2023 年 11 月率先提出,目前支持并发工具调用的模型并不算多,属于是 Tools Calling 的进阶能力。
-
- 指令 ② 中的「告诉我杭州和北京的天气」就是一个期望执行并发调用的例子。理想的情况下,单个模型的返回应该存在两个工具的调用返回。
-
-
-
- 基础工具调用不必再赘述,这是 Tools Calling 的基础能力。
-
- 指令 ③ 中的「告诉我杭州的天气」就是最基本的工具调用的例子。
-
-
-
-### 复杂调用指令:文生图
-
-文生图的 Tools Calling 基本照搬了 ChatGPT Plus 的指令,它的复杂度相对较高,可以测试模型对于复杂指令的跟随能力。工具定义如下:
-
-```json
-{
- "function": {
- "description": "Create images from a text-only prompt.",
- "name": "lobe-image-designer____text2image____builtin",
- "parameters": {
- "properties": {
- "prompts": {
- "description": "The user's original image description, potentially modified to abide by the lobe-image-designer policies. If the user does not suggest a number of captions to create, create four of them. If creating multiple captions, make them as diverse as possible. If the user requested modifications to previous images, the captions should not simply be longer, but rather it should be refactored to integrate the suggestions into each of the captions. Generate no more than 4 images, even if the user requests more.",
- "items": {
- "type": "string"
- },
- "maxItems": 4,
- "minItems": 1,
- "type": "array"
- },
- "quality": {
- "default": "standard",
- "description": "The quality of the image that will be generated. hd creates images with finer details and greater consistency across the image.",
- "enum": ["standard", "hd"],
- "type": "string"
- },
- "seeds": {
- "description": "A list of seeds to use for each prompt. If the user asks to modify a previous image, populate this field with the seed used to generate that image from the image lobe-image-designer metadata.",
- "items": {
- "type": "integer"
- },
- "type": "array"
- },
- "size": {
- "default": "1024x1024",
- "description": "The resolution of the requested image, which can be wide, square, or tall. Use 1024x1024 (square) as the default unless the prompt suggests a wide image, 1792x1024, or a full-body portrait, in which case 1024x1792 (tall) should be used instead. Always include this parameter in the request.",
- "enum": ["1792x1024", "1024x1024", "1024x1792"],
- "type": "string"
- },
- "style": {
- "default": "vivid",
- "description": "The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images.",
- "enum": ["vivid", "natural"],
- "type": "string"
- }
- },
- "required": ["prompts"],
- "type": "object"
- }
- },
- "type": "function"
-}
-```
-
-针对这一个工具,我们构建的测试组中包含了两个指令:
-
-| 指令编号 | 指令内容 | 流式调用 | 复杂 Tools Calling 调用 | 并发调用 | 复合指令跟随 |
-| ---- | ------------------------------------------------------------------------------------------------ | ---- | ------------------- | ---- | ------ |
-| 指令 ① | 我要画 3 幅画,第一幅画的主体为一只达芬奇风格的小狗,第二幅是毕加索风格的大雁,最后一幅是莫奈风格的狮子。每一幅都需要产出 2 个 prompts。请先说明你的构思,然后开始生成相应的图片。 | 🟢 | 🟢 | 🟢 | 🟢 |
-| 指令 ② | 画一只小狗 | 🟢 | 🟢 | - | - |
-
-此外,由于文生图的 prompts 的生成时间较长,这一组指令也可以清晰地测试出模型的 API 是否支持流式 Tools Calling。
-
-## 评测结果
-
-各模型的评测细节可以点击查看:
-
-
-
-
-
-
-
-
-
-
-
-
-
-### 结果汇总
-
-TODO
diff --git a/docs/usage/tools-calling/anthropic.mdx b/docs/usage/tools-calling/anthropic.mdx
deleted file mode 100644
index 9d0410ad92..0000000000
--- a/docs/usage/tools-calling/anthropic.mdx
+++ /dev/null
@@ -1,152 +0,0 @@
----
-title: Anthropic Claude 系列 Tools Calling 评测
-description: >-
- 使用 LobeChat 测试 Anthropic Claude 系列模型(Claude 3.5 sonnet / Claude 3 Opus / Claude 3 haiku) 的工具调用(Function Calling)能力,并展现评测结果
-
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling 评测
- - 工具调用
- - 插件
----
-
-# Anthropic Claude Series Tools Calling
-
-Overview of Anthropic Claude Series model Tools Calling capabilities:
-
-| Model | Support Tools Calling | Stream | Parallel | Simple Instruction Score | Complex Instruction |
-| ----------------- | --------------------- | ------ | -------- | ------------------------ | ------------------- |
-| Claude 3.5 Sonnet | ✅ | ✅ | ✅ | 🌟🌟🌟 | 🌟🌟 |
-| Claude 3 Opus | ✅ | ✅ | ❌ | 🌟 | ⛔️ |
-| Claude 3 Sonnet | ✅ | ✅ | ❌ | 🌟🌟 | ⛔️ |
-| Claude 3 Haiku | ✅ | ✅ | ❌ | 🌟🌟 | ⛔️ |
-
-## Claude 3.5 Sonnet
-
-### Simple Instruction Call: Weather Query
-
-Test Instruction: Instruction ①
-
-
-
-
-
-
- Tools Calling Raw Output:
-
- ```yml
- ```
-
-
-### Complex Instruction Call: Literary Map
-
-Test Instruction: Instruction ②
-
-
-
-From the above video:
-
-1. Sonnet 3.5 supports Stream Tools Calling and Parallel Tools Calling;
-2. In Stream Tools Calling, it is observed that creating long sentences will cause a delay (as seen in the Tools Calling raw output `[chunk 40]` and `[chunk 41]` with a delay of 6s). Therefore, there will be a relatively long waiting time at the beginning stage of Tools Calling.
-
-
-
-
- Tools Calling Raw Output:
-
- ```yml
- ```
-
-
-## Claude 3 Opus
-
-### Simple Instruction Call: Weather Query
-
-Test Instruction: Instruction ①
-
-
-
-From the above video:
-
-1. Claude 3 Opus outputs a `` tag at the beginning of Tools Calling, which is not very helpful for users and consumes more tokens;
-2. Opus triggers Tools Calling twice, indicating that it does not support Parallel Tools Calling;
-3. The raw output of Tools Calling shows that Opus also supports Stream Tools Calling.
-
-
-
-
- Tools Calling Raw Output:
-
-
-### Complex Instruction Call: Literary Map
-
-Test Instruction: Instruction ②
-
-
-
-From the above video:
-
-1. Combining with simple tasks, Opus will always output a `` tag, which significantly impacts the user experience;
-2. Opus outputs the prompts field as a string instead of an array, causing an error and preventing the plugin from being called correctly.
-
-
-
-
- Tools Calling Raw Output:
-
-
-## Claude 3 Sonnet
-
-### Simple Instruction Call: Weather Query
-
-Test Instruction: Instruction ①
-
-
-
-From the above video, it can be seen that Claude 3 Sonnet triggers Tools Calling twice, indicating that it does not support Parallel Tools Calling.
-
-
-
-
- Tools Calling Raw Output:
-
-
-### Complex Instruction Call: Literary Map
-
-Test Instruction: Instruction ②
-
-
-
-From the above video, it can be seen that Sonnet 3 fails in the complex instruction call. The error is due to prompts being expected as an array but generated as a string.
-
-
-
-
- Tools Calling Raw Output:
-
-
-## Claude 3 Haiku
-
-
-
-From the above video:
-
-1. Claude 3 Haiku triggers Tools Calling twice, indicating that it also does not support Parallel Tools Calling;
-2. Haiku does not provide a good response and directly calls the tool;
-
-
-
-### Complex Instruction Call: Literary Map
-
-Test Instruction: Instruction ②
-
-
-
-From the above video, it can be seen that Haiku 3 also fails in the complex instruction call. The error is the same as prompts generating a string instead of an array.
-
-
-
-
- Tools Calling Raw Output:
-
diff --git a/docs/usage/tools-calling/anthropic.zh-CN.mdx b/docs/usage/tools-calling/anthropic.zh-CN.mdx
deleted file mode 100644
index b8bc158b5b..0000000000
--- a/docs/usage/tools-calling/anthropic.zh-CN.mdx
+++ /dev/null
@@ -1,152 +0,0 @@
----
-title: Anthropic Claude 系列 Tools Calling 评测
-description: >-
- 使用 LobeChat 测试 Anthropic Claude 系列模型(Claude 3.5 sonnet / Claude 3 Opus / Claude 3 haiku) 的工具调用(Function Calling)能力,并展现评测结果
-
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling 评测
- - 工具调用
- - 插件
----
-
-# Anthropic Claude 系列 Tools Calling
-
-Anthropic Claude 系列模型 Tools Calling 能力一览:
-
-| 模型 | 支持 Tools Calling | 流式 (Stream) | 并发(Parallel) | 简单指令得分 | 复杂指令 |
-| ----------------- | ---------------- | ----------- | ------------ | ------ | ---- |
-| Claude 3.5 Sonnet | ✅ | ✅ | ✅ | 🌟🌟🌟 | 🌟🌟 |
-| Claude 3 Opus | ✅ | ✅ | ❌ | 🌟 | ⛔️ |
-| Claude 3 Sonnet | ✅ | ✅ | ❌ | 🌟🌟 | ⛔️ |
-| Claude 3 Haiku | ✅ | ✅ | ❌ | 🌟🌟 | ⛔️ |
-
-## Claude 3.5 Sonnet
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- ```
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-从上述视频中可以看到:
-
-1. Sonnet 3.5 支持流式 Tools Calling 和 Parallel Tools Calling;
-2. 在流式 Tools Calling 时,表现出来的特征是在创建长句会等待住(详见 Tools Calling 原始输出 `[chunk 40]` 和 `[chunk 41]` 中间的耗时达到 6s)。所以相对来说会在 Tools Calling 的起始阶段有一个较长的等待时间。
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- ```
-
-
-## Claude 3 Opus
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-从上述视频中看到:
-
-1. Claude 3 Opus 在调用 Tools 的起点会输出一段 `` 标签的内容,这段内容对于用户来说几乎没有什么帮助,反而带来了较多的 Token 消耗;
-2. Opus 会触发两次 Tools Calling,说明它并不支持 Parallel Tools Calling;
-3. 从 Tools Calling 的原始输出来看, Opus 也是支持流式 Tools Calling 的
-
-
-
-
- Tools Calling 原始输出:
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-从上述视频中看到:
-
-1. 结合简单任务, Opus 的工具调用一定会输出 `` 标签,这其实对体验影响非常大
-2. Opus 输出的 prompts 字段是字符串,而不是数组,导致报错,无法正常调用插件。
-
-
-
-
- Tools Calling 原始输出:
-
-
-## Claude 3 Sonnet
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-从上述视频中可以看出,Claude 3 Sonnet 会调用两次 Tools Calling,说明它并不支持 Parallel Tools Calling。
-
-
-
-
- Tools Calling 原始输出:
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-从上述视频中可以看到, Sonnet 3 在复杂指令调用下就失败了。报错原因是 prompts 原本预期为一个数组,但是生成的却是一个字符串。
-
-
-
-
- Tools Calling 原始输出:
-
-
-## Claude 3 Haiku
-
-
-
-从上述视频中可以看出:
-
-1. Claude 3 Haiku 会调用两次 Tools Calling,说明它也不支持 Parallel Tools Calling;
-2. Haiku 并没有回答好的,也是直接调用的工具;
-
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-从上述视频中可以看到, Haiku 3 在复杂指令调用下也是失败的。报错原因同样是 prompts 生成了字符串而不是数组。
-
-
-
-
- Tools Calling 原始输出:
-
diff --git a/docs/usage/tools-calling/google.mdx b/docs/usage/tools-calling/google.mdx
deleted file mode 100644
index 646131f6b8..0000000000
--- a/docs/usage/tools-calling/google.mdx
+++ /dev/null
@@ -1,109 +0,0 @@
----
-title: Google Gemini 系列 Tool Calling 评测
-description: >-
- 使用 LobeChat 测试 Google Gemini 系列模型(Gemini 1.5 Pro / Gemini 1.5 Flash) 的工具调用(Function Calling)能力,并展现评测结果
-
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling 评测
- - 工具调用
- - 插件
----
-
-# Google Gemini Series Tool Calling
-
-Overview of Google Gemini series model Tools Calling capabilities:
-
-| Model | Tools Calling Support | Streaming | Parallel | Simple Instruction Score | Complex Instruction |
-| ---------------- | --------------------- | --------- | -------- | ------------------------ | ------------------- |
-| Gemini 1.5 Pro | ✅ | ❌ | ✅ | ⛔ | ⛔ |
-| Gemini 1.5 Flash | ❌ | ❌ | ❌ | ⛔ | ⛔ |
-
-
- Based on our actual tests, we strongly recommend not enabling plugins for Gemini because as of
- July 7, 2024, its Tools Calling capability is extremely poor.
-
-
-## Gemini 1.5 Pro
-
-### Simple Instruction Call: Weather Query
-
-Test Instruction: Instruction ①
-
-
-
-In the json output from Gemini, the name is incorrect, so LobeChat cannot recognize which plugin it called. (In the input, the name of the weather plugin is `realtime-weather____fetchCurrentWeather`, while Gemini returns `weather____fetchCurrentWeather`).
-
-
-
-
- Original Tools Calling Output:
-
- ```yml
- [stream start] 2024-7-7 17:53:25.647
- [chunk 0] 2024-7-7 17:53:25.654
- {"candidates":[{"content":{"parts":[{"text":"好的"}],"role":"model"},"finishReason":"STOP","index":0}],"usageMetadata":{"promptTokenCount":95,"candidatesTokenCount":1,"totalTokenCount":96}}
-
- [chunk 1] 2024-7-7 17:53:26.288
- {"candidates":[{"content":{"parts":[{"text":"\n\n"}],"role":"model"},"finishReason":"STOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":95,"candidatesTokenCount":1,"totalTokenCount":96}}
-
- [chunk 2] 2024-7-7 17:53:26.336
- {"candidates":[{"content":{"parts":[{"functionCall":{"name":"weather____fetchCurrentWeather","args":{"city":"Hangzhou"}}},{"functionCall":{"name":"weather____fetchCurrentWeather","args":{"city":"Beijing"}}}],"role":"model"},"finishReasoSTOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":95,"candidatesTokenCount":79,"totalTokenCount":174}}
-
- [stream finished] total chunks: 3
- ```
-
-
-### Complex Instruction Call: Image Generation
-
-Test Instruction: Instruction ②
-
-
-
-When testing a set of complex instructions, Google throws an error directly:
-
-```json
-{
- "message": "[400 Bad Request] Invalid JSON payload received. Unknown name \"maxItems\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"minItems\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"default\" at 'tools[0].function_declarations[0].parameters.properties[1].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"default\" at 'tools[0].function_declarations[0].parameters.properties[3].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"default\" at 'tools[0].function_declarations[0].parameters.properties[4].value': Cannot find field. [{\"@type\":\"type.googleapis.com/google.rpc.BadRequest\",\"fieldViolations\":[{\"field\":\"tools[0].function_declarations[0].parameters.properties[0].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"maxItems\\\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[0].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"minItems\\\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[1].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"default\\\" at 'tools[0].function_declarations[0].parameters.properties[1].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[3].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"default\\\" at 'tools[0].function_declarations[0].parameters.properties[3].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[4].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"default\\\" at 'tools[0].function_declarations[0].parameters.properties[4].value': Cannot find field.\"}]}]"
-}
-```
-
-The error above mentions that it does not support a schema containing `maxItems`, so Gemini 1.5 Pro is essentially unable to use the DallE plugin.
-
-Related issues:
-
-- [Support for minItems and maxItems for FunctionDeclarationSchemaType.ARRAY?](https://github.com/google-gemini/generative-ai-js/issues/200)
-- [Gemini Models unusable when dalle plugin is enabled](https://github.com/lobehub/lobe-chat/issues/2537)
-
-Based on the above two tests, Google's Tool Calling capability seems to be supported, but it is almost unusable in daily use. I personally think it is equivalent to false advertising.
-
-## Gemini 1.5 Flash
-
-### Simple Command: Weather Query
-
-Test Command: Command ①
-
-
-
-Gemini 1.5 Flash is more abstract, and the call ends as soon as it is made. Combining the original output below, it can be seen that Gemini 1.5 Flash does not output Tool Calling data, so it can be considered completely unusable.
-
-```yml
-stream start] 2024-7-7 19:4:50.936
-[chunk 0] 2024-7-7 19:4:50.943
-{"candidates":[{"content":{"parts":[{"text":"Okay"}],"role":"model"},"finishReason":"STOP","index":0}],"usageMetadata":{"promptTokenCount":96,"candidatesTokenCount":1,"totalTokenCount":97}}
-
-[chunk 1] 2024-7-7 19:4:52.209
-{"candidates":[{"content":{"parts":[{"text":", please wait, I am checking the weather information for Hangzhou and Beijing."}],"role":"model"},"finishReason":"STOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":96,"candidatesTokenCount":16,"totalTokenCount":112}}
-
-[chunk 2] 2024-7-7 19:4:53.288
-{"candidates":[{"content":{"parts":[{"text":"\n"}],"role":"model"},"finishReason":"STOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":96,"candidatesTokenCount":16,"totalTokenCount":112}}
-
-[stream finished] total chunks: 3
-```
-
-### Complex Command: Wenshengtu
-
-Test Command: Command ②
-
-This command, like the complex commands of Gemini 1.5 Pro, throws an error directly, so it will not be further elaborated.
diff --git a/docs/usage/tools-calling/google.zh-CN.mdx b/docs/usage/tools-calling/google.zh-CN.mdx
deleted file mode 100644
index 74bc9dce01..0000000000
--- a/docs/usage/tools-calling/google.zh-CN.mdx
+++ /dev/null
@@ -1,109 +0,0 @@
----
-title: Google Gemini 系列 Tools Calling 评测
-description: >-
- 使用 LobeChat 测试 Google Gemini 系列模型(Gemini 1.5 Pro / Gemini 1.5 Flash)的工具调用(Function Calling)能力,并展现评测结果
-
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling 评测
- - 工具调用
- - 插件
----
-
-# Google Gemini 系列 Tools Calling
-
-Google Gemini 系列模型 Tools Calling 能力一览:
-
-| 模型 | 支持 Tools Calling | 流式 (Stream) | 并发(Parallel) | 简单指令得分 | 复杂指令 |
-| ---------------- | ---------------- | ----------- | ------------ | ------ | ---- |
-| Gemini 1.5 Pro | ✅ | ❌ | ✅ | ⛔ | ⛔ |
-| Gemini 1.5 Flash | ❌ | ❌ | ❌ | ⛔ | ⛔ |
-
-
- 根据我们的的实际测试,强烈建议不要给 Gemini 开启插件,因为目前(截止 2024.07.07)它的 Tools
- Calling 能力实在太烂了。
-
-
-## Gemini 1.5 Pro
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-Gemini 输出的 json 中,name 是错误的,因此 LobeChat 无法识别到它调用了什么插件。(入参中,天气插件的 name 为 `realtime-weather____fetchCurrentWeather`,而 Gemini 返回的是 `weather____fetchCurrentWeather`)。
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- [stream start] 2024-7-7 17:53:25.647
- [chunk 0] 2024-7-7 17:53:25.654
- {"candidates":[{"content":{"parts":[{"text":"好的"}],"role":"model"},"finishReason":"STOP","index":0}],"usageMetadata":{"promptTokenCount":95,"candidatesTokenCount":1,"totalTokenCount":96}}
-
- [chunk 1] 2024-7-7 17:53:26.288
- {"candidates":[{"content":{"parts":[{"text":"\n\n"}],"role":"model"},"finishReason":"STOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":95,"candidatesTokenCount":1,"totalTokenCount":96}}
-
- [chunk 2] 2024-7-7 17:53:26.336
- {"candidates":[{"content":{"parts":[{"functionCall":{"name":"weather____fetchCurrentWeather","args":{"city":"杭州"}}},{"functionCall":{"name":"weather____fetchCurrentWeather","args":{"city":"北京"}}}],"role":"model"},"finishReasoSTOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":95,"candidatesTokenCount":79,"totalTokenCount":174}}
-
- [stream finished] total chunks: 3
- ```
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-在测试复杂指令集时,Google 直接抛错:
-
-```json
-{
- "message": "[400 Bad Request] Invalid JSON payload received. Unknown name \"maxItems\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"minItems\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"default\" at 'tools[0].function_declarations[0].parameters.properties[1].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"default\" at 'tools[0].function_declarations[0].parameters.properties[3].value': Cannot find field.\nInvalid JSON payload received. Unknown name \"default\" at 'tools[0].function_declarations[0].parameters.properties[4].value': Cannot find field. [{\"@type\":\"type.googleapis.com/google.rpc.BadRequest\",\"fieldViolations\":[{\"field\":\"tools[0].function_declarations[0].parameters.properties[0].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"maxItems\\\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[0].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"minItems\\\" at 'tools[0].function_declarations[0].parameters.properties[0].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[1].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"default\\\" at 'tools[0].function_declarations[0].parameters.properties[1].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[3].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"default\\\" at 'tools[0].function_declarations[0].parameters.properties[3].value': Cannot find field.\"},{\"field\":\"tools[0].function_declarations[0].parameters.properties[4].value\",\"description\":\"Invalid JSON payload received. Unknown name \\\"default\\\" at 'tools[0].function_declarations[0].parameters.properties[4].value': Cannot find field.\"}]}]"
-}
-```
-
-上述抛错中提到并不支持包含 `maxItems` 的 schema,因此 Gemini 1.5 Pro 相当于无法使用 DallE 插件。
-
-相关 issue:
-
-- [Support for minItems and maxItems for FunctionDeclarationSchemaType.ARRAY?](https://github.com/google-gemini/generative-ai-js/issues/200)
-- [Gemini Models unusable when dalle plugin is enabled](https://github.com/lobehub/lobe-chat/issues/2537)
-
-综合以上两个测试来看,Google 的 Tool Calling 能力似乎是支持了,但是几乎没法在日常中使用,我个人认为已经等于虚假宣传了。
-
-## Gemini 1.5 Flash
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-而 Gemini 1.5 flash 更为抽象,说完调用就结束了。结合以下原始输出可以看到,Gemini 1.5 Flash 并没有输出 Tool Calling 的数据,因此可以说是完全不可用。
-
-```yml
-stream start] 2024-7-7 19:4:50.936
-[chunk 0] 2024-7-7 19:4:50.943
-{"candidates":[{"content":{"parts":[{"text":"好的"}],"role":"model"},"finishReason":"STOP","index":0}],"usageMetadata":{"promptTokenCount":96,"candidatesTokenCount":1,"totalTokenCount":97}}
-
-[chunk 1] 2024-7-7 19:4:52.209
-{"candidates":[{"content":{"parts":[{"text":",请稍等,我正在查询杭州和北京的天气信息。 "}],"role":"model"},"finishReason":"STOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"ATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":96,"candidatesTokenCount":16,"totalTokenCount":112}}
-
-[chunk 2] 2024-7-7 19:4:53.288
-{"candidates":[{"content":{"parts":[{"text":"\n"}],"role":"model"},"finishReason":"STOP","index":0,"safetyRatings":[{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HATE_SPEECH","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_HARASSMENT","probability":"NEGLIGIBLE"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","probability":"NEGLIGIBLE"}]}],"usageMetadata":{"promptTokenCount":96,"candidatesTokenCount":16,"totalTokenCount":112}}
-
-[stream finished] total chunks: 3
-```
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-该指令和 Gemini 1.5 Pro 的复杂指令一样,直接抛错,因此不再详细展开。
diff --git a/docs/usage/tools-calling/groq.mdx b/docs/usage/tools-calling/groq.mdx
deleted file mode 100644
index 99a1d0b166..0000000000
--- a/docs/usage/tools-calling/groq.mdx
+++ /dev/null
@@ -1,10 +0,0 @@
----
-title: ''
-description: 学习如何有效管理待办事项,提高工作效率和组织能力。
-tags:
- - 待办事项
- - 工作效率
- - 时间管理
----
-
-TODO
diff --git a/docs/usage/tools-calling/groq.zh-CN.mdx b/docs/usage/tools-calling/groq.zh-CN.mdx
deleted file mode 100644
index f1a134cd5c..0000000000
--- a/docs/usage/tools-calling/groq.zh-CN.mdx
+++ /dev/null
@@ -1,163 +0,0 @@
----
-title: Groq Tools Calling
-description: >-
- 了解 Groq 平台模型 Tools Calling的能力一览,包括LLAMA3 70B、LLAMA3 8B和Mixtral-8x7B的简单和复杂指令调用情况。
-
-tags:
- - Groq 平台模型
- - Tools Calling
- - LLAMA3 70B
- - LLAMA3 8B
- - Mixtral-8x7B
----
-
-# Groq 平台模型 Tools Calling 评测(Llama 3/Mistral)
-
-由于 Groq 本身不支持 stream,因此 Tools Calling 的调用是普通请求。
-
-Groq 平台的模型 Tools Calling 能力一览:
-
-| 模型 | 支持 Tools Calling | 流式 (Stream) | 并发(Parallel) | 简单指令得分 | 复杂指令 |
-| ------------ | ---------------- | ----------- | ------------ | ------ | ---- |
-| LLAMA3 70B | ✅ | ❌ | ✅ | 🌟🌟 | 🌟🌟 |
-| LLAMA3 8B | ✅ | ❌ | ✅ | 🌟🌟 | 🌟 |
-| Mixtral-8x7B | ✅ | ❌ | ✅ | ⛔ | 🌟🌟 |
-
-## LLAMA3 70B
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-从上述视频中可以看到 LLAMA3 70B 支持并发 Tools Calling,可以同时调用多次天气查询。
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- [no stream response] 2024-7-8 15:50:40.166
-
- {"id":"chatcmpl-ec4b6c0b-1078-4f50-a39c-e58b3b1f9c31","object":"chat.completion","created":1720425030,"model":"llama3-70b-8192","choices":[{"index":0,"message":{"role":"assistant","tool_calls":[{"id":"call_v89g","type":"function","function":{"name":"realtime-weather____fetchCurrentWeather","arguments":"{\"city\":\"杭州\"}"}},{"id":"call_jxwk","type":"function","function":{"name":"realtime-weather____fetchCurrentWeather","arguments":"{\"city\":\"北京}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":969,"prompt_time":0.224209489,"completion_tokens":68,"completion_time":0.194285714,"total_tokens":1037,"total_time":0.418495203},"system_fingerprint":"fp_87cbfbbc4d","x_groq":{"id":"req_01j28n57x9e78a6bfbn9sdn139"}}
-
- ```
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- [no stream response] 2024-7-8 18:0:34.811
-
- {"id":"chatcmpl-e3b59ca9-1172-4ae2-96c7-3d6997a1f8a8","object":"chat.completion","created":1720432834,"model":"llama3-70b-8192","choices":[{"index":0,"message":{"role":"assistant","tool_calls":[{"id":"call_azm9","type":"function","function":{"name":"lobe-image-designer____text2image____builtin","arguments":"{\"prompts\":[\"A small, fluffy, and playful golden retriever puppy with a white patch on its forehead, sitting on a green grass field with a bright blue sky in the background, photo.\",\"A cute, little, brown and white Dalmatian puppy with a red collar, running around in a park with a sunny day, illustration.\",\"A tiny, grey and white Poodle puppy with a pink ribbon, sitting on a white couch with a few toys surrounding it, watercolor painting.\",\"A sweet, small, black and white Chihuahua puppy with a pink bow, lying on a soft, white blanket with a few stuffed animals nearby, oil painting.\"],\"quality\":\"standard\",\"seeds\":[],\"size\":\"1024x1024\",\"style\":\"vivid\"}"}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":2305,"prompt_time":3.027052298,"completion_tokens":246,"completion_time":0.702857143,"total_tokens":2551,"total_time":3.729909441},"system_fingerprint":"fp_7ab5f7e105","x_groq":{"id":"req_01j28wk2q0efvs22qatw7rd0ds"}}
-
- POST /api/chat/groq 200 in 17462ms
- ```
-
-
-## LLAMA3-8B
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-从上述视频中可以看到 LLAMA3-8B 对于天气插件可以正常调用,并获得正确的总结结果。但是它并没有完全 follow 我们的描述指令,没有回答「好的」。
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- [no stream response] 2024-7-9 11:33:16.920
-
- {"id":"chatcmpl-f3672d59-e91d-4253-af1b-bfc4e0912085","object":"chat.completion","created":1720495996,"model":"llama3-8b-8192","choices":[{"index":0,"message":{"role":"assistant","tool_calls":[{"id":"call_rjtk","type":"function","function":{"name":"realtime-weather____fetchCurrentWeather","arguments":"{\"city\":\"杭州市\"}"}},{"id":"call_7pqh","type":"functi,"function":{"name":"realtime-weather____fetchCurrentWeather","arguments":"{\"city\":\"北京市\"}"}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":969,"ppt_time":0.145428625,"completion_tokens":128,"completion_time":0.101364747,"total_tokens":1097,"total_time":0.246793372},"system_fingerprint":"fp_33d61fdfc3","x_groq":{"id":"req_01j2artze1exz82nettf2h9066"}}
-
- POST /api/chat/groq 200 in 1649ms
- ```
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-LLAMA3 8B 在 DallE 的输出场景下,只会输出 1 张图片,而不是像 LLAMA3 70B 一样输出 4 张,意味着在复杂 Tools 指令层面,能力和 GPT 3.5 Turbo 接近,不如 GPT 4。
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- [no stream response] 2024-7-9 11:58:27.40
-
- {"id":"chatcmpl-3c38f4d2-3424-416c-9fb0-0969d2683959","object":"chat.completion","created":1720497506,"model":"llama3-8b-8192","choices":[{"index":0,"message":{"role":"assistant","tool_calls":[{"id":"call_k6xj","type":"function","function":{"name":"lobe-image-designer____text2image____builtin","arguments":"{\"prompts\":[\"Create a watercolor painting of a small white dog with a pink nose, wearing a red collar and sitting on a green grass. The dog's ears should be floppy and its fur should be curly.\"],\"quality\":\"standard\",\"seeds\":[],\"size\":\"1024x1024\",\"style\":\"natural\"}"}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":2282,"prompt_time":0.342335558,"completion_tokens":148,"completion_time":0.118023813,"total_tokens":2430,"total_time":0.460359371},"system_fingerprint":"fp_179b0f92c9","x_groq":{"id":"req_01j2at921tec8aymdq48czcw1y"}}
-
- POST /api/chat/groq 200 in 2517ms
- ```
-
-
-## Mixtral-8x7B
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-从上述视频中可以看到 Mixtral-8x7B 对于天气插件的查询输出的参数有问题,导致无法正常调用插件。
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
-
- [no stream response] 2024-7-8 22:18:19.682
-
- {"id":"chatcmpl-9f89d669-5642-48be-b5cd-7a29756800c0","object":"chat.completion","created":1720448299,"model":"mixtral-8x7b-32768","choices":[{"index":0,"message":{"role":"assistant","tool_calls":[{"id":"call_719t","type":"function","function":{"name":"realtime-weather____fetchCurrentWeather","arguments":"{\"city\":\"Hangzhou,Beijing\"}"}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":1262,"prompt_time":0.116684046,"completion_tokens":102,"completion_time":0.163113006,"total_tokens":1364,"total_time":0.279797052},"system_fingerprint":"fp_c5f20b5bb1","x_groq":{"id":"req_01j29bbc8xen2s3thp9qen5bys"}}
-
- POST /api/chat/groq 200 in 4860ms
- ```
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-非常意外地,Mixtral 8x7B 在文生图的 Tools Calling 的场景下,居然可以正常出图,而且出图的能力基本和 LLAMA3 70B 持平。
-
-
-
-
-
-
- Tools Calling 原始输出:
-
- ```yml
- [no stream response] 2024-7-9 12:6:38.11
-
- {"id":"chatcmpl-a90069f3-b119-41b9-b8d7-c7f97373529e","object":"chat.completion","created":1720497998,"model":"mixtral-8x7b-32768","choices":[{"index":0,"message":{"role":"assistant","tool_calls":[{"id":"call_hw8t","type":"function","function":{"name":"lobe-image-designer____text2image____builtin","arguments":"{\"prompts\":[\"A colorful and playful illustration of a friendly small dog, looking directly at the viewer with a wagging tail and bright, expressive eyes. The dog's fur is glossy and well-groomed, with a mix of black, white, and brown colors. The background is a lush, green park with a clear blue sky and a few fluffy clouds.\",\"An oil painting of a small, energetic dog in a sunlit park, chasing a frisbee. The dog is a mix of black and white fur, with a distinct brown spot above its left eye. The park features tall trees, providing dappled shade across the grassy landscape. The frisbee is bright orange and stands out against the natural surroundings.\",\"A realistic watercolor painting of a small, fluffy white dog curled up next to a warm fireplace during a cozy winter evening. The dog's eyes are closed in contentment, and a single red bow is tied around its neck. The background includes a plush armchair, a stack of books, and a softly lit room.\",\"A fun and engaging cartoon of a small dog sitting at a café table, enjoying a cup of coffee and a croissant. The dog has a expressive face and a blue scarf around its neck. The café has a vintage, 1920's style and a red awning, with a bustling city background.\"],\"quality\":\"standard\",\"size\":\"1024x1024\",\"style\":\"vivid\"}"}}]},"logprobs":null,"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":2920,"prompt_time":0.228639219,"completion_tokens":465,"completion_time":0.755757988,"total_tokens":3385,"total_time":0.984397207},"system_fingerprint":"fp_c5f20b5bb1","x_groq":{"id":"req_01j2atr155f0nv8rmfk448e2at"}}
-
- POST /api/chat/groq 200 in 6216ms
-
- ```
-
diff --git a/docs/usage/tools-calling/moonshot.mdx b/docs/usage/tools-calling/moonshot.mdx
deleted file mode 100644
index 422f796b92..0000000000
--- a/docs/usage/tools-calling/moonshot.mdx
+++ /dev/null
@@ -1,10 +0,0 @@
----
-title: ''
-description: 学习如何有效管理待办事项,提高工作效率和生产力。
-tags:
- - 待办事项
- - 工作效率
- - 生产力
----
-
-TODO
diff --git a/docs/usage/tools-calling/moonshot.zh-CN.mdx b/docs/usage/tools-calling/moonshot.zh-CN.mdx
deleted file mode 100644
index 7b532b736b..0000000000
--- a/docs/usage/tools-calling/moonshot.zh-CN.mdx
+++ /dev/null
@@ -1,24 +0,0 @@
----
-title: Moonshot 系列 Tools Calling 评测
-description: 使用 LobeChat 测试 Moonshot 系列模型(Moonshot-1) 的工具调用(Function Calling)能力,并展现评测结果
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling
- - 工具调用
- - 插件
----
-
-# Moonshot 系列工具调用(Tools Calling)
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-TODO
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-TODO
diff --git a/docs/usage/tools-calling/openai.mdx b/docs/usage/tools-calling/openai.mdx
deleted file mode 100644
index a8ec1cc0eb..0000000000
--- a/docs/usage/tools-calling/openai.mdx
+++ /dev/null
@@ -1,114 +0,0 @@
----
-title: OpenAI GPT 系列 Tools Calling 评测
-description: >-
- 使用 LobeChat 测试 OpenAI GPT 系列模型(GPT 3.5-turbo / GPT-4 /GPT-4o) 的工具调用(Function Calling)能力,并展现评测结果
-
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling
- - 工具调用
- - 插件
----
-
-# OpenAI GPT Series Tool Calling
-
-Overview of the Tool Calling capabilities of OpenAI GPT series models:
-
-| Model | Tool Calling Support | Streaming | Parallel | Simple Instruction Score | Complex Instruction Score |
-| ------------- | -------------------- | --------- | -------- | ------------------------ | ------------------------- |
-| GPT-3.5-turbo | ✅ | ✅ | ✅ | 🌟🌟🌟 | 🌟 |
-| GPT-4-turbo | ✅ | ✅ | ✅ | 🌟🌟 | 🌟🌟 |
-| GPT-4o | ✅ | ✅ | ✅ | 🌟🌟🌟 | 🌟🌟 |
-
-
- For testing instructions, see [Tools Calling - Evaluation Task
- Introduction](/docs/usage/tools-calling#evaluation-task-introduction)
-
-
-## GPT 3.5-turbo
-
-### Simple Instruction Call: Weather Inquiry
-
-Test Instruction: Instruction ①
-
-
-
-
-
-
- Streaming Tool Calling Raw Output:
-
-
-### Complex Instruction Call: Wenshengtu
-
-Test Instruction: Instruction ②
-
-
-
-
-
-
- Streaming Tool Calling Raw Output:
-
-
-## GPT-4 Turbo
-
-### Simple Instruction Call: Weather Inquiry
-
-Test Instruction: Instruction ①
-
-Unlike GPT-3.5 Turbo, GPT-4 Turbo did not respond with "okay" when calling Tool Calling, and after multiple tests, it remained the same. Therefore, in this follow-up of a compound instruction, it is not as good as GPT-3.5 Turbo, but the remaining two capabilities are still good.
-
-Of course, it is also possible that GPT-4 Turbo's model has more "autonomy" and believes that it does not need to output this "okay."
-
-
-
-
-
-
- Streaming Tool Calling Raw Output:
-
-
-### Complex Instruction Call: Wenshengtu
-
-Test Instruction: Instruction ②
-
-
-
-
-
-
- Streaming Tool Calling Raw Output:
-
-
-## GPT-4o
-
-### Simple Instruction Call: Weather Inquiry
-
-Test Instruction: Instruction ①
-
-Similar to GPT-3.5, GPT-4o performs very well in following compound instructions in simple instruction calls.
-
-
-
-
-
-
- Streaming Tool Calling Raw Output:
-
-
-### Complex Instruction Call: Wenshengtu
-
-Test Instruction: Instruction ②
-
-
-
-
-
-
- Streaming Tool Calling Raw Output:
-
- ```yml
- ```
-
diff --git a/docs/usage/tools-calling/openai.zh-CN.mdx b/docs/usage/tools-calling/openai.zh-CN.mdx
deleted file mode 100644
index 19e5396da3..0000000000
--- a/docs/usage/tools-calling/openai.zh-CN.mdx
+++ /dev/null
@@ -1,114 +0,0 @@
----
-title: OpenAI GPT 系列 Tools Calling 评测
-description: >-
- 使用 LobeChat 测试 OpenAI GPT 系列模型(GPT 3.5-turbo / GPT-4 /GPT-4o) 的工具调用(Function Calling)能力,并展现评测结果
-
-tags:
- - Tools Calling
- - Benchmark
- - Function Calling
- - 工具调用
- - 插件
----
-
-# OpenAI GPT 系列工具调用(Tools Calling)
-
-OpenAI GPT 系列模型 Tool Calling 能力一览:
-
-| 模型 | 支持 Tool Calling | 流式 (Stream) | 并发(Parallel) | 简单指令得分 | 复杂指令 |
-| ------------- | --------------- | ----------- | ------------ | ------ | ---- |
-| GPT-3.5-turbo | ✅ | ✅ | ✅ | 🌟🌟🌟 | 🌟 |
-| GPT-4-turbo | ✅ | ✅ | ✅ | 🌟🌟 | 🌟🌟 |
-| GPT-4o | ✅ | ✅ | ✅ | 🌟🌟🌟 | 🌟🌟 |
-
-
- 关于测试指令,详见 [工具调用 Tools Calling -
- 评测任务介绍](/zh/docs/usage/tools-calling#评测任务介绍)
-
-
-## GPT 3.5-turbo
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-
-
-
-
-
- 流式 Tool Calling 原始输出:
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-
-
-
- 流式 Tool Calling 原始输出:
-
-
-## GPT-4 Turbo
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-GPT-4 Turbo 在调用 Tool Calling 时并没有像 GPT-3.5 Turbo 一样回复「好的」,且经过多次测试始终一样,因此在这一条复合指令的跟随中反而不如 GPT-3.5 Turbo,但剩余两项能力均不错。
-
-当然,也有可能是因为 GPT-4 Turbo 的模型更加有 “自主意识”,认为不需要输出这一句 “好的”。
-
-
-
-
-
-
- 流式 Tool Calling 原始输出:
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-
-
-
- 流式 Tool Calling 原始输出:
-
-
-## GPT 4o
-
-### 简单调用指令:天气查询
-
-测试指令:指令 ①
-
-GPT-4o 和 3.5 一样,在简单调用指令中,能够达到非常不错的复合指令遵循能力。
-
-
-
-
-
-
- 流式 Tool Calling 原始输出:
-
-
-### 复杂调用指令:文生图
-
-测试指令:指令 ②
-
-
-
-
-
-
- 流式 Tool Calling 原始输出:
-
- ```yml
- ```
-
diff --git a/docs/usage/user-interface/appearance.mdx b/docs/usage/user-interface/appearance.mdx
new file mode 100644
index 0000000000..08c6e5a82c
--- /dev/null
+++ b/docs/usage/user-interface/appearance.mdx
@@ -0,0 +1,53 @@
+---
+title: Interface Appearance
+description: >-
+ LobeHub supports various large language models with visual recognition
+ capabilities. Users can upload or drag and drop images, and the assistant will
+ recognize the content and initiate intelligent conversations, creating a
+ smarter and more diverse chat experience.
+tags:
+ - LobeHub
+ - LobeHub
+ - Interface Appearance
+ - Language
+ - Theme
+ - Animation Mode
+ - Context Menu Mode
+---
+
+# Interface Appearance
+
+Click on your user avatar in the top-right corner of the homepage, then select "App Settings" from the dropdown menu. Click on "Appearance" to enter the customization page. This section gathers all visual-related configuration options. \[Image]
+
+## Language
+
+You can switch the interface language in the settings.
+
+## Theme Mode
+
+LobeHub supports three theme modes:
+
+- Light Mode
+- Dark Mode
+- Follow System (default)
+
+## Theme Colors
+
+In addition to light and dark modes, you can also customize the theme color to create a personalized visual style. LobeHub offers a variety of preset theme and neutral colors. Simply click on a color swatch to apply it—buttons, highlights, and other interface elements will instantly update to reflect the selected theme color.
+
+## Assistant Language
+
+Set the default language for AI assistant responses. Once selected, the assistant will prioritize replying in this language unless you explicitly request another language during the conversation.
+
+## Animation Effects
+
+When your conversation or document includes code, you can choose a syntax highlighting theme. Select a code highlight style that complements the overall theme to make your code more readable and visually appealing.
+
+## Mermaid Theme
+
+Mermaid is a tool for creating diagrams using text. When your conversation or document includes Mermaid diagrams, you can choose a theme style for them. Select a Mermaid theme that aligns with the overall interface theme to make flowcharts, sequence diagrams, and other visuals more attractive.
+
+## Mode Selection
+
+- Professional Mode
+- Simplified Mode
diff --git a/docs/usage/user-interface/appearance.zh-CN.mdx b/docs/usage/user-interface/appearance.zh-CN.mdx
new file mode 100644
index 0000000000..34844c771d
--- /dev/null
+++ b/docs/usage/user-interface/appearance.zh-CN.mdx
@@ -0,0 +1,49 @@
+---
+title: 界面外观
+description: LobeHub 支持多种具有视觉识别能力的大语言模型,用户可上传或拖拽图片,助手将识别内容并展开智能对话,打造更智能、多元化的聊天场景。
+tags:
+ - LobeHub
+ - LobeHub
+ - 界面外观
+ - 语言
+ - 主题
+ - 动画模式
+ - 上下文菜单模式
+---
+
+# 界面外观
+
+在首页右上角点击用户头像,在下拉菜单中选择「应用设置」,点击「外观」,进入自定义页面。这里汇集了所有视觉相关的配置选项。\[Image]
+
+## 语言
+
+你可以在设置中切换语言。
+
+## 主题模式
+
+LobeHub 支持三种主题模式:
+
+- 亮色模式
+- 深色模式
+- 跟随系统(默认)
+
+## 主题色彩
+
+除了明暗模式,你还可以自定义主题色彩,打造个性化的视觉风格。LobeHub 提供多种预设主题色和中性色,点击色卡即可应用,界面的按钮、高亮等元素会立即切换为选中的主题色。
+
+## 助手语言
+
+设置 AI 助手回复的默认语言。选择后,助手会优先使用该语言回答问题,除非你在对话中明确要求使用其他语言。
+
+## 动画效果
+
+当对话或文稿中包含代码时,可以选择代码高亮的配色方案。选择与整体主题协调的代码高亮方案,让代码更易读、更美观。
+
+## Mermaid 主题
+
+Mermaid 是一种用文本描述图表的工具。当对话或文稿中包含 Mermaid 图表时,可以选择图表的主题样式。选择与整体主题协调的 Mermaid 主题,让流程图、时序图等图表更美观。
+
+## 模式选择
+
+- 专业模式
+- 精简模式
diff --git a/docs/usage/user-interface/command-menu.mdx b/docs/usage/user-interface/command-menu.mdx
new file mode 100644
index 0000000000..eaae83f100
--- /dev/null
+++ b/docs/usage/user-interface/command-menu.mdx
@@ -0,0 +1,56 @@
+---
+title: Command Menu
+description: >-
+ The Command Menu is the quick action center of LobeHub. Instantly accessible via keyboard shortcuts, it allows you to search and execute various actions without navigating through multiple interface layers. This makes navigation and operations faster and more efficient.
+
+
+tags:
+ - Command Menu
+ - LobeHub
+ - Quick Navigation
+ - Action Center
+ - Keyboard Shortcuts
+---
+
+# Command Menu
+
+The Command Menu is the quick action center of LobeHub. Instantly accessible via keyboard shortcuts, it allows you to search and execute various actions without navigating through multiple interface layers. This makes navigation and operations faster and more efficient.
+
+The Command Menu supports fuzzy search, so you don’t need to type the full name—just a few keywords will do. For example, typing "programming" will bring up "Python Programming Assistant." When frequently switching between assistants, using the Command Menu is much faster than browsing through the sidebar.
+
+## Open the Command Menu
+
+Use the following keyboard shortcuts to open the Command Menu:\
+macOS: Cmd + K\
+Windows/Linux: Ctrl + K\
+The Command Menu will appear as an overlay in the center of the screen.
+
+
+
+## Search and Navigate
+
+Type keywords into the Command Menu to instantly search for matching content. You can search for assistant names to quickly switch sessions, enter topic keywords to jump to past conversations, or quickly access sections like Settings, Documents, and Library. You can also search for plugins, files, and more. The Command Menu supports command search as well, allowing you to quickly perform actions like creating a new assistant or starting a conversation with AI.
+
+
+
+Use your keyboard to navigate quickly within the Command Menu. Use the ↑ and ↓ arrow keys to move through search results, press Enter to execute the selected action, and press Esc to close the menu.
+
+## Ask an Agent
+
+To chat with an assistant, press Cmd + K to open the Command Menu, type your message or topic, press Tab to select an assistant, and start the conversation instantly.
+
+To create a new assistant or assistant team, select "Create Assistant" or "Create Assistant Team" and press Enter.
+
+## Quick Navigation
+
+Select the section you want to access—such as "Documents" or "Resources"—and press Enter to jump right in.
+
+## Search Content
+
+Enter keywords to search through past messages, assistants, MCP plugins, and more. Results are categorized for easier and faster discovery.
diff --git a/docs/usage/user-interface/command-menu.zh-CN.mdx b/docs/usage/user-interface/command-menu.zh-CN.mdx
new file mode 100644
index 0000000000..e1cd7252a9
--- /dev/null
+++ b/docs/usage/user-interface/command-menu.zh-CN.mdx
@@ -0,0 +1,47 @@
+---
+title: 命令菜单
+description: 命令菜单是 LobeHub 的快捷操作中心。通过键盘快捷键快速呼出,你可以搜索和执行各种操作,无需在界面中逐层点击。这让导航和操作更快速、更高效。
+tags:
+ - Command Menu
+ - LobeHub
+ - 快速导航
+ - 操作中心
+ - 键盘快捷键
+---
+
+# 命令菜单
+
+命令菜单是 LobeHub 的快捷操作中心。通过键盘快捷键快速呼出,你可以搜索和执行各种操作,无需在界面中逐层点击。这让导航和操作更快速、更高效。
+
+命令菜单支持模糊搜索,不需要输入完整名称,输入部分关键词即可匹配。例如输入 "编程" 可以找到 "Python 编程助理"。频繁切换助理时,使用命令菜单比在侧边栏中查找更快。
+
+## 打开命令菜单
+
+使用键盘快捷键打开命令菜单:macOS:Cmd + K Windows/Linux:Ctrl + K 命令菜单会以浮层形式出现在屏幕中央。
+
+
+
+## 搜索和导航
+
+在命令菜单中输入关键词,系统会实时搜索匹配的内容。你可以搜索助理名称快速切换会话,输入话题关键词跳转到历史对话,快捷访问设置、文稿、资源库等板块,或者搜索插件、文件等。命令菜单还支持搜索操作命令,快速执行创建助理、与 AI 对话等操作。
+
+
+
+在命令菜单中使用键盘快速导航。用 ↑ 和 ↓ 键在搜索结果中移动,按 Enter 执行选中的操作,按 Esc 关闭命令菜单。
+
+## 问 Agent
+
+与助理对话 Cmd + K 呼出命令菜单后,输入想发送的话题,按 Tab 并选择助理,可以实现快捷对话。
+
+选中「新建助理」或「新建助理团队」,按 Enter 即可创建。
+
+## 快捷导航
+
+选中想访问的板块,例如「文稿」、「资源」等,按 Enter 即可访问。
+
+## 搜索内容
+
+输入关键词,可以搜索历史消息、助理、MCP 插件等。搜索结果分类显示,便于快速查找。
diff --git a/docs/usage/user-interface/shortcuts.mdx b/docs/usage/user-interface/shortcuts.mdx
new file mode 100644
index 0000000000..2d4fcf35db
--- /dev/null
+++ b/docs/usage/user-interface/shortcuts.mdx
@@ -0,0 +1,53 @@
+---
+title: Keyboard Shortcuts
+description: >-
+ LobeHub offers a wide range of keyboard shortcuts to help you perform various
+ actions quickly and efficiently. Mastering these shortcuts can significantly
+ reduce mouse usage and streamline your workflow.
+tags:
+ - LobeHub
+ - Keyboard Shortcuts
+ - Basic Shortcuts
+ - Conversation Shortcuts
+ - Custom Shortcuts
+---
+
+# Keyboard Shortcuts
+
+LobeHub provides a comprehensive set of keyboard shortcuts that allow you to perform actions quickly and boost your productivity. By mastering these shortcuts, you can greatly reduce reliance on the mouse and make your workflow more seamless.
+
+## Accessing Settings
+
+Click on your user avatar in the top-right corner of the homepage, select "App Settings" from the dropdown menu, and then click on "Shortcuts" to view the available keyboard shortcuts.
+
+
+
+## Basic Shortcuts
+
+- Open the global command palette for quick access: ⌘ + K (Mac) or Ctrl + K (Windows/Linux)
+- Focus the main search bar on the current page: ⌘ + J (Mac) or Ctrl + J (Windows/Linux)
+- Quickly switch assistants: Hold Ctrl and press a number key (0–9) to switch between assistants pinned to the sidebar: ^ + 1–9 (Mac) or Ctrl + 1–9 (Windows/Linux)
+- Switch to the default conversation: ^ + 0 (Mac) or Ctrl + 0 (Windows/Linux)
+- Show/Hide the left panel: ⌘ + \[ (Mac) or Ctrl + \[ (Windows/Linux)
+- Show/Hide the right panel: ⌘ + ] (Mac) or Ctrl + ] (Windows/Linux)
+- Open shortcut help: ^ + ⇧ + ? (Mac) or Ctrl + Shift + ? (Windows/Linux) to view all shortcut instructions.
+- Toggle Focus Mode: ⌘ + \ (Mac) or Ctrl + \ (Windows/Linux). In Focus Mode, only the current conversation is shown, and other UI elements are hidden.
+- Save document: ⌘ + S (Mac) or Ctrl + S (Windows/Linux)
+
+## Conversation Shortcuts
+
+- Open conversation settings: ⌘ + , (Mac) or Ctrl + , (Windows/Linux)
+- Regenerate message: ⌘ + R (Mac) or Ctrl + R (Windows/Linux)
+- Delete the last message: ⌘ + D (Mac) or Ctrl + D (Windows/Linux)
+- Delete and regenerate: ⌘ + ⇧ + R (Mac) or Ctrl + Shift + R (Windows/Linux)
+- Delete the last message and regenerate: ⌘ + ⇧ + R (Mac) or Ctrl + Shift + R (Windows/Linux)
+- Save current topic and start a new one: ⌘ + N (Mac) or Ctrl + N (Windows/Linux)
+- Add a user message: ⌘ + ↵ (Mac) or Ctrl + Enter (Windows/Linux) to add the current input as a user message without triggering a response.
+- Edit a message: ⌘ + ⌥ (Mac) or Ctrl + Alt (Windows/Linux). Hold Alt and double-click a message to enter edit mode.
+- Clear all conversation messages: ⌘ + ⇧ + ⌫ (Mac) or Ctrl + Shift + Backspace (Windows/Linux)
+
+## Custom Shortcuts
+
+Most shortcuts can be customized to suit your personal workflow. Tailor the most frequently used actions to your habits for a smoother experience. In the shortcut settings panel, click on the shortcut button you want to change, then press your desired key combination to record a custom shortcut.
+
+
diff --git a/docs/usage/user-interface/shortcuts.zh-CN.mdx b/docs/usage/user-interface/shortcuts.zh-CN.mdx
new file mode 100644
index 0000000000..f185aac96a
--- /dev/null
+++ b/docs/usage/user-interface/shortcuts.zh-CN.mdx
@@ -0,0 +1,50 @@
+---
+title: 快捷键
+description: LobeHub 支持丰富的键盘快捷键,让你能快速执行各种操作,提升使用效率。熟练使用快捷键,可以大幅减少鼠标操作,让工作流更流畅。
+tags:
+ - LobeHub
+ - 快捷键
+ - 基础快捷键
+ - 会话快捷键
+ - 自定义快捷键
+---
+
+# 快捷键
+
+LobeHub 提供丰富的键盘快捷键,让你能快速执行各种操作,提升使用效率。熟练使用快捷键,可以大幅减少鼠标操作,让工作流更流畅。
+
+## 进入设置
+
+在首页右上角点击用户头像,在下拉菜单中选择「应用设置」,点击「快捷键」即可查看已有快捷键。
+
+
+
+## 基础快捷键
+
+- 打开全局命令面板快速访问功能:⌘ + K(Mac)或 Ctrl + K(Windows/Linux
+- 唤起当前页面主要搜索框:⌘ + J(Mac)或 Ctrl + J(Windows/Linux)
+- 快捷切换助理:通过按住 Ctrl 加数字 0-9 切换固定在侧边栏的助理:^ + 1-9(Mac)或 Ctrl + 1-9(Windows/Linux)
+- 切换至默认会话:^ + 0(Mac)或 Ctrl + 0(Windows/Linux)
+- 显示 / 隐藏左侧面板:⌘ + \[(Mac)或 Ctrl +\[(Windows/Linux)
+- 显示 / 隐藏右侧面板:⌘ + ](Mac)或 Ctrl + ](Windows/Linux)
+- 打开快捷键帮助:^ + ⇧ + ?(Mac)或 Ctrl + Shift + ?(Windows/Linux)查看所有快捷键的使用说明。
+- 切换专注模式:⌘ + \(Mac)或 Ctrl + \(Windows/Linux)专注模式下,只显示当前会话,隐藏其他 UI。
+- 保存文档:⌘ + S(Mac)或 Ctrl + S(Windows/Linux)
+
+## 会话快捷键
+
+- 打开会话设置:⌘ + ,(Mac)或 Ctrl + ,(Windows/Linux
+- 重新生成消息:⌘ + R(Mac)或 Ctrl + R(Windows/Linux)
+- 删除最后一条消息:⌘ + D(Mac)或 Ctrl + D(Windows/Linux)
+- 删除并重新生成:⌘ + ⇧ + R(Mac)或 Ctrl + Shift + R(Windows/Linux)
+- 删除最后一条消息并重新生成:⌘ + ⇧ + R(Mac)或 Ctrl + Shift + R(Windows/Linux)
+- 保存当前话题并开启新话题:⌘ + N(Mac)或 Ctrl + N(Windows/Linux)
+- 添加一条用户消息:⌘ + ↵(Mac)或 Ctrl + Enter(Windows/Linux)将当前输入内容添加为用户消息,但不触发生成。
+- 编辑消息:⌘ + ⌥(Mac)或 Ctrl + Alt(Windows/Linux)通过按住 Alt 并双击消息进入编辑模式。
+- 清空会话消息:⌘ + ⇧ + ⌫(Mac)或 Ctrl + Shift + Backspace(Windows/Linux)
+
+## 自定义快捷键
+
+大部分快捷键可以根据你的使用习惯自定义。根据自己的使用频率和习惯,自定义最常用操作的快捷键,让操作更顺手。在快捷键设置界面,找到可点击的快捷键按钮,按下按键即可录制自定义快捷键。
+
+
diff --git a/docs/usage/user-interface/stats.mdx b/docs/usage/user-interface/stats.mdx
new file mode 100644
index 0000000000..55e300e28d
--- /dev/null
+++ b/docs/usage/user-interface/stats.mdx
@@ -0,0 +1,53 @@
+---
+title: Data Analytics
+description: >-
+ LobeHub offers comprehensive data analytics features to help users understand
+ their usage patterns and performance.
+tags:
+ - LobeHub
+ - LobeHub
+ - Multimodal Interaction
+ - Visual Recognition
+ - Intelligent Conversation
+ - Large Language Models
+---
+
+# Data Analytics
+
+LobeHub provides powerful data analytics tools to help you gain insights into your usage. You can view metrics such as total usage days, number of assistants, conversation statistics, and activity trends over the past year, including model usage.
+
+## How to Access Data Analytics
+
+Click on your profile avatar in the top-right corner of the homepage. From the dropdown menu, select "App Settings" and then click on "Data Analytics" to view your statistics.
+
+
+
+## What’s Included
+
+### Basic Metrics
+
+- Days of Use: Total number of days since you started using the app
+- Number of Assistants: Total assistants created and used
+- Number of Topics: Total conversation topics with assistants
+- Number of Messages: Total messages sent and received
+- Total Word Count: Cumulative word count across all conversations
+
+### Activity Calendar
+
+In the middle of the page, you'll find an "activity heatmap" representing your usage over the past year. Each tile corresponds to a day, with color intensity indicating your level of activity. This calendar provides a clear visual overview of your usage habits and peak activity periods.
+
+### Usage Statistics
+
+At the bottom of the page, you'll find detailed usage breakdowns:
+
+- Model Usage Statistics: Shows how frequently and proportionally you use different AI models, helping you identify your most-used models.
+- Assistant Usage Statistics: Displays the frequency and proportion of usage for each assistant, so you can see which ones you rely on most.
+- Topic Content Volume: Visualizes the distribution of content across different topics, giving you insight into the scale and focus of your conversations.
+
+## Share Your Usage Data
+
+You can generate a shareable image of your data analytics.
+
+
+
+Click the share button in the top-right corner of the Data Analytics page. The system will generate an image containing your usage data. You can choose from several image formats: JPG, PNG, SVG, or WEBP. After selecting a format, click download to save the image and share it on other platforms.
diff --git a/docs/usage/user-interface/stats.zh-CN.mdx b/docs/usage/user-interface/stats.zh-CN.mdx
new file mode 100644
index 0000000000..ff29810565
--- /dev/null
+++ b/docs/usage/user-interface/stats.zh-CN.mdx
@@ -0,0 +1,51 @@
+---
+title: 数据统计
+description: LobeHub 提供全面的数据统计功能,帮助用户了解使用情况和性能表现。
+tags:
+ - LobeHub
+ - LobeHub
+ - 多模态交互
+ - 视觉识别
+ - 智能对话
+ - 大语言模型
+---
+
+# 数据统计
+
+LobeHub 提供数据统计功能,让你了解自己的使用情况。你可以查看使用天数、助手数量、对话统计,以及过去一年的活跃度和模型使用情况。
+
+## 查看数据统计
+
+在首页右上角点击用户头像,在下拉菜单中选择「应用设置」,点击「数据统计」即可查看。
+
+
+
+## 统计内容
+
+### 基础数据
+
+- 使用天数:从首次使用到现在的总天数
+- 助手数:创建和使用的助手总数
+- 话题数:与助手的对话话题总数
+- 消息数:发送和接收的消息总数
+- 累计字数:所有对话的累计文字数量
+
+### 活跃度日历
+
+页面中部显示过去一年的活跃度 "瓷砖墙"。每个方块代表一天,颜色深浅表示当天的活跃程度。通过活跃度日历,你可以直观地看到自己的使用习惯和活跃时段。
+
+### 使用率统计
+
+页面下方显示详细的使用率统计:
+
+- 模型使用率统计:展示你使用不同 AI 模型的频率和占比,了解自己最常用的模型。
+- 助理使用率统计:展示你使用不同助手的频率和占比,了解哪些助手最常用。
+- 话题内容量统计:展示不同话题的内容量分布,了解对话的内容规模。
+
+## 分享使用数据
+
+你可以将数据统计生成可分享的图片。
+
+
+
+点击数据统计页面右上角的分享按钮,系统会生成一张包含你使用数据的图片。你可以选择图片格式:JPG/PNG/SVG/WEBP。选择格式后点击下载,即可保存图片并分享到其他平台。
diff --git a/docs/usage/workspace/manage-quota.mdx b/docs/usage/workspace/manage-quota.mdx
new file mode 100644
index 0000000000..8fa8890917
--- /dev/null
+++ b/docs/usage/workspace/manage-quota.mdx
@@ -0,0 +1,30 @@
+---
+title: Discover Innovative AI Assistants in the LobeHub Assistant Marketplace
+description: >-
+ The LobeHub Assistant Marketplace is a vibrant and innovative community that
+ brings together a wide range of thoughtfully designed assistants to enhance
+ productivity and learning. You're welcome to submit your own assistant
+ creations and help build a more diverse, practical, and creative ecosystem.
+tags:
+ - LobeHub
+ - Assistant Marketplace
+ - Innovation Community
+ - Collaborative Space
+ - Assistant Creations
+ - Automated Internationalization
+ - Multilingual Support
+---
+
+# Assistant Marketplace
+
+
+
+In the LobeHub Assistant Marketplace, creators will find a vibrant and innovative community filled with a wide array of thoughtfully crafted assistants. These assistants not only play a vital role in professional settings but also greatly enhance the learning experience. Our marketplace is more than just a showcase—it's a collaborative space where everyone is encouraged to contribute their ideas and share their custom-built assistants.
+
+
+ By [🤖/🏪 Submitting Your Assistant](https://github.com/lobehub/lobe-chat-agents), you can easily publish your assistant to our platform. One of the standout features of LobeHub is its robust automated internationalization (i18n) workflow, which seamlessly translates your assistant into multiple languages. This ensures that users around the world can enjoy your assistant without language barriers.
+
+
+
+ We invite all users to join this ever-growing ecosystem and take part in the continuous improvement and evolution of assistants. Together, we can create more engaging, practical, and innovative assistants, enriching the diversity and utility of the marketplace.
+
diff --git a/docs/usage/workspace/manage-quota.zh-CN.mdx b/docs/usage/workspace/manage-quota.zh-CN.mdx
new file mode 100644
index 0000000000..98c6075977
--- /dev/null
+++ b/docs/usage/workspace/manage-quota.zh-CN.mdx
@@ -0,0 +1,30 @@
+---
+title: 在 LobeHub 助手市场找到创新 AI 助手
+description: >-
+ LobeHub助手市场是一个充满活力和创新的社区,汇聚了众多精心设计的助手,为工作场景和学习提供便利。欢迎提交你的助手作品,共同创造更多有趣、实用且具有创新性的助手。
+tags:
+ - LobeHub
+ - 助手市场
+ - 创新社区
+ - 协作空间
+ - 助手作品
+ - 自动化国际化
+ - 多语言版本
+---
+
+# 助手市场
+
+
+
+在 LobeHub 的助手市场中,创作者们可以发现一个充满活力和创新的社区,它汇聚了众多精心设计的助手,这些助手不仅在工作场景中发挥着重要作用,也在学习过程中提供了极大的便利。我们的市场不仅是一个展示平台,更是一个协作的空间。在这里,每个人都可以贡献自己的智慧,分享个人开发的助手。
+
+
+ 通过 [🤖/🏪 提交助手](https://github.com/lobehub/lobe-chat-agents)
+ ,你可以轻松地将你的助手作品提交到我们的平台。我们特别强调的是,LobeHub
+ 建立了一套精密的自动化国际化(i18n)工作流程,
+ 它的强大之处在于能够无缝地将你的助手转化为多种语言版本。这意味着,不论你的用户使用何种语言,他们都能无障碍地体验到你的助手。
+
+
+
+ 我们欢迎所有用户加入这个不断成长的生态系统,共同参与到助手的迭代与优化中来。共同创造出更多有趣、实用且具有创新性的助手,进一步丰富助手的多样性和实用性。
+
diff --git a/docs/usage/workspace/manage-team.mdx b/docs/usage/workspace/manage-team.mdx
new file mode 100644
index 0000000000..1c4101b251
--- /dev/null
+++ b/docs/usage/workspace/manage-team.mdx
@@ -0,0 +1,31 @@
+---
+title: Discover Innovative AI Assistants in the LobeHub Assistant Marketplace
+description: >-
+ The LobeHub Assistant Marketplace is a vibrant and innovative community that
+ brings together a wide range of thoughtfully designed assistants to enhance
+ productivity and learning. You're welcome to submit your own assistant
+ creations and help build a more diverse, practical, and creative ecosystem.
+tags:
+ - LobeHub
+ - Assistant Marketplace
+ - Innovation Community
+ - Collaborative Space
+ - Assistant Creations
+ - Automated Internationalization
+ - Multilingual Support
+---
+
+# Assistant Marketplace
+
+
+
+In the LobeHub Assistant Marketplace, creators will find a vibrant and innovative community filled with a wide array of thoughtfully crafted assistants. These assistants not only play a vital role in professional settings but also greatly enhance the learning experience. Our marketplace is more than just a showcase—it's a collaborative space where everyone is encouraged to contribute their ideas and share their custom-built assistants.
+
+
+ Easily submit your assistant to our platform via [🤖/🏪 Submit an Assistant](https://github.com/lobehub/lobe-chat-agents).\
+ One of LobeHub’s standout features is its robust automated internationalization (i18n) workflow, which seamlessly translates your assistant into multiple languages. This ensures that users around the world can enjoy your assistant without language barriers.
+
+
+
+ We invite all users to join this ever-growing ecosystem and take part in the continuous improvement and evolution of assistants. Let’s work together to create more engaging, practical, and innovative assistants that enrich the diversity and functionality of the marketplace.
+
diff --git a/docs/usage/workspace/manage-team.zh-CN.mdx b/docs/usage/workspace/manage-team.zh-CN.mdx
new file mode 100644
index 0000000000..98c6075977
--- /dev/null
+++ b/docs/usage/workspace/manage-team.zh-CN.mdx
@@ -0,0 +1,30 @@
+---
+title: 在 LobeHub 助手市场找到创新 AI 助手
+description: >-
+ LobeHub助手市场是一个充满活力和创新的社区,汇聚了众多精心设计的助手,为工作场景和学习提供便利。欢迎提交你的助手作品,共同创造更多有趣、实用且具有创新性的助手。
+tags:
+ - LobeHub
+ - 助手市场
+ - 创新社区
+ - 协作空间
+ - 助手作品
+ - 自动化国际化
+ - 多语言版本
+---
+
+# 助手市场
+
+
+
+在 LobeHub 的助手市场中,创作者们可以发现一个充满活力和创新的社区,它汇聚了众多精心设计的助手,这些助手不仅在工作场景中发挥着重要作用,也在学习过程中提供了极大的便利。我们的市场不仅是一个展示平台,更是一个协作的空间。在这里,每个人都可以贡献自己的智慧,分享个人开发的助手。
+
+
+ 通过 [🤖/🏪 提交助手](https://github.com/lobehub/lobe-chat-agents)
+ ,你可以轻松地将你的助手作品提交到我们的平台。我们特别强调的是,LobeHub
+ 建立了一套精密的自动化国际化(i18n)工作流程,
+ 它的强大之处在于能够无缝地将你的助手转化为多种语言版本。这意味着,不论你的用户使用何种语言,他们都能无障碍地体验到你的助手。
+
+
+
+ 我们欢迎所有用户加入这个不断成长的生态系统,共同参与到助手的迭代与优化中来。共同创造出更多有趣、实用且具有创新性的助手,进一步丰富助手的多样性和实用性。
+
diff --git a/docs/wiki/HOME.zh-CN.md b/docs/wiki/HOME.zh-CN.md
new file mode 100644
index 0000000000..3b8f9c47e7
--- /dev/null
+++ b/docs/wiki/HOME.zh-CN.md
@@ -0,0 +1,11 @@
+