diff --git a/.env.example b/.env.example index ad2181518..e117db7a8 100644 --- a/.env.example +++ b/.env.example @@ -51,6 +51,7 @@ TERMINUS_PROXY_HOST_URL= TERMINUS_ADMIN_ROOT_TOKEN= # Set this env to enable vault audit logs in the sidebar TERMINUS_RETRACED_PROJECT_ID= +TERMINUS_LLM_RETRACED_PROJECT_ID= # OpenTelemetry OTEL_EXPORTER_OTLP_METRICS_ENDPOINT= @@ -115,4 +116,15 @@ ENTERPRISE_ORY_PROJECT_ID= #OPENID_REQUEST_PROFILE_SCOPE=false # Uncomment below if you wish to forward the OpenID params (https://openid.net/specs/openid-connect-core-1_0-errata2.html#AuthRequest) to the OpenID IdP -#OPENID_REQUEST_FORWARD_PARAMS=true \ No newline at end of file +#OPENID_REQUEST_FORWARD_PARAMS=true +FEATURE_LLM_VAULT= +LLM_PDF_CHAT_BASE_URL= +LLM_PDF_CHAT_ROLE_MAPPING= +LLM_PDF_CHAT_SIGNING_KEY= +LLM_PDF_CHAT_JWS_ALG= +LLM_PDF_CHAT_JWT_AUDIENCE= +LLM_PDF_CHAT_JWT_ISSUER= +TERMINUS_LLM_TENANT= +TERMINUS_LLM_PRODUCT= +TERMINUS_WRITE_API_KEY= +TERMINUS_READ_API_KEY= \ No newline at end of file diff --git a/components/Sidebar.tsx b/components/Sidebar.tsx index 9bd727c78..c7202062d 100644 --- a/components/Sidebar.tsx +++ b/components/Sidebar.tsx @@ -11,6 +11,8 @@ import DSyncLogo from '@components/logo/DSync'; import AuditLogsLogo from '@components/logo/AuditLogs'; import Vault from '@components/logo/Vault'; import Cog8ToothIcon from '@heroicons/react/24/outline/Cog8ToothIcon'; +import { useCallback, useEffect } from 'react'; +import useFeatures from '@lib/ui/hooks/useFeatures'; type SidebarProps = { isOpen: boolean; @@ -22,15 +24,33 @@ type MenuItem = { href: string; text: string; active: boolean; + onClick?: () => void; icon?: any; items?: MenuItem[]; + current?: boolean; }; export const Sidebar = ({ isOpen, setIsOpen, branding }: SidebarProps) => { const { t } = useTranslation('common'); const { asPath } = useRouter(); - const menus = [ + const closeSidebar = useCallback(() => setIsOpen(false), [setIsOpen]); + + const features = useFeatures(); + + useEffect(() => { + function handleEscKey(e) { + if ((e as KeyboardEvent).key === 'Escape') { + closeSidebar(); + } + } + document.addEventListener('keydown', handleEscKey); + return () => { + document.removeEventListener('keydown', handleEscKey); + }; + }, [closeSidebar]); + + const menuItems = [ { href: '/admin/dashboard', text: t('dashboard'), @@ -126,6 +146,27 @@ export const Sidebar = ({ isOpen, setIsOpen, branding }: SidebarProps) => { }, ], }, + features?.llmVault + ? { + href: '/admin/llm-vault/chat', + text: t('llm_vault'), + icon: Vault, + current: asPath.includes('llm-vault'), + active: asPath.includes('/admin/llm-vault'), + items: [ + { + href: '/admin/llm-vault/chat', + text: t('bui-chat'), + active: asPath.includes('/admin/llm-vault/chat'), + }, + { + href: '/admin/llm-vault/audit-logs', + text: t('audit_logs'), + active: asPath.includes('/admin/llm-vault/audit-logs'), + }, + ], + } + : null, { href: '/admin/settings', text: t('settings'), @@ -144,28 +185,30 @@ export const Sidebar = ({ isOpen, setIsOpen, branding }: SidebarProps) => { }, ], }, - ]; + ].filter((m): m is NonNullable => m !== null); + + const menus: MenuItem[] = menuItems; return ( <> {/* Sidebar for mobile */}
-
+
+
+
-
- -
- +
- {/* Sidebar for desktop */} -
-
-
+
+
+
+
-
+
@@ -204,11 +263,11 @@ export const Sidebar = ({ isOpen, setIsOpen, branding }: SidebarProps) => { const MenuItems = ({ menus }: { menus: MenuItem[] }) => { return ( -
); diff --git a/components/terminus/Blockly/BlocklyComponent.module.css b/components/terminus/Blockly/BlocklyComponent.module.css index dfc9bf3ed..81a41cee6 100644 --- a/components/terminus/Blockly/BlocklyComponent.module.css +++ b/components/terminus/Blockly/BlocklyComponent.module.css @@ -1,7 +1,7 @@ .blocklyDiv { height: calc(100% - 151px); - width: calc(100% - 256px); + width: calc(100%); position: absolute; top: 151px; - left: 256px; + /* left: 256px; */ } diff --git a/components/terminus/Blockly/BlocklyComponent.tsx b/components/terminus/Blockly/BlocklyComponent.tsx index b53901009..635ca7ba3 100644 --- a/components/terminus/Blockly/BlocklyComponent.tsx +++ b/components/terminus/Blockly/BlocklyComponent.tsx @@ -6,8 +6,8 @@ import { useTranslation } from 'next-i18next'; import * as Blockly from 'blockly/core'; import 'blockly/blocks'; import { maskSetup } from '@components/terminus/blocks/customblocks'; -import * as locale from 'blockly/msg/en'; -Blockly.setLocale(locale); +// import * as locale from 'blockly/msg/en'; +// Blockly.setLocale(locale); import { generateModel } from '@components/terminus/blocks/generator'; import { errorToast, successToast } from '@components/Toaster'; @@ -84,6 +84,14 @@ function BlocklyComponent(props) { const { initialXml, ...rest } = props; primaryWorkspace.current = Blockly.inject(blocklyDiv.current as any, { toolbox: toolbox.current, + zoom: { + controls: true, + wheel: true, + maxScale: 3, + minScale: 0.3, + scaleSpeed: 1.2, + startScale: 0.8, + }, readOnly: false, trashcan: true, media: '/terminus/', diff --git a/e2e/support/fixtures/setuplink-ds-page.ts b/e2e/support/fixtures/setuplink-ds-page.ts index a578cf28a..daf204a5a 100644 --- a/e2e/support/fixtures/setuplink-ds-page.ts +++ b/e2e/support/fixtures/setuplink-ds-page.ts @@ -68,7 +68,7 @@ export class SetupLinkDSPage { await expect(this.page.getByRole('table')).toBeVisible(); // Delete the created setuplink - await this.page.getByRole('button').nth(5).click(); + await this.page.getByRole('button').nth(6).click(); await this.page.getByRole('button', { name: 'Delete' }).click(); } } diff --git a/e2e/support/fixtures/setuplink-page.ts b/e2e/support/fixtures/setuplink-page.ts index 81cacc7ed..e783e0fb0 100644 --- a/e2e/support/fixtures/setuplink-page.ts +++ b/e2e/support/fixtures/setuplink-page.ts @@ -76,7 +76,7 @@ export class SetupLinkPage { await expect(this.page.getByRole('table')).toBeVisible(); // Delete the created setuplink - await this.page.getByRole('button').nth(5).click(); + await this.page.getByRole('button').nth(6).click(); await this.page.getByRole('button', { name: 'Delete' }).click(); } } diff --git a/e2e/ui/Directory Sync/setup_link_ds.spec.ts b/e2e/ui/Directory Sync/setup_link_ds.spec.ts index 1cebc4e61..b929004e6 100644 --- a/e2e/ui/Directory Sync/setup_link_ds.spec.ts +++ b/e2e/ui/Directory Sync/setup_link_ds.spec.ts @@ -43,7 +43,7 @@ async function deleteDirectory(setupLinkPage: Page) { await setupLinkPage.getByRole('button', { name: 'Confirm' }).click(); } -test.describe('Admin Portal Dyrectory Sync SetupLink', () => { +test.describe('Admin Portal Directory Sync SetupLink', () => { test('should be able to create setup link and directories', async ({ page, setuplinkPage }) => { // get setuplink url const linkContent = await setuplinkPage.getSetupLinkUrl(); diff --git a/ee/chat/api/[tenant]/config/[configId].ts b/ee/chat/api/[tenant]/config/[configId].ts new file mode 100644 index 000000000..315820568 --- /dev/null +++ b/ee/chat/api/[tenant]/config/[configId].ts @@ -0,0 +1,54 @@ +import type { NextApiRequest, NextApiResponse } from 'next'; +import { deleteLLMConfigSchema, updateLLMConfigSchema, validateWithSchema } from '@lib/zod'; +import jackson from '@lib/jackson'; +import { LLMProvider } from '@boxyhq/saml-jackson'; +import { defaultHandler } from '@lib/api'; + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + DELETE: handleDELETE, + PUT: handlePUT, + }); +}; + +// Delete llm config +const handleDELETE = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + + const { configId, tenant } = validateWithSchema(deleteLLMConfigSchema, req.query); + + await chatController.deleteLLMConfig({ + configId, + tenant: tenant, + }); + + res.status(204).end(); +}; + +// Update llm config +const handlePUT = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + + const providers = await chatController.getLLMProviders(req.query.tenant as string, false); + + const { configId, tenant, apiKey, models, baseURL, piiPolicy, provider } = validateWithSchema( + updateLLMConfigSchema(providers), + { + ...req.body, + ...req.query, + } + ); + + await chatController.updateLLMConfig(configId, { + tenant, + apiKey, + baseURL, + piiPolicy, + provider: provider as LLMProvider, + models, + }); + + res.status(204).end(); +}; + +export default handler; diff --git a/ee/chat/api/[tenant]/config/index.ts b/ee/chat/api/[tenant]/config/index.ts new file mode 100644 index 000000000..4568b08e4 --- /dev/null +++ b/ee/chat/api/[tenant]/config/index.ts @@ -0,0 +1,51 @@ +import type { NextApiRequest, NextApiResponse } from 'next'; +import jackson from '@lib/jackson'; +import { createLLMConfigSchema, validateWithSchema } from '@lib/zod'; +import { LLMProvider } from '@boxyhq/saml-jackson'; +import { defaultHandler } from '@lib/api'; + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + GET: handleGET, + POST: handlePOST, + }); +}; + +// Get Chat Configs +const handleGET = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + + const configs = await chatController.getLLMConfigs(req.query.tenant as string); + + res.json({ data: configs }); +}; + +// Create Chat Config +const handlePOST = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + + const providers = await chatController.getLLMProviders(req.query.tenant as string, false); + + const { provider, apiKey, models, isChatWithPDFProvider, baseURL, piiPolicy, tenant } = validateWithSchema( + createLLMConfigSchema(providers), + { ...req.body, ...req.query } + ); + + if (!apiKey && provider !== 'ollama' && !isChatWithPDFProvider) { + throw new Error('API Key is required'); + } + + const config = await chatController.createLLMConfig({ + provider: provider as LLMProvider, + models: models || [], + isChatWithPDFProvider, + apiKey, + baseURL, + piiPolicy, + tenant, + }); + + res.status(201).json({ data: { config } }); +}; + +export default handler; diff --git a/ee/chat/api/[tenant]/conversation/[conversationId].ts b/ee/chat/api/[tenant]/conversation/[conversationId].ts new file mode 100644 index 000000000..716900a96 --- /dev/null +++ b/ee/chat/api/[tenant]/conversation/[conversationId].ts @@ -0,0 +1,37 @@ +import type { NextApiRequest, NextApiResponse } from 'next'; +import jackson from '@lib/jackson'; +import { defaultHandler } from '@lib/api'; +import { terminusOptions } from '@lib/env'; +import { getServerSession } from 'next-auth'; +import { authOptions } from 'pages/api/auth/[...nextauth]'; + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + GET: handleGET, + }); +}; + +// Get Chat Thread by Conversation ID +const handleGET = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + const { tenant } = req.query; + + let userId; + const isAdminPortalTenant = tenant === terminusOptions.llm?.tenant; + if (isAdminPortalTenant) { + const session = await getServerSession(req, res, authOptions); + if (!session) { + res.status(401).json({ error: { message: 'Unauthorized' } }); + return; + } + userId = session.user.id; + } else { + // userId = req.body.userId; + } + + const chat = await chatController.getChatThreadByConversationId(req.query.conversationId as string, userId); + + if (chat) res.json({ data: chat }); +}; + +export default handler; diff --git a/ee/chat/api/[tenant]/conversation/index.ts b/ee/chat/api/[tenant]/conversation/index.ts new file mode 100644 index 000000000..152839705 --- /dev/null +++ b/ee/chat/api/[tenant]/conversation/index.ts @@ -0,0 +1,40 @@ +import type { NextApiRequest, NextApiResponse } from 'next'; +import jackson from '@lib/jackson'; +import { defaultHandler } from '@lib/api'; +import { getServerSession } from 'next-auth'; +import { terminusOptions } from '@lib/env'; +import { authOptions } from 'pages/api/auth/[...nextauth]'; + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + GET: handleGET, + }); +}; + +// Get Conversations +const handleGET = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + const { tenant } = req.query; + + let userId; + const isAdminPortalTenant = tenant === terminusOptions.llm?.tenant; + if (isAdminPortalTenant) { + const session = await getServerSession(req, res, authOptions); + if (!session) { + res.status(401).json({ error: { message: 'Unauthorized' } }); + return; + } + userId = session.user.id; + } else { + userId = req.body.userId; + } + + const conversations = await chatController.getConversationsByTenantAndUser({ + tenant: req.query.tenant as string, + userId, + }); + + res.json({ data: conversations }); +}; + +export default handler; diff --git a/ee/chat/api/[tenant]/index.ts b/ee/chat/api/[tenant]/index.ts new file mode 100644 index 000000000..7a3129edb --- /dev/null +++ b/ee/chat/api/[tenant]/index.ts @@ -0,0 +1,212 @@ +import { NextApiRequest, NextApiResponse } from 'next'; +import jackson from '@lib/jackson'; +import { generateChatResponse } from '@lib/llm'; +import { terminusOptions } from '@lib/env'; +import { authOptions } from 'pages/api/auth/[...nextauth]'; +import { getServerSession } from 'next-auth'; +import { defaultHandler } from '@lib/api'; +import { ApiError } from '@lib/error'; +import { LLMProvider } from 'npm/src'; + +/** + * If no conversationId is provided it will be treated as new conversation and will be created. + * Post api will send the conversationId and message to the LLM provider and return the response. + * If the conversationId is provided it will be treated as existing conversation and will be used to send the message. + * Post api will send the conversationId and message to the LLM provider and return the response. + */ + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + POST: handlePOST, + }); +}; + +async function handlePOST(req: NextApiRequest, res: NextApiResponse) { + const { chatController } = await jackson(); + const { tenant } = req.query; + const { messages, model, provider, isChatWithPDFProvider } = req.body; + + let userId, email; + const isAdminPortalTenant = tenant === terminusOptions.llm?.tenant; + if (isAdminPortalTenant) { + const session = await getServerSession(req, res, authOptions); + if (!session) { + res.status(401).json({ error: { message: 'Unauthorized' } }); + return; + } + userId = session.user.id; + email = session.user.email; + } else { + userId = req.body.userId; + } + let { conversationId } = req.body; + + if (!isChatWithPDFProvider) { + if (!provider) { + res.status(400).json({ error: { message: 'Provider is required' } }); + return; + } + + if (!model) { + res.status(400).json({ error: { message: 'Model is required' } }); + return; + } + } + + try { + const llmConfigs = await chatController.getLLMConfigsByTenantAndProvider( + tenant as string, + isChatWithPDFProvider ? 'openai' : provider + ); + + if (llmConfigs.length === 0) { + res.status(400).json({ + error: { + message: conversationId + ? 'The provider and model related to this conversation are no longer available.' + : 'LLM Config not found. Please create one before using LLM.', + }, + }); + return; + } + if (!isChatWithPDFProvider) { + const allowedModels = await chatController.getLLMModels(tenant as string, provider as LLMProvider); + // const allowedModels = getLLMModels(provider, llmConfigs); + + if (allowedModels.length > 0 && allowedModels.find((m) => m.id === model.id) === undefined) { + res.status(400).json({ + error: { + message: conversationId + ? 'The provider and model related to this conversation are no longer available.' + : 'Model not allowed', + }, + }); + return; + } + } + + let config; + if (isChatWithPDFProvider) { + config = llmConfigs.find((c) => c.isChatWithPDFProvider); + if (config === undefined) { + res.status(400).json({ + error: { message: 'No config found for chat with PDF' }, + }); + return; + } + } else { + config = llmConfigs.find((c) => c.models.includes(model.id)) || llmConfigs[0]; + } + + const configFromVault = await chatController.getLLMConfigFromVault( + tenant as string, + config.terminusToken + ); + + if (isChatWithPDFProvider) { + const jwt = await chatController.generatePDFChatJWT({ email }); + configFromVault.apiKey = jwt; + } + + if (!conversationId) { + const conversation = await chatController.createConversation({ + tenant: tenant as string, + userId, + title: messages[0].content.trim().slice(0, 50), + provider: isChatWithPDFProvider ? 'openai' : provider, + model: model?.id || '', + isChatWithPDFProvider, + }); + conversationId = conversation.id; + } else { + const conversation = await chatController.getConversationById(conversationId); + if (!conversation) { + res.status(404).json({ error: { message: 'Conversation not found' } }); + return; + } + } + + await chatController.createChat({ + conversationId, + role: 'user', + content: messages[messages.length - 1].content, + }); + + const responseMessage = await generateChatResponse( + messages.map((m) => { + return { + content: m.content, + role: m.role, + }; + }), + isChatWithPDFProvider ? 'openai' : provider, + isChatWithPDFProvider ? 'gpt-4o' : model, + { + ...config, + ...configFromVault, + }, + true + ); + + if (!responseMessage) { + res.status(400).json({ error: 'Unable get response from LLM. Please try again.' }); + } + + if (typeof responseMessage === 'string') { + await chatController.createChat({ conversationId, role: 'assistant', content: responseMessage }); + + res.status(200).json({ message: responseMessage, conversationId }); + } else { + res.setHeader('Content-Type', 'application/x-ndjson; charset=utf-8'); + res.setHeader('Cache-Control', 'no-cache, no-transform'); + res.setHeader('Connection', 'keep-alive'); + res.flushHeaders(); + let message = ''; + for await (const chunk of responseMessage) { + if (!chunk || !chunk.choices) { + continue; + } + if (chunk.choices.length === 0) { + continue; + } + if (chunk.choices[0]?.delta?.content) { + // skip first empty line + if (!message && chunk.choices[0]?.delta?.content === '\n') { + continue; + } + message += chunk.choices[0]?.delta?.content; + if (!chunk) { + continue; + } + await res.write(JSON.stringify(chunk) + '\n'); + } + } + await res.write(JSON.stringify({ conversationId }) + '\n'); + res.end(); + + await chatController.createChat({ conversationId, role: 'assistant', content: message }); + } + } catch (error: any) { + console.error('Error in chat api', error); + const { status, message } = decodeError(provider, error); + throw new ApiError(message, status); + } +} + +const decodeError = (provider: string, error: any) => { + switch (provider) { + case 'openai': + return { + status: error.status || 400, + message: error?.code || error?.message, + }; + case 'mistral': + return { + status: (error?.message || '').indexOf('401') !== -1 ? 401 : 400, + message: (error?.message || '').indexOf('Unauthorized') !== -1 ? 'Unauthorized' : error?.message, + }; + } + return { status: 500, message: error?.message }; +}; + +export default handler; diff --git a/ee/chat/api/[tenant]/providers/[provider]/models.ts b/ee/chat/api/[tenant]/providers/[provider]/models.ts new file mode 100644 index 000000000..813346351 --- /dev/null +++ b/ee/chat/api/[tenant]/providers/[provider]/models.ts @@ -0,0 +1,24 @@ +import type { NextApiRequest, NextApiResponse } from 'next'; +import jackson from '@lib/jackson'; +import { defaultHandler } from '@lib/api'; +import { LLMProvider } from 'npm/src'; + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + GET: handleGET, + }); +}; + +// Get Models list for dropdown +const handleGET = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + + const { tenant, provider, filterByTenant: filterByTenantParam } = req.query; + const filterByTenant = filterByTenantParam !== 'false'; + + const models = await chatController.getLLMModels(tenant as string, provider as LLMProvider, filterByTenant); + + res.json({ data: models }); +}; + +export default handler; diff --git a/ee/chat/api/[tenant]/providers/index.ts b/ee/chat/api/[tenant]/providers/index.ts new file mode 100644 index 000000000..8ae556cac --- /dev/null +++ b/ee/chat/api/[tenant]/providers/index.ts @@ -0,0 +1,23 @@ +import type { NextApiRequest, NextApiResponse } from 'next'; +import jackson from '@lib/jackson'; +import { defaultHandler } from '@lib/api'; + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + GET: handleGET, + }); +}; + +// Get Providers list for dropdown +const handleGET = async (req: NextApiRequest, res: NextApiResponse) => { + const { chatController } = await jackson(); + + const { tenant, filterByTenant: filterByTenantParam } = req.query; + const filterByTenant = filterByTenantParam !== 'false'; + + const providers = await chatController.getLLMProviders(tenant as string, filterByTenant); + + res.json({ data: providers }); +}; + +export default handler; diff --git a/ee/chat/api/[tenant]/upload-file.ts b/ee/chat/api/[tenant]/upload-file.ts new file mode 100644 index 000000000..ccfc9f484 --- /dev/null +++ b/ee/chat/api/[tenant]/upload-file.ts @@ -0,0 +1,121 @@ +import type { IncomingMessage } from 'http'; +import { NextApiRequest, NextApiResponse } from 'next'; +import type { Readable } from 'node:stream'; +import { defaultHandler } from '@lib/api'; +import { llmOptions, terminusOptions } from '@lib/env'; +import jackson from '@lib/jackson'; +import { getServerSession } from 'next-auth'; +import { authOptions } from 'pages/api/auth/[...nextauth]'; + +// Function to force consume the response body to avoid memory leaks +export const forceConsume = async (response) => { + try { + await response.text(); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + } catch (error) { + // Do nothing + } +}; + +// Get raw body as buffer +async function getRawBody(readable: Readable): Promise { + const chunks: any[] = []; + for await (const chunk of readable) { + chunks.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk); + } + return Buffer.concat(chunks); +} + +// Parse multipart form data to extract file content +async function parseMultipartFormData(req: IncomingMessage, boundary: string) { + const buffer = await getRawBody(req); + const parts = buffer.toString().split(`--${boundary}`); + + let fileName = ''; + let fileBuffer: Buffer | null = null; + + parts.forEach((part) => { + if (part.includes('Content-Disposition: form-data; name="file"; filename=')) { + fileName = part.split('filename=')[1].split('"')[1]; + const start = buffer.indexOf('\r\n\r\n') + 4; + const end = buffer.lastIndexOf('\r\n--'); + fileBuffer = buffer.subarray(start, end); + } + }); + + return { fileName, fileBuffer }; +} + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + POST: handlePOST, + }); +}; + +export async function handlePOST(req, res) { + const { chatController } = await jackson(); + + // TODO: Upload file against tenant/user + + const { tenant } = req.query; + + let email; + const isAdminPortalTenant = tenant === terminusOptions.llm?.tenant; + if (isAdminPortalTenant) { + const session = await getServerSession(req, res, authOptions); + if (!session) { + res.status(401).json({ error: { message: 'Unauthorized' } }); + return; + } + email = session.user.email; + } else { + // email = req.body.userId; + } + + const contentType = req.headers['content-type']; + if (!contentType) { + res.status(400).json({ error: 'Content-Type header is missing' }); + return; + } + + const boundary = contentType.split('boundary=')[1]; + + const { fileName, fileBuffer } = await parseMultipartFormData(req, boundary); + + if (!fileBuffer || !fileName) { + res.status(400).json({ error: 'File not found in the request' }); + return; + } + + const formData = new FormData(); + formData.append('file', new Blob([fileBuffer]), fileName); + + const jwt = await chatController.generatePDFChatJWT({ email }); + + try { + const response = await fetch(`${llmOptions.pdfChat.baseUrl}/chat/upload_file`, { + headers: { + Authorization: `Bearer ${jwt}`, + }, + method: 'POST', + body: formData, + }); + + const status = response.status; + const contentType = response.headers.get('content-type'); + + if (contentType?.includes('application/json')) { + res.status(status).json(await response.json()); + } else { + forceConsume(res); + res.status(status).end(); + } + } catch (error: any) { + const message = error.message || 'An error occurred. Please try again.'; + const status = error.status || 500; + + res.status(status).json({ error: { message } }); + } +} + +export default handler; diff --git a/ee/chat/pages/[[...conversationId]].tsx b/ee/chat/pages/[[...conversationId]].tsx new file mode 100644 index 000000000..42d158077 --- /dev/null +++ b/ee/chat/pages/[[...conversationId]].tsx @@ -0,0 +1,25 @@ +import { ChatContextProvider, ChatUI } from '@boxyhq/internal-ui'; +import LicenseRequired from '@components/LicenseRequired'; + +const ChatPage = ({ llmTenant, hasValidLicense }: { llmTenant: string; hasValidLicense: boolean }) => { + if (!hasValidLicense) { + return ; + } + + return ( + + + + ); +}; + +export default ChatPage; diff --git a/ee/terminus/pages/audit-logs.tsx b/ee/terminus/pages/audit-logs.tsx new file mode 100644 index 000000000..e303bfd79 --- /dev/null +++ b/ee/terminus/pages/audit-logs.tsx @@ -0,0 +1,111 @@ +import type { NextPage } from 'next'; +import dynamic from 'next/dynamic'; +import { useEffect, useState } from 'react'; +import { useProject, useGroups } from '@lib/ui/retraced'; +import { LinkBack, Loading, Error } from '@boxyhq/internal-ui'; +import { Select } from 'react-daisyui'; +import { useTranslation } from 'next-i18next'; +import LicenseRequired from '@components/LicenseRequired'; + +const LogsViewer = dynamic(() => import('@components/retraced/LogsViewer'), { + ssr: false, +}); + +export interface Props { + host?: string; + projectId: string; + hasValidLicense: boolean; +} + +const Events: NextPage = ({ host, projectId, hasValidLicense }: Props) => { + const { t } = useTranslation('common'); + + const [environment, setEnvironment] = useState(''); + const [group, setGroup] = useState(''); + + const { project, isLoading, isError } = useProject(projectId); + const { groups } = useGroups(projectId, environment); + + // Set the environment + useEffect(() => { + if (project) { + setEnvironment(project.environments[0]?.id); + } + }, [project]); + + // Set the group + useEffect(() => { + if (groups && groups.length > 0) { + setGroup(groups[0].group_id); + } + }, [groups]); + + if (!hasValidLicense) { + return ; + } + + if (isLoading) { + return ; + } + + if (isError) { + return ; + } + + const displayLogsViewer = project && environment && group; + + return ( +
+ +
+

{project?.name}

+
+
+
+ + {project ? ( + + ) : null} +
+
+ + {groups ? ( + + ) : null} +
+
+
+ {displayLogsViewer && ( + + )} +
+
+ ); +}; + +export default Events; diff --git a/internal-ui/package-lock.json b/internal-ui/package-lock.json index a43d80618..f42c3b087 100644 --- a/internal-ui/package-lock.json +++ b/internal-ui/package-lock.json @@ -1902,9 +1902,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.43", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.43.tgz", - "integrity": "sha512-NxnmFBHDl5Sachd2P46O7UJiMaMHMLSofoIWVJq3mj8NJgG0umiSeljAVP9lGzjI0UDLJJ5jjoGjcrB8RSbjLQ==", + "version": "1.5.42", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.42.tgz", + "integrity": "sha512-gIfKavKDw1mhvic9nbzA5lZw8QSHpdMwLwXc0cWidQz9B15pDoDdDH4boIatuFfeoCatb3a/NGL6CYRVFxGZ9g==", "dev": true, "license": "ISC" }, diff --git a/internal-ui/src/chat/Chat.tsx b/internal-ui/src/chat/Chat.tsx new file mode 100644 index 000000000..4887a9ff7 --- /dev/null +++ b/internal-ui/src/chat/Chat.tsx @@ -0,0 +1,468 @@ +import { useContext, useEffect, useRef, useState } from 'react'; +import { PaperAirplaneIcon } from '@heroicons/react/24/outline'; +import { useTranslation } from 'next-i18next'; +import { useAutoResizeTextArea, useFetch } from '../hooks'; +import Message from './Message'; +import { LLMChat, LLMModel, LLMProvidersOptionsType } from './types'; +import { Loading } from '../shared'; +import { ApiSuccess } from '../types'; +import { ChatContext } from '../provider'; +import { ConversationContext } from './ChatUI'; +import { defaultHeaders } from '../utils'; + +interface ChatProps { + setShowSettings: (value: boolean) => void; + conversationId?: string; + setConversationId: (value: string) => void; +} + +const Chat = ({ setShowSettings, conversationId, setConversationId }: ChatProps) => { + const { t } = useTranslation('common'); + const [errorMessage, setErrorMessage] = useState(''); + const [message, setMessage] = useState(''); + const textAreaRef = useAutoResizeTextArea(); + const bottomOfChatRef = useRef(null); + + // Get the provider/model plus loading state from the context + const { provider, model, onError, urls } = useContext(ChatContext); + const selectedConversation = useContext(ConversationContext)?.selectedConversation; + let isChatWithPDFProvider = useContext(ConversationContext)?.isChatWithPDFProvider; + if (selectedConversation) { + isChatWithPDFProvider = selectedConversation.isChatWithPDFProvider; + } + const [selectedProvider, setSelectedProvider] = useState(''); + const [selectedModel, setSelectedModel] = useState(''); + + const [requestInProgress, setRequestInProgress] = useState(false); + const [isArchived, setIsArchived] = useState(false); + + // Fetch conversation thread + const { + data: conversationThreadData, + isLoading: isLoadingConversationThread, + error: errorLoadingThread, + refetch: reloadConversationThread, + } = useFetch>({ + url: conversationId ? `${urls?.conversation}/${conversationId}` : undefined, + }); + + const conversationThread = conversationThreadData?.data; + + useEffect(() => { + if (!isLoadingConversationThread && errorLoadingThread) { + onError?.(errorLoadingThread.message); + } + }, [isLoadingConversationThread, errorLoadingThread]); + + const { + data: providersData, + isLoading: isLoadingProviders, + error: errorLoadingProviders, + } = useFetch>({ + url: urls?.llmProviders, + }); + + const providers = providersData?.data; + + const showCreateLLMConfigMessage = + !isChatWithPDFProvider && Array.isArray(providers) && providers?.length === 0; + const showProviderSelection = + !isChatWithPDFProvider && + !showCreateLLMConfigMessage && + !provider && + Array.isArray(providers) && + providers?.length > 0 && + (selectedProvider === '' || selectedModel === ''); + + const { + data: modelsData, + isLoading: isLoadingModels, + error: errorLoadingModels, + } = useFetch>({ + url: selectedProvider ? `${urls?.llmProviders}/${selectedProvider}/models` : undefined, + }); + const models = modelsData?.data; + + useEffect(() => { + if (errorLoadingProviders || errorLoadingModels) { + onError?.(errorLoadingProviders?.message || errorLoadingModels?.message); + } + }, [errorLoadingProviders, errorLoadingModels]); + + useEffect(() => { + setSelectedProvider(selectedConversation?.provider || ''); + setSelectedModel(selectedConversation?.model || ''); + }, [selectedConversation]); + + useEffect(() => { + if (selectedConversation && !isChatWithPDFProvider) { + if ( + providers?.findIndex((p) => p.id === selectedConversation.provider) === -1 || + models?.findIndex((m) => m.id === selectedConversation.model) === -1 + ) { + setIsArchived(true); + } else { + setIsArchived(false); + } + } + }, [selectedConversation, providers, models, isChatWithPDFProvider]); + + useEffect(() => { + if (textAreaRef.current) { + textAreaRef.current.style.height = '24px'; + textAreaRef.current.style.height = `${textAreaRef.current.scrollHeight}px`; + } + }, [message, textAreaRef]); + + function isRefInView(ref) { + if (!ref.current) return false; + + const rect = ref.current.getBoundingClientRect(); + return ( + rect.top >= 0 && + rect.left >= 0 && + rect.bottom <= (window.innerHeight || document.documentElement.clientHeight) && + rect.right <= (window.innerWidth || document.documentElement.clientWidth) + ); + } + + useEffect(() => { + if (bottomOfChatRef.current) { + if (isRefInView(bottomOfChatRef)) { + bottomOfChatRef.current.scrollIntoView({ + behavior: 'auto', + }); + } + } + }, [conversationThread]); + + const [trailingThread, setTrailingThread] = useState<{ content: string | null; role: string }[]>([]); + const sendMessage = async (e: any) => { + try { + setRequestInProgress(true); + e.preventDefault(); + // const model = getProviderModel(); + const _model = models?.find((m) => m.id === (model || selectedModel)); + + if (!isChatWithPDFProvider) { + if (!provider && !selectedProvider) { + setErrorMessage('Please select a Provider'); + return; + } + if (!_model) { + setErrorMessage('Please select a Model'); + return; + } + } + // Don't send empty messages + if (message.length < 1) { + setErrorMessage('Please enter a message.'); + return; + } else { + setErrorMessage(''); + } + + // Add the message to the conversation + setTrailingThread([ + { content: message, role: 'user' }, + { content: null, role: 'assistant' }, + ]); + + // Clear the message & remove empty chat + setMessage(''); + + if (!urls?.chat) { + throw new Error('Missing API path for LLM chat'); + } + + const response = await fetch(urls.chat, { + method: 'POST', + headers: defaultHeaders, + body: JSON.stringify({ + messages: [...(conversationThread ?? []), { content: message, role: 'user' }], + model: _model, + provider: provider || selectedProvider, + conversationId, + isChatWithPDFProvider, + }), + }); + + if (response.ok) { + const reader = response.body?.getReader(); + const decoder = new TextDecoder('utf-8'); + let receivedData = ''; + if (reader) { + let done = false; + let value; + do { + const op = await reader.read(); + done = op.done; + value = op.value; + if (done) break; + const dt = decoder.decode(value, { stream: true }); + const jsonData = dt.split('\n').map((d) => { + if (!d) { + return { + choices: [ + { + delta: { + content: '', + }, + }, + ], + }; + } + return JSON.parse(d); + }); + for (const data of jsonData) { + if (data.conversationId) { + // last chunk + if (conversationId !== data.conversationId) { + setConversationId(data.conversationId); + } + setTrailingThread([]); + reloadConversationThread(); + } else if (data.choices) { + // new chunks get appended + if (data.choices[0]?.delta?.content) { + receivedData += data.choices[0]?.delta?.content || ''; + setTrailingThread([ + { content: message, role: 'user' }, + { content: receivedData, role: 'assistant' }, + ]); + } + } else if (data?.error?.message) { + setErrorMessage(data?.error?.message); + } + } + } while (!done); + } + } else { + const data = await response.json(); + setErrorMessage(data?.error?.message); + } + } catch (error: any) { + setErrorMessage(error.message); + + // setIsLoading(false); + } finally { + setRequestInProgress(false); + } + }; + + const handleKeypress = (e: any) => { + // It's triggers by pressing the enter key + if (e.keyCode == 13 && !e.shiftKey) { + sendMessage(e); + e.preventDefault(); + } + }; + + const [isUploadingFile, setIsUploadingFile] = useState(false); + const handleFileChange = async (e: React.ChangeEvent) => { + if (!urls?.fileUpload) { + throw new Error('Missing API path for file upload'); + } + const files = e.target.files; + if (files && files.length > 0) { + const formData = new FormData(); + formData.append('file', files[0]); + setIsUploadingFile(true); + const response = await fetch(urls.fileUpload, { + method: 'POST', + body: formData, + }); + setIsUploadingFile(false); + if (!response.ok) { + const json = await response.json(); + onError?.(json.error.message); + return; + } + } + }; + + const providerName = providers?.find((p) => p.id === (provider || selectedProvider))?.name; + const modelName = models?.find((m) => m.id === (model || selectedModel))?.name; + + return ( +
+
+
+ {showProviderSelection && !conversationId && ( +
+
+
+ +
+
+ {Array.isArray(models) && models.length > 0 ? ( + + ) : ( + { + setSelectedModel(e.target.value); + }} + value={selectedModel} + /> + )} +
+
+
+ )} +
+ {selectedProvider && selectedModel && ( +
+ {t('bui-chat-provider')}: {providerName} {t('bui-chat-model')}: {modelName || ''} + {isArchived && ({t('bui-chat-archived')})} +
+ )} +
+ {[...(conversationThread ?? []), ...trailingThread]?.map((message, index) => ( + + ))} +
+
+
+ + {showCreateLLMConfigMessage && ( +
+
+ {t('bui-chat-no-chat-configs-found')} +
+
+ {t('bui-chat-goto')}{' '} + { + setShowSettings(true); + }}> + {t('settings')} + {' '} + {t('bui-chat-to-create-new-config')} +
+
+ )} + {/*
*/} +
+
+
+
+
+
+ {errorMessage ? ( +
+
+ {errorMessage} +
+
+ ) : null} +
+ {isChatWithPDFProvider && ( + <> + + + + )} + + +
+
+
+
+
+
+ ); +}; + +export default Chat; diff --git a/internal-ui/src/chat/ChatDisabled.tsx b/internal-ui/src/chat/ChatDisabled.tsx new file mode 100644 index 000000000..16d44056a --- /dev/null +++ b/internal-ui/src/chat/ChatDisabled.tsx @@ -0,0 +1,20 @@ +import { Card } from '@boxyhq/internal-ui'; +// import ContactSupport from '../contactSupport'; +import { useTranslation } from 'next-i18next'; + +export default function ChatDisabled() { + const { t } = useTranslation('common'); + return ( +
+ + + + {t('bui-chat-disabled-title')} + {t('bui-chat-disabled-description')} + +
{/* */}
+
+
+
+ ); +} diff --git a/internal-ui/src/chat/ChatDrawer.tsx b/internal-ui/src/chat/ChatDrawer.tsx new file mode 100644 index 000000000..f3888b1d4 --- /dev/null +++ b/internal-ui/src/chat/ChatDrawer.tsx @@ -0,0 +1,46 @@ +import MobileSidebar from './MobileSidebar'; +import Sidebar from './Sidebar'; +import { LLMConversation } from './types'; + +type ChatDrawerProps = { + isChatDrawerVisible: boolean; + toggleChatDrawerVisibility: () => void; + setShowSettings: (value: boolean) => void; + conversations?: LLMConversation[]; + conversationId?: string; + setConversationId: (value: string) => void; +}; + +export default function ChatDrawer(props: ChatDrawerProps) { + const { + isChatDrawerVisible, + toggleChatDrawerVisibility, + setShowSettings, + conversations, + conversationId, + setConversationId, + } = props; + return ( + <> + {isChatDrawerVisible ? ( + + ) : null} +
+
+ +
+
+ + ); +} diff --git a/internal-ui/src/chat/ChatSettings.tsx b/internal-ui/src/chat/ChatSettings.tsx new file mode 100644 index 000000000..35196dc13 --- /dev/null +++ b/internal-ui/src/chat/ChatSettings.tsx @@ -0,0 +1,417 @@ +import { useTranslation } from 'next-i18next'; +import { useContext, useEffect, useState } from 'react'; +import { Button, Input } from 'react-daisyui'; +import { Table, Card, Badge, ConfirmationModal, InputWithLabel, Loading } from '../shared'; +import { LLMConfig, LLMModel, LLMProvidersOptionsType, PII_POLICY, PII_POLICY_OPTIONS } from './types'; +import { ChatContext } from '../provider'; +import { useFetch } from '../hooks'; +import { ApiSuccess } from '../types'; +import { defaultHeaders } from '../utils'; + +export default function ChatSettings() { + const { t } = useTranslation('common'); + const [selectedProvider, setSelectedProvider] = useState( + 'openai' + ); + const [selectedModel, setSelectedModel] = useState([]); + const [apiKey, setApiKey] = useState(''); + const [baseURL, setBaseURL] = useState(''); + const [piiPolicy, setPIIPolicy] = useState<(typeof PII_POLICY_OPTIONS)[number]>('none'); + const [loading, setLoading] = useState(false); + const [confirmationDialogVisible, setConfirmationDialogVisible] = useState(false); + const [selectedConfig, setSelectedConfig] = useState(null); + const [isChatWithPDFProvider, setIsChatWithPDFProvider] = useState(false); + + const [view, switchView] = useState<'list' | 'create' | 'edit'>('list'); + + const { urls, onError, onSuccess } = useContext(ChatContext); + + const { + data: llmConfigsData, + isLoading: isLoadingConfigs, + refetch: reloadConfigs, + error: errorLoadingConfigs, + } = useFetch>({ + url: urls?.llmConfig, + }); + const llmConfigs = llmConfigsData?.data || []; + + const { + data: providersData, + isLoading: isLoadingProviders, + error: errorLoadingProviders, + } = useFetch>({ + url: `${urls?.llmProviders}?filterByTenant=false`, + }); + + const providers = providersData?.data || []; + + const { + data: modelsData, + isLoading: isLoadingModels, + error: errorLoadingModels, + } = useFetch>({ + url: selectedProvider + ? `${urls?.llmProviders}/${selectedProvider}/models?filterByTenant=false` + : undefined, + }); + const models = modelsData?.data || []; + + useEffect(() => { + if (errorLoadingConfigs || errorLoadingProviders || errorLoadingModels) { + onError?.( + errorLoadingConfigs?.message || errorLoadingProviders?.message || errorLoadingModels?.message + ); + } + }, [errorLoadingConfigs, errorLoadingProviders, errorLoadingModels]); + + const createLLMConfig = async (e: React.FormEvent) => { + e.preventDefault(); + + setLoading(true); + + const response = await fetch(`${urls?.llmConfig}`, { + method: 'POST', + headers: defaultHeaders, + body: JSON.stringify({ + provider: isChatWithPDFProvider ? 'openai' : selectedProvider, + models: isChatWithPDFProvider ? [] : selectedModel, + apiKey: apiKey ?? undefined, + baseURL, + piiPolicy, + isChatWithPDFProvider, + }), + }); + setLoading(false); + + const result = await response.json(); + + if (!response.ok) { + onError?.(result.error.message); + return; + } + + onSuccess?.(t('bui-chat-config-created')); + reloadConfigs(); + resetForm(); + }; + + const deleteConfig = async (config: any) => { + if (!config) { + return; + } + + const response = await fetch(`${urls?.llmConfig}/${config.id}`, { + method: 'DELETE', + headers: defaultHeaders, + }); + + if (!response.ok) { + const json = await response.json(); + setConfirmationDialogVisible(false); + onError?.(json.error.message); + return; + } + + setSelectedConfig(null); + reloadConfigs(); + setConfirmationDialogVisible(false); + onSuccess?.(t('bui-chat-config-deleted')); + }; + + const updateLLMConfig = async (e: React.FormEvent) => { + e.preventDefault(); + + setLoading(true); + + const response = await fetch(`${urls?.llmConfig}/${selectedConfig?.id}`, { + method: 'PUT', + headers: defaultHeaders, + body: JSON.stringify({ + provider: selectedProvider, + models: selectedModel, + apiKey: apiKey ?? undefined, + isChatWithPdfProvider: selectedConfig.isChatWithPdfProvider, + baseURL, + piiPolicy, + }), + }); + + setLoading(false); + if (!response.ok) { + const json = await response.json(); + onError?.(json.error.message); + return; + } + + onSuccess?.(t('bui-chat-config-updated')); + reloadConfigs(); + resetForm(); + }; + + const resetForm = () => { + setSelectedProvider(''); + setSelectedModel([]); + setApiKey(''); + setBaseURL(''); + setPIIPolicy('none'); + switchView('list'); + }; + + return ( + <> + + + + + {t('settings')} + + + +

{t('bui-chat-llm-providers')}

+ {view === 'list' && ( + + )} + {isLoadingConfigs && } + {view === 'list' && llmConfigs.length > 0 && ( + { + const providerName = providers.find((p) => p.id === config.provider)?.name; + return { + id: config.id, + cells: [ + { + element: config.isChatWithPDFProvider ? ( + {t('bui-chat-with-pdf-provider')} + ) : ( + {providerName} + ), + }, + { + wrap: true, + text: + config.models + // .map((a: string) => { + // // const modelName = models.find((m) => m.id === a)?.name; + // // return modelName; + // // ollama is a special case where the model is open ended + // // config.models + // }) + .join(', ') || '*', + }, + { + wrap: true, + text: new Date(config.createdAt).toDateString(), + minWidth: 160, + }, + { + buttons: [ + { + text: t('bui-shared-edit'), + onClick: () => { + setSelectedConfig(config); + // setIsEdit(true); + switchView('edit'); + setSelectedProvider(config.provider); + setSelectedModel(config.models); + // setApiKey(config.apiKey || ''); + setBaseURL(config.baseURL || ''); + setPIIPolicy(config.piiPolicy); + }, + }, + { + color: 'error', + text: t('bui-chat-remove'), + onClick: () => { + setSelectedConfig(config); + setConfirmationDialogVisible(true); + }, + }, + ], + }, + ], + }; + })}>
+ )} + {(view === 'edit' || view === 'create') && ( +
+
+ {view === 'create' && ( +
+ setIsChatWithPDFProvider(e.target.checked)} + /> + +
+ )} + {((view === 'create' && !isChatWithPDFProvider) || + (view === 'edit' && !selectedConfig.isChatWithPDFProvider)) && ( + <> +
+
+ {t('bui-chat-provider')} +
+ +
+
+
+ {t('bui-chat-model')} +
+ +
+ + )} +
+ setApiKey('')} + onChange={(e) => setApiKey(e.target.value)} + /> +
+
+ setBaseURL(e.target.value)} + /> +
+
+
+ {t('bui-chat-pii-policy')} +
+ +
+ {(view === 'edit' || view === 'create') && ( +
+ + +
+ )} +
+
+ )} +
+ {/* */} +
+ {/*
*/} +
+ setConfirmationDialogVisible(false)} + onConfirm={() => deleteConfig(selectedConfig)} + title={t('bui-chat-config-deletion-title')} + description={t('bui-chat-config-deletion-description')}> + + ); +} diff --git a/internal-ui/src/chat/ChatUI.tsx b/internal-ui/src/chat/ChatUI.tsx new file mode 100644 index 000000000..f178e0158 --- /dev/null +++ b/internal-ui/src/chat/ChatUI.tsx @@ -0,0 +1,112 @@ +import { createContext, useContext, useEffect, useState } from 'react'; +import Chat from './Chat'; +import ChatSettings from './ChatSettings'; +import ChatDrawer from './ChatDrawer'; +import { useRouter } from 'next/router'; +import { Bars4Icon, PlusIcon } from '@heroicons/react/24/outline'; +import { useTranslation } from 'next-i18next'; +import { useFetch } from '../hooks'; +import { ApiSuccess } from '../types'; +import { ChatContext } from '../provider'; +import { LLMConversation } from './types'; + +interface ConversationContextType { + selectedConversation?: LLMConversation; + isLoadingConversations: boolean; + isChatWithPDFProvider: boolean; + setIsChatWithPDFProvider: (value: boolean) => void; +} + +export const ConversationContext = createContext(null); + +export function ChatUI() { + const { t } = useTranslation('common'); + const router = useRouter(); + const conversationId = router.query.conversationId?.[0] as string; + const setConversationId = (newConversationId: string) => { + const basePath = router.pathname.split('/[[...conversationId]]')[0]; + + if (newConversationId === '') { + router.push(basePath); + } else { + router.push(`${basePath}/${newConversationId}`); + } + }; + const { urls } = useContext(ChatContext); + + const { + data: conversationsData, + isLoading: isLoadingConversations, + refetch: reloadConversations, + } = useFetch>({ url: urls?.conversation }); + const conversations = conversationsData?.data; + + useEffect(() => { + if (conversationId) { + reloadConversations(); + } + }, [conversationId, reloadConversations]); + + const [isChatDrawerVisible, setIsChatDrawerVisible] = useState(false); + const [showSettings, setShowSettings] = useState(false); + const [isChatWithPDFProvider, setIsChatWithPDFProvider] = useState(false); + + const toggleChatDrawerVisibility = () => { + setIsChatDrawerVisible(!isChatDrawerVisible); + }; + + const selectedConversation = conversations?.filter((c) => c.id === conversationId)[0]; + + return ( + +
+ +
+
+ +

+ {showSettings ? t('settings') : selectedConversation?.title || t('bui-chat-new-chat')} +

+ +
+ {showSettings ? ( + + ) : ( + + )} +
+
+
+ ); +} diff --git a/internal-ui/src/chat/Message.tsx b/internal-ui/src/chat/Message.tsx new file mode 100644 index 000000000..09bedf2fa --- /dev/null +++ b/internal-ui/src/chat/Message.tsx @@ -0,0 +1,50 @@ +import { SparklesIcon, UserIcon, ChatBubbleOvalLeftEllipsisIcon } from '@heroicons/react/24/outline'; + +const Message = (props: any) => { + const { message } = props; + const { role, content: text } = message; + + const isUser = role === 'user'; + + return ( +
+
+
+
+
+ {isUser ? ( + + ) : ( + + )} +
+
+ + {/* 1 / 1 */} + +
+
+
+
+
+
+ {!isUser && text === null ? ( + + ) : ( +

{text}

+ )} +
+
+
+
+
+
+
+ ); +}; + +export default Message; diff --git a/internal-ui/src/chat/MobileSidebar.tsx b/internal-ui/src/chat/MobileSidebar.tsx new file mode 100644 index 000000000..eba422e94 --- /dev/null +++ b/internal-ui/src/chat/MobileSidebar.tsx @@ -0,0 +1,51 @@ +import { useTranslation } from 'next-i18next'; +import Sidebar from './Sidebar'; +import { XMarkIcon } from '@heroicons/react/24/outline'; + +const MobileSidebar = (props: any) => { + const { toggleChatDrawerVisibility, setShowSettings, conversations, conversationId, setConversationId } = + props; + const { t } = useTranslation('common'); + + return ( +
+ +
+
+
+
+
+
+ +
+ +
+
+
+
+
+ +
+ ); +}; + +export default MobileSidebar; diff --git a/internal-ui/src/chat/Sidebar.tsx b/internal-ui/src/chat/Sidebar.tsx new file mode 100644 index 000000000..f30f6ce1e --- /dev/null +++ b/internal-ui/src/chat/Sidebar.tsx @@ -0,0 +1,144 @@ +import { useTranslation } from 'next-i18next'; +import { + PlusIcon, + ChatBubbleLeftEllipsisIcon, + CogIcon, + DocumentMagnifyingGlassIcon, +} from '@heroicons/react/24/outline'; +import { LLMConversation } from './types'; +import { ConversationContext } from './ChatUI'; +import { useContext } from 'react'; +import { Badge } from '../shared'; + +type SidebarProps = { + setShowSettings: (value: boolean) => void; + toggleChatDrawerVisibility?: () => void; + conversations?: LLMConversation[]; + conversationId?: string; + setConversationId: (value: string) => void; +}; + +const Sidebar = ({ + toggleChatDrawerVisibility, + setShowSettings, + conversations, + conversationId, + setConversationId, +}: SidebarProps) => { + const { t } = useTranslation('common'); + const setIsChatWithPDFProvider = useContext(ConversationContext)?.setIsChatWithPDFProvider; + return ( +
+ +
+ ); +}; + +function ConversationTile({ + conversation, + onClick, + conversationId, +}: { + conversation: Partial; + onClick?: (id: string) => void; + conversationId?: string; +}) { + const { title, isChatWithPDFProvider, id } = conversation; + return ( +
{ + if (typeof onClick === 'function') { + onClick(id!); + } + }}> + +
{title}
+ {isChatWithPDFProvider && PDF} +
+
+ ); +} + +export default Sidebar; diff --git a/internal-ui/src/chat/index.ts b/internal-ui/src/chat/index.ts new file mode 100644 index 000000000..8b4a5aa0d --- /dev/null +++ b/internal-ui/src/chat/index.ts @@ -0,0 +1 @@ +export { ChatUI } from './ChatUI'; diff --git a/internal-ui/src/chat/types.ts b/internal-ui/src/chat/types.ts new file mode 100644 index 000000000..0ca4ba601 --- /dev/null +++ b/internal-ui/src/chat/types.ts @@ -0,0 +1,73 @@ +export const PII_POLICY_OPTIONS = [ + 'none', + 'detect_mask', + 'detect_redact', + 'detect_report', + 'detect_block', +] as const; + +export const PII_POLICY: { + [key in (typeof PII_POLICY_OPTIONS)[number]]: string; +} = { + none: 'None', + detect_mask: 'Detect & Mask', + detect_redact: 'Detect & Redact', + detect_report: 'Detect & Report', + detect_block: 'Detect & Block', +} as const; + +type LLMProvider = + | 'openai' + | 'anthropic' + | 'mistral' + | 'groq' + | 'perplexity' + | 'google-generative-ai' + | 'ollama'; + +export type LLMProvidersOptionsType = { id: LLMProvider; name: string }[]; + +export type LLMProvidersType = { + [key in LLMProvider]: { + name: string; + models: LLMModel[]; + }; +}; + +export type LLMConfig = { + id: string; + createdAt: number; + provider: LLMProvider; + tenant: string; + models: string[]; + terminusToken: string; + apiKey?: string; + baseURL?: string; + isChatWithPDFProvider?: boolean; + piiPolicy: (typeof PII_POLICY_OPTIONS)[number]; +}; + +export type LLMModel = { + id: string; + name: string; + max_tokens?: number; +}; + +export type LLMConversation = { + id: string; + tenant: string; + userId: string; + title: string; + provider: string; + model: string; + isChatWithPDFProvider?: boolean; + createdAt: number; +}; + +export type LLMChat = { + role: string; + content: string; + id: string; + conversationId: string; + createdAt: string; +}; diff --git a/internal-ui/src/hooks/index.ts b/internal-ui/src/hooks/index.ts index fe95ccd87..dbdb05658 100644 --- a/internal-ui/src/hooks/index.ts +++ b/internal-ui/src/hooks/index.ts @@ -2,3 +2,4 @@ export { usePaginate } from './usePaginate'; export { useDirectory } from './useDirectory'; export { useRouter } from './useRouter'; export { useFetch, parseResponseContent } from './useFetch'; +export { useAutoResizeTextArea } from './useAutoResizeTextArea'; diff --git a/internal-ui/src/hooks/useAutoResizeTextArea.tsx b/internal-ui/src/hooks/useAutoResizeTextArea.tsx new file mode 100644 index 000000000..6a43c5133 --- /dev/null +++ b/internal-ui/src/hooks/useAutoResizeTextArea.tsx @@ -0,0 +1,14 @@ +import { useRef, useEffect } from 'react'; + +export function useAutoResizeTextArea() { + const textAreaRef = useRef(null); + + useEffect(() => { + if (textAreaRef.current) { + textAreaRef.current.style.height = '24px'; + textAreaRef.current.style.height = `${textAreaRef.current.scrollHeight}px`; + } + }, [textAreaRef]); + + return textAreaRef; +} diff --git a/internal-ui/src/hooks/useFetch.ts b/internal-ui/src/hooks/useFetch.ts index d4ad32f95..684957a33 100644 --- a/internal-ui/src/hooks/useFetch.ts +++ b/internal-ui/src/hooks/useFetch.ts @@ -1,4 +1,4 @@ -import { useEffect, useState } from 'react'; +import { useCallback, useEffect, useState } from 'react'; type RefetchFunction = () => void; @@ -24,12 +24,12 @@ export function useFetch({ url }: { url?: string }): { const [error, setError] = useState(null); const [refetchIndex, setRefetchIndex] = useState(0); - const refetch = () => setRefetchIndex((prevRefetchIndex) => prevRefetchIndex + 1); + const refetch = useCallback(() => setRefetchIndex((prevRefetchIndex) => prevRefetchIndex + 1), []); useEffect(() => { - async function fetchData() { + async function fetchData(_url) { setIsLoading(true); - const res = await fetch(url!); + const res = await fetch(_url); setIsLoading(false); const resContent = await parseResponseContent(res); @@ -44,9 +44,15 @@ export function useFetch({ url }: { url?: string }): { setError(resContent.error); } } - if (url) { - fetchData(); + if (!url) { + // Clear states when URL is undefined + setData(undefined); + setIsLoading(false); + setError(null); + setRefetchIndex(0); + return; } + fetchData(url); }, [url, refetchIndex]); return { data, isLoading, error, refetch }; diff --git a/internal-ui/src/index.ts b/internal-ui/src/index.ts index 1e324d0ed..3248a06b3 100644 --- a/internal-ui/src/index.ts +++ b/internal-ui/src/index.ts @@ -6,3 +6,4 @@ export * from './provider'; export * from './sso-traces'; export * from './setup-link'; export * from './branding'; +export * from './chat'; diff --git a/internal-ui/src/provider.tsx b/internal-ui/src/provider.tsx index ecbafd7bb..cfd6d1626 100644 --- a/internal-ui/src/provider.tsx +++ b/internal-ui/src/provider.tsx @@ -1,6 +1,26 @@ import { createContext } from 'react'; import type { NextRouter } from 'next/router'; - +import { LLMProvider } from './types'; export const BUIContext = createContext<{ router: NextRouter | null }>({ router: null }); export const BUIProvider = BUIContext.Provider; + +export const ChatContext = createContext<{ + provider?: LLMProvider; + model?: string; + urls: + | { + chat: string; + conversation: string; + llmConfig: string; + llmProviders: string; + fileUpload: string; + } + | undefined; + onError?: (error: Error | string) => void; + onSuccess?: (success: string) => void; +}>({ + urls: undefined, +}); + +export const ChatContextProvider = ChatContext.Provider; diff --git a/internal-ui/src/shared/Card.tsx b/internal-ui/src/shared/Card.tsx index 84d5e434f..57dde9886 100644 --- a/internal-ui/src/shared/Card.tsx +++ b/internal-ui/src/shared/Card.tsx @@ -2,7 +2,8 @@ import React from 'react'; const Card = ({ children, className }: { children: React.ReactNode; className?: string }) => { return ( -
+
{children}
); @@ -21,7 +22,7 @@ const Header = ({ children }: { children: React.ReactNode }) => { }; const Body = ({ children }: { children: React.ReactNode }) => { - return
{children}
; + return
{children}
; }; const Footer = ({ children }: { children: React.ReactNode }) => { diff --git a/internal-ui/src/shared/InputWithLabel.tsx b/internal-ui/src/shared/InputWithLabel.tsx new file mode 100644 index 000000000..74ae5e782 --- /dev/null +++ b/internal-ui/src/shared/InputWithLabel.tsx @@ -0,0 +1,21 @@ +import { InputHTMLAttributes } from 'react'; + +interface InputWithLabelProps extends InputHTMLAttributes { + label: string; +} +export const InputWithLabel = (props: InputWithLabelProps) => { + return ( + + ); +}; diff --git a/internal-ui/src/shared/Loading.tsx b/internal-ui/src/shared/Loading.tsx index ff3fb8653..1d119aecf 100644 --- a/internal-ui/src/shared/Loading.tsx +++ b/internal-ui/src/shared/Loading.tsx @@ -1,8 +1,8 @@ -const Spinner = () => { +const Spinner = ({ className }: { className?: string }) => { return (
- + Loading...
diff --git a/internal-ui/src/shared/index.ts b/internal-ui/src/shared/index.ts index 39c009ccf..b706051e2 100644 --- a/internal-ui/src/shared/index.ts +++ b/internal-ui/src/shared/index.ts @@ -20,4 +20,5 @@ export { ButtonOutline } from './ButtonOutline'; export { Alert } from './Alert'; export { InputWithCopyButton, CopyToClipboardButton } from './InputWithCopyButton'; export { IconButton } from './IconButton'; +export { InputWithLabel } from './InputWithLabel'; export { PrismLoader } from './PrismLoader'; diff --git a/internal-ui/src/types.ts b/internal-ui/src/types.ts index 2cf30c423..253ccd990 100644 --- a/internal-ui/src/types.ts +++ b/internal-ui/src/types.ts @@ -173,3 +173,13 @@ export type Branding = { primaryColor: string; companyName: string; }; + +// Chat types +export type LLMProvider = + | 'openai' + | 'anthropic' + | 'mistral' + | 'groq' + | 'perplexity' + | 'google-generative-ai' + | 'ollama'; diff --git a/lib/env.ts b/lib/env.ts index 61f35d568..1d78e4400 100644 --- a/lib/env.ts +++ b/lib/env.ts @@ -30,6 +30,32 @@ const terminus = { hostUrl: process.env.TERMINUS_PROXY_HOST_URL, adminToken: process.env.TERMINUS_ADMIN_ROOT_TOKEN, retracedProjectId: process.env.TERMINUS_RETRACED_PROJECT_ID, + llmRetracedProjectId: process.env.TERMINUS_LLM_RETRACED_PROJECT_ID, + apiKey: + process.env.TERMINUS_READ_API_KEY && process.env.TERMINUS_WRITE_API_KEY + ? { + read: process.env.TERMINUS_READ_API_KEY, + write: process.env.TERMINUS_WRITE_API_KEY, + } + : undefined, + llm: + process.env.TERMINUS_LLM_TENANT && process.env.TERMINUS_LLM_PRODUCT + ? { + product: process.env.TERMINUS_LLM_PRODUCT, + tenant: process.env.TERMINUS_LLM_TENANT, + } + : undefined, +}; + +// LLM Chat +const llm = { + pdfChat: { + baseUrl: process.env.LLM_PDF_CHAT_BASE_URL || '', + roleMapping: process.env.LLM_PDF_CHAT_ROLE_MAPPING || '', + jwtSigningKey: process.env.LLM_PDF_CHAT_SIGNING_KEY || '', + jwtAudience: process.env.LLM_PDF_CHAT_JWT_AUDIENCE || '', + jwtIssuer: process.env.LLM_PDF_CHAT_JWT_ISSUER || '', + }, }; export const setupLinkExpiryDays = process.env.SETUP_LINK_EXPIRY_DAYS @@ -87,6 +113,7 @@ const jacksonOptions: JacksonOption = { process.env.BOXYHQ_NO_ANALYTICS === '1' || process.env.BOXYHQ_NO_ANALYTICS === 'true', terminus, + llm, webhook: { endpoint: process.env.WEBHOOK_URL || '', secret: process.env.WEBHOOK_SECRET || '', @@ -126,8 +153,12 @@ const adminPortalSSODefaults = { defaultRedirectUrl: `${externalUrl}/admin/auth/idp-login`, }; +const features = { llmVault: process.env.FEATURE_LLM_VAULT === 'true' }; + export { adminPortalSSODefaults }; export { retraced as retracedOptions }; export { terminus as terminusOptions }; export { apiKeys }; export { jacksonOptions }; +export { llm as llmOptions }; +export { features }; diff --git a/lib/llm.ts b/lib/llm.ts new file mode 100644 index 000000000..8e73fc515 --- /dev/null +++ b/lib/llm.ts @@ -0,0 +1,389 @@ +import type { LLMConfig, LLMProvider } from '@boxyhq/saml-jackson'; +import { ChatCompletionChunk } from 'openai/resources'; +import { ApiError } from './error'; +import { terminusOptions } from './env'; +import Anthropic from '@anthropic-ai/sdk'; +import { TextBlock } from '@anthropic-ai/sdk/resources'; +import MistralClient from '@mistralai/mistralai'; +import OpenAI from 'openai'; +import Groq from 'groq-sdk'; +import { GoogleGenerativeAI } from '@google/generative-ai'; +import { Ollama } from 'ollama'; + +export const PII_POLICY_OPTIONS = [ + 'none', + 'detect_mask', + 'detect_redact', + 'detect_report', + 'detect_block', +] as const; + +export const PII_POLICY: { + [key in (typeof PII_POLICY_OPTIONS)[number]]: string; +} = { + none: 'None', + detect_mask: 'Detect & Mask', + detect_redact: 'Detect & Redact', + detect_report: 'Detect & Report', + detect_block: 'Detect & Block', +} as const; + +export type LLMModel = { + id: string; + name: string; + max_tokens?: number; +}; + +type LLMConfigWithAPIKey = LLMConfig & { + apiKey: string; + baseURL: string; + piiPolicy: (typeof PII_POLICY_OPTIONS)[number]; +}; + +const useTerminus = { + openai: true, + anthropic: false, + mistral: true, + groq: false, + perplexity: true, + 'google-generative-ai': false, + ollama: true, +}; + +export const LLM_HANDLERS: { + [key in LLMProvider]: ( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey + ) => Promise<{ text: string } | AsyncGenerator>; +} = { + openai: openaiHandler, + anthropic: anthropicHandler, + mistral: mistralHandler, + groq: groqHandler, + perplexity: perplexityHandler, + 'google-generative-ai': googleGenAIHandler, + ollama: ollamaHandler, +}; + +export async function generateChatResponse( + messages: any[], + provider: string, + model: LLMModel, + config: LLMConfigWithAPIKey, + isStream = false +): Promise> { + if (!config.isChatWithPDFProvider && !LLM_HANDLERS[provider]) { + throw new ApiError('Provider not supported', 400); + } + // Set the base URL to the terminus proxy if the provider is supported + if (useTerminus[provider]) { + config.baseURL = terminusOptions.hostUrl + `/v1/proxy/${provider}`; + } + if (isStream) { + return LLM_HANDLERS[provider](messages, model, config, isStream); + } else { + const { text } = await LLM_HANDLERS[provider](messages, model, config); + return text || ''; + } +} + +// TODO: Need to test this +export async function anthropicHandler( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey +): Promise<{ text: string }> { + const anthropic = new Anthropic({ + apiKey: config.apiKey, + baseURL: config.baseURL, + }); + const stream = await anthropic.messages.create({ + model: model.id, + max_tokens: model.max_tokens || 1024, + messages, + stream: true, + }); + let text = ''; + // Iterate over the stream and print each event + for await (const messageStreamEvent of stream) { + switch (messageStreamEvent.type) { + case 'message_start': + console.log('Message start'); + for (const content of messageStreamEvent.message.content) { + if (content.type === 'text') { + text += content.text; + } else { + console.log('Unsupported content type', content.type); + } + } + break; + case 'message_stop': + console.log('Message stop'); + break; + case 'content_block_start': + console.log('Content block start'); + if (messageStreamEvent.content_block.type === 'text') { + text += (messageStreamEvent.content_block as TextBlock).text; + } else { + console.log('Unsupported content block type'); + } + break; + case 'content_block_delta': + console.log('Content block delta'); + break; + case 'content_block_stop': + console.log('Content block stop'); + break; + } + } + return { + text, + }; +} + +export async function mistralHandler( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey, + isStream = false +): Promise<{ text: string } | AsyncGenerator> { + const mistral = new MistralClient(config.apiKey, config.baseURL || undefined); + const stream = await mistral.chatStream({ + model: model.id, + messages, + }); + + async function* streamGenerator() { + for await (const chunk of stream) { + yield toOpenAIChunk(chunk, 'mistral'); + } + } + if (isStream) { + return streamGenerator(); + } else { + let text = ''; + for await (const msg of stream) { + text += msg?.choices[0]?.delta?.content || ''; + } + return { + text, + }; + } +} + +export async function openaiHandler( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey, + isStream = false +): Promise<{ text: string } | AsyncGenerator> { + const openai = new OpenAI({ + apiKey: config.apiKey, + baseURL: config.baseURL, + }); + const stream = await openai.chat.completions.create({ + messages, + model: model.id, + stream: true, + }); + async function* streamGenerator() { + for await (const chunk of stream) { + yield toOpenAIChunk(chunk, 'openai'); + } + } + if (isStream) { + return streamGenerator(); + } else { + let text = ''; + for await (const chunk of stream) { + text += chunk.choices[0]?.delta?.content || ''; + } + return { text }; + } +} + +export async function groqHandler( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey, + isStream = false +): Promise<{ text: string } | AsyncGenerator> { + const groq = new Groq({ + apiKey: config.apiKey, + baseURL: config.baseURL, + }); + const stream = await groq.chat.completions.create({ + messages, + model: model.id, + stream: true, + }); + async function* streamGenerator() { + for await (const chunk of stream) { + yield toOpenAIChunk(chunk, 'groq'); + } + } + if (isStream) { + return streamGenerator(); + } else { + let text = ''; + for await (const chunk of stream) { + text += chunk.choices[0]?.delta?.content || ''; + } + return { + text, + }; + } +} + +export async function perplexityHandler( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey, + isStream = false +): Promise<{ text: string } | AsyncGenerator> { + const openai = new OpenAI({ + apiKey: config.apiKey, + baseURL: config.baseURL || 'https://api.perplexity.ai', + }); + const stream = await openai.chat.completions.create({ + messages, + model: model.id, + stream: true, + }); + async function* streamGenerator() { + for await (const chunk of stream) { + yield toOpenAIChunk(chunk, 'perplexity'); + } + } + if (isStream) { + return streamGenerator(); + } else { + let text = ''; + for await (const chunk of stream) { + text += chunk.choices[0]?.delta?.content || ''; + } + return { + text, + }; + } +} + +export async function googleGenAIHandler( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey, + isStream = false +): Promise<{ text: string } | AsyncGenerator> { + const googleGenAI = new GoogleGenerativeAI(config.apiKey || ''); + + const instance = googleGenAI.getGenerativeModel( + { model: model.id }, + { + baseUrl: config.baseURL || undefined, + } + ); + const { stream } = await instance.generateContentStream({ + contents: messages.map((message) => ({ + role: message.role, + parts: [ + { + text: message.content, + }, + ], + })), + }); + async function* streamGenerator() { + for await (const chunk of stream) { + yield toOpenAIChunk(chunk, 'google-generative-ai'); + } + } + if (isStream) { + return streamGenerator(); + } else { + let text = ''; + for await (const chunk of stream) { + text += chunk.text() || ''; + } + return { + text, + }; + } +} + +export async function ollamaHandler( + messages: any[], + model: LLMModel, + config: LLMConfigWithAPIKey, + isStream = false +): Promise<{ text: string } | AsyncGenerator> { + const ollama = new Ollama({ host: config.baseURL || undefined }); + const stream = await ollama.chat({ + model: model?.id || 'llama3', + messages, + stream: true, + }); + async function* streamGenerator() { + for await (const chunk of stream) { + yield toOpenAIChunk(chunk, 'ollama'); + } + } + if (isStream) { + return streamGenerator(); + } else { + let text = ''; + for await (const chunk of stream) { + text += chunk.message.content || ''; + } + return { text }; + } +} + +function toOpenAIChunk(chunk: any, provider: LLMProvider): ChatCompletionChunk { + switch (provider) { + case 'openai': + return chunk as ChatCompletionChunk; + case 'anthropic': + return { + choices: [ + { + delta: { + content: chunk.text, + }, + }, + ], + } as ChatCompletionChunk; + case 'mistral': + return { + choices: chunk.choices, + } as ChatCompletionChunk; + case 'groq': + return { + choices: chunk.choices, + } as ChatCompletionChunk; + case 'perplexity': + return chunk as ChatCompletionChunk; + case 'google-generative-ai': + return { + choices: [ + { + delta: { + content: chunk.text(), + }, + }, + ], + } as ChatCompletionChunk; + case 'ollama': + return { + choices: [ + { + delta: { + content: chunk.message.content, + }, + }, + ], + } as ChatCompletionChunk; + default: + throw new ApiError('Provider not supported', 400); + } +} diff --git a/lib/ui/hooks/useFeatures.ts b/lib/ui/hooks/useFeatures.ts new file mode 100644 index 000000000..8571b57be --- /dev/null +++ b/lib/ui/hooks/useFeatures.ts @@ -0,0 +1,22 @@ +import { useEffect, useState } from 'react'; + +function useFeatures() { + const [features, setFeatures] = useState<{ [key: string]: boolean } | null>(null); + useEffect(() => { + const fetchChatFeatureStatus = async () => { + try { + const response = await fetch('/api/admin/features'); + const { data } = await response.json(); + setFeatures(data.features); + } catch (error) { + console.error('Error fetching list of features', error); + } + }; + + fetchChatFeatureStatus(); + }, []); + + return features; +} + +export default useFeatures; diff --git a/lib/zod/index.ts b/lib/zod/index.ts new file mode 100644 index 000000000..5364737b5 --- /dev/null +++ b/lib/zod/index.ts @@ -0,0 +1,14 @@ +import { ApiError } from '@lib/error'; +import z, { ZodType } from 'zod'; + +export * from './schema'; + +export const validateWithSchema = (schema: ZSchema, data: any) => { + const result = schema.safeParse(data); + + if (!result.success) { + throw new ApiError(`Validation Error: ${result.error.errors.map((e) => e.message)[0]}`, 422); + } + + return result.data as z.infer; +}; diff --git a/lib/zod/primitives.ts b/lib/zod/primitives.ts new file mode 100644 index 000000000..d7d79ca05 --- /dev/null +++ b/lib/zod/primitives.ts @@ -0,0 +1,73 @@ +import { PII_POLICY_OPTIONS } from '@boxyhq/saml-jackson'; +import { z } from 'zod'; + +const maxLengthPolicies = { + llmBaseUrl: 2048, + llmModelName: 64, + llmApiKey: 128, + llmConfigId: 64, +}; + +export const llmPIIPolicy = z.enum(PII_POLICY_OPTIONS); + +export const llmBaseUrl = z + .string({ + required_error: 'Base URL is required', + invalid_type_error: 'Base URL must be a string', + }) + .url() + .max( + maxLengthPolicies.llmBaseUrl, + `Base URL should have at most ${maxLengthPolicies.llmBaseUrl} characters` + ) + .or(z.literal('')) + .optional(); + +export const llmModels = z + .array( + z + .string({ + invalid_type_error: 'Model must be a string', + required_error: 'Model is required', + }) + .max( + maxLengthPolicies.llmModelName, + `Model name should be at most ${maxLengthPolicies.llmModelName} characters` + ) + ) + .min(0) + .optional(); + +export const llmApiKey = z + .string({ + required_error: 'API key is required', + invalid_type_error: 'API key must be a string', + }) + .max(maxLengthPolicies.llmApiKey, `API key should be at most ${maxLengthPolicies.llmApiKey} characters`) + .optional(); + +export const llmProvider = (providers) => { + const LLM_PROVIDERS_KEYS = providers.map((provider) => provider.id); + const maxLengthProvider = Math.max(...LLM_PROVIDERS_KEYS.map((provider) => provider.length)); + return z + .string({ + required_error: 'Provider is required', + invalid_type_error: 'Provider must be a string', + }) + .min(1, `Provider is required`) + .max(maxLengthProvider, 'Invalid provider length') + .refine((provider) => { + return LLM_PROVIDERS_KEYS.includes(provider); + }, 'Invalid provider'); +}; + +export const llmConfigId = z + .string({ + required_error: 'Config Id is required', + invalid_type_error: 'Config Id must be a string', + }) + .min(1, `Config Id is required`) + .max( + maxLengthPolicies.llmConfigId, + `Config Id should be at most ${maxLengthPolicies.llmConfigId} characters` + ); diff --git a/lib/zod/schema.ts b/lib/zod/schema.ts new file mode 100644 index 000000000..33a81b535 --- /dev/null +++ b/lib/zod/schema.ts @@ -0,0 +1,30 @@ +import { z } from 'zod'; +import { llmApiKey, llmBaseUrl, llmConfigId, llmModels, llmPIIPolicy, llmProvider } from './primitives'; + +export const updateLLMConfigSchema = (providers) => + z.object({ + configId: llmConfigId, + tenant: z.string(), + provider: llmProvider(providers), + isChatWithPDFProvider: z.boolean().optional(), + apiKey: llmApiKey, + models: llmModels, + baseURL: llmBaseUrl, + piiPolicy: llmPIIPolicy, + }); + +export const deleteLLMConfigSchema = z.object({ + configId: llmConfigId, + tenant: z.string(), +}); + +export const createLLMConfigSchema = (providers) => + z.object({ + tenant: z.string(), + provider: llmProvider(providers), + isChatWithPDFProvider: z.boolean().optional(), + apiKey: llmApiKey, + models: llmModels, + baseURL: llmBaseUrl, + piiPolicy: llmPIIPolicy, + }); diff --git a/locales/en/common.json b/locales/en/common.json index e4f1b3b35..4d1fb086c 100644 --- a/locales/en/common.json +++ b/locales/en/common.json @@ -1,4 +1,5 @@ { + "llm_vault": "LLM Vault", "apps": "Apps", "error_loading_page": "Unable to load this page. Maybe you don't have enough rights.", "documentation": "Documentation", @@ -286,5 +287,32 @@ "bui-sl-link-expired": "This link has expired", "bui-sl-link-expire-on": "This link will expire on {{expiresAt}}.", "bui-sl-share-link-info": "Share this link with your customer to setup their service", - "bui-sl-webhook-url": "Webhook URL" + "bui-sl-webhook-url": "Webhook URL", + "bui-chat-api-key": "API Key", + "bui-chat-remove": "Remove", + "bui-chat-open-sidebar": "Open sidebar", + "bui-chat-close-sidebar": "Close sidebar", + "bui-chat-attach-file": "Attach file", + "bui-chat-pii-policy": "PII Policy", + "bui-chat-archived": "Archived", + "bui-chat-disabled-title": "Chat feature is not enabled", + "bui-chat-disabled-description": "Please contact support to enable this feature.", + "bui-chat-goto": "Go to", + "bui-chat-to-create-new-config": "to create new config", + "bui-chat-no-chat-configs-found": "No Chat Configs found", + "bui-chat-config-created": "Chat Config created successfully", + "bui-chat-add": "Add", + "bui-chat-base-url": "Base URL", + "bui-chat-llm-providers": "LLM Providers", + "bui-chat-config-updated": "Chat Config updated successfully", + "bui-chat-config-deleted": "Chat Config deleted successfully", + "bui-chat-config-deletion-description": "Removing this config will prevent users from using it. This action cannot be undone.", + "bui-chat-config-deletion-title": "Confirm deletion of the Chat Config", + "bui-chat-provider": "Provider", + "bui-chat": "Chat", + "bui-chat-clear-conversation": "Clear conversation", + "bui-chat-model": "Model", + "bui-chat-new-chat": "New chat", + "bui-chat-with-pdf-provider": "Chat with PDF Provider", + "bui-chat-with-pdf": "Chat with PDF" } diff --git a/npm/package-lock.json b/npm/package-lock.json index c1d95d7b3..ab346cad8 100644 --- a/npm/package-lock.json +++ b/npm/package-lock.json @@ -4452,6 +4452,7 @@ "version": "22.9.0", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.0.tgz", "integrity": "sha512-vuyHg81vvWA1Z1ELfvLko2c8f34gyA0zaic0+Rllc5lbCnbSyuvb2Oxpm6TAUAC/2xZN3QGqxBNggD1nNR2AfQ==", + "license": "MIT", "dependencies": { "undici-types": "~6.19.8" } diff --git a/npm/src/controller/utils.ts b/npm/src/controller/utils.ts index 823779c89..dc5c63b63 100644 --- a/npm/src/controller/utils.ts +++ b/npm/src/controller/utils.ts @@ -25,6 +25,10 @@ export enum IndexNames { OIDCProviderClientID = 'OIDCProviderClientID', SSOClientID = 'SSOClientID', Product = 'product', + Tenant = 'tenant', + TenantProvider = 'tenantProvider', + TenantUser = 'tenantUser', + LLMConversation = 'llmConversation', // For Setup link Service = 'service', diff --git a/npm/src/ee/chat/index.ts b/npm/src/ee/chat/index.ts new file mode 100644 index 000000000..9a2d41aac --- /dev/null +++ b/npm/src/ee/chat/index.ts @@ -0,0 +1,422 @@ +import crypto from 'crypto'; +import axios from 'axios'; +import * as jose from 'jose'; +import type { + Storable, + JacksonOption, + LLMConfigMergedFromVault, + LLMProvidersOptionsType, + LLMProvider, + LLMModel, +} from '../../typings'; +import * as dbutils from '../../db/utils'; +import { IndexNames, loadJWSPrivateKey } from '../../controller/utils'; +import { throwIfInvalidLicense } from '../common/checkLicense'; +import { LLMChat, LLMConfig, LLMConfigPayload, LLMConversation, PII_POLICY_OPTIONS } from './types'; +import { JacksonError } from '../../controller/error'; +import { LLM_PROVIDERS } from './llm-providers'; + +export class ChatController { + private chatStore: Storable; + private conversationStore: Storable; + private llmConfigStore: Storable; + private opts: JacksonOption; + + constructor({ + chatStore, + conversationStore, + llmConfigStore, + opts, + }: { + chatStore: Storable; + conversationStore: Storable; + llmConfigStore: Storable; + opts: JacksonOption; + }) { + this.llmConfigStore = llmConfigStore; + this.chatStore = chatStore; + this.conversationStore = conversationStore; + this.opts = opts; + } + + private async getLLMConfigsByTenant(tenant: string): Promise { + return (await this.llmConfigStore.getByIndex({ name: IndexNames.Tenant, value: tenant })).data; + } + + public async getLLMConfigFromVault( + tenant: string, + token: string + ): Promise<{ + apiKey: string; + baseURL: string; + piiPolicy: (typeof PII_POLICY_OPTIONS)[number]; + }> { + const res = await axios.get( + `${this.opts.terminus?.hostUrl}/v1/vault/${tenant}/${this.opts.terminus?.llm?.product}/data?token=${token}`, + { headers: { Authorization: `api-key ${this.opts.terminus?.apiKey?.read}` } } + ); + + if (res.data[token]) { + return JSON.parse(res.data[token]?.data); + } else { + throw new JacksonError('Config not found in Vault', 404); + } + } + + public async getLLMConfigs(tenant: string): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const configs = await this.getLLMConfigsByTenant(tenant); + for (let i = 0; i < configs.length; i++) { + const data = await this.getLLMConfigFromVault(tenant, configs[i].terminusToken); + if (data) { + configs[i] = { + ...configs[i], + baseURL: data.baseURL, + apiKey: '*'.repeat(data.apiKey.length), + piiPolicy: data.piiPolicy, + } as any; + } + } + return configs as LLMConfigMergedFromVault[]; + } + + public async getLLMConfigsByTenantAndProvider(tenant: string, provider: LLMProvider): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + return ( + await this.llmConfigStore.getByIndex({ + name: IndexNames.TenantProvider, + value: dbutils.keyFromParts(tenant, provider), + }) + ).data; + } + + private async storeLLMConfig(config: Omit) { + const id = crypto.randomBytes(20).toString('hex'); + const createdAt = Date.now(); + await this.llmConfigStore.put( + id, + { ...config, id, createdAt }, + // secondary index on tenant + { name: IndexNames.Tenant, value: config.tenant }, + // secondary index on tenant + provider + { name: IndexNames.TenantProvider, value: dbutils.keyFromParts(config.tenant, config.provider) } + ); + return { id, createdAt, ...config }; + } + + private async saveLLMConfigInVault({ + tenant, + provider, + apiKey, + baseURL, + piiPolicy, + }: { + tenant: string; + provider: string; + apiKey?: string; + baseURL?: string; + piiPolicy: (typeof PII_POLICY_OPTIONS)[number]; + }): Promise { + const res = await axios.post( + `${this.opts.terminus?.hostUrl}/v1/vault/${tenant}/${this.opts.terminus?.llm?.product}/data/llm-config`, + { + provider, + apiKey: apiKey || '', + baseURL: baseURL || '', + piiPolicy, + }, + { headers: { Authorization: `api-key ${this.opts.terminus?.apiKey?.write}` } } + ); + + if (res.data?.token) { + return res.data.token; + } + } + + public async createLLMConfig(llmConfig: LLMConfigPayload): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const { apiKey, provider, tenant, isChatWithPDFProvider } = llmConfig; + + if (!apiKey && provider !== 'ollama' && !isChatWithPDFProvider) { + throw new Error('API Key is required'); + } + + const vaultResult = await this.saveLLMConfigInVault( + isChatWithPDFProvider ? { ...llmConfig, apiKey: `chat_with_pdf_${tenant}_key` } : llmConfig + ); + const config = await this.storeLLMConfig({ + provider: llmConfig.provider, + models: llmConfig.models || [], + terminusToken: vaultResult || '', + tenant, + isChatWithPDFProvider, + }); + + return config; + } + + private async updateLLMConfigInVault({ + tenant, + provider, + token, + apiKey, + baseURL, + piiPolicy, + }: { + tenant: string; + provider: string; + token: string; + apiKey?: string; + baseURL?: string; + piiPolicy: (typeof PII_POLICY_OPTIONS)[number]; + }) { + await axios.put( + `${this.opts.terminus?.hostUrl}/v1/vault/${tenant}/${this.opts.terminus?.llm?.product}/data/llm-config?token=${token}`, + { + provider, + apiKey, + baseURL, + piiPolicy, + }, + { + headers: { Authorization: `api-key ${this.opts.terminus?.apiKey?.write}` }, + } + ); + } + + public async updateLLMConfig(configId: string, llmConfig: LLMConfigPayload): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const config = await this.llmConfigStore.get(configId); + if (!config) { + throw new JacksonError('Config not found', 404); + } + + const configFromVault = await this.getLLMConfigFromVault(config.tenant, config.terminusToken); + if (!configFromVault) { + throw new JacksonError('Config not found in Vault', 404); + } + + await this.updateLLMConfigInVault({ + token: config.terminusToken, + tenant: config.tenant, + provider: llmConfig.provider, + apiKey: llmConfig.apiKey || configFromVault.apiKey, + baseURL: llmConfig.baseURL, + piiPolicy: llmConfig.piiPolicy, + }); + + await this.llmConfigStore.put(configId, { + ...config, + provider: llmConfig.provider, + models: llmConfig.models || [], + }); + } + + public async deleteLLMConfig({ configId, tenant }: { configId: string; tenant: string }): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + const config = await this.llmConfigStore.get(configId); + if (!config) { + throw new JacksonError('Config not found', 404); + } + await this.llmConfigStore.delete(configId); + await axios.delete( + `${this.opts.terminus?.hostUrl}/v1/vault/${tenant}/${this.opts.terminus?.llm?.product}/data/llm-config?token=${config.terminusToken}`, + { headers: { Authorization: `api-key ${this.opts.terminus?.apiKey?.write}` } } + ); + } + + public async getConversationsByTenantAndUser({ + tenant, + userId, + }: { + tenant: string; + userId: string; + }): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const _index = { name: IndexNames.TenantUser, value: dbutils.keyFromParts(tenant, userId) }; + + const conversations = (await this.conversationStore.getByIndex(_index)).data as LLMConversation[]; + + return conversations; + } + + public async getConversationById(conversationId: string): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const conversation = (await this.conversationStore.get(conversationId)) as LLMConversation; + + return conversation; + } + + public async createConversation( + conversation: Omit + ): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const conversationID = crypto.randomBytes(20).toString('hex'); + const createdAt = Date.now(); + + const _index = { + name: IndexNames.TenantUser, + value: dbutils.keyFromParts(conversation.tenant, conversation.userId), + }; + + await this.conversationStore.put( + conversationID, + { ...conversation, id: conversationID, createdAt }, + _index + ); + + return { id: conversationID, createdAt, ...conversation }; + } + + public async createChat(chat: Omit): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const chatID = crypto.randomBytes(20).toString('hex'); + + const createdAt = Date.now(); + + await this.chatStore.put( + chatID, + { ...chat, id: chatID, createdAt }, + { name: IndexNames.LLMConversation, value: chat.conversationId } + ); + + return { id: chatID, createdAt, ...chat }; + } + + public async getChatThreadByConversationId(conversationId: string, userId: string): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + const conversation = await this.getConversationById(conversationId); + + if (userId !== conversation.userId) { + throw new JacksonError('Forbidden', 403); + } + + if (!conversation) { + throw new JacksonError('Conversation not found', 404); + } + + const chat = ( + await this.chatStore.getByIndex( + { + name: IndexNames.LLMConversation, + value: conversationId, + }, + undefined, + undefined, + undefined, + 'ASC' + ) + ).data as LLMChat[]; + + return chat; + } + + public async getLLMProviders(tenant: string, filterByTenant?: boolean): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + if (filterByTenant) { + // Will be used for dropdown while chatting with LLM + const configs = await this.getLLMConfigsByTenant(tenant); + return Array.from( + new Set( + configs + .filter(({ isChatWithPDFProvider }) => !isChatWithPDFProvider) + .map((config) => config.provider) + ) + ) + .sort() + .map((provider) => ({ + id: provider, + name: LLM_PROVIDERS[provider].name, + })); + } + + // Will be used for dropdown while creating a new config + return Object.keys(LLM_PROVIDERS) + .sort() + .map((key) => ({ + id: key as LLMProvider, + name: LLM_PROVIDERS[key].name, + })); + } + + public async getLLMModels( + tenant: string, + provider: LLMProvider, + filterByTenant?: boolean // fetch models by saved configs + ): Promise { + await throwIfInvalidLicense(this.opts.boxyhqLicenseKey); + + if (filterByTenant) { + // Will be used for dropdown while chatting with LLM + const configs = await this.getLLMConfigsByTenantAndProvider(tenant, provider); + if (configs.length === 0) { + throw new JacksonError('Config not found', 404); + } + const modelsFromConfigs = Array.from(new Set(configs.map((c: LLMConfig) => c.models).flat())).filter( + (m) => Boolean(m) + ); + + if (modelsFromConfigs.length === 0) { + throw new JacksonError('No models found', 404); + } + + const models = modelsFromConfigs + .map((model: string) => LLM_PROVIDERS[provider].models.find((m) => m.id === model)!) + .filter((m) => m !== undefined); + + return models; + } + + // Will be used for dropdown while creating a new config + return LLM_PROVIDERS[provider].models; + } + + private getUserRole(email: string) { + const mappings = this.opts.llm?.pdfChat?.roleMapping.split(','); + if (!mappings) { + throw new JacksonError('Could not find role mappings on server for chatting with PDF', 500); + } + const matchedMapping = mappings.find((m) => { + const [_email] = m.split(':'); + if (email === _email) { + return true; + } + }); + + if (!matchedMapping) { + throw new JacksonError('Insufficient privileges, no role mapped for given user', 403); + } + + return matchedMapping.split(':')[1]; + } + + public async generatePDFChatJWT({ email }: { email: string }) { + if (!this.opts.llm?.pdfChat?.jwtSigningKey) { + throw new JacksonError('Could not load JWT signing keys for chatting with PDF', 500); + } + const jwsAlg = this.opts.llm?.pdfChat?.jwsAlg || 'RS256'; + const signingKey = await loadJWSPrivateKey(this.opts.llm?.pdfChat?.jwtSigningKey, jwsAlg); + + const jwt = await new jose.SignJWT({ + role: this.getUserRole(email), + tenant: this.opts.terminus?.llm?.tenant, + }) + .setProtectedHeader({ alg: jwsAlg }) + .setIssuer(this.opts.llm?.pdfChat.jwtIssuer) + .setAudience(this.opts.llm?.pdfChat.jwtAudience) + .setExpirationTime('3d') + .sign(signingKey); + + return jwt; + } +} diff --git a/npm/src/ee/chat/llm-providers.ts b/npm/src/ee/chat/llm-providers.ts new file mode 100644 index 000000000..ba6d9a8d7 --- /dev/null +++ b/npm/src/ee/chat/llm-providers.ts @@ -0,0 +1,237 @@ +import { LLMProvidersType } from './types'; + +export const LLM_PROVIDERS: LLMProvidersType = { + openai: { + name: 'OpenAI', + models: [ + { + id: 'gpt-4o', + name: 'GPT-4o', + }, + { + id: 'gpt-4o-2024-05-13', + name: 'GPT-4o 2024-05-13', + }, + { + id: 'gpt-4-turbo', + name: 'GPT-4 Turbo', + }, + { + id: 'gpt-4-turbo-2024-04-09', + name: 'GPT-4 Turbo 2024-04-09', + }, + { + id: 'gpt-4-0125-preview', + name: 'GPT-4 0125 Preview', + }, + { + id: 'gpt-4-turbo-preview', + name: 'GPT-4 Turbo Preview', + }, + { + id: 'gpt-4-1106-preview', + name: 'GPT-4 1106 Preview', + }, + { + id: 'gpt-4-vision-preview', + name: 'GPT-4 Vision Preview', + }, + { + id: 'gpt-4', + name: 'GPT-4', + }, + { + id: 'gpt-4-0613', + name: 'GPT-4 0613', + }, + { + id: 'gpt-4-32k', + name: 'GPT-4 32k', + }, + { + id: 'gpt-4-32k-0314', + name: 'GPT-4 32k 0314', + }, + { + id: 'gpt-4-32k-0613', + name: 'GPT-4 32k 0613', + }, + { + id: 'gpt-3.5-turbo', + name: 'GPT-3.5 Turbo', + }, + { + id: 'gpt-3.5-turbo-16k', + name: 'GPT-3.5 Turbo 16k', + }, + { + id: 'gpt-3.5-turbo-0301', + name: 'GPT-3.5 Turbo 0301', + }, + { + id: 'gpt-3.5-turbo-0613', + name: 'GPT-3.5 Turbo 0613', + }, + { + id: 'gpt-3.5-turbo-1106', + name: 'GPT-3.5 Turbo 1106', + }, + { + id: 'gpt-3.5-turbo-0125', + name: 'GPT-3.5 Turbo 0125', + }, + { + id: 'gpt-3.5-turbo-16k-0613', + name: 'GPT-3.5 Turbo 16k 0613', + }, + ], + }, + anthropic: { + name: 'Anthropic', + models: [ + { + id: 'claude-3-5-sonnet-20240620', + name: 'Claude 3.5 Sonet', + max_tokens: 4096, + }, + { + id: 'claude-3-opus-20240229', + name: 'Claude 3 Opus', + max_tokens: 4096, + }, + { + id: 'claude-3-sonnet-20240229', + name: 'Claude 3 Sonnet', + max_tokens: 4096, + }, + { + id: 'claude-3-haiku-20240307', + name: 'Claude 3 Haiku', + max_tokens: 4096, + }, + { + id: 'claude-2.1', + name: 'Claude 2.1', + max_tokens: 4096, + }, + { + id: 'claude-2.0', + name: 'Claude 2', + max_tokens: 4096, + }, + { + id: 'claude-instant-1.2', + name: 'Claude Instant 1.2', + max_tokens: 4096, + }, + ], + }, + mistral: { + name: 'Mistral', + models: [ + { + id: 'open-mistral-7b', + name: 'Mistral 7B', + }, + { + id: 'open-mixtral-8x7b', + name: 'Mixtral 8x7B', + }, + { + id: 'open-mixtral-8x22b', + name: 'Mixtral 8x22B', + }, + { + id: 'mistral-small-latest', + name: 'Mistral Small', + }, + { + id: 'mistral-large-latest', + name: 'Mistral Large', + }, + { + id: 'codestral-latest', + name: 'Codestral', + }, + ], + }, + groq: { + name: 'Groq', + models: [ + { + id: 'gemma-7b-it', + name: 'Gemma 7B IT', + }, + { + id: 'llama3-70b-8192', + name: 'Llama3 70B 8192', + }, + { + id: 'llama3-8b-8192', + name: 'Llama3 8B 8192', + }, + { + id: 'mixtral-8x7b-32768', + name: 'Mixtral 8x7B 32768', + }, + ], + }, + perplexity: { + name: 'Perplexity', + models: [ + { + id: 'llama-3-sonar-small-32k-chat', + name: 'Llama 3 Sonar Small 32k Chat', + }, + { + id: 'llama-3-sonar-small-32k-online', + name: 'Llama 3 Sonar Small 32k Online', + }, + { + id: 'llama-3-sonar-large-32k-chat', + name: 'Llama 3 Sonar Large 32k Chat', + }, + { + id: 'llama-3-sonar-large-32k-online', + name: 'Llama 3 Sonar Large 32k Online', + }, + { + id: 'llama-3-8b-instruct', + name: 'Llama 3 8B Instruct', + }, + { + id: 'llama-3-70b-instruct', + name: 'Llama 3 70B Instruct', + }, + { + id: 'mixtral-8x7b-instruct', + name: 'Mixtral 8x7B Instruct', + }, + ], + }, + 'google-generative-ai': { + name: 'Google Generative AI', + models: [ + { + id: 'gemini-1.5-pro', + name: 'Gemini 1.5 Pro', + }, + { + id: 'gemini-1.5-flash', + name: 'Gemini 1.5 Flash', + }, + { + id: 'gemini-1.0-pro', + name: 'Gemini 1.0 Pro', + }, + { + id: 'text-embedding-004', + name: 'Text Embedding', + }, + ], + }, + ollama: { + name: 'Ollama', + models: [], + }, +}; diff --git a/npm/src/ee/chat/types.ts b/npm/src/ee/chat/types.ts new file mode 100644 index 000000000..70a03ce1e --- /dev/null +++ b/npm/src/ee/chat/types.ts @@ -0,0 +1,83 @@ +export type LLMProvider = + | 'openai' + | 'anthropic' + | 'mistral' + | 'groq' + | 'perplexity' + | 'google-generative-ai' + | 'ollama'; + +export type LLMModel = { + id: string; + name: string; + max_tokens?: number; +}; + +export type LLMProvidersType = { + [key in LLMProvider]: { + name: string; + models: LLMModel[]; + }; +}; + +export type LLMProvidersOptionsType = { id: LLMProvider; name: string }[]; + +export const PII_POLICY_OPTIONS = [ + 'none', + 'detect_mask', + 'detect_redact', + 'detect_report', + 'detect_block', +] as const; + +export type LLMConversation = { + id: string; + tenant: string; + userId: string; + title: string; + provider: string; + model: string; + isChatWithPDFProvider?: boolean; + createdAt: number; +}; + +export type LLMChat = { + id: string; + createdAt: number; + conversationId: string; + content: string; + role: string; +}; + +export type LLMConfigPayload = { + provider: LLMProvider; + tenant: string; + models?: string[]; + apiKey?: string; + baseURL?: string; + isChatWithPDFProvider?: boolean; + piiPolicy: (typeof PII_POLICY_OPTIONS)[number]; +}; + +export type LLMConfig = { + id: string; + createdAt: number; + provider: LLMProvider; + tenant: string; + models: string[]; + isChatWithPDFProvider?: boolean; + terminusToken: string; +}; + +export type LLMConfigMergedFromVault = { + id: string; + createdAt: number; + provider: LLMProvider; + tenant: string; + models: string[]; + terminusToken: string; + apiKey?: string; + baseURL?: string; + isChatWithPDFProvider?: boolean; + piiPolicy: (typeof PII_POLICY_OPTIONS)[number]; +}; diff --git a/npm/src/index.ts b/npm/src/index.ts index 19d490ff6..977d3e0ae 100644 --- a/npm/src/index.ts +++ b/npm/src/index.ts @@ -20,6 +20,7 @@ import SSOTraces from './sso-traces'; import EventController from './event'; import { ProductController } from './ee/product'; import { OryController } from './ee/ory/ory'; +import { ChatController } from './ee/chat'; const tracesTTL = 7 * 24 * 60 * 60; @@ -75,6 +76,7 @@ export const controllers = async ( brandingController: IBrandingController; checkLicense: () => Promise; productController: ProductController; + chatController: ChatController; close: () => Promise; }> => { opts = defaultOpts(opts); @@ -91,6 +93,9 @@ export const controllers = async ( const settingsStore = db.store('portal:settings'); const productStore = db.store('product:config'); const tracesStore = db.store('saml:tracer', tracesTTL); + const chatStore = db.store('llm:chat'); + const conversationStore = db.store('llm:conversation'); + const llmConfigStore = db.store('llm:config'); const ssoTraces = new SSOTraces({ tracesStore }); const eventController = new EventController({ opts }); @@ -134,6 +139,7 @@ export const controllers = async ( const oidcDiscoveryController = new OidcDiscoveryController({ opts }); const spConfig = new SPSSOConfig(opts); const directorySyncController = await initDirectorySync({ db, opts, eventController }); + const chatController = new ChatController({ chatStore, conversationStore, llmConfigStore, opts }); // write pre-loaded connections if present const preLoadedConnection = opts.preLoadedConnection; @@ -188,6 +194,7 @@ export const controllers = async ( return checkLicense(opts.boxyhqLicenseKey); }, productController, + chatController, close: async () => { await db.close(); }, diff --git a/npm/src/typings.ts b/npm/src/typings.ts index bbb01f25a..fa6a47ee3 100644 --- a/npm/src/typings.ts +++ b/npm/src/typings.ts @@ -2,6 +2,7 @@ import type { JWK } from 'jose'; import { ServerMetadata } from 'openid-client'; export * from './ee/identity-federation/types'; +export * from './ee/chat/types'; export * from './sso-traces/types'; export * from './directory-sync/types'; export * from './event/types'; @@ -480,8 +481,13 @@ export interface JacksonOption { }; noAnalytics?: boolean; terminus?: { - host?: string; + hostUrl?: string; adminToken?: string; + apiKey?: { read: string; write: string }; + llm?: { + tenant: string; + product: string; + }; }; webhook?: Webhook; dsync?: { @@ -508,6 +514,17 @@ export interface JacksonOption { projectId: string | undefined; sdkToken: string | undefined; }; + + llm?: { + pdfChat?: { + baseUrl: string; + jwtSigningKey: string; + jwsAlg?: string; + jwtAudience: string; + jwtIssuer: string; + roleMapping: string; + }; + }; } export interface SLORequestParams { diff --git a/package-lock.json b/package-lock.json index 752bf3303..85a31e6eb 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,11 +10,14 @@ "hasInstallScript": true, "license": "Apache-2.0", "dependencies": { + "@anthropic-ai/sdk": "0.24.3", "@boxyhq/internal-ui": "file:internal-ui", "@boxyhq/metrics": "0.2.9", "@boxyhq/react-ui": "3.3.45", "@boxyhq/saml-jackson": "file:npm", + "@google/generative-ai": "0.15.0", "@heroicons/react": "2.1.5", + "@mistralai/mistralai": "0.5.0", "@next/bundle-analyzer": "15.0.3", "@retracedhq/logs-viewer": "2.8.0", "@retracedhq/retraced": "0.7.16", @@ -27,6 +30,7 @@ "cross-env": "7.0.3", "daisyui": "4.12.14", "formik": "2.4.6", + "groq-sdk": "0.5.0", "i18next": "23.16.5", "medium-zoom": "1.1.0", "micromatch": "4.0.8", @@ -35,6 +39,8 @@ "next-i18next": "15.3.1", "next-mdx-remote": "5.0.0", "nodemailer": "6.9.16", + "ollama": "0.5.2", + "openai": "4.52.7", "prismjs": "1.29.0", "raw-body": "3.0.0", "react": "18.3.1", @@ -44,7 +50,8 @@ "react-tagsinput": "3.20.3", "remark-gfm": "4.0.0", "sharp": "0.33.5", - "swr": "2.2.5" + "swr": "2.2.5", + "zod": "3.23.8" }, "devDependencies": { "@playwright/test": "1.48.2", @@ -52,7 +59,7 @@ "@types/micromatch": "4.0.9", "@types/node": "20.12.12", "@types/prismjs": "1.26.5", - "@types/react": "18.3.2", + "@types/react": "18.3.3", "@typescript-eslint/eslint-plugin": "8.0.1", "@typescript-eslint/parser": "8.0.1", "autoprefixer": "10.4.20", @@ -356,6 +363,37 @@ "node": ">=6.0.0" } }, + "node_modules/@anthropic-ai/sdk": { + "version": "0.24.3", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.24.3.tgz", + "integrity": "sha512-916wJXO6T6k8R6BAAcLhLPv/pnLGy7YSEBZXZ1XTFbLcTZE8oTy3oDW9WJf9KKZwMvVcePIfoTSvzXHRcGxkQQ==", + "license": "MIT", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7", + "web-streams-polyfill": "^3.2.1" + } + }, + "node_modules/@anthropic-ai/sdk/node_modules/@types/node": { + "version": "18.19.64", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.64.tgz", + "integrity": "sha512-955mDqvO2vFf/oL7V3WiUtiz+BugyX8uVbaT2H8oj3+8dRyH2FLiNdowe7eNqRM7IOIZvzDH76EoAT+gwm6aIQ==", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@anthropic-ai/sdk/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "license": "MIT" + }, "node_modules/@apidevtools/json-schema-ref-parser": { "version": "9.1.2", "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.2.tgz", @@ -2550,6 +2588,15 @@ "license": "MIT", "optional": true }, + "node_modules/@google/generative-ai": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@google/generative-ai/-/generative-ai-0.15.0.tgz", + "integrity": "sha512-zs37judcTYFJf1U7tnuqnh7gdzF6dcWj9pNRxjA5JTONRoiQ0htrRdbefRFiewOIfXwhun5t9hbd2ray7812eQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/@googleapis/admin": { "version": "23.0.0", "resolved": "https://registry.npmjs.org/@googleapis/admin/-/admin-23.0.0.tgz", @@ -3377,6 +3424,15 @@ "react": ">=16" } }, + "node_modules/@mistralai/mistralai": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@mistralai/mistralai/-/mistralai-0.5.0.tgz", + "integrity": "sha512-56xfoC/0CiT0RFHrRNoJYSKCNc922EyHzEPJYY6ttalQ5KZdrNVgXeOetIGX0lDx7IjbxAJrrae2MQgUIlL9+g==", + "license": "ISC", + "dependencies": { + "node-fetch": "^2.6.7" + } + }, "node_modules/@mongodb-js/saslprep": { "version": "1.1.9", "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.9.tgz", @@ -7437,9 +7493,9 @@ "license": "MIT" }, "node_modules/@types/react": { - "version": "18.3.2", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.2.tgz", - "integrity": "sha512-Btgg89dAnqD4vV7R3hlwOxgqobUQKgx3MmrQRi0yYbs/P0ym8XozIAlkqVilPqHQwXs4e9Tf63rrCgl58BcO4w==", + "version": "18.3.3", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.3.tgz", + "integrity": "sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw==", "license": "MIT", "dependencies": { "@types/prop-types": "*", @@ -8222,7 +8278,6 @@ "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz", "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", "license": "MIT", - "optional": true, "dependencies": { "humanize-ms": "^1.2.1" }, @@ -11977,6 +12032,34 @@ "node": ">= 6" } }, + "node_modules/form-data-encoder": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", + "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==", + "license": "MIT" + }, + "node_modules/formdata-node": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", + "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", + "license": "MIT", + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + }, + "engines": { + "node": ">= 12.20" + } + }, + "node_modules/formdata-node/node_modules/web-streams-polyfill": { + "version": "4.0.0-beta.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", + "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, "node_modules/formik": { "version": "2.4.6", "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.6.tgz", @@ -12541,6 +12624,37 @@ "dev": true, "license": "MIT" }, + "node_modules/groq-sdk": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/groq-sdk/-/groq-sdk-0.5.0.tgz", + "integrity": "sha512-RVmhW7qZ+XZoy5fIuSdx/LGQJONpL8MHgZEW7dFwTdgkzStub2XQx6OKv28CHogijdwH41J+Npj/z2jBPu3vmw==", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7", + "web-streams-polyfill": "^3.2.1" + } + }, + "node_modules/groq-sdk/node_modules/@types/node": { + "version": "18.19.64", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.64.tgz", + "integrity": "sha512-955mDqvO2vFf/oL7V3WiUtiz+BugyX8uVbaT2H8oj3+8dRyH2FLiNdowe7eNqRM7IOIZvzDH76EoAT+gwm6aIQ==", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/groq-sdk/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "license": "MIT" + }, "node_modules/gtoken": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", @@ -12895,7 +13009,6 @@ "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", "license": "MIT", - "optional": true, "dependencies": { "ms": "^2.0.0" } @@ -16952,6 +17065,25 @@ "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", "license": "MIT" }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "engines": { + "node": ">=10.5.0" + } + }, "node_modules/node-fetch": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", @@ -17644,6 +17776,15 @@ "node": "^10.13.0 || >=12.0.0" } }, + "node_modules/ollama": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/ollama/-/ollama-0.5.2.tgz", + "integrity": "sha512-nH9WEU8lGxX2RhTH9TukjwrQBlyoprIh8wIGMfFlprgzzJgAr+MFFmHzCt7BZt4SMFMXVwM2xnKrfshfHkBLyQ==", + "license": "MIT", + "dependencies": { + "whatwg-fetch": "^3.6.20" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -17688,6 +17829,40 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/openai": { + "version": "4.52.7", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.52.7.tgz", + "integrity": "sha512-dgxA6UZHary6NXUHEDj5TWt8ogv0+ibH+b4pT5RrWMjiRZVylNwLcw/2ubDrX5n0oUmHX/ZgudMJeemxzOvz7A==", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7", + "web-streams-polyfill": "^3.2.1" + }, + "bin": { + "openai": "bin/cli" + } + }, + "node_modules/openai/node_modules/@types/node": { + "version": "18.19.64", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.64.tgz", + "integrity": "sha512-955mDqvO2vFf/oL7V3WiUtiz+BugyX8uVbaT2H8oj3+8dRyH2FLiNdowe7eNqRM7IOIZvzDH76EoAT+gwm6aIQ==", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/openai/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "license": "MIT" + }, "node_modules/openapi-types": { "version": "12.1.3", "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", @@ -23820,6 +23995,15 @@ "defaults": "^1.0.3" } }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, "node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", @@ -23898,6 +24082,12 @@ "node": ">=18" } }, + "node_modules/whatwg-fetch": { + "version": "3.6.20", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", + "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==", + "license": "MIT" + }, "node_modules/whatwg-mimetype": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", @@ -24542,6 +24732,15 @@ "node": "^12.20.0 || >=14" } }, + "node_modules/zod": { + "version": "3.23.8", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, "node_modules/zwitch": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", diff --git a/package.json b/package.json index a6728faa2..ec13e16d7 100644 --- a/package.json +++ b/package.json @@ -61,11 +61,14 @@ "format": "prettier --write ." }, "dependencies": { + "@anthropic-ai/sdk": "0.24.3", "@boxyhq/internal-ui": "file:internal-ui", "@boxyhq/metrics": "0.2.9", "@boxyhq/react-ui": "3.3.45", "@boxyhq/saml-jackson": "file:npm", + "@google/generative-ai": "0.15.0", "@heroicons/react": "2.1.5", + "@mistralai/mistralai": "0.5.0", "@next/bundle-analyzer": "15.0.3", "@retracedhq/logs-viewer": "2.8.0", "@retracedhq/retraced": "0.7.16", @@ -78,6 +81,7 @@ "cross-env": "7.0.3", "daisyui": "4.12.14", "formik": "2.4.6", + "groq-sdk": "0.5.0", "i18next": "23.16.5", "medium-zoom": "1.1.0", "micromatch": "4.0.8", @@ -86,6 +90,8 @@ "next-i18next": "15.3.1", "next-mdx-remote": "5.0.0", "nodemailer": "6.9.16", + "ollama": "0.5.2", + "openai": "4.52.7", "prismjs": "1.29.0", "raw-body": "3.0.0", "react": "18.3.1", @@ -95,7 +101,8 @@ "react-tagsinput": "3.20.3", "remark-gfm": "4.0.0", "sharp": "0.33.5", - "swr": "2.2.5" + "swr": "2.2.5", + "zod": "3.23.8" }, "devDependencies": { "@playwright/test": "1.48.2", @@ -103,7 +110,7 @@ "@types/micromatch": "4.0.9", "@types/node": "20.12.12", "@types/prismjs": "1.26.5", - "@types/react": "18.3.2", + "@types/react": "18.3.3", "@typescript-eslint/eslint-plugin": "8.0.1", "@typescript-eslint/parser": "8.0.1", "autoprefixer": "10.4.20", diff --git a/pages/admin/llm-vault/audit-logs.tsx b/pages/admin/llm-vault/audit-logs.tsx new file mode 100644 index 000000000..7212d8ecd --- /dev/null +++ b/pages/admin/llm-vault/audit-logs.tsx @@ -0,0 +1,51 @@ +import { serverSideTranslations } from 'next-i18next/serverSideTranslations'; +import { retracedOptions, terminusOptions } from '@lib/env'; + +import { getToken } from '@lib/retraced'; +import type { Project } from 'types/retraced'; +import axios from 'axios'; +import jackson from '@lib/jackson'; +import { NextApiRequest, GetServerSideProps } from 'next'; + +export { default } from '@ee/terminus/pages/audit-logs'; + +export const getServerSideProps = (async ({ locale, req }) => { + const { checkLicense } = await jackson(); + + if (!terminusOptions.llmRetracedProjectId) { + return { + notFound: true, + }; + } else { + const token = await getToken(req as NextApiRequest); + try { + const { data } = await axios.get<{ project: Project }>( + `${retracedOptions?.hostUrl}/admin/v1/project/${terminusOptions.llmRetracedProjectId}`, + { + headers: { + Authorization: `id=${token.id} token=${token.token} admin_token=${retracedOptions.adminToken}`, + }, + } + ); + if (data.project.environments.length === 0) { + return { + notFound: true, + }; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + } catch (err) { + return { + notFound: true, + }; + } + } + + return { + props: { + ...(await serverSideTranslations(locale!, ['common'])), + host: retracedOptions.externalUrl, + projectId: terminusOptions.llmRetracedProjectId, + hasValidLicense: await checkLicense(), + }, + }; +}) satisfies GetServerSideProps; diff --git a/pages/admin/llm-vault/chat/[[...conversationId]].tsx b/pages/admin/llm-vault/chat/[[...conversationId]].tsx new file mode 100644 index 000000000..56d6b52c4 --- /dev/null +++ b/pages/admin/llm-vault/chat/[[...conversationId]].tsx @@ -0,0 +1,24 @@ +import { features, terminusOptions } from '@lib/env'; +import type { GetServerSidePropsContext } from 'next'; +import { serverSideTranslations } from 'next-i18next/serverSideTranslations'; +import jackson from '@lib/jackson'; + +export { default } from '@ee/chat/pages/[[...conversationId]]'; + +export async function getServerSideProps({ locale }: GetServerSidePropsContext) { + const { checkLicense } = await jackson(); + + if (!features.llmVault) { + return { + notFound: true, + }; + } + + return { + props: { + ...(locale ? await serverSideTranslations(locale, ['common']) : {}), + llmTenant: terminusOptions.llm?.tenant, + hasValidLicense: await checkLicense(), + }, + }; +} diff --git a/pages/admin/terminus/audit-logs.tsx b/pages/admin/terminus/audit-logs.tsx index ff75b0847..d20cf272e 100644 --- a/pages/admin/terminus/audit-logs.tsx +++ b/pages/admin/terminus/audit-logs.tsx @@ -1,113 +1,17 @@ -import type { NextPage, GetServerSideProps, NextApiRequest } from 'next'; -import dynamic from 'next/dynamic'; -import { useEffect, useState } from 'react'; import { serverSideTranslations } from 'next-i18next/serverSideTranslations'; -import { useProject, useGroups } from '@lib/ui/retraced'; -import { LinkBack, Loading, Error } from '@boxyhq/internal-ui'; -import { Select } from 'react-daisyui'; import { retracedOptions, terminusOptions } from '@lib/env'; -import { useTranslation } from 'next-i18next'; + import { getToken } from '@lib/retraced'; import type { Project } from 'types/retraced'; import axios from 'axios'; +import jackson from '@lib/jackson'; +import { NextApiRequest, GetServerSideProps } from 'next'; -const LogsViewer = dynamic(() => import('@components/retraced/LogsViewer'), { - ssr: false, -}); - -export interface Props { - host?: string; - projectId: string; -} - -const Events: NextPage = ({ host, projectId }: Props) => { - const { t } = useTranslation('common'); - - const [environment, setEnvironment] = useState(''); - const [group, setGroup] = useState(''); - - const { project, isLoading, isError } = useProject(projectId); - const { groups } = useGroups(projectId, environment); - - // Set the environment - useEffect(() => { - if (project) { - setEnvironment(project.environments[0]?.id); - } - }, [project]); - - // Set the group - useEffect(() => { - if (groups && groups.length > 0) { - setGroup(groups[0].group_id); - } - }, [groups]); - - if (isLoading) { - return ; - } - - if (isError) { - return ; - } - - const displayLogsViewer = project && environment && group; - - return ( -
- -
-

{project?.name}

-
-
-
- - {project ? ( - - ) : null} -
-
- - {groups ? ( - - ) : null} -
-
-
- {displayLogsViewer && ( - - )} -
-
- ); -}; +export { default } from '@ee/terminus/pages/audit-logs'; export const getServerSideProps = (async ({ locale, req }) => { + const { checkLicense } = await jackson(); + if (!terminusOptions.retracedProjectId) { return { notFound: true, @@ -141,8 +45,7 @@ export const getServerSideProps = (async ({ locale, req }) => { ...(await serverSideTranslations(locale!, ['common'])), host: retracedOptions.externalUrl, projectId: terminusOptions.retracedProjectId, + hasValidLicense: await checkLicense(), }, }; }) satisfies GetServerSideProps; - -export default Events; diff --git a/pages/admin/terminus/index.tsx b/pages/admin/terminus/index.tsx index ea877103c..08ac4a161 100644 --- a/pages/admin/terminus/index.tsx +++ b/pages/admin/terminus/index.tsx @@ -6,12 +6,19 @@ import '@components/terminus/blocks/customblocks'; import '@components/terminus/blocks/generator'; import { EmptyState } from '@boxyhq/internal-ui'; import { terminusOptions } from '@lib/env'; +import jackson from '@lib/jackson'; +import LicenseRequired from '@components/LicenseRequired'; export interface Props { host?: string; + hasValidLicense: boolean; } -const TerminusIndexPage: NextPage = ({ host }: Props) => { +const TerminusIndexPage: NextPage = ({ host, hasValidLicense }: Props) => { + if (!hasValidLicense) { + return ; + } + if (!host) { return ( = ({ host }: Props) => { }; export async function getServerSideProps({ locale }) { + const { checkLicense } = await jackson(); + return { props: { ...(await serverSideTranslations(locale, ['common'])), host: terminusOptions.hostUrl || null, + hasValidLicense: await checkLicense(), }, }; } diff --git a/pages/api/admin/features.ts b/pages/api/admin/features.ts new file mode 100644 index 000000000..8414060ba --- /dev/null +++ b/pages/api/admin/features.ts @@ -0,0 +1,15 @@ +import { defaultHandler } from '@lib/api'; +import { features } from '@lib/env'; +import { NextApiRequest, NextApiResponse } from 'next'; + +const handler = async (req: NextApiRequest, res: NextApiResponse) => { + await defaultHandler(req, res, { + GET: handleGET, + }); +}; + +const handleGET = async (req: NextApiRequest, res: NextApiResponse) => { + res.json({ data: { features } }); +}; + +export default handler; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/config/[configId].ts b/pages/api/admin/llm-vault/chat/[tenant]/config/[configId].ts new file mode 100644 index 000000000..7724a0d77 --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/config/[configId].ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/config/[configId]'; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/config/index.ts b/pages/api/admin/llm-vault/chat/[tenant]/config/index.ts new file mode 100644 index 000000000..903950cda --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/config/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/config'; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/conversation/[conversationId].ts b/pages/api/admin/llm-vault/chat/[tenant]/conversation/[conversationId].ts new file mode 100644 index 000000000..8dc35ad58 --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/conversation/[conversationId].ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/conversation/[conversationId]'; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/conversation/index.ts b/pages/api/admin/llm-vault/chat/[tenant]/conversation/index.ts new file mode 100644 index 000000000..6899e9260 --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/conversation/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/conversation'; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/index.ts b/pages/api/admin/llm-vault/chat/[tenant]/index.ts new file mode 100644 index 000000000..8c91ca24c --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]'; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/providers/[provider]/models.ts b/pages/api/admin/llm-vault/chat/[tenant]/providers/[provider]/models.ts new file mode 100644 index 000000000..cb296fe58 --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/providers/[provider]/models.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/providers/[provider]/models'; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/providers/index.ts b/pages/api/admin/llm-vault/chat/[tenant]/providers/index.ts new file mode 100644 index 000000000..0b8d552f4 --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/providers/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/providers'; diff --git a/pages/api/admin/llm-vault/chat/[tenant]/upload-file.ts b/pages/api/admin/llm-vault/chat/[tenant]/upload-file.ts new file mode 100644 index 000000000..d46134ab0 --- /dev/null +++ b/pages/api/admin/llm-vault/chat/[tenant]/upload-file.ts @@ -0,0 +1,7 @@ +export const config = { + api: { + bodyParser: false, + }, +}; + +export { default } from '@ee/chat/api/[tenant]/upload-file'; diff --git a/pages/api/auth/[...nextauth].ts b/pages/api/auth/[...nextauth].ts index 553448f34..267c45108 100644 --- a/pages/api/auth/[...nextauth].ts +++ b/pages/api/auth/[...nextauth].ts @@ -1,5 +1,5 @@ import Adapter from '@lib/nextAuthAdapter'; -import NextAuth from 'next-auth'; +import NextAuth, { NextAuthOptions } from 'next-auth'; import EmailProvider from 'next-auth/providers/email'; import CredentialsProvider from 'next-auth/providers/credentials'; import BoxyHQSAMLProvider from 'next-auth/providers/boxyhq-saml'; @@ -8,7 +8,7 @@ import { validateEmailWithACL } from '@lib/utils'; import { jacksonOptions as env } from '@lib/env'; import { sessionName } from '@lib/constants'; -export default NextAuth({ +export const authOptions: NextAuthOptions = { theme: { colorScheme: 'light', }, @@ -161,6 +161,12 @@ export default NextAuth({ return validateEmailWithACL(user.email); }, + async session({ session, token }) { + if (session && token) { + session.user.id = token.sub; + } + return session; + }, }, pages: { signIn: '/admin/auth/login', @@ -168,4 +174,6 @@ export default NextAuth({ // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore adapter: Adapter(), -}); +}; + +export default NextAuth(authOptions); diff --git a/pages/api/internals/chat/[tenant]/config/[configId].ts b/pages/api/internals/chat/[tenant]/config/[configId].ts new file mode 100644 index 000000000..7724a0d77 --- /dev/null +++ b/pages/api/internals/chat/[tenant]/config/[configId].ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/config/[configId]'; diff --git a/pages/api/internals/chat/[tenant]/config/index.ts b/pages/api/internals/chat/[tenant]/config/index.ts new file mode 100644 index 000000000..903950cda --- /dev/null +++ b/pages/api/internals/chat/[tenant]/config/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/config'; diff --git a/pages/api/internals/chat/[tenant]/conversation/[conversationId].ts b/pages/api/internals/chat/[tenant]/conversation/[conversationId].ts new file mode 100644 index 000000000..8dc35ad58 --- /dev/null +++ b/pages/api/internals/chat/[tenant]/conversation/[conversationId].ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/conversation/[conversationId]'; diff --git a/pages/api/internals/chat/[tenant]/conversation/index.ts b/pages/api/internals/chat/[tenant]/conversation/index.ts new file mode 100644 index 000000000..6899e9260 --- /dev/null +++ b/pages/api/internals/chat/[tenant]/conversation/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/conversation'; diff --git a/pages/api/internals/chat/[tenant]/index.ts b/pages/api/internals/chat/[tenant]/index.ts new file mode 100644 index 000000000..8c91ca24c --- /dev/null +++ b/pages/api/internals/chat/[tenant]/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]'; diff --git a/pages/api/internals/chat/[tenant]/providers/[provider]/models.ts b/pages/api/internals/chat/[tenant]/providers/[provider]/models.ts new file mode 100644 index 000000000..cb296fe58 --- /dev/null +++ b/pages/api/internals/chat/[tenant]/providers/[provider]/models.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/providers/[provider]/models'; diff --git a/pages/api/internals/chat/[tenant]/providers/index.ts b/pages/api/internals/chat/[tenant]/providers/index.ts new file mode 100644 index 000000000..0b8d552f4 --- /dev/null +++ b/pages/api/internals/chat/[tenant]/providers/index.ts @@ -0,0 +1 @@ +export { default } from '@ee/chat/api/[tenant]/providers'; diff --git a/pages/api/internals/chat/[tenant]/upload-file.ts b/pages/api/internals/chat/[tenant]/upload-file.ts new file mode 100644 index 000000000..8deacbcb5 --- /dev/null +++ b/pages/api/internals/chat/[tenant]/upload-file.ts @@ -0,0 +1,6 @@ +export const config = { + api: { + bodyParser: false, + }, +}; +export { default } from '@ee/chat/api/[tenant]/upload-file'; diff --git a/types/next-auth.d.ts b/types/next-auth.d.ts new file mode 100644 index 000000000..b85f246a1 --- /dev/null +++ b/types/next-auth.d.ts @@ -0,0 +1,9 @@ +import 'next-auth'; + +declare module 'next-auth' { + interface Session { + user: DefaultSession['user'] & { + id: string; + }; + } +}