From b60af4b0bd2d87af86a30e1cdb12420276dab57c Mon Sep 17 00:00:00 2001 From: Ilya Bondar Date: Mon, 13 Jan 2025 17:16:44 +0100 Subject: [PATCH] feat(chat): implement new settings feature flags (Issue #2923) (#2926) --- .../src/tests/chatHeaderSettings.test.ts | 24 +-- apps/chat-e2e/src/tests/compareMode.test.ts | 51 +++--- .../src/tests/defaultModelSettings.test.ts | 24 +-- apps/chat-e2e/src/tests/modelSettings.test.ts | 25 +-- apps/chat-e2e/src/tests/replay.test.ts | 5 +- apps/chat-e2e/src/utils/modelsUtil.ts | 23 ++- apps/chat/next.config.js | 2 +- .../Chat/ChatSettings/ChatSettingsModal.tsx | 4 +- .../ChatSettings/ConversationSettings.tsx | 152 ++++++++++-------- apps/chat/src/pages/api/chat.ts | 18 ++- .../conversations/conversations.epics.ts | 19 ++- apps/chat/src/types/models.ts | 6 +- apps/chat/src/utils/app/models.ts | 24 +++ .../src/utils/server/get-sorted-entities.ts | 4 +- 14 files changed, 246 insertions(+), 135 deletions(-) create mode 100644 apps/chat/src/utils/app/models.ts diff --git a/apps/chat-e2e/src/tests/chatHeaderSettings.test.ts b/apps/chat-e2e/src/tests/chatHeaderSettings.test.ts index 1c4dfe8024..d8cf479b42 100644 --- a/apps/chat-e2e/src/tests/chatHeaderSettings.test.ts +++ b/apps/chat-e2e/src/tests/chatHeaderSettings.test.ts @@ -58,21 +58,25 @@ dialTest( 'Verify conversation settings are the same as for initial model', async () => { await chatHeader.openConversationSettingsPopup(); - if (randomModel.features?.systemPrompt) { + if (ModelsUtil.doesModelAllowSystemPrompt(randomModel)) { const systemPrompt = await agentSettings.getSystemPrompt(); expect .soft(systemPrompt, ExpectedMessages.defaultSystemPromptIsEmpty) .toBe(conversation.prompt); } - const temperature = await temperatureSlider.getTemperature(); - expect - .soft(temperature, ExpectedMessages.defaultTemperatureIsOne) - .toBe(conversation.temperature.toString()); - const modelAddons = defaultModel.selectedAddons ?? []; - const selectedAddons = await addons.getSelectedAddons(); - expect - .soft(selectedAddons, ExpectedMessages.noAddonsSelected) - .toEqual(modelAddons); + if (ModelsUtil.doesModelAllowTemperature(randomModel)) { + const temperature = await temperatureSlider.getTemperature(); + expect + .soft(temperature, ExpectedMessages.defaultTemperatureIsOne) + .toBe(conversation.temperature.toString()); + } + if (ModelsUtil.doesModelAllowAddons(randomModel)) { + const modelAddons = defaultModel.selectedAddons ?? []; + const selectedAddons = await addons.getSelectedAddons(); + expect + .soft(selectedAddons, ExpectedMessages.noAddonsSelected) + .toEqual(modelAddons); + } }, ); }, diff --git a/apps/chat-e2e/src/tests/compareMode.test.ts b/apps/chat-e2e/src/tests/compareMode.test.ts index 94b6e671c4..9370b98345 100644 --- a/apps/chat-e2e/src/tests/compareMode.test.ts +++ b/apps/chat-e2e/src/tests/compareMode.test.ts @@ -26,7 +26,10 @@ dialTest.beforeAll(async () => { defaultModel = ModelsUtil.getDefaultModel()!; aModel = GeneratorUtil.randomArrayElement( allModels.filter( - (m) => m.id !== defaultModel.id && m.features?.systemPrompt, + (m) => + m.id !== defaultModel.id && + ModelsUtil.doesModelAllowSystemPrompt(m) && + ModelsUtil.doesModelAllowTemperature(m), ), ); bModel = GeneratorUtil.randomArrayElement( @@ -817,23 +820,27 @@ dialTest( await leftChatHeader.openConversationSettingsPopup(); const leftEntitySettings = conversationSettingsModal.getLeftAgentSettings(); - if (firstUpdatedRandomModel.features?.systemPrompt) { + if (ModelsUtil.doesModelAllowSystemPrompt(firstUpdatedRandomModel)) { await leftEntitySettings.clearAndSetSystemPrompt(firstUpdatedPrompt); } - await leftEntitySettings - .getTemperatureSlider() - .setTemperature(firstUpdatedTemp); + if (ModelsUtil.doesModelAllowTemperature(firstUpdatedRandomModel)) { + await leftEntitySettings + .getTemperatureSlider() + .setTemperature(firstUpdatedTemp); + } const rightEntitySettings = conversationSettingsModal.getRightAgentSettings(); - if (secondUpdatedRandomModel.features?.systemPrompt) { + if (ModelsUtil.doesModelAllowSystemPrompt(secondUpdatedRandomModel)) { await rightEntitySettings.clearAndSetSystemPrompt( secondUpdatedPrompt, ); } - await rightEntitySettings - .getTemperatureSlider() - .setTemperature(secondUpdatedTemp); + if (ModelsUtil.doesModelAllowTemperature(secondUpdatedRandomModel)) { + await rightEntitySettings + .getTemperatureSlider() + .setTemperature(secondUpdatedTemp); + } await conversationSettingsModal.applyChangesButton.click(); }, ); @@ -874,17 +881,18 @@ dialTest( .toBe(secondUpdatedRandomModel.version); await rightChatHeader.hoverOverChatSettings(); - if (secondUpdatedRandomModel.features?.systemPrompt) { + if (ModelsUtil.doesModelAllowSystemPrompt(secondUpdatedRandomModel)) { const rightPromptInfo = await chatSettingsTooltip.getPromptInfo(); expect .soft(rightPromptInfo, ExpectedMessages.chatInfoPromptIsValid) .toBe(secondUpdatedPrompt); } - - const rightTempInfo = await chatSettingsTooltip.getTemperatureInfo(); - expect - .soft(rightTempInfo, ExpectedMessages.chatInfoTemperatureIsValid) - .toBe(secondUpdatedTemp.toString()); + if (ModelsUtil.doesModelAllowTemperature(secondUpdatedRandomModel)) { + const rightTempInfo = await chatSettingsTooltip.getTemperatureInfo(); + expect + .soft(rightTempInfo, ExpectedMessages.chatInfoTemperatureIsValid) + .toBe(secondUpdatedTemp.toString()); + } await errorPopup.cancelPopup(); await leftChatHeader.hoverOverChatModel(); @@ -899,17 +907,18 @@ dialTest( .toBe(firstUpdatedRandomModel.version); await leftChatHeader.hoverOverChatSettings(); - if (firstUpdatedRandomModel.features?.systemPrompt) { + if (ModelsUtil.doesModelAllowSystemPrompt(firstUpdatedRandomModel)) { const leftPromptInfo = await chatSettingsTooltip.getPromptInfo(); expect .soft(leftPromptInfo, ExpectedMessages.chatInfoPromptIsValid) .toBe(firstUpdatedPrompt); } - - const leftTempInfo = await chatSettingsTooltip.getTemperatureInfo(); - expect - .soft(leftTempInfo, ExpectedMessages.chatInfoTemperatureIsValid) - .toBe(firstUpdatedTemp.toString()); + if (ModelsUtil.doesModelAllowTemperature(firstUpdatedRandomModel)) { + const leftTempInfo = await chatSettingsTooltip.getTemperatureInfo(); + expect + .soft(leftTempInfo, ExpectedMessages.chatInfoTemperatureIsValid) + .toBe(firstUpdatedTemp.toString()); + } }, ); }, diff --git a/apps/chat-e2e/src/tests/defaultModelSettings.test.ts b/apps/chat-e2e/src/tests/defaultModelSettings.test.ts index eac5bf4d64..05cb1627d7 100644 --- a/apps/chat-e2e/src/tests/defaultModelSettings.test.ts +++ b/apps/chat-e2e/src/tests/defaultModelSettings.test.ts @@ -17,7 +17,6 @@ let nonDefaultModel: DialAIEntityModel; let recentAddonIds: string[]; let recentModelIds: string[]; let allEntities: DialAIEntityModel[]; -let modelsWithoutSystemPrompt: string[]; dialTest.beforeAll(async () => { defaultModel = ModelsUtil.getDefaultModel()!; @@ -27,7 +26,6 @@ dialTest.beforeAll(async () => { recentAddonIds = ModelsUtil.getRecentAddonIds(); recentModelIds = ModelsUtil.getRecentModelIds(); allEntities = ModelsUtil.getOpenAIEntities(); - modelsWithoutSystemPrompt = ModelsUtil.getModelsWithoutSystemPrompt(); }); dialTest( @@ -359,13 +357,16 @@ dialTest( await chat.configureSettingsButton.click(); const sysPrompt = 'test prompt'; const temp = 0; - const isSysPromptAllowed = !modelsWithoutSystemPrompt.includes( - randomModel.id, - ); + const isSysPromptAllowed = + ModelsUtil.doesModelAllowSystemPrompt(randomModel); if (isSysPromptAllowed) { await agentSettings.setSystemPrompt(sysPrompt); } - await temperatureSlider.setTemperature(temp); + const isTemperatureAllowed = + ModelsUtil.doesModelAllowTemperature(randomModel); + if (isTemperatureAllowed) { + await temperatureSlider.setTemperature(temp); + } await conversationSettingsModal.applyChangesButton.click(); await dialHomePage.reloadPage(); @@ -376,11 +377,12 @@ dialTest( const systemPrompt = await agentSettings.systemPrompt.getElementContent(); expect.soft(systemPrompt, ExpectedMessages.systemPromptIsValid).toBe(''); } - - const temperature = await temperatureSlider.getTemperature(); - expect - .soft(temperature, ExpectedMessages.temperatureIsValid) - .toBe(ExpectedConstants.defaultTemperature); + if (isTemperatureAllowed) { + const temperature = await temperatureSlider.getTemperature(); + expect + .soft(temperature, ExpectedMessages.temperatureIsValid) + .toBe(ExpectedConstants.defaultTemperature); + } const selectedAddons = await addons.getSelectedAddons(); expect.soft(selectedAddons, ExpectedMessages.noAddonsSelected).toEqual([]); diff --git a/apps/chat-e2e/src/tests/modelSettings.test.ts b/apps/chat-e2e/src/tests/modelSettings.test.ts index 9a1e18858a..0690a376f8 100644 --- a/apps/chat-e2e/src/tests/modelSettings.test.ts +++ b/apps/chat-e2e/src/tests/modelSettings.test.ts @@ -32,7 +32,10 @@ dialTest( setTestIds('EPMRTC-1046'); const randomModel = GeneratorUtil.randomArrayElement( models.filter( - (m) => m.id !== defaultModel.id && m.features?.systemPrompt === true, + (m) => + m.id !== defaultModel.id && + ModelsUtil.doesModelAllowSystemPrompt(m) && + ModelsUtil.doesModelAllowTemperature(m), ), ); await localStorageManager.setRecentModelsIds(defaultModel, randomModel); @@ -40,28 +43,30 @@ dialTest( await dialHomePage.waitForPageLoaded(); await chat.configureSettingsButton.click(); - if (defaultModel.features?.systemPrompt) { + if (ModelsUtil.doesModelAllowSystemPrompt(defaultModel)) { await agentSettings.setSystemPrompt(sysPrompt); } - await temperatureSlider.setTemperature(temp); + if (ModelsUtil.doesModelAllowTemperature(defaultModel)) { + await temperatureSlider.setTemperature(temp); + } await conversationSettingsModal.applyChangesButton.click(); await chat.changeAgentButton.click(); await talkToAgentDialog.selectAgent(randomModel, marketplacePage); await chat.configureSettingsButton.click(); - if (defaultModel.features?.systemPrompt) { + if (ModelsUtil.doesModelAllowSystemPrompt(defaultModel)) { const systemPromptVisible = await agentSettings.getSystemPrompt(); expect .soft(systemPromptVisible, ExpectedMessages.systemPromptIsValid) .toBe(sysPrompt); } - - const temperature = await temperatureSlider.getTemperature(); - expect - .soft(temperature, ExpectedMessages.temperatureIsValid) - .toBe(temp.toString()); - + if (ModelsUtil.doesModelAllowTemperature(defaultModel)) { + const temperature = await temperatureSlider.getTemperature(); + expect + .soft(temperature, ExpectedMessages.temperatureIsValid) + .toBe(temp.toString()); + } const selectedAddons = await addons.getSelectedAddons(); expect .soft(selectedAddons, ExpectedMessages.selectedAddonsValid) diff --git a/apps/chat-e2e/src/tests/replay.test.ts b/apps/chat-e2e/src/tests/replay.test.ts index eb113eacd0..acbc29fd55 100644 --- a/apps/chat-e2e/src/tests/replay.test.ts +++ b/apps/chat-e2e/src/tests/replay.test.ts @@ -268,7 +268,10 @@ dialTest( const replayPrompt = 'reply the same text'; const replayModel = GeneratorUtil.randomArrayElement( allModels.filter( - (m) => m.id !== defaultModel.id && m.features?.systemPrompt, + (m) => + m.id !== defaultModel.id && + ModelsUtil.doesModelAllowSystemPrompt(m) && + ModelsUtil.doesModelAllowTemperature(m), ), ); const conversation = diff --git a/apps/chat-e2e/src/utils/modelsUtil.ts b/apps/chat-e2e/src/utils/modelsUtil.ts index 7328023f62..7c3a6cc24e 100644 --- a/apps/chat-e2e/src/utils/modelsUtil.ts +++ b/apps/chat-e2e/src/utils/modelsUtil.ts @@ -1,4 +1,9 @@ import { DialAIEntityModel } from '@/chat/types/models'; +import { + doesModelAllowAddons, + doesModelAllowSystemPrompt, + doesModelAllowTemperature, +} from '@/chat/utils/app/models'; export class ModelsUtil { public static getOpenAIEntities() { @@ -80,10 +85,20 @@ export class ModelsUtil { return ModelsUtil.getModels().find((a) => a.isDefault); } - public static getModelsWithoutSystemPrompt() { - return ModelsUtil.getModels() - .filter((m) => m.features?.systemPrompt === false) - .map((m) => m.id); + public static doesModelAllowSystemPrompt( + model: DialAIEntityModel | undefined, + ) { + return doesModelAllowSystemPrompt(model); + } + + public static doesModelAllowTemperature( + model: DialAIEntityModel | undefined, + ) { + return doesModelAllowTemperature(model); + } + + public static doesModelAllowAddons(model: DialAIEntityModel | undefined) { + return doesModelAllowAddons(model); } public static getModelsWithoutAttachment() { diff --git a/apps/chat/next.config.js b/apps/chat/next.config.js index 270100881c..4e409217ce 100644 --- a/apps/chat/next.config.js +++ b/apps/chat/next.config.js @@ -85,7 +85,7 @@ const nextConfig = { permanent: false, }, { - source: '/models/:slug([A-Za-z0-9@.:-]+)', + source: '/models/:slug([A-Za-z0-9@.-]+)', destination: '/?isolated-model-id=:slug', permanent: false, }, diff --git a/apps/chat/src/components/Chat/ChatSettings/ChatSettingsModal.tsx b/apps/chat/src/components/Chat/ChatSettings/ChatSettingsModal.tsx index 1767cc4258..444778f9a5 100644 --- a/apps/chat/src/components/Chat/ChatSettings/ChatSettingsModal.tsx +++ b/apps/chat/src/components/Chat/ChatSettings/ChatSettingsModal.tsx @@ -5,9 +5,9 @@ import { useTranslation } from 'next-i18next'; import classNames from 'classnames'; import { DefaultsService } from '@/src/utils/app/data/defaults-service'; +import { doesModelHaveSettings } from '@/src/utils/app/models'; import { Conversation } from '@/src/types/chat'; -import { EntityType } from '@/src/types/common'; import { ModalState } from '@/src/types/modal'; import { DialAIEntityModel } from '@/src/types/models'; import { Translation } from '@/src/types/translation'; @@ -147,7 +147,7 @@ export const ChatSettings = ({ .map((conv) => modelsMap[conv.model.id]) .filter(Boolean) as DialAIEntityModel[]; - return allowedModels.some((model) => model.type !== EntityType.Application); + return allowedModels.some((model) => doesModelHaveSettings(model)); }, [conversations, modelsMap]); return ( diff --git a/apps/chat/src/components/Chat/ChatSettings/ConversationSettings.tsx b/apps/chat/src/components/Chat/ChatSettings/ConversationSettings.tsx index ca459c287a..ae3deb4a4b 100644 --- a/apps/chat/src/components/Chat/ChatSettings/ConversationSettings.tsx +++ b/apps/chat/src/components/Chat/ChatSettings/ConversationSettings.tsx @@ -3,6 +3,12 @@ import { ReactNode } from 'react'; import { useTranslation } from 'next-i18next'; import { DefaultsService } from '@/src/utils/app/data/defaults-service'; +import { + doesModelAllowAddons, + doesModelAllowSystemPrompt, + doesModelAllowTemperature, + doesModelHaveSettings, +} from '@/src/utils/app/models'; import { Conversation } from '@/src/types/chat'; import { EntityType } from '@/src/types/common'; @@ -37,13 +43,36 @@ interface Props { onChangeAddon: (addonsId: string) => void; } -export const SettingContainer = ({ children }: SettingContainerProps) => { +export function FieldContainer({ children }: SettingContainerProps) { if (!children) { return null; } return
{children}
; -}; +} + +export function SettingContainer({ children }: SettingContainerProps) { + if (!children) { + return ; + } + + return ( +
+ {children} +
+ ); +} + +function EmptySettings() { + const { t } = useTranslation(Translation.Chat); + return ( + + + {t('There are no conversation settings for this agent ')} + + + ); +} export const ConversationSettings = ({ assistantModelId, @@ -65,71 +94,64 @@ export const ConversationSettings = ({ const model = modelsMap[conversation.model.id]; const isPlayback = !!conversation.playback?.isPlayback; - const isNotAllowedModel = !modelsMap[conversation.model.id]; + if (!model) { + return {t('Agent is not available')}; + } + + if (!doesModelHaveSettings(model)) { + return ; + } return ( -
- {!isNotAllowedModel ? ( - <> - {model && model.type === EntityType.Application && ( - - {t('There are no conversation settings for this agent ')} - - )} - {model && model.type === EntityType.Assistant && ( - - - - )} - {(!model || - (model.type === EntityType.Model && - model?.features?.systemPrompt)) && ( - - - - )} - {(!model || model.type !== EntityType.Application) && ( - - - - )} - {(!model || model.type !== EntityType.Application) && ( - - - - )} - - ) : ( - {t('Agent is not available')} + + {model.type === EntityType.Assistant && ( + + + )} -
+ {model.type === EntityType.Model && doesModelAllowSystemPrompt(model) && ( + + + + )} + {doesModelAllowTemperature(model) && ( + + + + )} + {doesModelAllowAddons(model) && ( + + + + )} + ); }; diff --git a/apps/chat/src/pages/api/chat.ts b/apps/chat/src/pages/api/chat.ts index 3ea7cfb8f1..326b8e57c0 100644 --- a/apps/chat/src/pages/api/chat.ts +++ b/apps/chat/src/pages/api/chat.ts @@ -2,6 +2,11 @@ import { NextApiRequest, NextApiResponse } from 'next'; import { getToken } from 'next-auth/jwt'; import { getServerSession } from 'next-auth/next'; +import { + doesModelAllowAddons, + doesModelAllowSystemPrompt, + doesModelAllowTemperature, +} from '@/src/utils/app/models'; import { validateServerSession } from '@/src/utils/auth/session'; import { OpenAIStream } from '@/src/utils/server'; import { @@ -51,12 +56,16 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => { } let promptToSend = prompt; - if (!promptToSend && model.type === EntityType.Model) { + if (!doesModelAllowSystemPrompt(model)) { + promptToSend = ''; + } else if (!promptToSend && model.type === EntityType.Model) { promptToSend = DEFAULT_SYSTEM_PROMPT; } let temperatureToUse = temperature; - if ( + if (!doesModelAllowTemperature(model)) { + temperatureToUse = 1; + } else if ( !temperatureToUse && temperatureToUse !== 0 && model.type !== EntityType.Application @@ -106,7 +115,10 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => { model, temperature: temperatureToUse, messages: messagesToSend, - selectedAddonsIds: selectedAddons?.length ? selectedAddons : undefined, + selectedAddonsIds: + selectedAddons?.length && doesModelAllowAddons(model) + ? selectedAddons + : undefined, assistantModelId: assistantModel?.id, userJWT: token?.access_token as string, chatId: id, diff --git a/apps/chat/src/store/conversations/conversations.epics.ts b/apps/chat/src/store/conversations/conversations.epics.ts index be929b1ef7..5b563f3cb1 100644 --- a/apps/chat/src/store/conversations/conversations.epics.ts +++ b/apps/chat/src/store/conversations/conversations.epics.ts @@ -75,6 +75,11 @@ import { parseStreamMessages, } from '@/src/utils/app/merge-streams'; import { isMediumScreen } from '@/src/utils/app/mobile'; +import { + doesModelAllowAddons, + doesModelAllowSystemPrompt, + doesModelAllowTemperature, +} from '@/src/utils/app/models'; import { updateSystemPromptInMessages } from '@/src/utils/app/overlay'; import { getEntitiesFromTemplateMapping } from '@/src/utils/app/prompts'; import { @@ -1324,18 +1329,22 @@ const streamMessageEpic: AppEpic = (action$, state$) => if (conversationModelType === EntityType.Model) { modelAdditionalSettings = { - prompt: lastModel?.features?.systemPrompt + prompt: doesModelAllowSystemPrompt(lastModel) ? payload.conversation.prompt : undefined, - temperature: payload.conversation.temperature, - selectedAddons, + temperature: doesModelAllowTemperature(lastModel) + ? payload.conversation.temperature + : 1, + selectedAddons: doesModelAllowAddons(lastModel) ? selectedAddons : [], }; } if (conversationModelType === EntityType.Assistant && assistantModelId) { modelAdditionalSettings = { assistantModel: modelsMap[assistantModelId], - temperature: payload.conversation.temperature, - selectedAddons, + temperature: doesModelAllowTemperature(lastModel) + ? payload.conversation.temperature + : 1, + selectedAddons: doesModelAllowAddons(lastModel) ? selectedAddons : [], }; } diff --git a/apps/chat/src/types/models.ts b/apps/chat/src/types/models.ts index 777f62661b..91aa5a3ae8 100644 --- a/apps/chat/src/types/models.ts +++ b/apps/chat/src/types/models.ts @@ -37,6 +37,8 @@ export interface CoreAIEntity { features?: { truncate_prompt?: boolean; system_prompt?: boolean; + temperature?: boolean; + addons?: boolean; url_attachments?: boolean; folder_attachments?: boolean; allow_resume?: boolean; @@ -51,7 +53,9 @@ export interface CoreAIEntity { export interface DialAIEntityFeatures { truncatePrompt?: boolean; - systemPrompt?: boolean; + systemPrompt: boolean; + temperature: boolean; + addons: boolean; urlAttachments?: boolean; folderAttachments?: boolean; allowResume?: boolean; diff --git a/apps/chat/src/utils/app/models.ts b/apps/chat/src/utils/app/models.ts new file mode 100644 index 0000000000..61a9e6f8c5 --- /dev/null +++ b/apps/chat/src/utils/app/models.ts @@ -0,0 +1,24 @@ +import { EntityType } from '@/src/types/common'; +import { DialAIEntityModel } from '@/src/types/models'; + +export const doesModelAllowSystemPrompt = ( + model: DialAIEntityModel | undefined, +) => !!model?.features?.systemPrompt; + +export const doesModelAllowTemperature = ( + model: DialAIEntityModel | undefined, +) => !!model?.features?.temperature; + +export const doesModelAllowAddons = (model: DialAIEntityModel | undefined) => + !!model?.features?.addons; + +export const doesModelHaveSettings = (model: DialAIEntityModel | undefined) => { + return ( + model && + model.type !== EntityType.Application && // custom settings in future + (model.type === EntityType.Assistant || + doesModelAllowSystemPrompt(model) || + doesModelAllowTemperature(model) || + doesModelAllowAddons(model)) + ); +}; diff --git a/apps/chat/src/utils/server/get-sorted-entities.ts b/apps/chat/src/utils/server/get-sorted-entities.ts index 50b50f4bf0..c36d091940 100644 --- a/apps/chat/src/utils/server/get-sorted-entities.ts +++ b/apps/chat/src/utils/server/get-sorted-entities.ts @@ -172,7 +172,9 @@ export const getSortedEntities = async (token: JWT | null) => { } : undefined, features: entity.features && { - systemPrompt: entity.features.system_prompt ?? false, + systemPrompt: entity.features.system_prompt ?? true, + temperature: entity.features.temperature ?? true, + addons: entity.features.addons ?? true, truncatePrompt: entity.features.truncate_prompt ?? false, urlAttachments: entity.features.url_attachments ?? false, folderAttachments: entity.features.folder_attachments ?? false,