Yuki Takei 11 месяцев назад
Родитель
Сommit
c08b5f7d7d

+ 7 - 11
apps/app/src/features/openai/server/routes/message.ts

@@ -13,7 +13,6 @@ import { apiV3FormValidator } from '~/server/middlewares/apiv3-form-validator';
 import type { ApiV3Response } from '~/server/routes/apiv3/interfaces/apiv3-response';
 import loggerFactory from '~/utils/logger';
 
-import { shouldHideMessageKey } from '../../interfaces/message';
 import { MessageErrorCode, type StreamErrorCode } from '../../interfaces/message-error';
 import AiAssistantModel from '../models/ai-assistant';
 import ThreadRelationModel from '../models/thread-relation';
@@ -85,6 +84,7 @@ export const postMessageHandlersFactory: PostMessageHandlersFactory = (crowi) =>
       threadRelation.updateThreadExpiration();
 
       let stream: AssistantStream;
+      const isSummaryMode = req.body.summaryMode ?? false;
 
       try {
         const assistant = await getOrCreateChatAssistant();
@@ -93,18 +93,14 @@ export const postMessageHandlersFactory: PostMessageHandlersFactory = (crowi) =>
         stream = openaiClient.beta.threads.runs.stream(thread.id, {
           assistant_id: assistant.id,
           additional_messages: [
-            {
-              role: 'assistant',
-              content: req.body.summaryMode
-                ? 'Turn on summary mode: I will try to answer concisely, aiming for 1-3 sentences.'
-                : 'I will turn off summary mode and answer.',
-              metadata: {
-                [shouldHideMessageKey]: 'true',
-              },
-            },
             { role: 'user', content: req.body.userMessage },
           ],
-          additional_instructions: aiAssistant.additionalInstruction,
+          additional_instructions: [
+            aiAssistant.additionalInstruction,
+            isSummaryMode
+              ? 'Turn on summary mode: I will try to answer concisely, aiming for 1-3 sentences.'
+              : 'I will turn off summary mode and answer.',
+          ].join('\n'),
         });
 
       }

+ 6 - 11
apps/app/src/features/openai/server/services/openai.ts

@@ -35,6 +35,7 @@ import {
 } from '../../interfaces/ai-assistant';
 import type { MessageListParams } from '../../interfaces/message';
 import { ThreadType } from '../../interfaces/thread-relation';
+import type { IVectorStore } from '../../interfaces/vector-store';
 import { removeGlobPath } from '../../utils/remove-glob-path';
 import AiAssistantModel, { type AiAssistantDocument } from '../models/ai-assistant';
 import { convertMarkdownToHtml } from '../utils/convert-markdown-to-html';
@@ -131,8 +132,11 @@ class OpenaiService implements IOpenaiService {
     }
 
     try {
-      const vectorStoreRelation = aiAssistantId != null ? await this.getVectorStoreRelationByAiAssistantId(aiAssistantId) : null;
-      const thread = await this.client.createThread(vectorStoreRelation?.vectorStoreId);
+      const aiAssistant = aiAssistantId != null
+        ? await AiAssistantModel.findOne({ _id: { $eq: aiAssistantId } }).populate<{ vectorStore: IVectorStore }>('vectorStore')
+        : null;
+
+      const thread = await this.client.createThread(aiAssistant?.vectorStore?.vectorStoreId);
       const threadRelation = await ThreadRelationModel.create({
         userId,
         type,
@@ -223,15 +227,6 @@ class OpenaiService implements IOpenaiService {
   }
 
 
-  async getVectorStoreRelationByAiAssistantId(aiAssistantId: string): Promise<VectorStoreDocument> {
-    const aiAssistant = await AiAssistantModel.findOne({ _id: { $eq: aiAssistantId } }).populate('vectorStore');
-    if (aiAssistant == null) {
-      throw createError(404, 'AiAssistant document does not exist');
-    }
-
-    return aiAssistant.vectorStore as VectorStoreDocument;
-  }
-
   async getVectorStoreRelationsByPageIds(pageIds: Types.ObjectId[]): Promise<VectorStoreDocument[]> {
     const pipeline = [
       // Stage 1: Match documents with the given pageId