Yuki Takei 1 год назад
Родитель
Сommit
036d342ae5

+ 3 - 3
apps/app/src/server/routes/apiv3/openai/chat.ts

@@ -3,7 +3,7 @@ import type { ValidationChain } from 'express-validator';
 import { body } from 'express-validator';
 
 import type Crowi from '~/server/crowi';
-import { openaiService } from '~/server/service/openai';
+import { openaiClient } from '~/server/service/openai';
 import loggerFactory from '~/utils/logger';
 
 import { apiV3FormValidator } from '../../../middlewares/apiv3-form-validator';
@@ -31,9 +31,9 @@ export const chatHandlersFactory: ChatHandlersFactory = (crowi) => {
     accessTokenParser, loginRequiredStrictly, validator, apiV3FormValidator,
     async(req: Req, res: ApiV3Response) => {
       try {
-        const chatCompletion = await openaiService.client.chat.completions.create({
+        const chatCompletion = await openaiClient.chat.completions.create({
           messages: [{ role: 'assistant', content: req.body.userMessage }],
-          model: 'gpt-3.5-turbo-0125',
+          model: 'gpt-4o',
         });
 
         return res.apiv3({ assistantMessage: chatCompletion.choices[0].message.content });

+ 46 - 0
apps/app/src/server/service/openai/assistant/assistant.ts

@@ -0,0 +1,46 @@
+import type OpenAI from 'openai';
+
+import { configManager } from '../../config-manager';
+import { openaiClient } from '../client';
+
+const findAssistantByName = async(assistantName: string): Promise<OpenAI.Beta.Assistant | undefined> => {
+
+  // declare finder
+  const findAssistant = async(assistants: OpenAI.Beta.Assistants.AssistantsPage): Promise<OpenAI.Beta.Assistant | undefined> => {
+    const found = assistants.data.find(assistant => assistant.name === assistantName);
+
+    if (found != null) {
+      return found;
+    }
+
+    // recursively find assistant
+    if (assistants.hasNextPage()) {
+      return findAssistant(await assistants.getNextPage());
+    }
+  };
+
+  const storedAssistants = await openaiClient.beta.assistants.list({ order: 'desc' });
+
+  return findAssistant(storedAssistants);
+};
+
+const getOrCreateAssistant = async(): Promise<OpenAI.Beta.Assistant> => {
+
+  const appSiteUrl = configManager.getConfig('crowi', 'app:siteUrl');
+  const assistantName = `GROWI OpenAI Assistant for ${appSiteUrl}`;
+
+  const assistantOnRemote = await findAssistantByName(assistantName);
+  if (assistantOnRemote != null) {
+    // store
+    return assistantOnRemote;
+  }
+
+  const newAssistant = await openaiClient.beta.assistants.create({
+    name: assistantName,
+    model: 'gpt-4o',
+  });
+
+  return newAssistant;
+};
+
+export const defaultAssistant = getOrCreateAssistant();

+ 1 - 0
apps/app/src/server/service/openai/assistant/index.ts

@@ -0,0 +1 @@
+export * from './assistant';

+ 5 - 0
apps/app/src/server/service/openai/client.ts

@@ -0,0 +1,5 @@
+import OpenAI from 'openai';
+
+export const openaiClient = new OpenAI({
+  apiKey: process.env.OPENAI_API_KEY, // This is the default and can be omitted
+});

+ 14 - 0
apps/app/src/server/service/openai/embeddings.ts

@@ -0,0 +1,14 @@
+import type { OpenAI } from 'openai';
+
+import { openaiClient } from './client';
+
+export const embed = async(username: string, input: string): Promise<OpenAI.Embedding[]> => {
+  const result = await openaiClient.embeddings.create({
+    input,
+    model: 'text-embedding-3-large',
+    dimensions: Number(process.env.OPENAI_DIMENSIONS),
+    user: username,
+  });
+
+  return result.data;
+};

+ 2 - 1
apps/app/src/server/service/openai/index.ts

@@ -1 +1,2 @@
-export * from './openai';
+export * from './embeddings';
+export * from './client';

+ 0 - 30
apps/app/src/server/service/openai/openai.ts

@@ -1,30 +0,0 @@
-import { OpenAI } from 'openai';
-
-export interface IOpenaiService {
-  embed: (username: string, input: string) => Promise<OpenAI.Embedding[]>;
-}
-
-class OpenaiService implements IOpenaiService {
-
-  client: OpenAI;
-
-  constructor() {
-    this.client = new OpenAI({
-      apiKey: process.env.OPENAI_API_KEY, // This is the default and can be omitted
-    });
-  }
-
-  async embed(username: string, input: string): Promise<OpenAI.Embedding[]> {
-    const result = await this.client.embeddings.create({
-      input,
-      model: 'text-embedding-3-large',
-      dimensions: Number(process.env.OPENAI_DIMENSIONS),
-      user: username,
-    });
-
-    return result.data;
-  }
-
-}
-
-export const openaiService = new OpenaiService();

+ 3 - 3
apps/app/src/server/service/search-delegator/elasticsearch.ts

@@ -20,7 +20,7 @@ import type { PageModel } from '../../models/page';
 import { createBatchStream } from '../../util/batch-stream';
 import { configManager } from '../config-manager';
 import type { UpdateOrInsertPagesOpts } from '../interfaces/search';
-import { openaiService } from '../openai';
+import { embed } from '../openai';
 
 import { aggregatePipelineToIndex } from './aggregate-to-index';
 import type { AggregatedPage, BulkWriteBody, BulkWriteCommand } from './bulk-write';
@@ -485,7 +485,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
       async transform(chunk: AggregatedPage[], encoding, callback) {
         // append embedding
         for await (const doc of chunk) {
-          doc.revisionBodyEmbedded = (await openaiService.embed(doc.creator.username, doc.revision.body))[0].embedding;
+          doc.revisionBodyEmbedded = (await embed(doc.creator.username, doc.revision.body))[0].embedding;
         }
 
         this.push(chunk);
@@ -848,7 +848,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   }
 
   async appendVectorScore(query, queryString: string, username: string): Promise<void> {
-    const queryVector = (await openaiService.embed(username, queryString))[0].embedding;
+    const queryVector = (await embed(username, queryString))[0].embedding;
 
     query.body.query = {
       script_score: {