From 7a7989d388b609f48cd874cb0b555aa70cb16a06 Mon Sep 17 00:00:00 2001 From: Chris Griffing Date: Tue, 9 Apr 2024 18:22:52 -0700 Subject: [PATCH] Adds OpenAI customModel settings field and adds logic to support it Closes #3223 --- package.json | 18 +++++++++++++++--- src/ai/aiProviderService.ts | 2 +- src/ai/openaiProvider.ts | 26 +++++++++++++++++++++----- src/config.ts | 1 + src/quickpicks/aiModelPicker.ts | 1 + 5 files changed, 39 insertions(+), 9 deletions(-) diff --git a/package.json b/package.json index 07e19b33a966e..179ace1811d43 100644 --- a/package.json +++ b/package.json @@ -3321,7 +3321,8 @@ "gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106" + "gpt-3.5-turbo-1106", + "custom" ], "enumDescriptions": [ "GPT-4 Turbo (Latest)", @@ -3334,7 +3335,8 @@ "GPT-3.5 Turbo", "GPT-3.5 Turbo 16k", "GPT-3.5 Turbo (June 13)", - "GPT-3.5 Turbo (Nov 6)" + "GPT-3.5 Turbo (Nov 6)", + "Custom" ], "markdownDescription": "Specifies the OpenAI model to use for GitLens' experimental AI features", "scope": "window", @@ -3346,7 +3348,17 @@ "null" ], "default": null, - "markdownDescription": "Specifies a custom URL to use for access to an OpenAI model via Azure. Azure URLs should be in the following format: https://{your-resource-name}.openai.azure.com/openai/deployments/{deployment-id}/chat/completions?api-version={api-version}", + "markdownDescription": "Specifies a custom URL to use for access to an OpenAI-compatible API. URLs could be in any format as long as the API responds in the same way as OpenAI. eg: https://{serverHost}/v1/chat/completions \n\nAzure URLs should be in the following format: https://{your-resource-name}.openai.azure.com/openai/deployments/{deployment-id}/chat/completions?api-version={api-version}", + "scope": "window", + "order": 102 + }, + "gitlens.ai.experimental.openai.customModel": { + "type": [ + "string", + "null" + ], + "default": null, + "markdownDescription": "Specifies a custom model to use with an OpenAI-compatible API. Only used when the OpenAI model is set to 'Custom'.", "scope": "window", "order": 102 }, diff --git a/src/ai/aiProviderService.ts b/src/ai/aiProviderService.ts index 3d35e9d398ef5..5a04721368d5a 100644 --- a/src/ai/aiProviderService.ts +++ b/src/ai/aiProviderService.ts @@ -218,7 +218,7 @@ async function confirmAIProviderToS(provider: AIProvider, storage: Storage): Pro return false; } -export function getMaxCharacters(model: OpenAIModels | AnthropicModels, outputLength: number): number { +export function getMaxCharacters(model: OpenAIModels | AnthropicModels | string, outputLength: number): number { const tokensPerCharacter = 3.1; let tokens; diff --git a/src/ai/openaiProvider.ts b/src/ai/openaiProvider.ts index e52a162971239..2d0deac03e584 100644 --- a/src/ai/openaiProvider.ts +++ b/src/ai/openaiProvider.ts @@ -25,7 +25,7 @@ export class OpenAIProvider implements AIProvider<'openai'> { return configuration.get('ai.experimental.openai.url') || 'https://api.openai.com/v1/chat/completions'; } - private async getOrChooseModel(): Promise { + private async getOrChooseModel(): Promise { const model = this.model; if (model != null) return model; @@ -40,7 +40,14 @@ export class OpenAIProvider implements AIProvider<'openai'> { const apiKey = await getApiKey(this.container.storage); if (apiKey == null) return undefined; - const model = await this.getOrChooseModel(); + let model = await this.getOrChooseModel(); + if (model == null) return undefined; + + if (model === 'custom') { + const customModel = configuration.get('ai.experimental.openai.customModel') || ''; + model = customModel ? `${customModel}` : undefined; + } + // Might need to notify the user that they need to set a custom model name if (model == null) return undefined; let retries = 0; @@ -88,6 +95,7 @@ Follow the user's instructions carefully, don't repeat yourself, don't include t }; const rsp = await this.fetch(apiKey, request); + if (!rsp.ok) { if (rsp.status === 404) { throw new Error( @@ -135,7 +143,14 @@ Follow the user's instructions carefully, don't repeat yourself, don't include t const apiKey = await getApiKey(this.container.storage); if (apiKey == null) return undefined; - const model = await this.getOrChooseModel(); + let model = await this.getOrChooseModel(); + if (model == null) return undefined; + + if (model === 'custom') { + const customModel = configuration.get('ai.experimental.openai.customModel') || ''; + model = customModel ? `${customModel}` : undefined; + } + // Might need to notify the user that they need to set a custom model name if (model == null) return undefined; let retries = 0; @@ -302,10 +317,11 @@ export type OpenAIModels = | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' - | 'gpt-3.5-turbo-0613'; + | 'gpt-3.5-turbo-0613' + | 'custom'; interface OpenAIChatCompletionRequest { - model: OpenAIModels; + model: OpenAIModels | string; messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; temperature?: number; top_p?: number; diff --git a/src/config.ts b/src/config.ts index 8cfbe8bdc9c1d..87bc95be81822 100644 --- a/src/config.ts +++ b/src/config.ts @@ -15,6 +15,7 @@ export interface Config { readonly openai: { readonly model: OpenAIModels | null; readonly url: string | null; + readonly customModel: string | null; }; readonly anthropic: { readonly model: AnthropicModels | null; diff --git a/src/quickpicks/aiModelPicker.ts b/src/quickpicks/aiModelPicker.ts index e68e0642304fb..80b5fcbe4864c 100644 --- a/src/quickpicks/aiModelPicker.ts +++ b/src/quickpicks/aiModelPicker.ts @@ -22,6 +22,7 @@ export async function showAIModelPicker(provider?: AIProviders): Promise

AltStyle によって変換されたページ (->オリジナル) /