From 8f7ee3bfe53b3d7e5f59e5193e204bb32e52c5b7 Mon Sep 17 00:00:00 2001 From: Zoheb Malik Date: Sat, 3 May 2025 13:04:54 +0100 Subject: [PATCH] OpenAI support only. Removed other platform support, as well as unneeded code. --- .changeset/popular-ravens-laugh.md | 5 + README.md | 17 +-- assets/prompt.structure.json | 50 ++------ src/commands/command.config.ts | 33 +++-- src/models/model.config.ts | 14 +-- src/models/model.request.ts | 17 +-- src/services/service.network.ts | 118 ++---------------- .../service.process/process.request.ts | 96 +------------- .../service.process/process.response.ts | 28 +---- .../service.prompts/service.system.prompt.ts | 113 +---------------- 10 files changed, 62 insertions(+), 429 deletions(-) create mode 100644 .changeset/popular-ravens-laugh.md diff --git a/.changeset/popular-ravens-laugh.md b/.changeset/popular-ravens-laugh.md new file mode 100644 index 0000000..b0c8d62 --- /dev/null +++ b/.changeset/popular-ravens-laugh.md @@ -0,0 +1,5 @@ +--- +"overide": major +--- + +This change removes all other platform support besides OpenAI, as well as some code refactoring. diff --git a/README.md b/README.md index f03ad6c..0ce9c68 100644 --- a/README.md +++ b/README.md @@ -20,11 +20,10 @@ - [Community](#community) - [License](#license) - ## Key Features - **IDE Agnostic**: Works with any IDE or text editor -- **AI-Powered Code Generation**: Uses OpenAI, DeepSeek, or Groq APIs +- **AI-Powered Code Generation**: Uses OpenAI API - **Live File Monitoring**: Continuously monitors files for code generation prompts - **Simple Prompting Syntax**: Uses intuitive `//> Accept the changes (y/n): -// ``` @@ -96,8 +89,8 @@ Configure Overide using `oi-config.json`: ```json { - "name": "project name", - "ignore": ["node_modules", "*.test.js"] + "name": "project name", + "ignore": ["node_modules", "*.test.js"] } ``` diff --git a/assets/prompt.structure.json b/assets/prompt.structure.json index 2291739..6b068dc 100644 --- a/assets/prompt.structure.json +++ b/assets/prompt.structure.json @@ -1,40 +1,14 @@ { - "openai": { - "systemMessage": "Your task is to provide accurate and efficient code completions and respond in VALID JSON.", - "context": "Below is the current code context:", - "format": "```[\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n},\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n}\n]\n```", - "instructions": [ - "Do not include comments explaining the changes.", - "Make sure that the structure of the file is maintained. Imports, Global Variables and should be placed at the top of the file below other imports or global variables.", - "Make sure that the {lines in find} accounts for empty spaces and empty lines in the existing code.", - "Include tabs and spaces in the replace lines to PROPERLY INDENT according to existing code.", - "Use the following JSON format to respond:" - ] - }, - "deepseek": { - "systemMessage": "You are a coding assistant API specialized in generating accurate and efficient code completions and responding in valid JSON.", - "context": "Below is the current code context:", - "format": "[\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n},\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n}\n]\n", - "instructions": [ - "Do not include comments explaining the changes.", - "Please provide the entire code block that should be replaced, and the entire new code block as the replacement, as list of lines.", - "Return the JSON inside a markdown block quote using triple backticks (```).", - "If code to be replaced is separated by even ONE OR MORE empty line then return multiple {find, replace} structures in a list", - "Include proper indentation in code.", - "Use the following JSON format to respond:" - ] - }, - "groq": { - "systemMessage": "You are a coding assistant API specialized in generating accurate and efficient code completions and responding in valid JSON while following the instructions STRICTLY.", - "context": "Below is the current code context:", - "format": "[\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n},\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n}\n]\n", - "instructions": [ - "Do not include comments explaining the changes.", - "Please provide the entire code block that should be replaced, and the entire new code block as the replacement, as list of lines.", - "Return the VALID JSON inside a markdown block quote using triple backticks (```).", - "If code to be replaced is separated by even ONE OR MORE empty line then return multiple {find, replace} structures in a list", - "Include proper indentation in code.", - "Use the following JSON format to respond:" - ] - } + "openai": { + "systemMessage": "Your task is to provide accurate and efficient code completions and respond in VALID JSON.", + "context": "Below is the current code context:", + "format": "```[\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n},\n{\n \"find\": [lines to find],\n \"replace\": [lines to replace]\n}\n]\n```", + "instructions": [ + "Do not include comments explaining the changes.", + "Make sure that the structure of the file is maintained. Imports, Global Variables and should be placed at the top of the file below other imports or global variables.", + "Make sure that the {lines in find} accounts for empty spaces and empty lines in the existing code.", + "Include tabs and spaces in the replace lines to PROPERLY INDENT according to existing code.", + "Use the following JSON format to respond:" + ] + } } diff --git a/src/commands/command.config.ts b/src/commands/command.config.ts index 576a9f8..1c2dc50 100644 --- a/src/commands/command.config.ts +++ b/src/commands/command.config.ts @@ -17,12 +17,11 @@ import utilParser from '../utilis/util.parser'; /** * The `Config` class is responsible for handling both global and local configurations - * for the `overide` CLI application. It manages configuration settings for different platforms - * (like OpenAI and DeepSeek) and allows users to select an active platform, update config - * details, and manage ignored files and project-specific settings. + * for the `overide` CLI application. It manages configuration settings for OpenAI + * and allows users to update config details, and manage ignored files and project-specific settings. * * Responsibilities: - * - Prompt the user for platform-specific configuration details. + * - Prompt the user for OpenAI configuration details. * - Manage global configuration, including setting the active platform and updating platform settings. * - Handle local configuration updates, including project name and ignored files. * - Ensure that required directories and configuration files exist. @@ -37,7 +36,7 @@ class Config extends OiCommand { // Define supported platforms and their respective configuration prompts this.platforms = supportedPlatforms; - // Configuration questions for each platform (OpenAI, DeepSeek, Groq) + // Configuration questions for OpenAI this.platformQuestions = platformQuestions; } @@ -109,18 +108,18 @@ class Config extends OiCommand { } async handleEmbeddingEnable(): Promise { - // Check if OpenAi platform details are available. + // Check if OpenAI platform details are available. const activePlatform = CommandHelper.getActiveServiceDetails(true); if (!activePlatform) { console.warn( - 'Overide supports embeddings over OpenAI\nEnabling this will incure additional cost' + 'Overide supports embeddings over OpenAI\nEnabling this will incur additional cost' ); - // Ask for open ai platform details. + // Ask for OpenAI platform details. const answers = await this.promptPlatformConfig('openai'); // Check if a global config file already exists, if not initialize an empty config const existingConfig = CommandHelper.configExists(true) - ? await CommandHelper.readConfigFileData(true) + ? CommandHelper.readConfigFileData(true) : {}; // Merge the new platform configuration with the existing config @@ -128,12 +127,12 @@ class Config extends OiCommand { ...existingConfig, ['openai']: { ...answers, - isActive: Object.keys(existingConfig as GlobalConfig).length === 0 ? true : false // Set isActive for first platform + isActive: true // OpenAI is always active for embeddings } }; // Save the updated global configuration - await CommandHelper.writeConfigFileData(true, updatedConfig); + CommandHelper.writeConfigFileData(true, updatedConfig); } // Set the embeddings flag to true. @@ -179,7 +178,7 @@ class Config extends OiCommand { } // Read the existing global configuration file - const existingConfig = (await CommandHelper.readConfigFileData(true)) as GlobalConfig; + const existingConfig = CommandHelper.readConfigFileData(true) as GlobalConfig; // Get a list of available platforms from the existing configuration const activePlatforms = Object.keys(existingConfig); @@ -209,7 +208,7 @@ class Config extends OiCommand { }); // Save the updated configuration back to the global config file - await CommandHelper.writeConfigFileData(true, updatedConfig); + CommandHelper.writeConfigFileData(true, updatedConfig); console.log(`Successfully updated the active platform to: ${selectedPlatform}`); } @@ -246,7 +245,7 @@ class Config extends OiCommand { // Check if a global config file already exists, if not initialize an empty config const existingConfig = CommandHelper.configExists(true) - ? await CommandHelper.readConfigFileData(true) + ? CommandHelper.readConfigFileData(true) : {}; // Merge the new platform configuration with the existing config @@ -259,7 +258,7 @@ class Config extends OiCommand { }; // Save the updated global configuration - await CommandHelper.writeConfigFileData(true, updatedConfig); + CommandHelper.writeConfigFileData(true, updatedConfig); console.log('Run `overide config global -a | --set-active` to select active platform'); } @@ -285,7 +284,7 @@ class Config extends OiCommand { } // Read the local configuration - const config: LocalConfig = (await CommandHelper.readConfigFileData()) as LocalConfig; + const config: LocalConfig = CommandHelper.readConfigFileData() as LocalConfig; // Update the list of ignored files if provided in options if (options.ignore) { @@ -315,7 +314,7 @@ class Config extends OiCommand { } // Save the updated local configuration - await CommandHelper.writeConfigFileData(false, config); + CommandHelper.writeConfigFileData(false, config); console.log('Local config updated successfully.'); } } diff --git a/src/models/model.config.ts b/src/models/model.config.ts index 5328573..5ab9c5d 100644 --- a/src/models/model.config.ts +++ b/src/models/model.config.ts @@ -10,9 +10,8 @@ export interface GlobalConfig { } export interface GlobalPlatformInfo { - apiKey?: string; // Optional, as Ollama does not require an API key - baseUrl?: string; // Optional, as not all platforms may have a baseUrl - orgId?: string; // Optional, specific to platforms like OpenAI + apiKey?: string; + orgId?: string; isActive: boolean; } @@ -21,16 +20,11 @@ export interface ActivePlatformDetails { platformConfig: GlobalPlatformInfo; } -export const supportedPlatforms = ['OpenAI', 'DeepSeek', 'Groq']; +export const supportedPlatforms = ['OpenAI']; export const platformQuestions = { openai: [ { type: 'input', name: 'apiKey', message: 'Enter your API key:' }, { type: 'input', name: 'orgId', message: 'Enter your Organization ID:' } - ], - deepseek: [ - { type: 'input', name: 'apiKey', message: 'Enter your API key:' }, - { type: 'input', name: 'baseUrl', message: 'Enter the BaseUrl to use:' } - ], - groq: [{ type: 'input', name: 'apiKey', message: 'Enter your Groq API key:' }] + ] }; diff --git a/src/models/model.request.ts b/src/models/model.request.ts index 3eaeb6e..7a58234 100644 --- a/src/models/model.request.ts +++ b/src/models/model.request.ts @@ -1,11 +1,10 @@ import { ResponseFormatJSONSchema } from 'openai/resources'; import { ChatCompletionMessageParam as OpenAIChatCompletionMessageParam } from 'openai/resources/chat/completions'; -import { ChatCompletionMessageParam as GroqChatCompletionMessageParam } from 'groq-sdk/resources/chat/completions'; import { ActivePlatformDetails } from './model.config'; export interface GeneralRequestObject { platform: ActivePlatformDetails; - metadata: OpenAiRequestObject | DeepSeekRequestObject | GroqRequestObject; + metadata: OpenAiRequestObject; } export interface OpenAiRequestObject { @@ -19,17 +18,3 @@ export interface OpenAiRequestObject { frequency_penalty?: number; response_format: ResponseFormatJSONSchema; } - -export interface DeepSeekRequestObject { - model: string; - messages: OpenAIChatCompletionMessageParam[]; -} - -export interface GroqRequestObject { - model: string; - messages: GroqChatCompletionMessageParam[]; - temperature?: number; - max_tokens?: number; - top_p?: number; - stream?: boolean; -} diff --git a/src/services/service.network.ts b/src/services/service.network.ts index 1c3827c..b169604 100644 --- a/src/services/service.network.ts +++ b/src/services/service.network.ts @@ -1,18 +1,7 @@ import OpenAI from 'openai'; -import Groq from 'groq-sdk'; - -import { - DeepSeekRequestObject, - GeneralRequestObject, - GroqRequestObject, - OpenAiRequestObject -} from '../models/model.request'; -import { - ChatCompletion, - ChatCompletionMessageParam as OpenAIChatCompletionMessageParam -} from 'openai/resources/chat/completions'; +import { GeneralRequestObject, OpenAiRequestObject } from '../models/model.request'; +import { ChatCompletion } from 'openai/resources/chat/completions'; import { ActivePlatformDetails } from '../models/model.config'; -import { ChatCompletionMessageParam as GroqChatCompletionMessageParam } from 'groq-sdk/resources/chat/completions'; import * as dotenv from 'dotenv'; @@ -30,22 +19,11 @@ abstract class NetworkService { activeServiceDetails: ActivePlatformDetails, metadata: OpenAiRequestObject ): Promise; - - abstract handleDeepSeekRequest( - activeServiceDetails: ActivePlatformDetails, - metadata: DeepSeekRequestObject - ): Promise; - - abstract handleGroqRequest( - activeServiceDetails: ActivePlatformDetails, - metadata: GroqRequestObject - ): Promise; } /** - * The `Network` class is responsible for making API requests to different - * services (OpenAI, DeepSeek, and Groq) to generate code based on the - * provided request data. + * The `Network` class is responsible for making API requests to OpenAI + * to generate code based on the provided request data. */ class NetworkServiceImpl extends NetworkService { /** @@ -78,7 +56,7 @@ class NetworkServiceImpl extends NetworkService { } /** - * Generates code based on the active service (OpenAI, DeepSeek, or Groq). + * Generates code based on OpenAI service. * * @param {object} requestData - The request data containing service details and metadata. * @returns {Promise} - The generated code response. @@ -96,22 +74,10 @@ class NetworkServiceImpl extends NetworkService { const platform = activeServiceDetails.platform; // Handle requests based on the selected platform - switch (platform) { - case 'openai': - return this.handleOpenAIRequest(activeServiceDetails, metadata as OpenAiRequestObject); - case 'deepseek': - return this.handleDeepSeekRequest(activeServiceDetails, { - ...metadata, - messages: metadata.messages as OpenAIChatCompletionMessageParam[] - }); - case 'groq': - return this.handleGroqRequest(activeServiceDetails, { - ...metadata, - messages: metadata.messages as GroqChatCompletionMessageParam[] - }); - default: - throw new Error('No valid model or platform selected.'); + if (platform === 'openai') { + return this.handleOpenAIRequest(activeServiceDetails, metadata as OpenAiRequestObject); } + throw new Error('No valid model or platform selected.'); } /** @@ -148,74 +114,6 @@ class NetworkServiceImpl extends NetworkService { throw error; } } - - /** - * Handles requests to the DeepSeek service. - * - * @param {object} activeServiceDetails - The details of the active DeepSeek service. - * @param {object} metadata - The metadata for the API request. - * @returns {Promise} - The generated code response from DeepSeek. - * @throws Will throw an error if the API key or base URL is missing. - */ - async handleDeepSeekRequest( - activeServiceDetails: ActivePlatformDetails, - metadata: DeepSeekRequestObject - ): Promise { - const { apiKey, baseUrl } = activeServiceDetails.platformConfig; - - if (!apiKey || !baseUrl) { - throw new Error('API key or BaseUrl missing for DeepSeek.'); - } - - try { - const openai = new OpenAI.OpenAI({ apiKey, baseURL: baseUrl }); - const completions = await openai.chat.completions.create({ - ...metadata, - stream: false - }); - return (completions.choices[0] as ChatCompletion.Choice).message.content || ''; // Return the content string from DeepSeek completion - } catch (error) { - if (error instanceof Error) { - console.error(`Error generating code with DeepSeek: ${error.message}`); - throw error; // Rethrow error for handling at a higher level - } - throw error; - } - } - - /** - * Handles requests to the Groq service. - * - * @param {object} activeServiceDetails - The details of the active Groq service. - * @param {object} metadata - The metadata for the API request. - * @returns {Promise} - The generated code response from Groq. - * @throws Will throw an error if the API key is missing. - */ - async handleGroqRequest( - activeServiceDetails: ActivePlatformDetails, - metadata: GroqRequestObject - ): Promise { - const { apiKey } = activeServiceDetails.platformConfig; - - if (!apiKey) { - throw new Error('API key missing for Groq.'); - } - - try { - const groq = new Groq({ apiKey }); - const completions = await groq.chat.completions.create({ - ...metadata, - stream: false - }); - return (completions.choices[0] as ChatCompletion.Choice).message.content || ''; // Return the content string from Groq completion - } catch (error) { - if (error instanceof Error) { - console.error(`Error generating code with Groq: ${error.message}`); - throw error; - } - throw error; - } - } } export default new NetworkServiceImpl(); diff --git a/src/services/service.process/process.request.ts b/src/services/service.process/process.request.ts index 537a2e9..5743057 100644 --- a/src/services/service.process/process.request.ts +++ b/src/services/service.process/process.request.ts @@ -1,24 +1,19 @@ import { ActivePlatformDetails } from '../../models/model.config'; import { InsertionRequestInfo } from '../../models/model.prompts'; -import { - DeepSeekRequestObject, - GeneralRequestObject, - GroqRequestObject, - OpenAiRequestObject -} from '../../models/model.request'; +import { GeneralRequestObject, OpenAiRequestObject } from '../../models/model.request'; import CommandHelper from '../../utilis/util.command.config'; import { systemPromptServiceImpl } from '../service.prompts/service.system.prompt'; /** * The `FormatRequest` class is responsible for creating a dynamic request - * based on the active AI service platform (OpenAI or DeepSeek). It formats + * based on the active AI service platform (OpenAI). It formats * the prompt using `FormatPrompt` and constructs the request body accordingly. */ class ProcessRequest { /** * Creates a dynamic request object based on the active service platform. - * It calls either the OpenAI or DeepSeek-specific request formatting function. + * It calls the OpenAI-specific request formatting function. * * @param prompt - The raw prompt extracted from the file. * @param promptArray - The array of context around the prompt. @@ -39,19 +34,10 @@ class ProcessRequest { } // Determine which platform is active and create the appropriate request - switch (activeServiceDetails.platform) { - case 'openai': - return this.createOpenAIRequest(insertionRequest, activeServiceDetails, verbose); - - case 'deepseek': - return this.createDeepSeekRequest(insertionRequest, activeServiceDetails, verbose); - - case 'groq': - return this.createGroqRequest(insertionRequest, activeServiceDetails, verbose); - - default: - throw new Error(`Unsupported platform: ${activeServiceDetails.platform}`); + if (activeServiceDetails.platform === 'openai') { + return this.createOpenAIRequest(insertionRequest, activeServiceDetails, verbose); } + throw new Error(`Unsupported platform: ${activeServiceDetails.platform}`); } catch (error) { if (error instanceof Error) { console.error(`Error in creating request: ${error.message}`); @@ -128,76 +114,6 @@ class ProcessRequest { metadata: metadata }; } - - /** - * Creates and formats the request for DeepSeek models. - * - * @param prompt - The raw prompt extracted from the file. - * @param promptArray - The array of context around the prompt. - * @param activeServiceDetails - Details about the active service (platform, apiKey, etc.). - * @param completionType - The type of completion being requested. - * @returns The request object for the DeepSeek API. - */ - async createDeepSeekRequest( - insertionRequest: InsertionRequestInfo, - activeServiceDetails: ActivePlatformDetails, - verbose: boolean - ): Promise { - // Getting the final prompt. - const messages = await systemPromptServiceImpl.getDeepSeekSystemMessage(insertionRequest); - - if (verbose) { - console.log(`Prompt Text : ${messages}`); - } - - // Making metadata - const metadata: DeepSeekRequestObject = { - model: 'deepseek-chat', - messages: messages - }; - - // Construct the request body for DeepSeek API - return { - platform: activeServiceDetails, - metadata: metadata - }; - } - - /** - * Creates and formats the request for Groq models. - * - * @param prompt - The raw prompt extracted from the file. - * @param promptArray - The array of context around the prompt. - * @param activeServiceDetails - Details about the active service (platform, apiKey, etc.). - * @param completionType - The type of completion being requested. - * @param verbose - Whether to log the request details. - * @returns The request object for the Groq API. - */ - async createGroqRequest( - insertionRequest: InsertionRequestInfo, - activeServiceDetails: ActivePlatformDetails, - verbose: boolean - ): Promise { - const messages = await systemPromptServiceImpl.getGroqSystemMessage(insertionRequest); - - if (verbose) { - console.log(`Prompt Text : ${messages}`); - } - - const metadata: GroqRequestObject = { - model: 'llama-3.1-70b-versatile', - messages: messages, - temperature: 0.94, - max_tokens: 2048, - top_p: 1, - stream: false - }; - - return { - platform: activeServiceDetails, - metadata: metadata - }; - } } export default new ProcessRequest(); diff --git a/src/services/service.process/process.response.ts b/src/services/service.process/process.response.ts index 063b273..124c88d 100644 --- a/src/services/service.process/process.response.ts +++ b/src/services/service.process/process.response.ts @@ -1,43 +1,23 @@ import { ReplacementBlock } from '../../models/model.response'; -import serviceDev from '../service.dev'; /** * The `FormatResponse` class is responsible for formatting the response received from - * AI service platforms like OpenAI, DeepSeek, and Groq. It extracts code blocks from - * the response content and returns them for further processing. + * OpenAI. It extracts code blocks from the response content and returns them for + * further processing. */ class ProcessResponse { /** - * Formats the response based on the active service platform. - * Calls the appropriate formatting function for OpenAI, DeepSeek, or Groq. + * Formats the response from OpenAI. * * @param response - The API response object. - * @param completionType - Type of completion to aid in code extraction. * @param verbose - Whether to log the formatting process. * @returns The formatted code block extracted from the response, or null if not found. */ - async formatResponse( - response: string, - verbose: boolean = false - ): Promise { - // IN case of OpenAi we receive a proper json format. + async formatResponse(response: string): Promise { try { return JSON.parse(response)['changes'] as ReplacementBlock[]; } catch (error) { console.error(`Error in formatting response: ${(error as Error).message}`); - } - - try { - const replacementObject: ReplacementBlock[] = serviceDev.extractCodeBlock(response, verbose); - for (const bloc of replacementObject) { - bloc.replace = bloc.replace.filter( - line => !line.includes('//>') || !line.includes('; - - abstract getDeepSeekSystemMessage( - insertionRequest: InsertionRequestInfo - ): Promise; - - abstract getGroqSystemMessage( - insertionRequest: InsertionRequestInfo - ): Promise; } class SystemPromptServiceImpl extends SystemPromptService { private static instance: SystemPromptServiceImpl; private basePrompt: SystemPromptInfo; - private hasDependencyGraph: boolean; - constructor() { + private constructor() { super(); - this.hasDependencyGraph = false; this.basePrompt = promptStructure; } @@ -47,10 +36,6 @@ class SystemPromptServiceImpl extends SystemPromptService { return SystemPromptServiceImpl.instance; } - setDependencyExists(value: boolean): void { - this.hasDependencyGraph = value; - } - /** * Creates and formats a prompt for OpenAI models. * @@ -101,102 +86,6 @@ class SystemPromptServiceImpl extends SystemPromptService { } } - /** - * Creates and formats a prompt for DeepSeek models. - * - * @param {Array} contextArray - The array of context around the prompt. - * @param {string} prompt - The raw prompt text. - * @param {string} completionType - The type of completion (e.g., 'complete' or 'update'). - * @returns {Promise} The formatted DeepSeek prompt. - */ - async getDeepSeekSystemMessage( - insertionRequest: InsertionRequestInfo - ): Promise { - try { - const platform = 'deepseek'; - - // In all the cases load the system prompt - const systemPrompt = (this.basePrompt[platform] as SystemPromptPlatformInfo).systemMessage; - const codeContext = this.getCodeContext( - insertionRequest.filePath, - insertionRequest.promptEmbedding ?? [] - ); - const instructions = this.getInstructions(platform); - - let format = ''; - let contextPrompt = ''; - - contextPrompt = (this.basePrompt[platform] as SystemPromptPlatformInfo).context; - format = (this.basePrompt[platform] as SystemPromptPlatformInfo).format; - - const systemContent = `${systemPrompt}\n Instructions:${instructions}\n${format}\n${contextPrompt}:\n${codeContext}`; - const systemMessage: ChatCompletionMessageParam = { - role: 'system', - content: systemContent - }; - - const userMessage: ChatCompletionMessageParam = { - role: 'user', - content: insertionRequest.prompt - }; - - return [systemMessage, userMessage] as ChatCompletionMessageParam[]; - } catch (error) { - if (error instanceof Error) { - console.error(`Error generating DeepSeek prompt: ${error.message}`); - } - throw error; // Re-throw the error for further handling - } - } - - /** - * Creates and formats a prompt for Groq models. - * - * @param {Array} contextArray - The array of context around the prompt. - * @param {string} prompt - The raw prompt text. - * @param {string} completionType - The type of completion (e.g., 'complete' or 'update'). - * @returns {Promise} The formatted Groq prompt. - */ - async getGroqSystemMessage( - insertionRequest: InsertionRequestInfo - ): Promise { - try { - const platform = 'groq'; - - // In all the cases load the system prompt - const systemPrompt = (this.basePrompt[platform] as SystemPromptPlatformInfo).systemMessage; - const codeContext = this.getCodeContext( - insertionRequest.filePath, - insertionRequest.promptEmbedding ?? [] - ); - const instructions = this.getInstructions(platform); - - let format = ''; - let contextPrompt = ''; - - contextPrompt = (this.basePrompt[platform] as SystemPromptPlatformInfo).context; - format = (this.basePrompt[platform] as SystemPromptPlatformInfo).format; - - const systemContent = `${systemPrompt}\n Instructions:${instructions}\n${format}\n${contextPrompt}:\n${codeContext}`; - const systemMessage: GroqChatCompletionMessageParam = { - role: 'system', - content: systemContent - }; - - const userMessage: GroqChatCompletionMessageParam = { - role: 'user', - content: insertionRequest.prompt - }; - - return [systemMessage, userMessage] as GroqChatCompletionMessageParam[]; - } catch (error) { - if (error instanceof Error) { - console.error(`Error generating Groq prompt: ${error.message}`); - } - throw error; - } - } - /** * Generates a formatted string containing the file content and user prompt. *