diff --git a/.changeset/beige-crabs-repeat.md b/.changeset/beige-crabs-repeat.md deleted file mode 100644 index ad8e1a1..0000000 --- a/.changeset/beige-crabs-repeat.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"overide": patch ---- - -Refactored the embedding and parsing service to handle the generation of prompt embedding and contexts properly. diff --git a/.changeset/curly-owls-visit.md b/.changeset/curly-owls-visit.md new file mode 100644 index 0000000..414b3bc --- /dev/null +++ b/.changeset/curly-owls-visit.md @@ -0,0 +1,5 @@ +--- +"overide": minor +--- + +Now we are able to use response_format in case of OpenAi api diff --git a/.changeset/gorgeous-pugs-peel.md b/.changeset/gorgeous-pugs-peel.md deleted file mode 100644 index 21b32d4..0000000 --- a/.changeset/gorgeous-pugs-peel.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -"overide": minor ---- - -Adds: -1. Support for dependency graph generation with tree sitter. -2. Support for Embedding generation in dependency graph using open-ai. -3. Support for embedding based code relevancy selection for - -Updated: -1. The Network layer is refactored to reduce redundancy. -2. Config command now has two sub-commands 'local' & 'global' with respective options. -3. overide config has a new option `--embedding` to enable embedding. - -Fixes: -1. Fixed ignore file pattern matching. -2. Fixed context generation for programming languages which dows not have tree-sitter support. -3. Fixed embedding generation to exclude non-supported languages. - diff --git a/.changeset/hip-flowers-accept.md b/.changeset/hip-flowers-accept.md deleted file mode 100644 index 81a2062..0000000 --- a/.changeset/hip-flowers-accept.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"overide": patch ---- - -Added a cool 3D view for dependency graph - This can be further improved to give user a proper experiance of manually adding context and viewing the project-wide code dependency. diff --git a/.changeset/odd-adults-sneeze.md b/.changeset/odd-adults-sneeze.md deleted file mode 100644 index cc31a5c..0000000 --- a/.changeset/odd-adults-sneeze.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"overide": patch ---- - -Updated the watching file message. diff --git a/CHANGELOG.md b/CHANGELOG.md index eb9e60a..280010b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,33 @@ # overide +## 0.2.0 + +### Minor Changes + +- 1713261: Adds: + + 1. Support for dependency graph generation with tree sitter. + 2. Support for Embedding generation in dependency graph using open-ai. + 3. Support for embedding based code relevancy selection for + + Updated: + + 1. The Network layer is refactored to reduce redundancy. + 2. Config command now has two sub-commands 'local' & 'global' with respective options. + 3. overide config has a new option `--embedding` to enable embedding. + + Fixes: + + 1. Fixed ignore file pattern matching. + 2. Fixed context generation for programming languages which dows not have tree-sitter support. + 3. Fixed embedding generation to exclude non-supported languages. + +### Patch Changes + +- 55ee800: Refactored the embedding and parsing service to handle the generation of prompt embedding and contexts properly. +- 6f1ea9b: Added a cool 3D view for dependency graph - This can be further improved to give user a proper experiance of manually adding context and viewing the project-wide code dependency. +- 183836d: Updated the watching file message. + ## 0.1.8 ### Patch Changes diff --git a/package.json b/package.json index e9dace3..1541e07 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "overide", - "version": "0.1.8", + "version": "0.2.0", "description": "This is a CLI based Code Generation Tool.", "type": "commonjs", "main": "dist/index.js", @@ -35,7 +35,7 @@ "groq-sdk": "^0.7.0", "inquirer": "^11.1.0", "open": "^10.1.0", - "openai": "^4.67.2", + "openai": "^4.77.0", "three": "^0.170.0", "tree-sitter": "^0.22.0", "tree-sitter-c": "^0.23.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e546a17..c64e3ba 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -42,8 +42,8 @@ importers: specifier: ^10.1.0 version: 10.1.0 openai: - specifier: ^4.67.2 - version: 4.69.0 + specifier: ^4.77.0 + version: 4.91.1 three: specifier: ^0.170.0 version: 0.170.0 @@ -1485,12 +1485,15 @@ packages: resolution: {integrity: sha512-mnkeQ1qP5Ue2wd+aivTD3NHd/lZ96Lu0jgf0pwktLPtx6cTZiH7tyeGRRHs0zX0rbrahXPnXlUnbeXyaBBuIaw==} engines: {node: '>=18'} - openai@4.69.0: - resolution: {integrity: sha512-S3hOHSkk609KqwgH+7dwFrSvO3Gm3Nk0YWGyPHNscoMH/Y2tH1qunMi7gtZnLbUv4/N1elqCp6bDior2401kCQ==} + openai@4.91.1: + resolution: {integrity: sha512-DbjrR0hIMQFbxz8+3qBsfPJnh3+I/skPgoSlT7f9eiZuhGBUissPQULNgx6gHNkLoZ3uS0uYS6eXPUdtg4nHzw==} hasBin: true peerDependencies: + ws: ^8.18.0 zod: ^3.23.8 peerDependenciesMeta: + ws: + optional: true zod: optional: true @@ -3509,7 +3512,7 @@ snapshots: is-inside-container: 1.0.0 is-wsl: 3.1.0 - openai@4.69.0: + openai@4.91.1: dependencies: '@types/node': 18.19.61 '@types/node-fetch': 2.6.11 diff --git a/src/models/model.request.ts b/src/models/model.request.ts index b59f882..3eaeb6e 100644 --- a/src/models/model.request.ts +++ b/src/models/model.request.ts @@ -1,3 +1,4 @@ +import { ResponseFormatJSONSchema } from 'openai/resources'; import { ChatCompletionMessageParam as OpenAIChatCompletionMessageParam } from 'openai/resources/chat/completions'; import { ChatCompletionMessageParam as GroqChatCompletionMessageParam } from 'groq-sdk/resources/chat/completions'; import { ActivePlatformDetails } from './model.config'; @@ -16,6 +17,7 @@ export interface OpenAiRequestObject { stream?: boolean; presence_penalty?: number; frequency_penalty?: number; + response_format: ResponseFormatJSONSchema; } export interface DeepSeekRequestObject { diff --git a/src/services/service.network.ts b/src/services/service.network.ts index fef4809..1c3827c 100644 --- a/src/services/service.network.ts +++ b/src/services/service.network.ts @@ -98,10 +98,7 @@ class NetworkServiceImpl extends NetworkService { // Handle requests based on the selected platform switch (platform) { case 'openai': - return this.handleOpenAIRequest(activeServiceDetails, { - ...metadata, - messages: metadata.messages as OpenAIChatCompletionMessageParam[] - }); + return this.handleOpenAIRequest(activeServiceDetails, metadata as OpenAiRequestObject); case 'deepseek': return this.handleDeepSeekRequest(activeServiceDetails, { ...metadata, @@ -141,6 +138,7 @@ class NetworkServiceImpl extends NetworkService { ...metadata, stream: false }); + console.log(completions.choices[0]); return (completions.choices[0] as ChatCompletion.Choice).message.content || ''; // Return the content string from OpenAI completion } catch (error) { if (error instanceof Error) { diff --git a/src/services/service.process/process.request.ts b/src/services/service.process/process.request.ts index d8aad9e..537a2e9 100644 --- a/src/services/service.process/process.request.ts +++ b/src/services/service.process/process.request.ts @@ -81,14 +81,45 @@ class ProcessRequest { } const metadata: OpenAiRequestObject = { - model: 'gpt-4o', // Specify the model to use + model: 'gpt-4o', messages: messages, - temperature: 0.5, // Adjust temperature for creativity (lower = more deterministic) - max_tokens: 2500, // Max tokens for the response - n: 1, // Number of completions to generate - stream: false, // Whether to stream results - presence_penalty: 0, // Adjusts frequency of introducing new ideas - frequency_penalty: 0 // Adjusts repetition + temperature: 0.5, + max_tokens: 2500, + n: 1, + stream: false, + presence_penalty: 0, + frequency_penalty: 0, + response_format: { + type: 'json_schema', + json_schema: { + name: 'changes', + schema: { + type: 'object', + properties: { + changes: { + type: 'array', + items: { + type: 'object', + properties: { + find: { + type: 'array', + items: { type: 'string' } + }, + replace: { + type: 'array', + items: { type: 'string' } + } + }, + required: ['find', 'replace'], + additionalProperties: false + } + } + }, + required: ['changes'], + additionalProperties: false + } + } + } }; // Construct the request body for OpenAI API diff --git a/src/services/service.process/process.response.ts b/src/services/service.process/process.response.ts index 4a52147..063b273 100644 --- a/src/services/service.process/process.response.ts +++ b/src/services/service.process/process.response.ts @@ -20,6 +20,13 @@ class ProcessResponse { response: string, verbose: boolean = false ): Promise { + // IN case of OpenAi we receive a proper json format. + try { + return JSON.parse(response)['changes'] as ReplacementBlock[]; + } catch (error) { + console.error(`Error in formatting response: ${(error as Error).message}`); + } + try { const replacementObject: ReplacementBlock[] = serviceDev.extractCodeBlock(response, verbose); for (const bloc of replacementObject) {