Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/curly-owls-visit.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"overide": minor
---

Now we are able to use response_format in case of OpenAi api
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"groq-sdk": "^0.7.0",
"inquirer": "^11.1.0",
"open": "^10.1.0",
"openai": "^4.67.2",
"openai": "^4.77.0",
"three": "^0.170.0",
"tree-sitter": "^0.22.0",
"tree-sitter-c": "^0.23.1",
Expand Down
13 changes: 8 additions & 5 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions src/models/model.request.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { ResponseFormatJSONSchema } from 'openai/resources';
import { ChatCompletionMessageParam as OpenAIChatCompletionMessageParam } from 'openai/resources/chat/completions';
import { ChatCompletionMessageParam as GroqChatCompletionMessageParam } from 'groq-sdk/resources/chat/completions';
import { ActivePlatformDetails } from './model.config';
Expand All @@ -16,6 +17,7 @@ export interface OpenAiRequestObject {
stream?: boolean;
presence_penalty?: number;
frequency_penalty?: number;
response_format: ResponseFormatJSONSchema;
}

export interface DeepSeekRequestObject {
Expand Down
6 changes: 2 additions & 4 deletions src/services/service.network.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,7 @@ class NetworkServiceImpl extends NetworkService {
// Handle requests based on the selected platform
switch (platform) {
case 'openai':
return this.handleOpenAIRequest(activeServiceDetails, {
...metadata,
messages: metadata.messages as OpenAIChatCompletionMessageParam[]
});
return this.handleOpenAIRequest(activeServiceDetails, metadata as OpenAiRequestObject);
case 'deepseek':
return this.handleDeepSeekRequest(activeServiceDetails, {
...metadata,
Expand Down Expand Up @@ -141,6 +138,7 @@ class NetworkServiceImpl extends NetworkService {
...metadata,
stream: false
});
console.log(completions.choices[0]);
return (completions.choices[0] as ChatCompletion.Choice).message.content || ''; // Return the content string from OpenAI completion
} catch (error) {
if (error instanceof Error) {
Expand Down
45 changes: 38 additions & 7 deletions src/services/service.process/process.request.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,14 +81,45 @@ class ProcessRequest {
}

const metadata: OpenAiRequestObject = {
model: 'gpt-4o', // Specify the model to use
model: 'gpt-4o',
messages: messages,
temperature: 0.5, // Adjust temperature for creativity (lower = more deterministic)
max_tokens: 2500, // Max tokens for the response
n: 1, // Number of completions to generate
stream: false, // Whether to stream results
presence_penalty: 0, // Adjusts frequency of introducing new ideas
frequency_penalty: 0 // Adjusts repetition
temperature: 0.5,
max_tokens: 2500,
n: 1,
stream: false,
presence_penalty: 0,
frequency_penalty: 0,
response_format: {
type: 'json_schema',
json_schema: {
name: 'changes',
schema: {
type: 'object',
properties: {
changes: {
type: 'array',
items: {
type: 'object',
properties: {
find: {
type: 'array',
items: { type: 'string' }
},
replace: {
type: 'array',
items: { type: 'string' }
}
},
required: ['find', 'replace'],
additionalProperties: false
}
}
},
required: ['changes'],
additionalProperties: false
}
}
}
};

// Construct the request body for OpenAI API
Expand Down
7 changes: 7 additions & 0 deletions src/services/service.process/process.response.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,13 @@ class ProcessResponse {
response: string,
verbose: boolean = false
): Promise<ReplacementBlock[] | null> {
// IN case of OpenAi we receive a proper json format.
try {
return JSON.parse(response)['changes'] as ReplacementBlock[];
} catch (error) {
console.error(`Error in formatting response: ${(error as Error).message}`);
}

try {
const replacementObject: ReplacementBlock[] = serviceDev.extractCodeBlock(response, verbose);
for (const bloc of replacementObject) {
Expand Down