-
Notifications
You must be signed in to change notification settings - Fork 6
Enhance conversation handling: clone entries to avoid mutation, extra… #1631
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -516,16 +516,25 @@ OpenWrap.ai.prototype.__gpttypes = { | |
| getConversation: () => { | ||
| var _res = _r.conversation.map(r => { | ||
| if (isMap(r)) { | ||
| if (isUnDef(r.role)) | ||
| r.role = "user" | ||
| else if (r.role == "assistant") { | ||
| r.role = "model" | ||
| // Clone to avoid mutating the original conversation | ||
| var _entry = clone(r) | ||
| if (isUnDef(_entry.role)) | ||
| _entry.role = "user" | ||
| else if (_entry.role == "assistant") { | ||
| _entry.role = "model" | ||
| } | ||
|
|
||
| if (isArray(r.parts) && r.parts.length > 0) { | ||
| r.content = r.parts.reduce((aC, aV) => aC + "\n" + aV.text, "") | ||
| delete r.parts | ||
|
|
||
| if (isArray(_entry.parts) && _entry.parts.length > 0) { | ||
| // Only extract text from parts that have text (skip functionCall, functionResponse, etc.) | ||
| var textParts = _entry.parts.filter(p => isDef(p) && isDef(p.text)) | ||
| if (textParts.length > 0) { | ||
| _entry.content = textParts.reduce((aC, aV) => aC + "\n" + aV.text, "") | ||
| } else { | ||
| _entry.content = "" | ||
| } | ||
| delete _entry.parts | ||
| } | ||
| return _entry | ||
| } | ||
| return r | ||
| }) | ||
|
|
@@ -576,7 +585,9 @@ OpenWrap.ai.prototype.__gpttypes = { | |
| var __r = _r.rawPrompt(aPrompt, aModel, aTemperature, aJsonFlag, tools) | ||
| if (isArray(__r.candidates) && isArray(__r.candidates[0].content.parts) && __r.candidates[0].content.parts.length > 0) { | ||
| if (__r.candidates[0].finishReason == "STOP") { | ||
| return __r.candidates[0].content.parts.reduce((aC, aV) => aC + "\n" + aV.text, "") | ||
| // Only extract text from parts that have text (skip functionCall, functionResponse, etc.) | ||
| var textParts = __r.candidates[0].content.parts.filter(p => isDef(p) && isDef(p.text)) | ||
| return textParts.reduce((aC, aV) => aC + "\n" + aV.text, "") | ||
|
||
| } | ||
| } | ||
| return stringify(__r, __, "") | ||
|
|
@@ -622,7 +633,9 @@ OpenWrap.ai.prototype.__gpttypes = { | |
|
|
||
| if (isArray(__r.candidates) && isArray(__r.candidates[0].content.parts) && __r.candidates[0].content.parts.length > 0) { | ||
| if (__r.candidates[0].finishReason == "STOP") { | ||
| return __r.candidates[0].content.parts.reduce((aC, aV) => aC + "\n" + aV.text, "") | ||
| // Only extract text from parts that have text (skip functionCall, functionResponse, etc.) | ||
| var textParts = __r.candidates[0].content.parts.filter(p => isDef(p) && isDef(p.text)) | ||
| return textParts.reduce((aC, aV) => aC + "\n" + aV.text, "") | ||
|
||
| } | ||
| } | ||
| return __r | ||
|
|
@@ -651,12 +664,15 @@ OpenWrap.ai.prototype.__gpttypes = { | |
| } | ||
| var msgs = []; | ||
| if (isString(aPrompt)) aPrompt = [ { role: "user", parts: [ { text: aPrompt } ] } ]; | ||
| // Preserve the new user prompt(s) before merging with conversation history | ||
| // This is needed to add them to the conversation when function calls occur | ||
| var newPrompts = isArray(aPrompt) ? aPrompt.map(toPartsMsg) : []; | ||
| aPrompt = _r.conversation.reduce((acc, r) => { | ||
| if (isDef(r) && (isUnDef(r.role) || r.role != "system")) { | ||
| acc.push(toPartsMsg(r)); | ||
| } | ||
| return acc; | ||
| }, []).concat(aPrompt.map(toPartsMsg)); | ||
| }, []).concat(newPrompts); | ||
| msgs = aPrompt; | ||
|
|
||
| var systemParts = _r.conversation.reduce((acc, r) => { | ||
|
|
@@ -764,11 +780,14 @@ OpenWrap.ai.prototype.__gpttypes = { | |
| } | ||
| }) | ||
| if (stopWith) { | ||
| _r.conversation = _r.conversation.concat(_res.candidates[0].content) | ||
| // Add the new user prompt(s) and the model response to conversation | ||
| _r.conversation = _r.conversation.concat(newPrompts).concat(_res.candidates[0].content) | ||
| _captureStats(_res, aModel) | ||
| return _res | ||
| } else { | ||
| _r.conversation = _r.conversation.concat(_p) | ||
| // Add the new user prompt(s), then the function call/response to conversation | ||
| // This ensures the conversation starts with a user turn before the model's functionCall | ||
| _r.conversation = _r.conversation.concat(newPrompts).concat(_p) | ||
| return _r.rawPrompt([], aModel, aTemperature, aJsonFlag, []) | ||
| } | ||
| } else { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The reduce operation always prepends a newline, resulting in leading whitespace when concatenating text parts. Initialize the accumulator with the first element's text or use
join('\n')instead.