diff --git a/commands/predict.js b/commands/predict.js index b230dd4..3d670d9 100644 --- a/commands/predict.js +++ b/commands/predict.js @@ -17,36 +17,31 @@ const { alitaService, workspaceService, windowService } = require("../services") module.exports = async function () { alitaService.checkLLMConfig(); - if (alitaService.init_done === 0) { - try { - await alitaService.serviceProvider.init(); - alitaService.init_done = 1; - } catch (ex) { - alitaService.init_done = 0; - await vscode.window.showErrorMessage( - `Alita is not able to connect to ${alitaService.serviceProvider.getPromptsUrl}` - ); - return; - } - } - - const promptsList = await workspaceService.updatePrompts(); + const applicationList = await alitaService.getApplications({}); // renderring list - const entities = [...promptsList] - .map((prompt) => ({ - label: prompt.label.replace(/(_prompt|_datasource)$/, ""), - description: prompt.description, - iconPath: new vscode.ThemeIcon( - prompt.label.endsWith("_datasource") ? "database" : prompt.external ? "terminal" : "remote-explorer" - ), - full_name: prompt.label, + let entities = [] + entities.push({ + label: "No agent", + description: "", + iconPath: "terminal", + full_name: "", + }); + [...applicationList] + .forEach((application) => entities.push({ + label: application.name, + description: application.description, + iconPath: new vscode.ThemeIcon("remote-explorer"), + full_name: application.label, })); - let selection = await windowService.showQuickPick([...entities]); - selection = [...promptsList].find((prompt) => prompt.label === selection.full_name); - if (!selection) return; - // select required version - if (!selection.label.endsWith("_datasource") && selection.external) { - var prompt_details_response = await alitaService.getPromptDetail(selection.prompt_id); + let selection = await windowService.showQuickPick([...entities], { + activeItem: entities[entities.length - 1] + }); + selection = [...applicationList].find((application) => application.label === selection.full_name); + if (!selection) { + + } else { + // select required version + var prompt_details_response = await alitaService.getApplicationDetail(selection.id); // if prompt has 2+ versions - show them selection.version = @@ -54,7 +49,8 @@ module.exports = async function () { ? prompt_details_response.versions[0] : await handleVersions(prompt_details_response.versions); } - + + vscode.window.withProgress( { location: vscode.ProgressLocation.Window, diff --git a/commands/syncEmbeddings.js b/commands/syncEmbeddings.js index a4958b9..98124de 100644 --- a/commands/syncEmbeddings.js +++ b/commands/syncEmbeddings.js @@ -24,7 +24,7 @@ module.exports = async function () { await workspaceService.updateEmbeddings(); } catch (e) { await vscode.window.showErrorMessage( - `Alita is not able to connec to ${alitaService.serviceProvider.getEmbeddingsUrl}` + `Alita is not able to connec to ${alitaService.serviceProvider.getConfigurationsUrl}` ); } }; diff --git a/package.json b/package.json index ba3c32b..171f426 100644 --- a/package.json +++ b/package.json @@ -23,22 +23,6 @@ "command": "eliteacode.predict", "title": "Predict" }, - { - "command": "eliteacode.createPrompt", - "title": "Create Prompt" - }, - { - "command": "eliteacode.addContext", - "title": "Extend Context" - }, - { - "command": "eliteacode.syncPrompts", - "title": "Sync External Prompts" - }, - { - "command": "eliteacode.initAlita", - "title": "Init" - }, { "command": "eliteacode.getAvailableAIModels", "title": "Get available AI models from the server" @@ -54,24 +38,8 @@ ], "eliteacode.submenu": [ { - "when": "editorHasSelection && editorFocus && !editorReadonly && eliteacode.init", + "when": "editorHasSelection && editorFocus && !editorReadonly", "command": "eliteacode.predict" - }, - { - "when": "editorFocus && !editorReadonly && eliteacode.init && eliteacode.LocalPrompts", - "command": "eliteacode.createPrompt" - }, - { - "when": "editorFocus && !eliteacode.init && eliteacode.LLMProvider != 'None'", - "command": "eliteacode.initAlita" - }, - { - "when": "editorHasSelection && editorFocus && !editorReadonly && eliteacode.init && eliteacode.LocalPrompts", - "command": "eliteacode.addContext" - }, - { - "when": "eliteacode.init && eliteacode.LLMProvider in eliteacode.ExtentablePlatforms", - "command": "eliteacode.syncPrompts" } ] }, @@ -241,7 +209,7 @@ "@vscode/webview-ui-toolkit": "^1.4.0", "axios": "^1.6.8", "form-data": "^4.0.0", - "squirrelly": "7.9.2", + "squirrelly": "^9.1.0", "yaml": "^2.4.1" }, "devDependencies": { @@ -251,7 +219,7 @@ "@types/vscode": "^1.78.0", "@vscode/test-electron": "^2.3.2", "@vscode/vsce": "^2.25.0", - "esbuild": "^0.19.12", + "esbuild": "0.25.9", "eslint": "^8.41.0", "eslint-config-prettier": "^8.8.0", "glob": "^8.1.0", diff --git a/services/alita.service.js b/services/alita.service.js index 5cbbeb5..ad0b11a 100644 --- a/services/alita.service.js +++ b/services/alita.service.js @@ -40,7 +40,7 @@ module.exports = class AlitaService { this.serviceProvider = new llmServierProvider[newProvier](); this.currentProvider = newProvier; this.init_done = 0; - this.integrationData = undefined; + this.configurationData = undefined; } } catch (ex) { console.log(ex); @@ -59,7 +59,7 @@ module.exports = class AlitaService { return `${fnDesc} not supported by this LLM Provider`; } } catch (error) { - await Notifications.showError({ error, message: `Alita Code ${functionName}`, showOutputButton: true }); + await Notifications.showError({ error, message: `Elitea Code ${functionName}`, showOutputButton: true }); return "You need to configure LLM Provider first"; } } @@ -69,7 +69,7 @@ module.exports = class AlitaService { this.checkLLMConfig(); return await this.serviceProvider.predict(template, prompt, prompt_template); } catch (error) { - await Notifications.showError({ error, message: "Alita is not able to connect", showOutputButton: true }); + await Notifications.showError({ error, message: "Elitea is not able to connect", showOutputButton: true }); return "You need to configure LLM Provider first"; } } @@ -82,21 +82,6 @@ module.exports = class AlitaService { return this.invokeMethod("getModelSettings", "Get model settings"); } - async getPrompts({ page = 0, query }) { - return await this.invokeMethod("getPrompts", "List prompts", { page, query }); - } - - async getPromptDetail(promptId) { - return await this.invokeMethod("getPromptDetail", "Get prompt detail", promptId); - } - - async getDatasourceDetail(id) { - return await this.invokeMethod("getDatasourceDetail", "Get prompt detail", id); - } - - async getDatasources() { - return await this.invokeMethod("getDatasources", "List datasources"); - } async getApplicationDetail(id) { return await this.invokeMethod("getAppllicationDetail", "Get application detail", id); @@ -106,6 +91,10 @@ module.exports = class AlitaService { return await this.invokeMethod("getApplications", "List applications"); } + async createConversation(name) { + return await this.invokeMethod("createConversation", "Create conversation", name); + } + async getDeployments() { return await this.invokeMethod("getDeployments", "Get deployments"); } @@ -114,10 +103,6 @@ module.exports = class AlitaService { return await this.invokeMethod("stopApplicationTask", "Stop application task", taskId); } - async stopDatasourceTask(taskId) { - return await this.invokeMethod("stopDatasourceTask", "Stop datasource task", taskId); - } - async chat(params) { return await this.invokeMethod("chat", "Chat", params); } @@ -127,64 +112,61 @@ module.exports = class AlitaService { } async getAIModelNames() { - this.integrationData = await this.getEmbeddings(); + this.configurationData = await this.getEmbeddings(); const array = []; - this.integrationData.forEach((entry) => { - if (entry.settings && Array.isArray(entry.settings.models)) { - entry.settings.models.forEach((model) => { - if (model.name && entry.name) { - array.push({ [entry.config.name]: model.name }); - } - }); - } - }); + if (this.configurationData.shared && Array.isArray(this.configurationData.shared.items)) { + this.configurationData.shared.items.forEach((model) => { + if (model.data.name && model.alita_title) { + array.push({ [model.alita_title]: model.data.name }); + } + }); + } return array; } async getAIModelUid(integrationConfigName, isUsedCashedData) { - const data = isUsedCashedData ? this.integrationData : await this.getEmbeddings(); - return data - .filter((integration) => integration.config.name === integrationConfigName) - .map((integration) => integration.uid); + const data = isUsedCashedData ? this.configurationData : await this.getEmbeddings(); + return data.shared.items + .filter((configuration) => configuration.alita_title === integrationConfigName) + .map((configuration) => configuration.uuid); } async getAIModelIntegrationName(integrationConfigName, isUsedCashedData) { - const data = isUsedCashedData ? this.integrationData : await this.getEmbeddings(); - return data - .filter((integration) => integration.config.name === integrationConfigName) - .map((integration) => integration.name); + const data = isUsedCashedData ? this.configurationData : await this.getEmbeddings(); + return data.shared.items + .filter((configuration) => configuration.alita_title === integrationConfigName) + .map((configuration) => configuration.data.name); } async getEmbeddings() { return await this.invokeMethod("getEmbeddings", "Get available integrations"); } + async getAIModelNames() { const data = await this.getEmbeddings(); const array = []; - data.forEach((entry) => { - if (entry.settings && Array.isArray(entry.settings.models)) { - entry.settings.models.forEach((model) => { - if (model.name && entry.name) { - array.push({ [entry.config.name]: model.name }); - } - }); - } - }); + if (data.shared && Array.isArray(data.shared.items)) { + data.shared.items.forEach((model) => { + if (model.data.name && model.alita_title) { + array.push({ [model.alita_title]: model.data.name }); + } + }); + } return array; } async getAIModelUid(integrationConfigName) { const data = await this.getEmbeddings(); - return data - .filter((integration) => integration.config.name === integrationConfigName) - .map((integration) => integration.uid); + return data.shared.items + .filter((configuration) => configuration.alita_title === integrationConfigName) + .map((configuration) => configuration.uuid); } async getAIModelIntegrationName(integrationConfigName) { const data = await this.getEmbeddings(); - return data - .filter((integration) => integration.config.name === integrationConfigName) - .map((integration) => integration.name); + return data.shared.items + .filter((configuration) => configuration.alita_title === integrationConfigName) + .map((configuration) => configuration.data.name); } }; diff --git a/services/providers/alita.provider.js b/services/providers/alita.provider.js index 7c29293..722765f 100644 --- a/services/providers/alita.provider.js +++ b/services/providers/alita.provider.js @@ -29,19 +29,14 @@ module.exports = class AlitaServiceProvider extends CarrierServiceProvider { const apiBasePath = removeTrailingSlash(this.config.LLMserverURL).concat(apiPath); this.codeTagId = -1; this.getCodeTagUrl = `${apiBasePath}/prompt_lib/tags/prompt_lib/${this.config.projectID}`; - this.getPromptsUrl = `${apiBasePath}/prompt_lib/prompts/prompt_lib/${this.config.projectID}`; - this.getPromptDetailUrl = `${apiBasePath}/prompt_lib/prompt/prompt_lib/${this.config.projectID}`; - this.getDatasourcesUrl = `${apiBasePath}/datasources/datasources/prompt_lib/${this.config.projectID}`; - this.getDatasourceDetailUrl = `${apiBasePath}/datasources/datasource/prompt_lib/${this.config.projectID}`; - this.getApplicationsUrl = `${apiBasePath}/applications/applications/prompt_lib/${this.config.projectID}`; + this.getApplicationsUrl = `${apiBasePath}/applications/applications/prompt_lib/${this.config.projectID}?agents_type=classic`; this.getApplicationDetailUrl = `${apiBasePath}/applications/application/prompt_lib/${this.config.projectID}`; - this.updatePromptsUrl = `${apiBasePath}/prompt_lib/version/prompt_lib/${this.config.projectID}`; - this.predictUrl = `${apiBasePath}/prompt_lib/predict/prompt_lib/${this.config.projectID}`; - this.getEmbeddingsUrl = `${apiBasePath}/integrations/integrations/default/${this.config.projectID}`; + this.predictUrl = `${apiBasePath}/applications/predict_llm/prompt_lib/${this.config.projectID}`; + this.applicationPredictUrl = `${apiBasePath}/applications/predict/prompt_lib/${this.config.projectID}`; + this.getConfigurationsUrl = `${apiBasePath}/configurations/configurations/${this.config.projectID}?include_shared=true§ion=llm`; this.sumilarityUrl = `${apiBasePath}/datasources/deduplicate/prompt_lib/${this.config.projectID}`; - this.chatWithDatasourceUrl = `${apiBasePath}/datasources/predict/prompt_lib/${this.config.projectID}`; + this.getConversationUrl = `${apiBasePath}/chat/conversations/prompt_lib/${this.config.projectID}`; this.stopApplicationTaskUrl = `${apiBasePath}/applications/task/prompt_lib/${this.config.projectID}`; - this.stopDatasourceTaskUrl = `${apiBasePath}/datasources/task/prompt_lib/${this.config.projectID}`; this.getDeploymentsUrl = `${apiBasePath}/integrations/integrations/default/${this.config.projectID}?section=ai`; } @@ -78,155 +73,73 @@ module.exports = class AlitaServiceProvider extends CarrierServiceProvider { var prompt_data = {}; var display_type = "append"; var response = {}; - if (template.external) { - prompt_data = template.label.endsWith("_datasource") - ? { input: prompt } - : { - model_settings: this.getModelSettings(), - user_input: prompt, - chat_history: template.chat_history, - }; - if (template.userSettings) { - display_type = template.userSettings.display_type ? template.userSettings.display_type : "append"; - - if (template.userSettings.temperature) { - prompt_data.temperature = template.userSettings.temperature; - } - if (template.userSettings.maxTokens) { - prompt_data.max_tokens = template.userSettings.maxTokens; - prompt_data.max_decode_steps = template.userSettings.maxTokens; - } - if (template.userSettings.topP) { - prompt_data.top_p = template.userSettings.topP; - } - if (template.userSettings.topK) { - prompt_data.top_k = template.userSettings.topK; - } - if (template.userSettings.modelName) { - prompt_data.model_name = template.userSettings.LLMModelName; - } - } - - // datasource by default - let base_url = this.chatWithDatasourceUrl; - let prompt_id = template.prompt_id; - if (!template.label.endsWith("_datasource")) { - // prompt predict - base_url = this.predictUrl; - let version_details_response = await this.getPromptDetail(prompt_id, template.version.name); - let external_variables = version_details_response.version_details.variables.reduce((acc, item) => { - acc[item.name] = item.value; - return acc; - }, {}); - if (external_variables) { - let configured_variables = await this.handleVars(external_variables); - prompt_data.variables = Object.entries(configured_variables ? configured_variables : []).map( - ([key, value]) => ({ name: key, value: value }) - ); - } - prompt_id = template.version.id; + var resp_data = {} + if (!template) { + prompt_data = { + llm_settings: { + temperature: config.temperature, + max_tokens: config.maxTokens, + top_p: config.topP, + top_k: config.topK, + model_name: config.LLMmodelName + }, + user_input: prompt, + chat_history: [] } - - // datasouce predict - response = await this.request(base_url + "/" + prompt_id) + response = await this.request(this.predictUrl) .method("POST") .headers({ "Content-Type": "application/json" }) .body(prompt_data) .auth(this.authType, this.authToken) .send(); + resp_data = response.data.result.chat_history.filter((chat) => chat.role == "assistant")[0].content } else { - if (!prompt_template) { - prompt_template = await this.getPromptTemplate(config, template.template); + + let version_details_response = await this.getAppllicationDetail(template.id); + let external_variables = version_details_response.version_details.variables.reduce((acc, item) => { + acc[item.name] = item.value; + return acc; + }, {}); + let configured_variables + if (external_variables) { + configured_variables = await this.handleVars(external_variables); } prompt_data = { project_id: config.projectID, model_settings: { model: { - model_name: prompt_template.model_name ? prompt_template.model_name : config.LLMmodelName, - integration_uid: prompt_template.integration_id ? prompt_template.integration_id : config.integrationID, + model_name: config.LLMmodelName, + integration_uid: config.integrationID, }, - temperature: prompt_template.temperature ? prompt_template.temperature : config.temperature, - max_tokens: prompt_template.maxTokens ? prompt_template.maxTokens : config.maxTokens, - top_p: prompt_template.topP ? prompt_template.topP : config.topP, - top_k: prompt_template.topK ? prompt_template.topK : config.topK, + temperature: config.temperature, + max_tokens: config.maxTokens, + top_p: config.topP, + top_k: config.topK, stream: true, }, - context: prompt_template.context, user_input: prompt, - variables: Object.entries( - prompt_template.variables ? prompt_template.variables : this.getTemplateDefaults() - ).map(([key, value]) => ({ name: key, value: value })), - chat_history: prompt_template.chat_history, + variables: Object.entries(configured_variables ? configured_variables : []).map( + ([key, value]) => ({ name: key, value: value }) + ), + chat_history: [], }; - response = await this.request(this.predictUrl) + response = await this.request(this.applicationPredictUrl.concat(`/${template.version.id}`)) .method("POST") .headers({ "Content-Type": "application/json" }) .body(prompt_data) .auth(this.authType, this.authToken) .send(); + resp_data = response.data.chat_history.filter((chat) => chat.role == "assistant")[0].content } - display_type = - prompt_template && prompt_template.display_type - ? prompt_template.display_type - : this.workspaceService.getWorkspaceConfig().DisplayType; + display_type = this.workspaceService.getWorkspaceConfig().DisplayType; // escape $ sign as later it try to read it as template variable - const resp_data = response.data.response - ? response.data.response - : response.data.messages.map((message) => message.content).join("\n"); + return { content: resp_data, type: display_type, }; } - async syncPrompts() { - const prompts = []; - let promptData = []; - let datasourceData = []; - promptData = (await this.getPrompts({})).map((prompt) => ({ ...prompt, name: prompt.name + "_prompt" })); - datasourceData = (await this.getDatasources({})).map((ds) => ({ ...ds, name: ds.name + "_datasource" })); - prompts.push(...promptData); - prompts.push(...datasourceData); - - const _addedPrompts = []; - for (var i = 0; i < prompts.length; i++) { - var prompt = prompts[i]; - var tags = prompt.tags.map((tag) => tag.name.toLowerCase()); - if (tags.includes("code")) { - _addedPrompts.push(prompt.name); - await this.addPrompt( - prompt.name, - prompt.description ? prompt.description : "", - { prompt_id: prompt.id, integration_uid: prompt.integration_uid }, - [], - {}, - true - ); - } - } - const workspaceConfig = this.workspaceService.getWorkspaceConfig(); - var promptsMapping = await this.workspaceService.readContent( - path.join(workspaceConfig.workspacePath, workspaceConfig.promptLib, "./prompts.json"), - true - ); - for (const [key, value] of Object.entries(promptsMapping)) { - if (!_addedPrompts.includes(key) && value.external) { - await this.removePrompt(key); - } - } - } - - async getPromptDetail(promptId, version_name) { - const response = await this.request( - this.getPromptDetailUrl + "/" + promptId + (version_name ? "/" + version_name : "") - ) - .method("GET") - .headers({ "Content-Type": "application/json" }) - .auth(this.authType, this.authToken) - .send(); - return response.data; - } - async getCodeTagId() { if (this.codeTagId > 0) return; @@ -249,44 +162,6 @@ module.exports = class AlitaServiceProvider extends CarrierServiceProvider { return this.codeTagId && this.codeTagId !== -1; } - async getPrompts() { - const response = await this.request(this.getPromptsUrl, { - params: { - offset: 0, - limit: 1000, - }, - }) - .method("GET") - .headers({ "Content-Type": "application/json" }) - .auth(this.authType, this.authToken) - .send(); - return response.data.rows.filter((row) => row.tags.some((tag) => tag.name === "code")) || []; - } - - async getDatasourceDetail(id) { - const response = await this.request(this.getDatasourceDetailUrl + "/" + id) - .method("GET") - .headers({ "Content-Type": "application/json" }) - .auth(this.authType, this.authToken) - .send(); - return response.data; - } - - async getDatasources() { - const response = await this.request(this.getDatasourcesUrl, { - params: { - // remove after BE alignment - limit: 1000, - offset: 0, - }, - }) - .method("GET") - .headers({ "Content-Type": "application/json" }) - .auth(this.authType, this.authToken) - .send(); - return response.data.rows.filter((row) => row.tags.some((tag) => tag.name === "code")) || []; - } - async getAppllicationDetail(id) { const response = await this.request(this.getApplicationDetailUrl + "/" + id) .method("GET") @@ -311,28 +186,18 @@ module.exports = class AlitaServiceProvider extends CarrierServiceProvider { } async chat({ prompt_id, datasource_id, user_input, chat_history }) { - let url; - let body; - - if (prompt_id) { - url = this.predictUrl + "/" + prompt_id; - body = { - user_input, - chat_history, - }; - } else if (datasource_id) { - url = this.chatWithDatasourceUrl + "/" + datasource_id; - body = { - input: user_input, - chat_history, - }; - } else { - url = this.predictUrl; - body = { - user_input, - chat_history, - model_settings: this.getModelSettings(), - }; + const url = this.predictUrl; + const config = this.workspaceService.getWorkspaceConfig(); + const body = { + llm_settings: { + temperature: config.temperature, + max_tokens: config.maxTokens, + top_p: config.topP, + top_k: config.topK, + model_name: config.LLMmodelName + }, + user_input, + chat_history } const response = await this.request(url) @@ -341,12 +206,8 @@ module.exports = class AlitaServiceProvider extends CarrierServiceProvider { .body(body) .auth(this.authType, this.authToken) .send(); - return datasource_id - ? { - ...response.data, - content: response.data.response, - } - : response.data.messages && response.data.messages[0]; + return response.data.chat_history + && response.data.chat_history.filter((chat) => chat.role == "assistant")[0].content } async stopApplicationTask(taskId) { @@ -358,26 +219,48 @@ module.exports = class AlitaServiceProvider extends CarrierServiceProvider { return response.status; } - async stopDatasourceTask(taskId) { - const response = await this.request(this.stopDatasourceTaskUrl + "/" + taskId) - .method("DELETE") + async createConversation(conversationName) { + const response = await this.request(this.getConversationUrl) + .method("GET") .headers({ "Content-Type": "application/json" }) .auth(this.authType, this.authToken) .send(); - return response.status; + + const existingConversation = response.data.rows.find((conv) => conv.name === conversationName); + if (existingConversation) { + await this.request(this.getConversationUrl + "/" + existingConversation.id) + .method("DELETE") + .headers({ "Content-Type": "application/json" }) + .auth(this.authType, this.authToken) + .send(); + } + const body = { + name: conversationName, + is_private: true, + participants: [] + } + + const createdConversationResponse = await this.request(this.getConversationUrl) + .method("POST") + .headers({ "Content-Type": "application/json" }) + .auth(this.authType, this.authToken) + .body(body) + .send(); + return createdConversationResponse.data; + } - async getDeployments() { - const response = await this.request(this.getDeploymentsUrl) - .method("GET") + async stopDatasourceTask(taskId) { + const response = await this.request(this.stopDatasourceTaskUrl + "/" + taskId) + .method("DELETE") .headers({ "Content-Type": "application/json" }) .auth(this.authType, this.authToken) .send(); - return response.data; + return response.status; } async getEmbeddings() { - const response = await this.request(this.getEmbeddingsUrl) + const response = await this.request(this.getConfigurationsUrl) .method("GET") .headers({ "Content-Type": "application/json" }) .auth(this.authType, this.authToken) diff --git a/services/providers/base.provider.js b/services/providers/base.provider.js index 2de864e..47bdaca 100644 --- a/services/providers/base.provider.js +++ b/services/providers/base.provider.js @@ -71,10 +71,6 @@ module.exports = class LlmServiceProvider { throw new Error("Not implemented"); } - async syncPrompts() { - return true; - } - async getEmbeddings() { throw new Error("Not implemented"); } diff --git a/services/providers/carrier.provider.js b/services/providers/carrier.provider.js index 56dda7d..e7c51e4 100644 --- a/services/providers/carrier.provider.js +++ b/services/providers/carrier.provider.js @@ -22,7 +22,7 @@ module.exports = class CarrierServiceProvider extends LlmServiceProvider { this.getPromptsUrl = `${this.config.LLMserverURL}/prompts/prompts/default/${this.config.projectID}`; this.updatePromptsUrl = `${this.config.LLMserverURL}/prompts/prompts`; this.predictUrl = `${this.config.LLMserverURL}/prompts/predict/default/${this.config.projectID}`; - this.getEmbeddingsUrl = `${this.config.LLMserverURL}/embeddings/embedding/default/${this.config.projectID}`; + this.getConfigurationsUrl = `${this.config.LLMserverURL}/embeddings/embedding/default/${this.config.projectID}`; this.sumilarityUrl = `${this.config.LLMserverURL}/embeddings/similarity/default/${this.config.projectID}`; this.authToken = this.config.LLMauthToken; this.authType = "Bearer"; @@ -54,7 +54,7 @@ module.exports = class CarrierServiceProvider extends LlmServiceProvider { async getEmbeddings() { try { - const response = await this.request(this.getEmbeddingsUrl) + const response = await this.request(this.getConfigurationsUrl) .method("GET") .headers({ "Content-Type": "application/json" }) .auth(this.authType, this.authToken) @@ -66,36 +66,6 @@ module.exports = class CarrierServiceProvider extends LlmServiceProvider { } } - async syncPrompts() { - const prompts = await this.getPrompts(); - const _addedPrompts = []; - for (var i = 0; i < prompts.length; i++) { - var prompt = prompts[i]; - var tags = prompt.tags.map((tag) => tag.tag.toLowerCase()); - if (tags.includes("code")) { - _addedPrompts.push(prompt.name); - await this.addPrompt( - prompt.name, - prompt.description ? prompt.description : "", - { prompt_id: prompt.id, integration_uid: prompt.integration_uid }, - [], - {}, - true - ); - } - } - const workspaceConfig = this.workspaceService.getWorkspaceConfig(); - var promptsMapping = await this.workspaceService.readContent( - path.join(workspaceConfig.workspacePath, workspaceConfig.promptLib, "./prompts.json"), - true - ); - for (const [key, value] of Object.entries(promptsMapping)) { - if (!_addedPrompts.includes(key) && value.external) { - await this.removePrompt(key); - } - } - } - async syncEmbeddings() { const embeddings = await this.getEmbeddings(); for (const embedding of embeddings) {