Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions custom_components/custom_conversation/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@


class CustomLLMAPI(llm.API):
"""An API for the Custom Conversation integration to use to call Home Assistant services."""
"""MCP server for Custom Conversation (intents, scripts, and managed prompts as LLM tools)."""

def __init__(
self,
Expand All @@ -50,7 +50,7 @@ def __init__(
conversation_config_entry: ConfigEntry | None = None,
) -> None:
"""Initialize the API."""
super().__init__(hass=hass, id=LLM_API_ID, name="Custom Conversation LLM API")
super().__init__(hass=hass, id=LLM_API_ID, name="Custom Conversation MCP server")
self.cached_slugify = cache(
partial(unicode_slug.slugify, separator="_", lowercase=False)
)
Expand All @@ -67,7 +67,7 @@ def set_langfuse_client(self, langfuse_client: Any) -> None:
async def async_get_api_instance(
self, llm_context: llm.LLMContext
) -> llm.APIInstance:
"""Return an instance of the Custom Conversation LLM API."""
"""Return an instance of this integration's MCP server API."""
if llm_context.assistant:
exposed_entities: dict | None = _get_exposed_entities(
self.hass, llm_context.assistant
Expand Down
77 changes: 43 additions & 34 deletions custom_components/custom_conversation/cc_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from . import CustomConversationConfigEntry
from .api import CustomLLMAPI
from .const import DOMAIN, LLM_API_ID, LOGGER
from .const import DOMAIN, LLM_API_ID, LOGGER, coerce_llm_hass_api_ids
from .prompt_manager import PromptContext, PromptManager


Expand All @@ -25,7 +25,7 @@ async def async_update_llm_data(
config_entry: CustomConversationConfigEntry,
chat_log: ChatLog,
prompt_manager: PromptManager,
llm_api_name: str | None = None,
llm_hass_api: str | list[str] | None = None,
):
"""Process the incoming message for the LLM.

Expand Down Expand Up @@ -53,36 +53,44 @@ async def async_update_llm_data(
user_name = user.name

llm_api: llm.APIInstance | None = None
api_ids = coerce_llm_hass_api_ids(llm_hass_api)

if llm_api_name:
if api_ids:
try:
if llm_api_name == LLM_API_ID:
LOGGER.debug("Using Custom LLM API for request")
api_instance = CustomLLMAPI(
hass,
user_name,
conversation_config_entry=config_entry,
)
if (
langfuse_client := hass.data.get(DOMAIN,{})
.get(config_entry.entry_id,{})
.get("langfuse_client")
):
LOGGER.debug("Setting langfuse client for Custom LLM API")
api_instance.set_langfuse_client(langfuse_client)
llm_api = await api_instance.async_get_api_instance(llm_context)
registered = {api.id: api for api in llm.async_get_apis(hass)}
apis_to_merge: list[llm.API] = []
for api_id in api_ids:
if api_id == LLM_API_ID:
custom_api = CustomLLMAPI(
hass,
user_name,
conversation_config_entry=config_entry,
)
if (
langfuse_client := hass.data.get(DOMAIN, {})
.get(config_entry.entry_id, {})
.get("langfuse_client")
):
custom_api.set_langfuse_client(langfuse_client)
apis_to_merge.append(custom_api)
LOGGER.debug("Including Custom LLM API in merge")
elif api_id in registered:
apis_to_merge.append(registered[api_id])
LOGGER.debug("Including LLM API %s in merge", api_id)
else:
raise HomeAssistantError(f"API {api_id} not found")

if len(apis_to_merge) == 1:
llm_api = await apis_to_merge[0].async_get_api_instance(llm_context)
else:
LOGGER.debug("Using LLM API with ID %s", llm_api_name)
llm_api = await llm.async_get_api(
hass,
llm_api_name,
llm_context,
llm_api = await llm.MergedAPI(apis_to_merge).async_get_api_instance(
llm_context
)

except HomeAssistantError as err:
LOGGER.error(
"Error getting LLM API %s for %s: %s",
llm_api_name,
"Error getting LLM APIs %s for %s: %s",
api_ids,
DOMAIN,
err,
)
Expand All @@ -92,10 +100,10 @@ async def async_update_llm_data(
"Error preparing LLM API",
)
raise ConverseError(
f"Error getting LLM API {llm_api_name}",
conversation_id=chat_log.conversation_id,
response=intent_response,
) from err
f"Error getting LLM APIs {api_ids}",
conversation_id=chat_log.conversation_id,
response=intent_response,
) from err
prompt_object = None
try:
prompt_context = PromptContext(
Expand Down Expand Up @@ -126,15 +134,16 @@ async def async_update_llm_data(
prompt_object, prompt = prompt
LOGGER.debug("Base prompt: %s", prompt)
else:
# We're using a different API, so we need to combine the base prompt with
# the API prompt
# Non-custom API (or merged APIs): base prompt plus API / merged API text
base_prompt = await prompt_manager.async_get_base_prompt(
prompt_context,
config_entry,
)
prompt_parts = [base_prompt]
prompt_parts.append(llm_api.api_prompt)
prompt = "\n".join(prompt_parts)
if isinstance(base_prompt, tuple):
prompt_object, base_text = base_prompt
else:
base_text = base_prompt
prompt = "\n".join([base_text, llm_api.api_prompt])
LOGGER.debug("Combined prompt: %s", prompt)

except TemplateError as err:
Expand Down
39 changes: 24 additions & 15 deletions custom_components/custom_conversation/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
CONF_TOP_P,
CONFIG_VERSION,
CONFIGURING_SECONDARY_PROVIDER,
coerce_llm_hass_api_ids,
DEFAULT_API_PROMPT_BASE,
DEFAULT_API_PROMPT_DEVICE_KNOWN_LOCATION,
DEFAULT_API_PROMPT_DEVICE_UNKNOWN_LOCATION,
Expand All @@ -83,7 +84,6 @@
_LOGGER = LOGGER

DEFAULT_OPTIONS = {
CONF_LLM_HASS_API: "none",
CONF_AGENTS_SECTION: {
CONF_ENABLE_HASS_AGENT: True,
CONF_ENABLE_LLM_AGENT: True,
Expand Down Expand Up @@ -505,9 +505,13 @@ async def async_step_init(
# Process user input before saving
processed_input = {**user_input} # Start with a copy

# Handle potential "none" value for Hass API control
if processed_input.get(CONF_LLM_HASS_API) == "none":
processed_input.pop(CONF_LLM_HASS_API, None) # Remove if 'none'
# MCP server ids (CONF_LLM_HASS_API): store list or omit when none selected
llm_apis = processed_input.get(CONF_LLM_HASS_API)
normalized = coerce_llm_hass_api_ids(llm_apis)
if normalized:
processed_input[CONF_LLM_HASS_API] = normalized
else:
processed_input.pop(CONF_LLM_HASS_API, None)

# Handle empty ignored intents - use default
ignored_intents_section = processed_input.get(
Expand Down Expand Up @@ -552,12 +556,21 @@ async def async_step_init(
CONF_MAX_TOKENS,
default=options.get(CONF_MAX_TOKENS, DEFAULT_MAX_TOKENS),
): vol.All(vol.Coerce(int), vol.Range(min=1)),
# Hass API Control
# MCP servers (multiple merged like core conversation)
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
default="none",
): SelectSelector(SelectSelectorConfig(options=hass_apis)),
description={
"suggested_value": coerce_llm_hass_api_ids(
options.get(CONF_LLM_HASS_API)
)
or []
},
default=[],
): SelectSelector(
SelectSelectorConfig(
options=hass_apis, multiple=True, sort=True
)
),
# Agent Section
vol.Required(CONF_AGENTS_SECTION): section(
vol.Schema(
Expand Down Expand Up @@ -756,15 +769,11 @@ async def async_step_init(
)

def _get_hass_apis(self, hass: HomeAssistant) -> list[SelectOptionDict]:
"""Get available Home Assistant LLM APIs."""
hass_apis: list[SelectOptionDict] = [
SelectOptionDict(label="No control", value="none")
]
hass_apis.extend(
"""Get registered MCP servers / LLM tool APIs for multi-select."""
return [
SelectOptionDict(label=api.name, value=api.id)
for api in llm.async_get_apis(hass)
)
return hass_apis
]

async def _get_intents(self, hass: HomeAssistant) -> list[SelectOptionDict]:
"""Get available intents."""
Expand Down
24 changes: 24 additions & 0 deletions custom_components/custom_conversation/const.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
"""Constants for the Custom Conversation integration."""

from __future__ import annotations

import logging
from typing import Any

DOMAIN = "custom_conversation"
LOGGER = logging.getLogger(__package__)
Expand Down Expand Up @@ -117,3 +120,24 @@
# Deprecated constants for migrations only
CONF_CHAT_MODEL = "chat_model"
CONF_BASE_URL = "base_url"


def coerce_llm_hass_api_ids(raw: Any) -> list[str] | None:
"""Normalize CONF_LLM_HASS_API (MCP server / LLM API ids) from options."""
if raw is None:
return None
if isinstance(raw, str):
if raw in ("", "none"):
return None
return [raw]
if isinstance(raw, list):
out: list[str] = []
seen: set[str] = set()
for item in raw:
if not item or item == "none":
continue
if item not in seen:
seen.add(item)
out.append(item)
return out or None
return None
8 changes: 3 additions & 5 deletions custom_components/custom_conversation/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
CONF_SECONDARY_PROVIDER,
CONF_SECONDARY_PROVIDER_ENABLED,
CONF_TEMPERATURE,
coerce_llm_hass_api_ids,
CONF_TOP_P,
CONVERSATION_ENDED_EVENT,
CONVERSATION_ERROR_EVENT,
Expand Down Expand Up @@ -341,7 +342,7 @@ def __init__(
model="Custom Conversation",
entry_type=dr.DeviceEntryType.SERVICE,
)
if self.entry.options.get(CONF_LLM_HASS_API):
if coerce_llm_hass_api_ids(self.entry.options.get(CONF_LLM_HASS_API)):
self._attr_supported_features = (
conversation.ConversationEntityFeature.CONTROL
)
Expand Down Expand Up @@ -579,16 +580,13 @@ async def _async_handle_message_with_llm(

try:
LOGGER.debug("Updating LLM Data")
llm_api = self.entry.options.get(CONF_LLM_HASS_API)
if llm_api == "none":
llm_api = None
prompt_object = await async_update_llm_data(
self.hass,
user_input,
self.entry,
chat_log,
self.prompt_manager,
llm_api,
self.entry.options.get(CONF_LLM_HASS_API),
)
if prompt_object:
LOGGER.debug(
Expand Down
13 changes: 7 additions & 6 deletions custom_components/custom_conversation/strings.json
Original file line number Diff line number Diff line change
Expand Up @@ -58,22 +58,23 @@
"init": {
"data": {
"instructions_prompt": "Instructions",
"llm_hass_api": "Choose the API to expose to the LLM"
"llm_hass_api": "MCP servers for the model"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
"prompt": "Instruct how the LLM should respond. This can be a template.",
"llm_hass_api": "Each option is an MCP server available to conversation agents in Home Assistant. Assist is the built-in server from core; the MCP Client integration registers additional servers. Custom Conversation MCP server is this integration's server. Pick one or more; their tools and instructions are merged for the LLM."
},
"sections": {
"ignored_intents_section": {
"name": "Ignored Intents",
"description": "Configure which intents should be ignored by the Custom LLM API.\nThose recommended by the current Home Assistant version are marked.",
"description": "Configure which intents this integration's MCP server should not expose as tools.\nThose recommended by the current Home Assistant version are marked.",
"data": {
"ignored_intents": "Ignored Intents"
}
},
"agents": {
"name": "Custom LLM API Agents",
"description": "Configure which agents are enabled for the Custom LLM API.\nEnabling both will try to resolve intents locally first,\nand then fall back to the configured LLM.",
"name": "Conversation agents",
"description": "Configure how requests are handled before the configured model runs.\nEnabling both tries the Home Assistant agent first, then the LLM agent.",
"data": {
"enable_home_assistant_agent": "Enable Home Assistant Agent",
"enable_llm_agent": "Enable LLM Agent"
Expand Down Expand Up @@ -131,7 +132,7 @@
"enable_langfuse": "Enable Langfuse for prompt management",
"base_prompt_id": "The ID Langfuse prompt to use when the API is not enabled.\nHome assistant templating is not supported, but the variables '{{current_time}}' and '{{current_date}}', '{{ha_name}}' and '{{user_name}}' are available.",
"base_prompt_label": "The label to select the version of the base prompt (ie, 'production', or 'latest')",
"api_prompt_id": "The ID of the Langfuse prompt to use when the custom LLM API is enabled.\nNote that because Langfuse does not currently support combining multiple prompts, this will be the only prompt sent, it will not be combined with the one above as is the case with the non-Langfuse support.\nIn addition to the above variables, '{{location}}', '{{supports_timers}}' and '{{exposed_entities}}' are available.",
"api_prompt_id": "The ID of the Langfuse prompt to use when this integration's MCP server (Custom Conversation MCP server) is selected under MCP servers for the model.\nNote that because Langfuse does not currently support combining multiple prompts, this will be the only prompt sent, it will not be combined with the one above as is the case with the non-Langfuse support.\nIn addition to the above variables, '{{location}}', '{{supports_timers}}' and '{{exposed_entities}}' are available.",
"api_prompt_label": "The label to select the version of the API prompt (ie, 'production', or 'latest')",
"langfuse_host": "The host of the Langfuse API",
"langfuse_public_key": "The public key for the Langfuse API",
Expand Down
13 changes: 7 additions & 6 deletions custom_components/custom_conversation/translations/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -58,22 +58,23 @@
"init": {
"data": {
"instructions_prompt": "Instructions",
"llm_hass_api": "Choose the API to expose to the LLM"
"llm_hass_api": "MCP servers for the model"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
"prompt": "Instruct how the LLM should respond. This can be a template.",
"llm_hass_api": "Each option is an MCP server available to conversation agents in Home Assistant. Assist is the built-in server from core; the MCP Client integration registers additional servers. Custom Conversation MCP server is this integration's server. Pick one or more; their tools and instructions are merged for the LLM."
},
"sections": {
"ignored_intents_section": {
"name": "Ignored Intents",
"description": "Configure which intents should be ignored by the Custom LLM API.\nThose recommended by the current Home Assistant version are marked.",
"description": "Configure which intents this integration's MCP server should not expose as tools.\nThose recommended by the current Home Assistant version are marked.",
"data": {
"ignored_intents": "Ignored Intents"
}
},
"agents": {
"name": "Custom LLM API Agents",
"description": "Configure which agents are enabled for the Custom LLM API.\nEnabling both will try to resolve intents locally first,\nand then fall back to the configured LLM.",
"name": "Conversation agents",
"description": "Configure how requests are handled before the configured model runs.\nEnabling both tries the Home Assistant agent first, then the LLM agent.",
"data": {
"enable_home_assistant_agent": "Enable Home Assistant Agent",
"enable_llm_agent": "Enable LLM Agent"
Expand Down Expand Up @@ -131,7 +132,7 @@
"enable_langfuse": "Enable Langfuse for prompt management",
"base_prompt_id": "The ID Langfuse prompt to use when the API is not enabled.\nHome assistant templating is not supported, but the variables '{{current_time}}' and '{{current_date}}', '{{ha_name}}' and '{{user_name}}' are available.",
"base_prompt_label": "The label to select the version of the base prompt (ie, 'production', or 'latest')",
"api_prompt_id": "The ID of the Langfuse prompt to use when the custom LLM API is enabled.\nNote that because Langfuse does not currently support combining multiple prompts, this will be the only prompt sent, it will not be combined with the one above as is the case with the non-Langfuse support.\nIn addition to the above variables, '{{location}}', '{{supports_timers}}' and '{{exposed_entities}}' are available.",
"api_prompt_id": "The ID of the Langfuse prompt to use when this integration's MCP server (Custom Conversation MCP server) is selected under MCP servers for the model.\nNote that because Langfuse does not currently support combining multiple prompts, this will be the only prompt sent, it will not be combined with the one above as is the case with the non-Langfuse support.\nIn addition to the above variables, '{{location}}', '{{supports_timers}}' and '{{exposed_entities}}' are available.",
"api_prompt_label": "The label to select the version of the API prompt (ie, 'production', or 'latest')",
"langfuse_host": "The host of the Langfuse API",
"langfuse_public_key": "The public key for the Langfuse API",
Expand Down
2 changes: 1 addition & 1 deletion unit_tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ async def test_custom_llm_api_init(hass, config_entry):
assert api._request_user_name == "Test User"
assert api.conversation_config_entry is config_entry
assert api.id == LLM_API_ID
assert api.name == "Custom Conversation LLM API"
assert api.name == "Custom Conversation MCP server"

def test_custom_llm_api_set_langfuse_client(custom_llm_api, mock_prompt_manager):
"""Test setting the Langfuse client."""
Expand Down
Loading