diff --git a/claw/services/ai/ai_engine.c b/claw/services/ai/ai_engine.c index 1cc3b29..4f444ad 100644 --- a/claw/services/ai/ai_engine.c +++ b/claw/services/ai/ai_engine.c @@ -62,6 +62,7 @@ static char s_channel_hint[512]; * Auto-detected from model name in ai_engine_init(). */ static int s_openai_compat; +static int s_channel = AI_CHANNEL_SHELL; static inline void notify_status(int st, const char *detail) { @@ -84,6 +85,18 @@ void ai_set_channel_hint(const char *hint) } } +void ai_set_channel(int channel_id) +{ + if (channel_id >= 0 && channel_id < AI_CHANNEL_MAX) { + s_channel = channel_id; + } +} + +int ai_get_channel(void) +{ + return s_channel; +} + static const char *SYSTEM_PROMPT = "You are rt-claw, an AI assistant running on an embedded RTOS device. " "You can control hardware peripherals (GPIO, sensors, etc.) through tools. " @@ -689,7 +702,7 @@ int ai_chat(const char *user_msg, char *reply, size_t reply_size) } #endif - ai_memory_add_message("user", user_msg); + ai_memory_add("user", user_msg, s_channel); char *sys_prompt = build_system_prompt(); if (!sys_prompt) { @@ -697,7 +710,7 @@ int ai_chat(const char *user_msg, char *reply, size_t reply_size) return CLAW_ERROR; } - cJSON *messages = ai_memory_build_messages(); + cJSON *messages = ai_memory_build(s_channel); cJSON *tools = claw_tools_to_json(); int ret = ai_chat_with_messages(sys_prompt, messages, tools, @@ -711,7 +724,7 @@ int ai_chat(const char *user_msg, char *reply, size_t reply_size) * "unexpected tool_use_id" errors on the next API call. */ if (ret == CLAW_OK && reply[0] != '\0') { - ai_memory_add_message("assistant", reply); + ai_memory_add("assistant", reply, s_channel); } claw_free(sys_prompt); diff --git a/claw/services/ai/ai_memory.c b/claw/services/ai/ai_memory.c index 0b5f9e4..75f54f6 100644 --- a/claw/services/ai/ai_memory.c +++ b/claw/services/ai/ai_memory.c @@ -7,6 +7,7 @@ #include "osal/claw_os.h" #include "claw_config.h" +#include "claw/services/ai/ai_engine.h" #include "claw/services/ai/ai_memory.h" #include @@ -30,23 +31,43 @@ #endif typedef struct { - char role[12]; /* "user" / "assistant" */ - char *content_json; /* heap: plain string or cJSON array string */ + char role[12]; /* "user" / "assistant" */ + char *content_json; /* heap: plain string or cJSON array string */ + uint8_t channel; /* AI_CHANNEL_* */ } mem_entry_t; static mem_entry_t s_entries[MEM_MAX_MSGS]; static int s_count; static claw_mutex_t s_lock; -static void drop_oldest_pair(void) +/* + * Drop the oldest user+assistant pair for a given channel. + * Falls back to dropping any oldest pair if no channel match. + */ +static void drop_oldest_pair_for(int channel) { if (s_count < 2) { return; } + /* Try to find a pair matching the target channel */ + for (int i = 0; i < s_count - 1; i++) { + if (s_entries[i].channel == channel && + s_entries[i + 1].channel == channel) { + claw_free(s_entries[i].content_json); + claw_free(s_entries[i + 1].content_json); + if (i + 2 < s_count) { + memmove(&s_entries[i], &s_entries[i + 2], + (s_count - i - 2) * sizeof(mem_entry_t)); + } + s_count -= 2; + return; + } + } + + /* Fallback: drop the global oldest pair */ claw_free(s_entries[0].content_json); claw_free(s_entries[1].content_json); - memmove(&s_entries[0], &s_entries[2], (s_count - 2) * sizeof(mem_entry_t)); s_count -= 2; @@ -65,7 +86,8 @@ int ai_memory_init(void) return CLAW_OK; } -void ai_memory_add_message(const char *role, const char *content_json) +void ai_memory_add(const char *role, const char *content_json, + int channel) { if (!role || !content_json) { return; @@ -74,11 +96,12 @@ void ai_memory_add_message(const char *role, const char *content_json) claw_mutex_lock(s_lock, CLAW_WAIT_FOREVER); while (s_count >= MEM_MAX_MSGS) { - drop_oldest_pair(); + drop_oldest_pair_for(channel); } mem_entry_t *e = &s_entries[s_count]; snprintf(e->role, sizeof(e->role), "%s", role); + e->channel = (uint8_t)channel; size_t len = strlen(content_json); e->content_json = claw_malloc(len + 1); @@ -90,7 +113,13 @@ void ai_memory_add_message(const char *role, const char *content_json) claw_mutex_unlock(s_lock); } -cJSON *ai_memory_build_messages(void) +/* Backward-compatible wrapper */ +void ai_memory_add_message(const char *role, const char *content_json) +{ + ai_memory_add(role, content_json, AI_CHANNEL_SHELL); +} + +cJSON *ai_memory_build(int channel) { cJSON *messages = cJSON_CreateArray(); if (!messages) { @@ -101,6 +130,10 @@ cJSON *ai_memory_build_messages(void) for (int i = 0; i < s_count; i++) { mem_entry_t *e = &s_entries[i]; + if (e->channel != channel) { + continue; + } + cJSON *msg = cJSON_CreateObject(); cJSON_AddStringToObject(msg, "role", e->role); @@ -123,6 +156,46 @@ cJSON *ai_memory_build_messages(void) return messages; } +cJSON *ai_memory_build_messages(void) +{ + return ai_memory_build(AI_CHANNEL_SHELL); +} + +void ai_memory_clear_channel(int channel) +{ + claw_mutex_lock(s_lock, CLAW_WAIT_FOREVER); + + int dst = 0; + for (int i = 0; i < s_count; i++) { + if (s_entries[i].channel == channel) { + claw_free(s_entries[i].content_json); + s_entries[i].content_json = NULL; + } else { + if (dst != i) { + s_entries[dst] = s_entries[i]; + } + dst++; + } + } + s_count = dst; + + claw_mutex_unlock(s_lock); + CLAW_LOGI(TAG, "channel %d memory cleared", channel); +} + +int ai_memory_count_channel(int channel) +{ + int n = 0; + claw_mutex_lock(s_lock, CLAW_WAIT_FOREVER); + for (int i = 0; i < s_count; i++) { + if (s_entries[i].channel == channel) { + n++; + } + } + claw_mutex_unlock(s_lock); + return n; +} + void ai_memory_clear(void) { claw_mutex_lock(s_lock, CLAW_WAIT_FOREVER); diff --git a/claw/services/im/feishu.c b/claw/services/im/feishu.c index 4588d58..d9d2134 100644 --- a/claw/services/im/feishu.c +++ b/claw/services/im/feishu.c @@ -749,6 +749,7 @@ static void ai_worker_thread(void *arg) add_reaction(in.msg_id, "Typing"); } + ai_set_channel(AI_CHANNEL_FEISHU); ai_set_channel_hint( " You are communicating via Feishu IM." " All outputs (including scheduled task results)" @@ -768,6 +769,7 @@ static void ai_worker_thread(void *arg) (unsigned long)claw_tick_ms(), ret, (unsigned)esp_get_free_heap_size()); + ai_set_channel(AI_CHANNEL_SHELL); ai_set_channel_hint(NULL); sched_set_reply_context(NULL, NULL); diff --git a/claw/services/im/telegram.c b/claw/services/im/telegram.c index 9f34771..60fc50a 100644 --- a/claw/services/im/telegram.c +++ b/claw/services/im/telegram.c @@ -334,6 +334,7 @@ static void tg_ai_worker(void *arg) /* Typing indicator before AI call */ send_chat_action(in.chat_id); + ai_set_channel(AI_CHANNEL_TELEGRAM); ai_set_channel_hint( " You are communicating via Telegram." " All outputs (including scheduled task results)" @@ -352,6 +353,7 @@ static void tg_ai_worker(void *arg) (unsigned long)claw_tick_ms(), ret, (unsigned)esp_get_free_heap_size()); + ai_set_channel(AI_CHANNEL_SHELL); ai_set_channel_hint(NULL); sched_set_reply_context(NULL, NULL); diff --git a/claw/tools/tool_sched.c b/claw/tools/tool_sched.c index e705f29..8394f25 100644 --- a/claw/tools/tool_sched.c +++ b/claw/tools/tool_sched.c @@ -180,6 +180,7 @@ static void ai_worker_thread(void *arg) claw_mutex_unlock(s_rctx_lock); } + ai_set_channel(AI_CHANNEL_SCHED); if (ctx->reply_fn) { ai_set_channel_hint( " This is a scheduled background task." @@ -252,6 +253,7 @@ static void ai_worker_thread(void *arg) } claw_free(reply); + ai_set_channel(AI_CHANNEL_SHELL); ai_set_channel_hint(NULL); claw_mutex_lock(s_worker_lock, CLAW_WAIT_FOREVER); diff --git a/include/claw/services/ai/ai_engine.h b/include/claw/services/ai/ai_engine.h index 4d2aa14..15e84d5 100644 --- a/include/claw/services/ai/ai_engine.h +++ b/include/claw/services/ai/ai_engine.h @@ -8,6 +8,13 @@ #include "osal/claw_os.h" +/* Channel IDs for conversation memory isolation */ +#define AI_CHANNEL_SHELL 0 +#define AI_CHANNEL_FEISHU 1 +#define AI_CHANNEL_TELEGRAM 2 +#define AI_CHANNEL_SCHED 3 +#define AI_CHANNEL_MAX 4 + /* Status phases for the progress callback */ #define AI_STATUS_THINKING 0 /* waiting for API response */ #define AI_STATUS_TOOL_CALL 1 /* executing a tool (detail = name) */ @@ -40,6 +47,15 @@ const char *ai_get_model(void); */ void ai_set_channel_hint(const char *hint); +/** + * Set the active channel ID for conversation memory isolation. + * Each channel maintains separate conversation history so messages + * from different sources (shell, Feishu, Telegram) don't mix. + * Use AI_CHANNEL_* constants from ai_memory.h. + */ +void ai_set_channel(int channel_id); +int ai_get_channel(void); + /** * Send a user message to the LLM and receive a reply. * Stores user/assistant messages in conversation memory. diff --git a/include/claw/services/ai/ai_memory.h b/include/claw/services/ai/ai_memory.h index b9694f7..a1a0bf0 100644 --- a/include/claw/services/ai/ai_memory.h +++ b/include/claw/services/ai/ai_memory.h @@ -18,10 +18,13 @@ /* ---- Short-term memory (RAM ring buffer, conversation turns) ---- */ int ai_memory_init(void); -void ai_memory_add_message(const char *role, const char *content_json); -cJSON *ai_memory_build_messages(void); /* caller frees */ +void ai_memory_add(const char *role, const char *content_json, + int channel); +cJSON *ai_memory_build(int channel); /* caller frees */ +void ai_memory_clear_channel(int channel); void ai_memory_clear(void); int ai_memory_count(void); +int ai_memory_count_channel(int channel); /* ---- Long-term memory (NVS Flash, persistent facts) ---- */