Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -160,3 +160,7 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/

.entari/
.vscode/
data.db
5 changes: 3 additions & 2 deletions entari.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ basic:
ignores:
- httpx._client
- httpcore._trace
- aiosqlite.core
prefix:
- /
schema: true
Expand All @@ -18,12 +19,11 @@ plugins:
.record_message:
record_send: true
::echo: {}
::help: {}
::inspect: {}
::auto_reload:
watch_config: true
entari_plugin_llm:
api_key: ${{ env.LLM_API_KEY }}
api_key: ${{ env.OPENAI_API_KEY }}
base_url: https://api.openai.com/v1
models:
- name: "deepseek/deepseek-chat"
Expand All @@ -33,6 +33,7 @@ plugins:
api_key: ${{ env.GEMINI_API_KEY }}
base_url: https://generativelanguage.googleapis.com
- name: "gpt-4.1-mini"
alias: "gpt"

prompt: |
你是一个由用户提供信息并回答问题的智能助手。请根据用户提供的信息,尽可能准确和简洁地回答他们的问题。如果信息不足以回答问题,请礼貌地告知用户需要更多信息。
Expand Down
424 changes: 299 additions & 125 deletions pdm.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ dependencies = [
"openai>=2.14.0",
"docstring-parser>=0.17.0",
"litellm>=1.80.9",
"entari-plugin-database>=0.2.3",
]
requires-python = ">=3.10"
readme = "README.md"
Expand Down
11 changes: 9 additions & 2 deletions src/entari_plugin_llm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from arclet.entari import declare_static, metadata
from arclet.entari import declare_static, metadata

from .config import Config
from .events import LLMToolEvent as LLMToolEvent
Expand All @@ -14,5 +14,12 @@
declare_static()
_suppress_litellm_logging()

from . import listeners as listeners
from .handlers import chat as chat
from .handlers import check as check
from .handlers import command as command
from .service import llm as llm

__all__ = [
"llm",
"LLMToolEvent",
]
File renamed without changes.
54 changes: 54 additions & 0 deletions src/entari_plugin_llm/_jsondata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import json
from dataclasses import asdict, dataclass
from pathlib import Path
from typing import Any

from arclet.entari import local_data


@dataclass(slots=True)
class LLMState:
default_model: str | None = None

@classmethod
def from_dict(cls, data: dict[str, Any]) -> "LLMState":
value = data.get("default_model")
default_model = value if isinstance(value, str) and value else None
return cls(default_model=default_model)

def to_dict(self) -> dict[str, Any]:
return asdict(self)


def _state_path() -> Path:
return local_data.get_data_file("entari_plugin_llm", "state.json")


def _read_state() -> LLMState:
path = _state_path()
if not path.exists():
return LLMState()
try:
data = json.loads(path.read_text(encoding="utf-8"))
except (OSError, json.JSONDecodeError):
return LLMState()
if not isinstance(data, dict):
return LLMState()
return LLMState.from_dict(data)


def _write_state(data: LLMState) -> None:
path = _state_path()
path.write_text(
json.dumps(data.to_dict(), ensure_ascii=False, indent=2), encoding="utf-8"
)


def get_default_model() -> str | None:
return _read_state().default_model


def set_default_model(model_name: str | None) -> None:
state = _read_state()
state.default_model = model_name if model_name else None
_write_state(state)
19 changes: 15 additions & 4 deletions src/entari_plugin_llm/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
from arclet.entari import BasicConfModel, plugin_config
from arclet.entari.config import model_field

from ._jsondata import get_default_model
from .exception import ModelNotFoundError


class ScopedModel(BasicConfModel):
name: str
Expand Down Expand Up @@ -36,14 +39,22 @@ class Config(BasicConfModel):
def get_model_config(model_name: str | None = None) -> ScopedModel:
if model_name is None:
if not _conf.models:
raise ValueError("No models configured.")
model_name = _conf.models[0].name
raise ModelNotFoundError("No models configured.")

model_name = get_default_model()

for model in _conf.models:
if model.name == model_name or model.alias == model_name:
if model.api_key is None:
model.api_key = _conf.api_key
if model.base_url == "https://api.openai.com/v1" and _conf.base_url != "https://api.openai.com/v1":
if (
model.base_url == "https://api.openai.com/v1"
and _conf.base_url != "https://api.openai.com/v1"
):
model.base_url = _conf.base_url
return model
raise ValueError(f"Model {model_name} not found in config.")
raise ModelNotFoundError(f"Model {model_name} not found in config.")


def get_model_list() -> set[str]:
return {m.name for m in _conf.models} | {m.alias for m in _conf.models if m.alias}
2 changes: 2 additions & 0 deletions src/entari_plugin_llm/exception.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class ModelNotFoundError(Exception):
...
40 changes: 40 additions & 0 deletions src/entari_plugin_llm/handlers/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
from collections import deque

from arclet.entari import MessageCreatedEvent, Session, filter_
from arclet.entari.config import config_model_validate
from arclet.entari.event.config import ConfigReload
from arclet.entari.event.send import SendResponse
from arclet.letoderea import BLOCK, on
from arclet.letoderea.typing import Contexts

from ..config import Config, _conf
from .manager import LLMSessionManager

RECORD = deque(maxlen=16)

@on(SendResponse)
async def _record(event: SendResponse):
if event.result and event.session:
RECORD.append(event.session.event.sn)


@on(MessageCreatedEvent, priority=1000).if_(filter_.to_me)
async def run_conversation(session: Session, ctx: Contexts):
if session.event.sn in RECORD:
return BLOCK

msg = session.elements.extract_plain_text()
answer = await LLMSessionManager.chat(user_input=msg, ctx=ctx, session=session)
await session.send(answer)
return BLOCK


@on(ConfigReload)
async def reload_config(event: ConfigReload):
if event.scope != "plugin":
return
if event.key not in ("entari_plugin_llm", "llm"):
return
new_conf = config_model_validate(Config, event.value)
_conf.models = new_conf.models
_conf.prompt = new_conf.prompt
41 changes: 41 additions & 0 deletions src/entari_plugin_llm/handlers/check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from arclet.entari.event.lifespan import Ready
from arclet.letoderea import on

from .._jsondata import get_default_model, set_default_model
from ..config import _conf
from ..log import logger


@on(Ready)
async def _():
if not _conf.models:
set_default_model(None)
logger.warning("未配置任何模型,已清空本地默认模型配置")
return

first_model = _conf.models[0].name
default_model = get_default_model()
if not default_model:
set_default_model(first_model)
logger.info(f"未检测到本地默认模型,已设置为首个模型: {first_model}")
return

matched = next(
(
m
for m in _conf.models
if m.name == default_model or m.alias == default_model
),
None,
)
if matched is None:
set_default_model(first_model)
logger.warning(
f"本地默认模型不存在于当前配置: {default_model},已重置为: {first_model}",
)
return

if matched.name != default_model:
set_default_model(matched.name)
logger.info(f"已将本地默认模型标准化为模型名: {matched.name}")

Loading