Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions agents/canvas/src/canvas/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ async def canvas_agent(
yield "Can't run without a LLM."
return

await context.store(message)
edit_request = await canvas.parse_canvas_edit_request(message=message)

user_text_content = _get_text(message)
Expand Down Expand Up @@ -132,12 +131,6 @@ async def canvas_agent(
parts=[TextPart(text=content_delta)],
)

final_artifact = AgentArtifact(
artifact_id=artifact.artifact_id,
name=artifact.name,
parts=[TextPart(text=buffer)],
)
await context.store(final_artifact)


def serve():
Expand Down
22 changes: 2 additions & 20 deletions agents/chat/src/chat/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,19 +24,18 @@
PlatformApiExtensionServer,
PlatformApiExtensionSpec,
)
from kagenti_adk.a2a.types import AgentArtifact, AgentMessage
from kagenti_adk.a2a.types import AgentArtifact
from kagenti_adk.server import Server
from kagenti_adk.server.context import RunContext
from kagenti_adk.server.middleware.platform_auth_backend import PlatformAuthBackend
from kagenti_adk.server.store.platform_context_store import PlatformContextStore
from beeai_framework.adapters.agentstack.backend.chat import AgentStackChatModel
from beeai_framework.agents.requirement import RequirementAgent
from beeai_framework.agents.requirement.events import (
RequirementAgentFinalAnswerEvent,
RequirementAgentSuccessEvent,
)
from beeai_framework.agents.requirement.utils._tool import FinalAnswerTool
from beeai_framework.backend import AssistantMessage, ChatModelParameters
from beeai_framework.backend import ChatModelParameters
from beeai_framework.errors import FrameworkError
from beeai_framework.middleware.trajectory import GlobalTrajectoryMiddleware
from beeai_framework.tools import AnyTool, Tool
Expand All @@ -45,7 +44,6 @@
from beeai_framework.tools.weather import OpenMeteoTool
from openinference.instrumentation.beeai import BeeAIInstrumentor

from chat.helpers.citations import extract_citations
from chat.helpers.trajectory import TrajectoryContent
from chat.tools.files.file_creator import FileCreatorTool, FileCreatorToolOutput
from chat.tools.files.file_reader import FileReaderTool
Expand Down Expand Up @@ -149,8 +147,6 @@ async def chat(
_p: Annotated[PlatformApiExtensionServer, PlatformApiExtensionSpec()],
):
"""Agent with memory and access to web search, Wikipedia, and weather."""
await context.store(input)

# Send initial trajectory
yield trajectory.trajectory_metadata(title="Starting", content="Received your request")

Expand Down Expand Up @@ -220,7 +216,6 @@ async def chat(
middlewares=[GlobalTrajectoryMiddleware(included=[Tool])],
)

final_answer: AssistantMessage | None = None
new_messages = [to_framework_message(item, extracted_files) for item in history]

try:
Expand All @@ -244,8 +239,6 @@ async def chat(
case RequirementAgentFinalAnswerEvent(delta=delta):
yield delta
case RequirementAgentSuccessEvent(state=state):
final_answer = state.answer

last_step = state.steps[-1]
if last_step.tool and last_step.tool.name == FinalAnswerTool.name: # internal tool
continue
Expand All @@ -259,24 +252,14 @@ async def chat(
group_id=last_step.id,
)
yield metadata
await context.store(AgentMessage(metadata=metadata))

if isinstance(last_step.output, FileCreatorToolOutput):
for file_info in last_step.output.result.files:
part = file_info.file.to_part()
part.filename = file_info.display_filename
artifact = AgentArtifact(name=file_info.display_filename, parts=[part])
yield artifact
await context.store(artifact)

if final_answer:
citations, clean_text = extract_citations(final_answer.text)

message = AgentMessage(
text=clean_text,
metadata=(citation.citation_metadata(citations=citations) if citations else None),
)
await context.store(message)
except FrameworkError as err:
raise RuntimeError(err.explain())

Expand All @@ -287,7 +270,6 @@ def serve():
host=os.getenv("HOST", "127.0.0.1"),
port=int(os.getenv("PORT", 8000)),
configure_telemetry=True,
context_store=PlatformContextStore(),
auth_backend=PlatformAuthBackend(),
)
except KeyboardInterrupt:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ async def content_builder_agent(
return

started_at = datetime.now(timezone.utc)
await context.store(data=message)

subagents: list[SubAgent] = []
for sub_agent in AVAILABLE_SUBAGENTS:
Expand Down Expand Up @@ -140,7 +139,6 @@ async def content_builder_agent(
title=data["name"], content=json.dumps(obj=data["args"])
)
yield tool_call_metadata
await context.store(data=AgentMessage(metadata=tool_call_metadata))
tool_calls.clear()

elif last_msg.tool_call_chunks:
Expand All @@ -151,12 +149,10 @@ async def content_builder_agent(
tool_calls[tc_id]["args"] += tc.get("args") or ""
elif last_msg.text:
yield AgentMessage(text=last_msg.text)
await context.store(AgentMessage(text=last_msg.text))

elif isinstance(last_msg, ToolMessage) and last_msg.name and last_msg.text:
tool_message_metadata = trajectory.trajectory_metadata(title=last_msg.name, content=last_msg.text)
yield tool_message_metadata
await context.store(data=AgentMessage(metadata=tool_message_metadata))

updated_files = await agent_stack_backend.alist(order_by="created_at", order="asc", created_after=started_at)
for updated_file in updated_files:
Expand Down
5 changes: 0 additions & 5 deletions agents/rag/src/rag/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
from kagenti_adk.server import Server
from kagenti_adk.server.context import RunContext
from kagenti_adk.server.middleware.platform_auth_backend import PlatformAuthBackend
from kagenti_adk.server.store.platform_context_store import PlatformContextStore
from beeai_framework.adapters.agentstack.backend.chat import AgentStackChatModel
from beeai_framework.agents.requirement import RequirementAgent
from beeai_framework.agents.requirement.utils._tool import FinalAnswerTool
Expand Down Expand Up @@ -116,7 +115,6 @@ async def rag(
_: Annotated[PlatformApiExtensionServer, PlatformApiExtensionSpec()],
):
"""RAG agent that retrieves and generates text based on user queries."""
await context.store(input)
llm, embedding = _get_clients(llm_ext, embedding_ext)

history = [m async for m in context.load_history()]
Expand Down Expand Up @@ -181,7 +179,6 @@ async def rag(
phase="end",
).metadata(trajectory)
yield vector_store_create_metadata
await context.store(AgentMessage(metadata=vector_store_create_metadata))

tools.append(cast(Tool, VectorSearchTool(vector_store_id=vector_store_id, embedding_function=embedding)))
async for item in embed_all_files(
Expand Down Expand Up @@ -300,7 +297,6 @@ async def handle_tool_success(event, meta):
metadata=(citation.citation_metadata(citations=citations) if citations else None),
)
yield message
await context.store(message)


def _get_clients(
Expand Down Expand Up @@ -331,7 +327,6 @@ def serve():
host=os.getenv("HOST", "127.0.0.1"),
port=int(os.getenv("PORT", 8000)),
configure_telemetry=True,
context_store=PlatformContextStore(),
auth_backend=PlatformAuthBackend(),
)
except KeyboardInterrupt:
Expand Down
17 changes: 14 additions & 3 deletions apps/adk-cli/src/kagenti_cli/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
import httpx
import openai
import pydantic
from a2a.client import A2AClientError, Client, ClientConfig, ClientFactory
from a2a.client import A2AClientError, Client, ClientCallContext, ClientConfig, ClientFactory
from a2a.extensions.common import HTTP_EXTENSION_HEADER
from a2a.types import AgentCard
from kagenti_adk.platform.context import ContextToken
from google.protobuf.json_format import MessageToDict
Expand Down Expand Up @@ -131,16 +132,26 @@ class OpenAPISchema(pydantic.BaseModel):
return None


def make_extension_context(extensions: list[str] | None = None) -> ClientCallContext | None:
"""Create a ClientCallContext with extension URIs as service parameters."""
if not extensions:
return None
return ClientCallContext(service_parameters={HTTP_EXTENSION_HEADER: ",".join(extensions)})


@asynccontextmanager
async def a2a_client(agent_card: AgentCard, context_token: ContextToken) -> AsyncIterator[Client]:
async def a2a_client(
agent_card: AgentCard,
context_token: ContextToken,
) -> AsyncIterator[Client]:
try:
async with httpx.AsyncClient(
headers={"Authorization": f"Bearer {context_token.token.get_secret_value()}"},
follow_redirects=True,
timeout=timedelta(hours=1).total_seconds(),
) as httpx_client:
yield ClientFactory(ClientConfig(httpx_client=httpx_client, use_client_preference=True)).create(
card=agent_card
card=agent_card,
)
except A2AClientError as ex:
card_data = json.dumps(
Expand Down
Loading