diff --git a/examples/langgraph-viral-youtube/.env.example b/examples/langgraph-viral-youtube/.env.example new file mode 100644 index 0000000..4ec011c --- /dev/null +++ b/examples/langgraph-viral-youtube/.env.example @@ -0,0 +1,2 @@ +ANTHROPIC_API_KEY=your-anthropic-api-key +TAVILY_API_KEY=your-tavily-api-key diff --git a/examples/langgraph-viral-youtube/README.md b/examples/langgraph-viral-youtube/README.md new file mode 100644 index 0000000..2f4d96e --- /dev/null +++ b/examples/langgraph-viral-youtube/README.md @@ -0,0 +1,162 @@ +# BMasterAI × LangGraph — Viral YouTube Short Generator + +> Four AI agents collaborate in a LangGraph pipeline to produce a complete, +> production-ready YouTube Short package — instrumented end-to-end with +> BMasterAI telemetry. + +--- + +## What It Does + +You give it a topic. Four specialist agents do the rest: + +``` +trend_researcher → hook_writer → script_writer → title_and_tags + ↓ + quality_gate + / \ + approved rejected + ↓ ↓ + END retry from hook +``` + +| Agent | Job | +|---|---| +| **Trend Researcher** | Searches the web (Tavily) for the most viral angle on your topic right now | +| **Hook Writer** | Writes the single best ≤12-word opening line to stop the scroll | +| **Script Writer** | Writes the full 45–60s script: Hook → Conflict → Build → Payoff → CTA | +| **Title & Tags** | Generates a viral title, 10 SEO tags, and thumbnail concept | +| **Quality Gate** | Validates the package; triggers a retry loop (max 2) if it falls short | + +**Output:** title, hook, full script, tags, thumbnail concept — ready to record. + +--- + +## BMasterAI Integration + +Every agent call is instrumented with structured telemetry: + +```python +bm.log_event(EventType.TASK_START, agent_id="hook-writer", metadata={...}) +bm.log_event(EventType.LLM_CALL, agent_id="hook-writer", metadata={...}) +bm.log_event(EventType.TASK_COMPLETE, ...) +bm.log_event(EventType.TASK_ERROR, ...) # on failure +``` + +Logs written to: +- `logs/agents.log` — human-readable +- `logs/agents.jsonl` — structured JSONL (import into any observability tool) + +--- + +## Quick Start + +```bash +# 1. Clone and enter +git clone https://github.com/travis-burmaster/bmasterai +cd bmasterai/examples/langgraph-viral-youtube + +# 2. Install deps +pip install -r requirements.txt + +# 3. Set API keys +cp .env.example .env +# Edit .env — add ANTHROPIC_API_KEY and TAVILY_API_KEY + +# 4. Run +python main.py "AI agents taking over software engineering" + +# Or interactive mode +python main.py +``` + +--- + +## Example Output + +``` +═══════════════════════════════════════════════════════════ +🎬 VIRAL YOUTUBE SHORT — PRODUCTION PACKAGE +═══════════════════════════════════════════════════════════ + +📌 TITLE +The AI Agent Nobody Warned You About + +🎣 HOOK (first 3–5 seconds) +Your job isn't safe — and your boss already knows. + +📝 SCRIPT +[HOOK] +Your job isn't safe — and your boss already knows. + +[CONFLICT] +AI agents don't just answer questions anymore. +They write code. They deploy it. They fix bugs at 3am. +No salary. No lunch break. No complaints. + +[BUILD] +Three things agents can already do better than you: +Debug production code in under 30 seconds. +Write and ship a full feature without a ticket. +Review a PR with zero ego. + +[PAYOFF] +This isn't coming. It's already here. +The engineers winning right now aren't fighting agents. +They're building them. + +[CTA] +Follow for the playbook. Your future self will thank you. + +🏷️ TAGS +AI agents, software engineering, future of work, ... + +🖼️ THUMBNAIL CONCEPT +BOLD RED TEXT: "Your job is next" | background: robot at laptop, shocked dev in corner + +✅ Quality gate: PASSED (iteration 1) +``` + +--- + +## Architecture + +``` +main.py ← CLI entry point, invokes graph +graph.py ← LangGraph StateGraph definition + routing +agents.py ← All four agent functions + quality gate +state.py ← Shared VideoState TypedDict +logs/ ← BMasterAI telemetry output +output.json ← Final result saved after each run +``` + +--- + +## API Keys + +| Key | Get it at | +|---|---| +| `ANTHROPIC_API_KEY` | [console.anthropic.com](https://console.anthropic.com) | +| `TAVILY_API_KEY` | [tavily.com](https://tavily.com) (free tier available) | + +--- + +## How It Differs from Other Examples + +| This Example | bmasterai-agentcore | +|---|---| +| LangGraph (framework-agnostic) | Amazon Bedrock AgentCore | +| Multi-agent pipeline | Single research agent | +| Creative content generation | Technical research | +| Conditional retry loop | Linear execution | +| Tavily web search | No external search | + +--- + +## Part of the BMasterAI Examples Collection + +- [`bmasterai-agentcore`](../bmasterai-agentcore) — AWS Bedrock AgentCore + Strands +- [`agentcore-memory-agent-bmasterai`](../agentcore-memory-agent-bmasterai) — Telegram bot with persistent memory +- [`a2a-realestate-multiagent`](../a2a-realestate-multiagent) — A2A multi-agent coordination +- [`webmcp-gcp-agent`](../webmcp-gcp-agent) — GCP Cloud Run + WebMCP browser tools +- **`langgraph-viral-youtube`** ← you are here diff --git a/examples/langgraph-viral-youtube/agents.py b/examples/langgraph-viral-youtube/agents.py new file mode 100644 index 0000000..0bf31a8 --- /dev/null +++ b/examples/langgraph-viral-youtube/agents.py @@ -0,0 +1,263 @@ +""" +Four specialist agents + one quality-gate node. +Each is a plain function: (state) -> partial state dict. +BMasterAI logs every agent call. +""" +import os +from bmasterai.logging import configure_logging, EventType, LogLevel +from bmasterai.monitoring import get_monitor +from langchain_anthropic import ChatAnthropic +from langchain_core.messages import HumanMessage +from tavily import TavilyClient + +from state import VideoState + +# ── BMasterAI setup ─────────────────────────────────────────────────────────── +bm = configure_logging( + log_level=LogLevel.INFO, + log_file="logs/agents.log", + json_log_file="logs/agents.jsonl", +) +monitor = get_monitor() +monitor.start_monitoring() + +# ── LLM + tools ─────────────────────────────────────────────────────────────── +llm = ChatAnthropic(model="claude-3-5-sonnet-20241022", max_tokens=2048) +tavily = TavilyClient(api_key=os.environ["TAVILY_API_KEY"]) + + +def _llm(prompt: str, agent_name: str) -> str: + """Call Claude and return the text response.""" + bm.log_event( + EventType.LLM_CALL, + agent_id=agent_name, + metadata={"prompt_chars": len(prompt)}, + ) + response = llm.invoke([HumanMessage(content=prompt)]) + bm.log_event( + EventType.TASK_COMPLETE, + agent_id=agent_name, + metadata={"response_chars": len(response.content)}, + ) + return response.content.strip() + + +# ── Agent 1: Trend Researcher ───────────────────────────────────────────────── +def trend_researcher(state: VideoState) -> dict: + """ + Searches for the most viral angle on the given topic right now. + Uses Tavily to pull real-time context. + """ + agent = "trend-researcher" + bm.log_event(EventType.TASK_START, agent_id=agent, metadata={"topic": state["topic"]}) + + try: + results = tavily.search( + query=f"viral YouTube Shorts {state['topic']} trending 2025", + max_results=5, + search_depth="advanced", + ) + snippets = "\n".join( + f"- {r['title']}: {r['content'][:200]}" + for r in results.get("results", []) + ) + + prompt = f"""You are a viral YouTube Shorts trend analyst. + +Topic: {state["topic"]} + +Recent search results: +{snippets} + +Identify: +1. The single most viral angle for a 60-second YouTube Short on this topic right now +2. Why it's trending (2-3 sentences of context) +3. Three example opening hooks from top-performing videos on this angle + +Reply in this exact format: +ANGLE: +CONTEXT: <2-3 sentences> +HOOK_1: +HOOK_2: +HOOK_3: """ + + raw = _llm(prompt, agent) + lines = {l.split(":")[0].strip(): ":".join(l.split(":")[1:]).strip() + for l in raw.splitlines() if ":" in l} + + return { + "trending_angle": lines.get("ANGLE", ""), + "trend_context": lines.get("CONTEXT", ""), + "competitor_hooks": [ + lines.get("HOOK_1", ""), + lines.get("HOOK_2", ""), + lines.get("HOOK_3", ""), + ], + "errors": state.get("errors", []), + } + + except Exception as e: + bm.log_event(EventType.TASK_ERROR, agent_id=agent, metadata={"error": str(e)}) + return {"errors": state.get("errors", []) + [f"{agent}: {e}"]} + + +# ── Agent 2: Hook Writer ────────────────────────────────────────────────────── +def hook_writer(state: VideoState) -> dict: + """ + Writes the single best opening hook — the first 3–5 seconds that stop + the scroll before YouTube decides to keep showing the video. + """ + agent = "hook-writer" + bm.log_event(EventType.TASK_START, agent_id=agent, + metadata={"angle": state.get("trending_angle", "")}) + + try: + prompt = f"""You are an elite YouTube Shorts hook writer. + +Topic: {state["topic"]} +Trending angle: {state["trending_angle"]} +Trend context: {state["trend_context"]} + +Competitor hooks for reference (don't copy, outperform): +{chr(10).join(f"- {h}" for h in state.get("competitor_hooks", []))} + +Write ONE killer opening hook for a 60-second YouTube Short. +Rules: +- Maximum 12 words +- Creates immediate curiosity or shock +- Poses a question OR makes a bold claim +- Must make the viewer feel they'll miss something if they scroll +- No filler words ("Hey guys", "Welcome back", "In this video") + +Reply with ONLY the hook text, nothing else.""" + + hook = _llm(prompt, agent) + return {"hook": hook, "errors": state.get("errors", [])} + + except Exception as e: + bm.log_event(EventType.TASK_ERROR, agent_id=agent, metadata={"error": str(e)}) + return {"errors": state.get("errors", []) + [f"{agent}: {e}"]} + + +# ── Agent 3: Script Writer ──────────────────────────────────────────────────── +def script_writer(state: VideoState) -> dict: + """ + Writes the full 45–60 second script using the hook as the opening line. + Structured for maximum retention: hook → conflict → payoff → CTA. + """ + agent = "script-writer" + bm.log_event(EventType.TASK_START, agent_id=agent, + metadata={"hook": state.get("hook", "")[:80]}) + + try: + prompt = f"""You are a viral YouTube Shorts script writer. + +Topic: {state["topic"]} +Angle: {state["trending_angle"]} +Context: {state["trend_context"]} +Opening hook (first line, do not change): {state["hook"]} + +Write a complete 45–60 second spoken script for a YouTube Short. + +Structure: +1. HOOK (0–3s): Use the exact hook above +2. CONFLICT (3–20s): Introduce the problem / tension / surprising fact +3. BUILD (20–45s): Stack the value — 3 punchy points, each under 10 words +4. PAYOFF (45–55s): The satisfying reveal or actionable insight +5. CTA (55–60s): One specific call to action (follow, comment, or share — pick one) + +Rules: +- Write for spoken delivery, not reading +- Short punchy sentences. One idea per line. +- No filler. Every word earns its place. +- Label each section clearly: [HOOK] [CONFLICT] [BUILD] [PAYOFF] [CTA] + +Reply with ONLY the script.""" + + script = _llm(prompt, agent) + return {"script": script, "errors": state.get("errors", [])} + + except Exception as e: + bm.log_event(EventType.TASK_ERROR, agent_id=agent, metadata={"error": str(e)}) + return {"errors": state.get("errors", []) + [f"{agent}: {e}"]} + + +# ── Agent 4: Title & Tags ───────────────────────────────────────────────────── +def title_and_tags(state: VideoState) -> dict: + """ + Generates a viral title, SEO tags, and one-line thumbnail concept. + """ + agent = "title-and-tags" + bm.log_event(EventType.TASK_START, agent_id=agent, + metadata={"topic": state["topic"]}) + + try: + prompt = f"""You are a YouTube SEO and packaging expert specialising in Shorts. + +Topic: {state["topic"]} +Angle: {state["trending_angle"]} +Hook: {state["hook"]} +Script excerpt: {state.get("script", "")[:300]} + +Generate: +1. TITLE: A viral YouTube Shorts title (max 70 chars). Curiosity gap or bold claim. No clickbait. +2. TAGS: 10 SEO tags as a comma-separated list (mix broad + niche) +3. THUMBNAIL: One-line concept — bold text overlay + visual description (e.g. "RED TEXT: 'They lied to you' | background: shocked face close-up") + +Reply in this exact format: +TITLE: +TAGS: <tag1>, <tag2>, ... +THUMBNAIL: <concept>""" + + raw = _llm(prompt, agent) + lines = {l.split(":")[0].strip(): ":".join(l.split(":")[1:]).strip() + for l in raw.splitlines() if ":" in l} + + tags_raw = lines.get("TAGS", "") + tags = [t.strip() for t in tags_raw.split(",") if t.strip()] + + return { + "title": lines.get("TITLE", ""), + "tags": tags, + "thumbnail_concept": lines.get("THUMBNAIL", ""), + "errors": state.get("errors", []), + } + + except Exception as e: + bm.log_event(EventType.TASK_ERROR, agent_id=agent, metadata={"error": str(e)}) + return {"errors": state.get("errors", []) + [f"{agent}: {e}"]} + + +# ── Quality Gate ────────────────────────────────────────────────────────────── +def quality_gate(state: VideoState) -> dict: + """ + Checks the assembled package for quality. + Returns approved=True or approved=False with feedback in errors. + """ + agent = "quality-gate" + bm.log_event(EventType.TASK_START, agent_id=agent) + + issues = [] + if not state.get("hook") or len(state["hook"].split()) > 15: + issues.append("Hook is missing or too long (>15 words)") + if not state.get("script") or len(state["script"]) < 200: + issues.append("Script too short (<200 chars)") + if not state.get("title"): + issues.append("Title is missing") + if len(state.get("tags", [])) < 5: + issues.append("Not enough tags (<5)") + + approved = len(issues) == 0 + iterations = state.get("iterations", 0) + 1 + + bm.log_event( + EventType.TASK_COMPLETE, + agent_id=agent, + metadata={"approved": approved, "issues": issues, "iteration": iterations}, + ) + + return { + "approved": approved, + "iterations": iterations, + "errors": state.get("errors", []) + issues, + } diff --git a/examples/langgraph-viral-youtube/graph.py b/examples/langgraph-viral-youtube/graph.py new file mode 100644 index 0000000..6cb5200 --- /dev/null +++ b/examples/langgraph-viral-youtube/graph.py @@ -0,0 +1,62 @@ +""" +LangGraph pipeline: 4 specialist agents → quality gate → retry or publish. + +Flow: + trend_researcher → hook_writer → script_writer → title_and_tags + ↓ + quality_gate + / \ + approved rejected (retry ≤2) + ↓ ↓ + END hook_writer (retry) +""" +from langgraph.graph import StateGraph, END + +from state import VideoState +from agents import ( + trend_researcher, + hook_writer, + script_writer, + title_and_tags, + quality_gate, +) + + +def should_retry(state: VideoState) -> str: + """Routing function after quality gate.""" + if state.get("approved"): + return "approved" + if state.get("iterations", 0) >= 2: + # Max retries hit — publish anyway with warnings + return "approved" + return "retry" + + +def build_graph() -> StateGraph: + g = StateGraph(VideoState) + + # Register nodes + g.add_node("trend_researcher", trend_researcher) + g.add_node("hook_writer", hook_writer) + g.add_node("script_writer", script_writer) + g.add_node("title_and_tags", title_and_tags) + g.add_node("quality_gate", quality_gate) + + # Linear pipeline + g.set_entry_point("trend_researcher") + g.add_edge("trend_researcher", "hook_writer") + g.add_edge("hook_writer", "script_writer") + g.add_edge("script_writer", "title_and_tags") + g.add_edge("title_and_tags", "quality_gate") + + # Quality gate routing + g.add_conditional_edges( + "quality_gate", + should_retry, + { + "approved": END, + "retry": "hook_writer", # retry from hook stage + }, + ) + + return g.compile() diff --git a/examples/langgraph-viral-youtube/main.py b/examples/langgraph-viral-youtube/main.py new file mode 100644 index 0000000..c71b6a9 --- /dev/null +++ b/examples/langgraph-viral-youtube/main.py @@ -0,0 +1,102 @@ +""" +Entry point. Run from CLI: + python main.py "AI agents taking over software engineering" + python main.py # prompts interactively +""" +import sys +import json +import os +from pathlib import Path +from dotenv import load_dotenv + +load_dotenv() +os.makedirs("logs", exist_ok=True) + +from graph import build_graph +from state import VideoState + + +DIVIDER = "─" * 60 + + +def print_result(state: VideoState) -> None: + print(f"\n{'═'*60}") + print("🎬 VIRAL YOUTUBE SHORT — PRODUCTION PACKAGE") + print('═'*60) + + print(f"\n📌 TITLE\n{DIVIDER}") + print(state.get("title", "—")) + + print(f"\n🎣 HOOK (first 3–5 seconds)") + print(DIVIDER) + print(state.get("hook", "—")) + + print(f"\n📝 SCRIPT") + print(DIVIDER) + print(state.get("script", "—")) + + print(f"\n🏷️ TAGS") + print(DIVIDER) + print(", ".join(state.get("tags", []))) + + print(f"\n🖼️ THUMBNAIL CONCEPT") + print(DIVIDER) + print(state.get("thumbnail_concept", "—")) + + print(f"\n📊 TREND ANGLE") + print(DIVIDER) + print(state.get("trending_angle", "—")) + print(state.get("trend_context", "—")) + + if state.get("errors"): + print(f"\n⚠️ WARNINGS") + print(DIVIDER) + for e in state["errors"]: + print(f" • {e}") + + approved = state.get("approved", False) + iterations = state.get("iterations", 0) + print(f"\n{'✅' if approved else '⚠️ '} Quality gate: {'PASSED' if approved else 'MAX RETRIES'} " + f"(iteration {iterations})") + print('═'*60) + + # Save to file + out_file = Path("output.json") + out_file.write_text(json.dumps(dict(state), indent=2)) + print(f"\n💾 Full output saved to {out_file.resolve()}\n") + + +def main(): + topic = " ".join(sys.argv[1:]).strip() if len(sys.argv) > 1 else "" + if not topic: + topic = input("🎯 Enter your video topic or niche: ").strip() + if not topic: + print("No topic provided. Exiting.") + sys.exit(1) + + print(f"\n🚀 Generating viral YouTube Short for: \"{topic}\"") + print(f" 4 agents working in parallel via LangGraph + BMasterAI telemetry\n") + + graph = build_graph() + + initial_state: VideoState = { + "topic": topic, + "trending_angle": "", + "trend_context": "", + "competitor_hooks": [], + "hook": "", + "script": "", + "title": "", + "tags": [], + "thumbnail_concept": "", + "errors": [], + "iterations": 0, + "approved": None, + } + + final_state = graph.invoke(initial_state) + print_result(final_state) + + +if __name__ == "__main__": + main() diff --git a/examples/langgraph-viral-youtube/requirements.txt b/examples/langgraph-viral-youtube/requirements.txt new file mode 100644 index 0000000..cf426a0 --- /dev/null +++ b/examples/langgraph-viral-youtube/requirements.txt @@ -0,0 +1,6 @@ +langgraph>=0.2.0 +langchain-anthropic>=0.3.0 +langchain-core>=0.3.0 +tavily-python>=0.5.0 +bmasterai>=0.2.3 +python-dotenv>=1.0.0 diff --git a/examples/langgraph-viral-youtube/state.py b/examples/langgraph-viral-youtube/state.py new file mode 100644 index 0000000..eda952f --- /dev/null +++ b/examples/langgraph-viral-youtube/state.py @@ -0,0 +1,31 @@ +""" +Shared state flowing through the LangGraph pipeline. +Each agent reads from and writes to this TypedDict. +""" +from typing import TypedDict, Optional + + +class VideoState(TypedDict): + # Input + topic: str # User-supplied topic or niche + + # Trend Research Agent output + trending_angle: str # The specific viral angle found + trend_context: str # Supporting data / why it's trending + competitor_hooks: list[str] # Sample hooks from top-performing videos + + # Hook Agent output + hook: str # Opening 3–5 second line to stop the scroll + + # Script Agent output + script: str # Full 45–60 second script + + # Title & Tags Agent output + title: str # Viral-optimised title (< 70 chars) + tags: list[str] # SEO tags + thumbnail_concept: str # One-line thumbnail text / visual idea + + # Coordinator metadata + errors: list[str] # Any agent errors collected + iterations: int # Retry count (quality gate) + approved: Optional[bool] # Quality gate result