-
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy path.env.example
More file actions
106 lines (90 loc) · 5.11 KB
/
.env.example
File metadata and controls
106 lines (90 loc) · 5.11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
# OpenChronicle Environment Configuration
# Copy to .env and customize. All values shown are defaults unless noted.
# See docs/configuration/env_vars.md for the full reference.
# ──────────────────────────────────────────────
# Core Paths (Docker overrides these to /data, /config, etc.)
# ──────────────────────────────────────────────
OC_DB_PATH=data/openchronicle.db
OC_CONFIG_DIR=config
OC_PLUGIN_DIR=plugins
OC_OUTPUT_DIR=output
OC_ASSETS_DIR=data/assets
# ──────────────────────────────────────────────
# Provider Selection
# Options: stub, openai, ollama, anthropic, groq, gemini
# ──────────────────────────────────────────────
OC_LLM_PROVIDER=stub
# ── OpenAI ──
# OPENAI_API_KEY=changeme
# OPENAI_MODEL=gpt-4o-mini
# OPENAI_BASE_URL=
# ── Anthropic ──
# ANTHROPIC_API_KEY=changeme
# ANTHROPIC_MODEL=claude-sonnet-4-20250514
# ── Groq ──
# GROQ_API_KEY=changeme
# GROQ_MODEL=llama-3.3-70b-versatile
# ── Gemini (Google) ──
# GEMINI_API_KEY=changeme
# GEMINI_MODEL=gemini-2.0-flash
# ── Ollama (local) ──
# OLLAMA_BASE_URL=http://localhost:11434
# OLLAMA_MODEL=llama3.1
# ──────────────────────────────────────────────
# Routing
# ──────────────────────────────────────────────
# OC_LLM_DEFAULT_MODE=fast
# OC_LLM_FAST_POOL=ollama:llama3.1,openai:gpt-4o-mini
# OC_LLM_QUALITY_POOL=openai:gpt-4o,anthropic:claude-sonnet-4-20250514
# ──────────────────────────────────────────────
# Budget and Rate Limiting
# ──────────────────────────────────────────────
# OC_LLM_RPM_LIMIT=
# OC_LLM_TPM_LIMIT=
# OC_MAX_TOKENS_PER_TASK=
# OC_MAX_OUTPUT_TOKENS_PER_CALL=
# ──────────────────────────────────────────────
# Conversation Defaults
# ──────────────────────────────────────────────
# OC_CONVO_TEMPERATURE=0.2
# OC_CONVO_MAX_OUTPUT_TOKENS=512
# OC_CONVO_TOP_K_MEMORY=8
# OC_CONVO_LAST_N=10
# ──────────────────────────────────────────────
# Privacy Gate (outbound PII detection)
# Modes: off, warn, block, redact
# ──────────────────────────────────────────────
# OC_PRIVACY_OUTBOUND_MODE=off
# OC_PRIVACY_OUTBOUND_CATEGORIES=email,phone,ip,ssn,cc,api_key
# ──────────────────────────────────────────────
# Mixture-of-Experts
# ──────────────────────────────────────────────
# OC_MOE_ENABLED=false
# OC_MOE_MIN_EXPERTS=2
# ──────────────────────────────────────────────
# HTTP API (starts with `oc serve`)
# ──────────────────────────────────────────────
# OC_API_HOST=127.0.0.1
# OC_API_PORT=8000
# OC_API_KEY=
# OC_API_RATE_LIMIT_RPM=120
# OC_API_CORS_ORIGINS=
# ──────────────────────────────────────────────
# MCP Server (starts with `oc mcp serve`)
# ──────────────────────────────────────────────
# OC_MCP_TRANSPORT=stdio
# OC_MCP_HOST=127.0.0.1
# OC_MCP_PORT=8080
# ──────────────────────────────────────────────
# Discord Bot (starts with `oc discord start`)
# ──────────────────────────────────────────────
# DISCORD_BOT_TOKEN=changeme
# OC_DISCORD_GUILD_IDS=
# OC_DISCORD_CHANNEL_ALLOWLIST=
# OC_DISCORD_HISTORY_LIMIT=5
# ──────────────────────────────────────────────
# Telemetry
# ──────────────────────────────────────────────
# OC_TELEMETRY_ENABLED=true
# OC_TELEMETRY_MCP_TRACKING_ENABLED=true
# OC_TELEMETRY_MOE_TRACKING_ENABLED=true