-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcreate_cli.py
More file actions
executable file
·221 lines (189 loc) · 6.21 KB
/
create_cli.py
File metadata and controls
executable file
·221 lines (189 loc) · 6.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
#!/usr/bin/env python3
"""Create CLI and documentation"""
import os
def create_file(path, content):
os.makedirs(os.path.dirname(path) if os.path.dirname(path) else ".", exist_ok=True)
with open(path, 'w') as f:
f.write(content)
print(f"✓ {path}")
print("Creating CLI and documentation...\n")
# CLI main file
cli_content = """import argparse
import sys
from assistant.agent.agent_loop import run_agent
from assistant.index.symbol_index import SymbolIndex, get_shared_index
from assistant.index.semantic_index import SemanticIndex
from assistant.tools.registry import get_tool_descriptions
from assistant.agent.memory import AgentMemory
def cmd_index():
print("Rebuilding indexes...")
print("\\n1. Building symbol index...")
symbol_idx = SymbolIndex()
symbol_count = symbol_idx.build()
print(f" Indexed {symbol_count} symbols")
print("\\n2. Building semantic index...")
semantic_idx = SemanticIndex()
chunk_count = semantic_idx.build(force=True)
print(f" Indexed {chunk_count} code chunks")
print(f"\\n Semantic index cache location:")
print(f" {semantic_idx.cache_file}")
print("\\nIndexing complete!")
def cmd_stats():
print("Index Statistics:")
print("="*60)
try:
index = get_shared_index()
stats = index.get_stats()
print(f"\\nSymbol Index:")
print(f" Symbols: {stats['symbols']}")
print(f" Call graph edges: {stats['calls']}")
except Exception as e:
print(f"\\nSymbol Index: Error - {e}")
try:
semantic_idx = SemanticIndex()
semantic_idx.build()
print(f"\\nSemantic Index:")
print(f" Code chunks: {len(semantic_idx.documents)}")
print(f" Cache: {semantic_idx.cache_file}")
except Exception as e:
print(f"\\nSemantic Index: Error - {e}")
def cmd_tools():
tools = get_tool_descriptions()
print("Available Tools:")
print("="*60)
for tool in tools:
print(f"\\n{tool['name']}")
print(f" {tool['description']}")
def cmd_memory():
memory = AgentMemory()
recent = memory.get_recent(10)
if not recent:
print("No memory entries found.")
return
print("Recent Assistant Memory:")
print("="*60)
for i, entry in enumerate(recent, 1):
print(f"\\n{i}. {entry['summary']}")
if entry.get('files_modified'):
print(f" Files: {', '.join(entry['files_modified'])}")
def main():
parser = argparse.ArgumentParser(
description="Local AI coding assistant powered by Ollama",
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("instruction", type=str, nargs='?', help="Task or question")
parser.add_argument("--debug", action="store_true", help="Enable debug mode")
parser.add_argument("--model", type=str, default=None, help="Override Ollama model")
parser.add_argument("--max-iter", type=int, default=20, help="Max iterations")
parser.add_argument("--no-context", action="store_true", help="Disable context")
parser.add_argument("--index", action="store_true", help="Rebuild indexes")
parser.add_argument("--stats", action="store_true", help="Show statistics")
parser.add_argument("--tools", action="store_true", help="List tools")
parser.add_argument("--memory", action="store_true", help="Show memory")
args = parser.parse_args()
if args.index:
cmd_index()
sys.exit(0)
if args.stats:
cmd_stats()
sys.exit(0)
if args.tools:
cmd_tools()
sys.exit(0)
if args.memory:
cmd_memory()
sys.exit(0)
if not args.instruction:
parser.print_help()
sys.exit(1)
try:
result = run_agent(
instruction=args.instruction,
debug=args.debug,
max_iterations=args.max_iter,
model=args.model,
use_context=not args.no_context
)
if not args.debug:
print(result)
sys.exit(0)
except KeyboardInterrupt:
print("\\n\\nInterrupted by user")
sys.exit(1)
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()
"""
create_file("assistant/cli/main.py", cli_content)
# Main entry point
main_content = """from assistant.cli.main import main
if __name__ == "__main__":
main()
"""
create_file("assistant/__main__.py", main_content)
# README
readme_content = """# Local AI Coding Assistant
A terminal-based AI coding assistant that runs entirely on your machine using Ollama.
## Quick Start
bash
# Install Ollama and pull model
ollama pull qwen2.5-coder:3b
# Run assistant
python -m assistant "explain this project"
python -m assistant --tools
python -m assistant --stats
## Features
- Fully local (no cloud services)
- 13+ specialized tools
- AST-based code understanding
- Semantic search with embeddings
- Safe code editing with patches
- Self-reflection and planning
- Persistent memory
## Requirements
- Python 3.8+
- Ollama (https://ollama.ai)
- ripgrep
- Git
## Tools
- search_repo, find_symbol, find_callers
- semantic_search (requires: ollama pull nomic-embed-text)
- read_file, write_file, apply_patch, edit_symbol
- generate_test, git_diff, git, shell, list_files
## Safety
- Blocked shell commands (rm -rf, sudo, curl, wget)
- Path validation (no ../.. escapes)
- Patch context validation
- Max 30% file modification per patch
- Self-reflection before finalizing changes
## Project Stats
~1,650 lines of Python | 13 tools | 3 indexes | Zero dependencies (except requests)
"""
create_file("README.md", readme_content)
# .gitignore
gitignore_content = """.assistant/
__pycache__/
*.pyc
*.pyo
.Python
*.egg-info/
.pytest_cache/
.venv/
venv/
.DS_Store
"""
create_file(".gitignore", gitignore_content)
print("\n✓ CLI and documentation created!")
print("\n" + "="*60)
print("PROJECT SETUP COMPLETE!")
print("="*60)
print("\nNext steps:")
print("1. Install Ollama: https://ollama.ai")
print("2. Pull model: ollama pull qwen2.5-coder:3b")
print("3. Test: python -m assistant --tools")
print("4. Try: python -m assistant 'explain this project'")
print("\nOptional:")
print("- Semantic search: ollama pull nomic-embed-text")
print("- Build indexes: python -m assistant --index")