Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
toolvenv/*
#.gitignore
agents/__pycache__/*
prompts/__pycache__/*
tools/__pycache__/*
models/__pycache__/*
toolbox/__pycache__/*
tests/__pycache__/*
*/*.egg-info
*/.pytest_cache
*/build
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
MIT License

Copyright (c) 2024 John Adeojo
Copyright (c) 2024 John Adeojo, Sir Lord Dalibor JONIC, MSc BSc

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
Expand Down
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ A simple project for enabling LLM agents to use tools.
### Clone and Navigate to the Repository
1. **Clone the Repo:**
```bash
git clone https://github.com/john-adeojo/use-tools.git
git clone https://github.com/dalijon-byte/use-tools.git
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this can't be correct?

```
2. **Navigate to the Repo:**
```bash
Expand Down Expand Up @@ -56,8 +56,12 @@ Ollama [API documentation](https://github.com/ollama/ollama/blob/main/docs/api.m
3. Navigate to the bottom of the `agent.py` script and uncomment the Ollama arguments and comment out the OpenAI arguments.

### Run Your Query In Shell

```bash
python -m agents.agent
```
Then enter your query.

## Star History

[![Star History Chart](https://api.star-history.com/svg?repos=dalijon-byte/use-tools&type=Date)](https://star-history.com/#dalijon-byte/use-tools&Date)
Binary file removed agents/__pycache__/__init__.cpython-310.pyc
Binary file not shown.
Binary file removed agents/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file removed agents/__pycache__/agent.cpython-310.pyc
Binary file not shown.
Binary file removed agents/__pycache__/agent.cpython-311.pyc
Binary file not shown.
24 changes: 20 additions & 4 deletions agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@
from prompts.prompts import agent_system_prompt_template
from models.openai_models import OpenAIModel
from models.ollama_models import OllamaModel
from models.groq_models import GroqModel
from tools.basic_calculator import basic_calculator
from tools.reverser import reverse_string
from tools.ddg_searcher import search
from toolbox.toolbox import ToolBox
import webbrowser



class Agent:
Expand Down Expand Up @@ -84,8 +88,14 @@ def work(self, prompt):
for tool in self.tools:
if tool.__name__ == tool_choice:
response = tool(tool_input)

print(colored(response, 'cyan'))
if isinstance(response, list):
for result in response:
if isinstance(result, tuple) and len(result) > 1 and isinstance(result[1], str):
url = result[1]
if url.startswith('http'):
webbrowser.open(url)
break
return
# return tool(tool_input)

Expand All @@ -97,17 +107,22 @@ def work(self, prompt):
# Example usage
if __name__ == "__main__":

tools = [basic_calculator, reverse_string]
tools = [basic_calculator, reverse_string, search]


# Uncoment below to run with OpenAI
# Uncomment below to run with OpenAI
# model_service = OpenAIModel
# model_name = 'gpt-3.5-turbo'
# stop = None

# Uncomment below to run with GroqAI
#model_service = GroqModel
#model_name = 'llama3-70b-8192'
#stop = None

# Uncomment below to run with Ollama
model_service = OllamaModel
model_name = 'llama3:instruct'
model_name = 'codestral:latest'
stop = "<|eot_id|>"

agent = Agent(tools=tools, model_service=model_service, model_name=model_name, stop=stop)
Expand All @@ -116,5 +131,6 @@ def work(self, prompt):
prompt = input("Ask me anything: ")
if prompt.lower() == "exit":
break


agent.work(prompt)
3 changes: 2 additions & 1 deletion configs/config.yaml
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
OPENAI_API_KEY: 'YOUR_API_KEY'
OPENAI_API_KEY: 'YOUR_API_KEY'
GROQ_API_KEY: 'YOUR_API_KEY'
Binary file removed models/__pycache__/__init__.cpython-310.pyc
Binary file not shown.
Binary file removed models/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file removed models/__pycache__/ollama_models.cpython-310.pyc
Binary file not shown.
Binary file removed models/__pycache__/ollama_models.cpython-311.pyc
Binary file not shown.
Binary file removed models/__pycache__/openai_models.cpython-310.pyc
Binary file not shown.
Binary file removed models/__pycache__/openai_models.cpython-311.pyc
Binary file not shown.
49 changes: 49 additions & 0 deletions models/groq_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import requests
import json
import os
from utils.get_keys import load_config

config_path = os.path.join(os.path.dirname(__file__), '..', 'configs', 'config.yaml')
load_config(config_path)

class GroqModel:
def __init__(self, model, system_prompt, temperature):
self.model_endpoint = 'https://api.groq.com/openai/v1/chat/completions'
self.temperature = temperature
self.model = model
self.system_prompt = system_prompt
load_config(config_path)
self.api_key = os.getenv('GROQ_API_KEY')
self.headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {self.api_key}'
}


def generate_text(self, prompt):

payload = {
"model": self.model,
"response_format": {"type": "json_object"},
"messages": [
{
"role": "system",
"content": self.system_prompt
},
{
"role": "user",
"content": prompt
}
],
"stream": False,
"temperature": self.temperature,
}

response_dict = requests.post(self.model_endpoint, headers=self.headers, data=json.dumps(payload))
response_json = response_dict.json()
print(response_json)
response = json.loads(response_json['choices'][0]['message']['content'])

print(F"\n\nResponse from Groq model: {response}")

return response
Binary file removed prompts/__pycache__/__init__.cpython-310.pyc
Binary file not shown.
Binary file removed prompts/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file removed prompts/__pycache__/prompts.cpython-310.pyc
Binary file not shown.
Binary file removed prompts/__pycache__/prompts.cpython-311.pyc
Binary file not shown.
Binary file removed toolbox/__pycache__/toolbox.cpython-310.pyc
Binary file not shown.
Binary file removed toolbox/__pycache__/toolbox.cpython-311.pyc
Binary file not shown.
Binary file removed tools/__pycache__/__init__.cpython-310.pyc
Binary file not shown.
Binary file removed tools/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file removed tools/__pycache__/basic_calculator.cpython-310.pyc
Binary file not shown.
Binary file removed tools/__pycache__/basic_calculator.cpython-311.pyc
Binary file not shown.
Binary file removed tools/__pycache__/reverser.cpython-310.pyc
Binary file not shown.
Binary file removed tools/__pycache__/reverser.cpython-311.pyc
Binary file not shown.
66 changes: 66 additions & 0 deletions tools/ddg_searcher.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# @author Sir Lord Dalibor JONIC, MSc BSc (c
__author__ = "Sir Lord Dalibor JONIC, MSc BSc"
__copyright__ = "BeMyInspiration 2024"
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Sir Lord Dalibor JONIC, MSc BSc"
__email__ = "dali.manu@gmail.com"
__status__ = "Production"
__description__ = "Google searcher with aration"

from duckduckgo_search import DDGS
import time
import json

def search(query, num_results=5, delay=2, max_retries=1):
"""
Perform a web search using DuckDuckGo and return the top results.
Args:
query (str): Search query.
num_results (int): Number of results to return. At least 5.
delay (int): Delay between retries in seconds.
max_retries (int): Maximum number of retries for failed requests.
Returns:
list: List of top search results (title, link).
"""
# Replace single quotes with double quotes
input_str_clean = json.dumps(query)
input_str_clean = input_str_clean.replace("'", "\"")
# Remove any extraneous characters such as trailing quotes
input_str_clean = input_str_clean.strip().strip("\"")
#print (query)

#print (input_str_clean)
try:
parsed_data = json.loads(input_str_clean)
query_content = parsed_data['query']
#print (parsed_data)
query = query_content
results = []
retries = 0
except json.JSONDecodeError as e:
print(f"JSON decoding error: {e}")

while retries < max_retries:
try:
with DDGS() as ddgs:
search_results = list(ddgs.text(query, max_results=num_results))

for result in search_results:
title = result['title']
link = result['href']
results.append((title, link))

return results

except Exception as e:
print(f"Error during search: {e}")
retries += 1
time.sleep(delay * (2 ** retries))

return results

# Example usage:
#query = "this is all about ai"
#search_results = search(query, num_results=5)
#print(search_results)
49 changes: 49 additions & 0 deletions tools/searcher.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# @author Sir Lord Dalibor JONIC, MSc BSc (c
#__author__ = "Sir Lord Dalibor JONIC, MSc BSc"
#__copyright__ = "BeMyInspiration 2024"
#__license__ = "MIT"
#__version__ = "1.0.0"
#__maintainer__ = "Sir Lord Dalibor JONIC, MSc BSc"
#__email__ = "dali.manu@gmail.com"
#__status__ = "Production"
#__description__ = "Google searcher with aration"
import requests
import time
from bs4 import BeautifulSoup # type: ignore
from urllib.parse import quote_plus

def search(query, num_results=5, delay=2, max_retries=3):
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

doesnt work, the search parameter is a json string, not python parameters...

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@jonasbg
please try the new commit ;) with duckduckgo_search out
dalijon-byte@523e702

Copy link
Author

@dalijon-byte dalijon-byte Jun 20, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"""
Perform a web search and return the top results.
Args:
query (str): Search query.
num_results (int): Number of results to return. Do at least 5 please.
delay (int): Delay between requests in seconds.
max_retries (int): Maximum number of retries for failed requests.
Returns:
list: List of top search results (title, link).
"""
query = str(query) # Convert query to string
search_url = f"https://www.google.com/search?q={quote_plus(query)}"
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/113.0'}
results = []
retries = 0

while retries < max_retries:
try:
with requests.get(search_url, headers=headers, timeout=10) as response:
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
result_divs = soup.find_all('div', class_='%yuRU%')
#result_divs = soup.find_all('div')
for result in result_divs[:num_results]:
title = result.find('h3').get_text()
link = result.find('a')['href']
results.append((title, link))
return results
except (requests.RequestException, ValueError) as e:
print(f"Error during search: {e}")
retries += 1
time.sleep(delay * (2 ** retries))

return results
Binary file modified utils/__pycache__/get_keys.cpython-311.pyc
Binary file not shown.