-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathintelligence.py
More file actions
131 lines (104 loc) · 4.25 KB
/
intelligence.py
File metadata and controls
131 lines (104 loc) · 4.25 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import openai
import os
import requests
from dotenv import load_dotenv
import json
import base64
from azure.storage.blob import BlobServiceClient
from datetime import datetime
load_dotenv()
openai.api_key = os.getenv("OPENAI")
azure_connection_string = os.getenv("AZUREBLOBSTORAGE")
MODEL = "gpt-3.5-turbo-16k-0613"
MESSAGES = [
{"role": "system", "content": "Your name is Toni. It stands for 'The Only Neural Interface'"},
{"role": "system", "content": "You were inspired by Tony Stark's JARVIS."},
{"role": "system", "content": "You are a virtual assistant. You will be helpful"},
{"role": "system", "content": "You are witty and charming yet have speak with confidence and swagger."},
{"role": "system", "content": "You will incorporate technological jargon and references into your responses."},
{"role": "system", "content": "You will use pop culture references in your answers as well."},
{"role": "system", "content": "You will refer to me as 'boss'. If you are given a link, you will display it"},
{"role": "system", "content": "You will be short and direct with your responses with a slight hint of arrogance"}
]
FUNCTIONS = [
{
"name": "generate_image",
"description": "Uses the DALL-E API to generate an image",
"parameters": {
"type": "object",
"properties": {
"parameter": {
"type": "string",
"description": "This is the prompt for the image generation"
}
},
"required": ["parameter"]
},
},
{
"name": "get_current_weather",
"description": "Grabs the current weather of a city",
"parameters": {
"type": "object",
"properties": {
"parameter": {
"type": "string",
"description": "This is the city the user wants the weather of"
}
},
"required": ["parameter"]
},
}
]
def generate_image(parameter: str):
response = openai.Image.create(
prompt=parameter,
n=1,
size="256x256",
response_format="b64_json"
)
encoded_image = response["data"][0]["b64_json"]
image_bytes = base64.b64decode(encoded_image)
# Create a BlobServiceClient object
blob_service_client = BlobServiceClient.from_connection_string(azure_connection_string)
# Create a container client for a specific container
container_name = "image-container"
container_client = blob_service_client.get_container_client(container_name)
blob_name = "toni_image_" + str(datetime.now().time()) + ".png"
# Upload filee
container_client.upload_blob(name=blob_name, data=image_bytes)
blob_client = container_client.get_blob_client(blob_name)
return blob_client.url
def get_current_weather(parameter: str):
key = os.getenv("WEATHER_API")
data = requests.get("http://api.weatherapi.com/v1/current.json?key=" + key + "&q=" + parameter).json()
return json.dumps(data)
def get_chat_response(user_input: str):
MESSAGES.append({"role": "user", "content": user_input})
chat_completion = openai.ChatCompletion.create(
model=MODEL,
messages=MESSAGES,
functions=FUNCTIONS,
function_call="auto"
)
response_message = chat_completion["choices"][0]["message"]
if response_message.get("function_call"):
available_functions = {
"generate_image": generate_image,
"get_current_weather": get_current_weather
}
function_name = response_message["function_call"]["name"]
function_to_call = available_functions[function_name]
function_args = json.loads(response_message["function_call"]["arguments"])
function_response = function_to_call(
parameter=function_args.get("parameter")
)
MESSAGES.append(
{"role": "function", "name": function_name, "content": function_response}
)
response_message = openai.ChatCompletion.create(
model=MODEL,
messages=MESSAGES,
)["choices"][0]["message"] # get a new response from GPT where it can see the function response
MESSAGES.append(response_message)
return response_message["content"]