-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLlmCallerObject.cs
More file actions
103 lines (86 loc) · 2.99 KB
/
LlmCallerObject.cs
File metadata and controls
103 lines (86 loc) · 2.99 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
/*
* Copyright (c) 2026 Yvens R Serpa [https://github.com/YvensFaos/]
*
* This work is licensed under the Creative Commons Attribution 4.0 International License.
* To view a copy of this license, visit http://creativecommons.org/licenses/by/4.0/
* or see the LICENSE file in the root directory of this repository.
*/
using System.Collections;
using System.Collections.Generic;
using FALLA.Exception;
using FALLA.Helper;
using UnityEngine;
using UnityEngine.Events;
namespace FALLA
{
public class LlmCallerObject : MonoBehaviour
{
[Header("API Configuration")] [SerializeField]
private string apiKeyFile;
[Header("Llm Settings")] [SerializeField]
private LlmType llmType;
[SerializeField] private string llmModel;
[SerializeField] private List<LlmTypeKeyPair> llmTypeKeyPairs;
private BaseLlm _llm;
private bool _ready;
private LlmGenericResponse _response;
private void Awake()
{
LoadModel(llmModel);
llmModel = _llm.Model;
_ready = false;
_response = LlmGenericResponse.EmptyResponse();
}
[ContextMenu("Reload Model")]
public void ReloadModel()
{
LoadModel(llmModel);
}
public void LoadModel(string newModel)
{
var llmKeyPair = llmTypeKeyPairs.Find((pair) => pair.type == llmType);
var keyValue = JsonFileReader.GetValueFromValuePairJson(apiKeyFile, llmKeyPair.key);
if (string.IsNullOrEmpty(keyValue))
{
throw new LlmKeyNotFoundException(apiKeyFile, llmType, llmKeyPair.key);
}
llmModel = newModel;
_llm = string.IsNullOrEmpty(llmModel)
? LLmFactory.CreateLlm(llmType, keyValue)
: LLmFactory.CreateLlm(llmType, keyValue, llmModel);
}
public void CallLlm(string prompt)
{
if (string.IsNullOrEmpty(prompt))
{
return;
}
_ready = false;
_response = LlmGenericResponse.EmptyResponse();
SubmitAsync(prompt);
}
public void CallLlmWithCallback(string prompt, UnityAction<LlmGenericResponse> callback)
{
CallLlm(prompt);
StartCoroutine(CallLlmCoroutine(callback));
}
private IEnumerator CallLlmCoroutine(UnityAction<LlmGenericResponse> callback)
{
yield return new WaitUntil(IsReady);
callback(GetResponse());
}
private async void SubmitAsync(string prompt)
{
_response = await _llm.SendRequest(prompt);
_ready = true;
}
public bool IsReady() => _ready;
public LlmGenericResponse GetResponse() => _response;
public LlmType GetLlmType() => llmType;
public string GetLlmModel() => llmModel;
public override string ToString()
{
return $"{llmType}|{llmModel}";
}
}
}