YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
GPTDevice.cpp
Go to the documentation of this file.
1/*
2 * SPDX-FileCopyrightText: 2023-2024 Istituto Italiano di Tecnologia (IIT)
3 * SPDX-License-Identifier: BSD-3-Clause
4 */
5
6#include <GPTDevice.h>
7#include <yarp/os/LogStream.h>
9#include <fstream>
10#include <string_view>
11
12using json = nlohmann::json;
13
15{
16 // Azure settings
17 azure_api_version = config.check("api_version", yarp::os::Value("2023-07-01-preview")).asString();
18 azure_deployment_id = std::getenv("DEPLOYMENT_ID");
19 azure_resource = std::getenv("AZURE_RESOURCE");
20
21 if (!azure_resource)
22 {
23 yWarning() << "Could not read env variable AZURE_RESOURCE. Device set in offline mode";
24 m_offline = true;
25 return true;
26 }
27
28 if (!oai.auth.SetAzureKeyEnv("AZURE_API_KEY"))
29 {
30 yWarning() << "Invalid or no azure key provided. Device set in offline mode.";
31 m_offline = true;
32 }
33
34 // Prompt and functions file
35 bool has_prompt_file{config.check("prompt_file")};
36 yarp::os::ResourceFinder resource_finder;
37 std::string prompt_ctx = config.check("prompt_context",yarp::os::Value("GPTDevice")).asString();
38 resource_finder.setDefaultContext(prompt_ctx);
39
40 if(has_prompt_file)
41 {
42 std::string prompt_file_fullpath = resource_finder.findFile(config.find("prompt_file").asString());
43 auto stream = std::ifstream(prompt_file_fullpath);
44 if (!stream)
45 {
46 yWarning() << "File:" << prompt_file_fullpath << "does not exist or path is invalid";
47 }
48 else
49 {
50 std::ostringstream sstr;
51 sstr << stream.rdbuf(); //Reads the entire file into the stringstream
52 if(!setPrompt(sstr.str()))
53 {
54 return false;
55 }
56 }
57 }
58
59 bool has_function_file{config.check("functions_file")};
60 std::string json_ctx = config.check("json_context",yarp::os::Value(prompt_ctx)).asString();
61 resource_finder.setDefaultContext(json_ctx);
62 if(has_function_file)
63 {
64 std::string functions_file_fullpath = resource_finder.findFile(config.find("functions_file").asString());
65 auto stream = std::ifstream(functions_file_fullpath);
66 if (!stream)
67 {
68 yWarning() << "File: " << functions_file_fullpath << "does not exist or path is invalid.";
69 }
70 else
71 {
72 // Read the function file into json format
73 // yDebug() << functions_file_fullpath;
74 json function_js = json::parse(stream);
75 if (!setFunctions(function_js))
76 {
77 return false;
78 }
79 }
80 }
81
82 return true;
83}
84
85yarp::dev::ReturnValue GPTDevice::ask(const std::string &question, yarp::dev::LLM_Message &oAnswer)
86{
87 // Adding prompt to conversation
88 m_convo->AddUserData(question);
89 m_convo_length += 1;
90
91 if (m_offline)
92 {
93 yWarning() << "Device in offline mode";
95 }
96
97 // Asking gpt for an answer
98 try
99 {
100 liboai::Response res = oai.Azure->create_chat_completion(
101 azure_resource, azure_deployment_id, azure_api_version,
102 *m_convo);
103 m_convo->Update(res);
104 }
105 catch (const std::exception &e)
106 {
107 yError() << e.what() << '\n';
109 }
110
111 if(m_convo->LastResponseIsFunctionCall())
112 {
113 yDebug() << "Last answer was function call";
114 auto str_args = m_convo->GetLastFunctionCallArguments();
115 std::string function_call_name = m_convo->GetLastFunctionCallName();
116 auto j_args = json::parse(str_args);
117
118 std::vector<std::string> parameters_list;
119 std::vector<std::string> arguments_list;
120
121 for(const auto&[key,val]: j_args.items())
122 {
123 parameters_list.push_back(key);
124 arguments_list.push_back(val);
125 }
126
127 auto function_call_message = yarp::dev::LLM_Message{"function",
128 function_call_name,
129 parameters_list,
130 arguments_list};
131
132 m_function_called.insert({m_convo_length,function_call_message});
133
134 oAnswer = function_call_message;
135 }
136 else
137 {
138 oAnswer = yarp::dev::LLM_Message{"assistant",
139 m_convo->GetLastResponse(),
140 std::vector<std::string>(),
141 std::vector<std::string>()};
142 }
143
144 m_convo_length+=1;
145 return yarp::dev::ReturnValue_ok;
146}
147
149{
150
151 std::string aPrompt;
152
153 if(readPrompt(aPrompt))
154 {
155 yError() << "A prompt is already set. You must delete conversation first";
157 }
158
159 try
160 {
161 m_convo->SetSystemData(prompt);
162 }
163 catch (const std::exception &e)
164 {
165 yError() << e.what() << '\n';
167 }
168
169 return yarp::dev::ReturnValue_ok;
170}
171
173{
174 auto &convo_json = m_convo->GetJSON();
175 for (auto &message : convo_json["messages"])
176 {
177 if (message["role"] == "system")
178 {
179 oPrompt = message["content"];
180 return yarp::dev::ReturnValue_ok;
181 }
182 }
183
185}
186
187yarp::dev::ReturnValue GPTDevice::getConversation(std::vector<yarp::dev::LLM_Message> &oConversation)
188{
189 std::vector<yarp::dev::LLM_Message> conversation;
190
191 auto &convo_json = m_convo->GetJSON();
192
193
194 if (convo_json["messages"].empty())
195 {
196 yWarning() << "Conversation is empty!";
198 }
199
200 for (auto &message : convo_json["messages"])
201 {
202 std::string type = message["role"].get<std::string>();
203 std::string content = message["content"].get<std::string>();
204
205 conversation.push_back(yarp::dev::LLM_Message{type, content,std::vector<std::string>(),std::vector<std::string>()});
206 }
207
208 // Adding function calls to the conversation
209 for(const auto& [i,answer]: m_function_called)
210 {
211 auto conv_it = conversation.begin();
212 conversation.insert(std::next(conv_it,i),answer);
213 }
214
215 oConversation = conversation;
216 return yarp::dev::ReturnValue_ok;
217}
218
220{
221 // Api does not provide a method to empty the conversation: we are better off if we rebuild an object from scratch
222 m_convo.reset(new liboai::Conversation());
223 m_convo_length = 0;
224 m_function_called.clear();
225 return yarp::dev::ReturnValue_ok;
226}
227
229{
230 std::string current_prompt = "";
231 this->readPrompt(current_prompt);
232 this->deleteConversation();
233 this->setPrompt(current_prompt);
234 return yarp::dev::ReturnValue_ok;
235}
236
238{
239 return true;
240}
241
242bool GPTDevice::setFunctions(const json& function_json)
243{
244
245 for (auto& function: function_json.items())
246 {
247 if(!function.value().contains("name") || !function.value().contains("description"))
248 {
249 yError() << "Function missing mandatory parameters <name> and/or <description>";
250 return false;
251 }
252
253 std::string function_name = function.value()["name"].template get<std::string>();
254 std::string function_desc = function.value()["description"].template get<std::string>();
255
256 if(!m_functions->AddFunction(function_name))
257 {
258 yError() << module_name + "::setFunctions(). Cannot add function.";
259 return false;
260 }
261
262 if(!m_functions->SetDescription(function_name,function_desc))
263 {
264 yError() << module_name + "::setFunctions(). Cannot set description";
265 return false;
266 }
267
268 if(function.value().contains("parameters"))
269 {
270 auto parameters = function.value()["parameters"]["properties"];
271 std::vector<liboai::Functions::FunctionParameter> parameters_vec;
272 for(auto& params: parameters.items())
273 {
274 liboai::Functions::FunctionParameter param;
275 param.name = params.key();
276 param.description = params.value()["description"];
277 param.type = params.value()["type"];
278 parameters_vec.push_back(param);
279 }
280 if(!m_functions->SetParameters(function_name,parameters_vec))
281 {
282 yError() << module_name + "::setFunction(). Cannot set parameters";
283 return false;
284 }
285 }
286 }
287
288 if(!m_convo->SetFunctions(*m_functions))
289 {
290 yError() << module_name + "::setFunction(). Cannot set function";
291 return false;
292 }
293
294 return true;
295}
nlohmann::json json
Definition GPTDevice.cpp:12
nlohmann::json json
Definition GPTDevice.h:17
#define yError(...)
Definition Log.h:361
#define yDebug(...)
Definition Log.h:275
#define yWarning(...)
Definition Log.h:340
yarp::dev::ReturnValue deleteConversation() noexcept override
Delete the conversation and clear the system context from any internally stored context.
yarp::dev::ReturnValue setPrompt(const std::string &prompt) override
Performs a question.
yarp::dev::ReturnValue getConversation(std::vector< yarp::dev::LLM_Message > &oConversation) override
Retrieves the whole conversation.
yarp::dev::ReturnValue refreshConversation() noexcept override
Refresh the conversation.
yarp::dev::ReturnValue readPrompt(std::string &oPrompt) override
Retrieves the provided prompt.
yarp::dev::ReturnValue ask(const std::string &question, yarp::dev::LLM_Message &oAnswer) override
Performs a question.
Definition GPTDevice.cpp:85
bool close() override
Close the DeviceDriver.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
Definition GPTDevice.cpp:14
@ return_value_error_method_failed
Method is deprecated.
Helper class for finding config files and other external resources.
bool setDefaultContext(const std::string &contextName)
Sets the context for the current ResourceFinder object.
std::string findFile(const std::string &name)
Find the full path to a file.
A base class for nested structures that can be searched.
Definition Searchable.h:31
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
A single value (typically within a Bottle).
Definition Value.h:43