12#include <catch2/catch_amalgamated.hpp>
19 REQUIRE(illm !=
nullptr);
31 b = illm->
ask(
"A question", answer);
33 CHECK(answer.
type ==
"assistant");
39 b = illm->
ask(
"function", answer);
41 CHECK(answer.
type ==
"function");
47 std::vector<yarp::dev::LLM_Message> conversation;
virtual yarp::dev::ReturnValue deleteConversation()=0
Delete the conversation and clear the system context from any internally stored context.
virtual yarp::dev::ReturnValue setPrompt(const std::string &prompt)=0
Performs a question.
virtual yarp::dev::ReturnValue getConversation(std::vector< yarp::dev::LLM_Message > &conversation)=0
Retrieves the whole conversation.
virtual yarp::dev::ReturnValue refreshConversation()=0
Refresh the conversation.
virtual yarp::dev::ReturnValue readPrompt(std::string &oPrompt)=0
Retrieves the provided prompt.
virtual yarp::dev::ReturnValue ask(const std::string &question, yarp::dev::LLM_Message &answer)=0
Performs a question.
std::vector< std::string > parameters
possible parameters
std::string type
type of message (can be prompt, assistant, function, ...)
std::string content
content of the message
std::vector< std::string > arguments
possible arguments of the parameters
void exec_iLLM_test_1(yarp::dev::ILLM *illm)
For streams capable of holding different kinds of content, check what they actually have.