LlamaLib  v2.0.2
Cross-platform library for local LLMs
Loading...
Searching...
No Matches
LLM_agent.cpp
1#include "LLM_agent.h"
2#include <fstream>
3#include <iostream>
4
5LLMAgent::LLMAgent(LLMLocal *llm_, const std::string &system_prompt_) : llm(llm_), system_prompt(system_prompt_)
6{
7 id_slot = llm->get_next_available_slot();
9}
10
11void LLMAgent::set_slot(int id_slot_)
12{
13 if (id_slot != -1)
14 {
15 if (LLMClient *client = dynamic_cast<LLMClient *>(llm))
16 {
17 if (client->is_remote())
18 {
19 id_slot_ = -1;
20 std::cerr << "Remote clients can only use id_slot -1" << std::endl;
21 }
22 }
23 }
24 id_slot = id_slot_;
25}
26
28{
29 history = json::array();
30 n_keep = -1;
31}
32
33void LLMAgent::set_n_keep()
34{
35 try
36 {
37 json working_history = json::array();
38 working_history.push_back(ChatMessage(system_role, system_prompt).to_json());
39 working_history.push_back(ChatMessage(USER_ROLE, "").to_json());
40 n_keep = tokenize(apply_template(working_history)).size();
41 } catch(...){ }
42}
43
44std::string LLMAgent::chat(const std::string &user_prompt, bool add_to_history, CharArrayFn callback, bool return_response_json, bool debug_prompt)
45{
46 if (n_keep == -1) set_n_keep();
47
48 // Add user message to working history
49 json working_history = json::array();
50 working_history.push_back(ChatMessage(system_role, system_prompt).to_json());
51 for (auto &m : history)
52 working_history.push_back(m);
53 ChatMessage user_msg(USER_ROLE, user_prompt);
54 working_history.push_back(user_msg.to_json());
55
56 // Apply template to get the formatted prompt
57 std::string query_prompt = apply_template(working_history);
58 if (debug_prompt)
59 {
61 auto log_callback = registry.get_log_callback();
62 if (log_callback != nullptr) log_callback(query_prompt.c_str());
63 }
64
65 // Call completion with the formatted prompt
66 std::string response = completion(query_prompt, callback, return_response_json);
67 std::string assistant_content = response;
68 if (return_response_json)
69 assistant_content = parse_completion_json(response);
70
71 if (add_to_history)
72 {
73 history.push_back(user_msg.to_json());
74 ChatMessage assistant_msg(ASSISTANT_ROLE, assistant_content);
75 history.push_back(assistant_msg.to_json());
76 }
77
78 return response;
79}
80
81void LLMAgent::add_message(const std::string &role, const std::string &content)
82{
83 ChatMessage msg(role, content);
84 history.push_back(msg.to_json());
85}
86
88{
89 if (!history.empty())
90 {
91 history.erase(history.end() - 1);
92 }
93}
94
95void LLMAgent::save_history(const std::string &filepath) const
96{
97 try
98 {
99 std::ofstream file(filepath);
100 if (file.is_open())
101 {
102 file << history.dump(4); // Pretty print with 4 spaces
103 file.close();
104 }
105 else
106 {
107 std::cerr << "Unable to open file for writing: " << filepath << std::endl;
108 }
109 }
110 catch (const std::exception &e)
111 {
112 std::cerr << "Error saving history to file: " << e.what() << std::endl;
113 }
114}
115
116void LLMAgent::load_history(const std::string &filepath)
117{
118 try
119 {
120 std::ifstream file(filepath);
121 if (file.is_open())
122 {
123 json loaded_history;
124 file >> loaded_history;
125 file.close();
126
127 if (loaded_history.is_array())
128 {
129 history = loaded_history;
130 }
131 else
132 {
133 std::cerr << "Invalid history file format: expected JSON array" << std::endl;
134 }
135 }
136 else
137 {
138 std::cerr << "Unable to open file for reading: " << filepath << std::endl;
139 }
140 }
141 catch (const std::exception &e)
142 {
143 std::cerr << "Error loading history from file: " << e.what() << std::endl;
144 }
145}
146
147//================ C API ================//
148
149LLMAgent *LLMAgent_Construct(LLMLocal *llm, const char *system_prompt_)
150{
151 std::string system_prompt = system_prompt_ ? system_prompt_ : "";
152 return new LLMAgent(llm, system_prompt);
153}
154
155const char *LLMAgent_Chat(LLMAgent *llm, const char *user_prompt, bool add_to_history, CharArrayFn callback, bool return_response_json, bool debug_prompt)
156{
157 return stringToCharArray(llm->chat(user_prompt, add_to_history, callback, return_response_json, debug_prompt));
158}
159
160// History management C API implementations
162{
163 llm->clear_history();
164}
165
166void LLMAgent_Set_System_Prompt(LLMAgent *llm, const char *system_prompt)
167{
168 std::string sys_prompt = system_prompt ? system_prompt : "";
169 llm->set_system_prompt(sys_prompt);
170}
171
173{
174 return stringToCharArray(llm->get_system_prompt());
175}
176
178{
179 return stringToCharArray(llm->get_history().dump());
180}
181
182void LLMAgent_Set_Slot(LLMAgent *llm, int slot_id)
183{
184 llm->set_slot(slot_id);
185}
186
188{
189 return llm->get_slot();
190}
191
192void LLMAgent_Set_History(LLMAgent *llm, const char *history_json)
193{
194 try
195 {
196 json history = json::parse(history_json ? history_json : "[]");
197 if (!history.is_array())
198 std::cerr << "Expected JSON array for history." << std::endl;
199 else
200 llm->set_history(history);
201 }
202 catch (const std::exception &e)
203 {
204 std::cerr << "Error parsing history JSON: " << e.what() << std::endl;
205 }
206}
207
208void LLMAgent_Add_User_Message(LLMAgent *llm, const char *content)
209{
210 llm->add_user_message(content ? content : "");
211}
212
213void LLMAgent_Add_Assistant_Message(LLMAgent *llm, const char *content)
214{
215 llm->add_assistant_message(content ? content : "");
216}
217
222
223void LLMAgent_Save_History(LLMAgent *llm, const char *filepath)
224{
225 std::string path = filepath ? filepath : "";
226 if (!path.empty())
227 {
228 llm->save_history(path);
229 }
230}
231
232void LLMAgent_Load_History(LLMAgent *llm, const char *filepath)
233{
234 std::string path = filepath ? filepath : "";
235 if (!path.empty())
236 {
237 llm->load_history(path);
238 }
239}
240
242{
243 return llm->get_history_size();
244}
High-level conversational agent interface for LLMs.
High-level conversational agent for LLM interactions.
Definition LLM_agent.h:60
LLMAgent(LLMLocal *llm, const std::string &system_prompt="")
Constructor for LLM agent.
Definition LLM_agent.cpp:5
void load_history(const std::string &filepath)
Load conversation history from file.
void set_history(const json &history_)
Set conversation history.
Definition LLM_agent.h:184
json get_history() const
Get conversation history.
Definition LLM_agent.h:189
int get_slot()
Get current processing slot ID.
Definition LLM_agent.h:163
void add_user_message(const std::string &content)
Add a user message to conversation history.
Definition LLM_agent.h:196
virtual void add_message(const std::string &role, const std::string &content)
Add a message to conversation history.
Definition LLM_agent.cpp:81
void save_history(const std::string &filepath) const
Save conversation history to file.
Definition LLM_agent.cpp:95
void add_assistant_message(const std::string &content)
Add an assistant message to conversation history.
Definition LLM_agent.h:201
size_t get_history_size() const
Get number of messages in history.
Definition LLM_agent.h:224
void set_slot(int id_slot)
Set processing slot ID.
Definition LLM_agent.cpp:11
void set_system_prompt(const std::string &system_prompt_)
Set system prompt.
Definition LLM_agent.h:175
void remove_last_message()
Remove the last message from history.
Definition LLM_agent.cpp:87
std::string get_system_prompt() const
Get current system prompt.
Definition LLM_agent.h:179
std::string chat(const std::string &user_prompt, bool add_to_history=true, CharArrayFn callback=nullptr, bool return_response_json=false, bool debug_prompt=false)
Conduct a chat interaction.
Definition LLM_agent.cpp:44
void clear_history()
Clear all conversation history.
Definition LLM_agent.cpp:27
virtual std::string completion(const std::string &prompt, CharArrayFn callback=nullptr, bool return_response_json=false)
Generate completion with agent's slot.
Definition LLM_agent.h:131
Client for accessing LLM functionality locally or remotely.
Definition LLM_client.h:32
Abstract class for local LLM operations with slot management.
Definition LLM.h:222
virtual int get_next_available_slot()=0
Get an available processing slot.
Registry for managing LLM provider instances.
Definition LLM.h:380
const CharArrayFn get_log_callback()
Get current log callback.
Definition LLM.h:455
static LLMProviderRegistry & instance()
Get the singleton registry instance.
Definition LLM.h:395
int32_t n_keep
Number of tokens to keep from the beginning of the context.
Definition LLM.h:62
virtual std::string apply_template(const json &messages)
Apply template to messages.
Definition LLM.cpp:144
virtual std::string parse_completion_json(const json &result)
Parse completion result.
Definition LLM.cpp:264
virtual std::vector< int > tokenize(const std::string &query)
Tokenize text.
Definition LLM.cpp:170
void LLMAgent_Save_History(LLMAgent *llm, const char *filepath)
Save conversation history to file (C API)
void LLMAgent_Load_History(LLMAgent *llm, const char *filepath)
Load conversation history from file (C API)
size_t LLMAgent_Get_History_Size(LLMAgent *llm)
Get conversation history size (C API)
void LLMAgent_Add_Assistant_Message(LLMAgent *llm, const char *content)
Add assistant message to history (C API)
LLMAgent * LLMAgent_Construct(LLMLocal *llm, const char *system_prompt_)
Construct LLMAgent (C API)
void LLMAgent_Clear_History(LLMAgent *llm)
Clear conversation history (C API)
void LLMAgent_Set_History(LLMAgent *llm, const char *history_json)
Set conversation history (C API)
const char * LLMAgent_Chat(LLMAgent *llm, const char *user_prompt, bool add_to_history=true, CharArrayFn callback=nullptr, bool return_response_json=false, bool debug_prompt=false)
Conduct chat interaction (C API)
int LLMAgent_Get_Slot(LLMAgent *llm)
Get processing slot (C API)
void LLMAgent_Set_Slot(LLMAgent *llm, int slot_id)
Set processing slot (C API)
const char * LLMAgent_Get_History(LLMAgent *llm)
Get conversation history (C API)
void LLMAgent_Add_User_Message(LLMAgent *llm, const char *content)
Add user message to history (C API)
void LLMAgent_Set_System_Prompt(LLMAgent *llm, const char *system_prompt)
Set system prompt (C API)
const char * LLMAgent_Get_System_Prompt(LLMAgent *llm)
Get system prompt (C API)
void LLMAgent_Remove_Last_Message(LLMAgent *llm)
Remove last message from history (C API)
Structure representing a single chat message.
Definition LLM_agent.h:16
json to_json() const
Convert message to JSON representation.
Definition LLM_agent.h:32