LlamaLib  v2.0.2
Cross-platform library for local LLMs
Loading...
Searching...
No Matches
LLM_agent.h
Go to the documentation of this file.
1
6
7#pragma once
8
9#include "LLM.h"
10#include "LLM_client.h"
11
15struct UNDREAMAI_API ChatMessage
16{
17 std::string role;
18 std::string content;
19
21 ChatMessage() = default;
22
26 ChatMessage(const std::string &role_, const std::string &content_)
27 : role(role_), content(content_) {}
28
32 json to_json() const
33 {
34 return json{{"role", role}, {"content", content.empty() ? " " : content}};
35 }
36
42 static ChatMessage from_json(const json &j)
43 {
44 return ChatMessage(j.at("role").get<std::string>(), j.at("content").get<std::string>());
45 }
46
50 bool operator==(const ChatMessage &other) const
51 {
52 return role == other.role && content == other.content;
53 }
54};
55
59class UNDREAMAI_API LLMAgent : public LLMLocal
60{
61public:
62 const std::string USER_ROLE = "user";
63 const std::string ASSISTANT_ROLE = "assistant";
64
69 LLMAgent(LLMLocal *llm, const std::string &system_prompt = "");
70
71 //=================================== LLM METHOD DELEGATES ===================================//
75 std::string tokenize_json(const json &data) override { return llm->tokenize_json(data); }
76
81 std::string detokenize_json(const json &data) override { return llm->detokenize_json(data); }
82
87 std::string embeddings_json(const json &data) override { return llm->embeddings_json(data); }
88
94 std::string completion_json(const json &data, CharArrayFn callback = nullptr, bool callbackWithJSON = true) override { return llm->completion_json(data, callback, callbackWithJSON); }
95
100 std::string apply_template_json(const json &data) override { return llm->apply_template_json(data); }
101
106 std::string slot_json(const json &data) override { return llm->slot_json(data); }
107
110 void cancel(int id_slot) override { return llm->cancel(id_slot); }
111
114 int get_next_available_slot() override { return llm->get_next_available_slot(); }
115
116 //=================================== LLM METHOD DELEGATES ===================================//
117
118 //=================================== Slot-aware method overrides ===================================//
123 virtual json build_completion_json(const std::string &prompt) { return LLMLocal::build_completion_json(prompt, this->id_slot); }
124
131 virtual std::string completion(const std::string &prompt, CharArrayFn callback = nullptr, bool return_response_json = false)
132 {
133 return LLMLocal::completion(prompt, callback, this->id_slot, return_response_json);
134 }
135
141 virtual json build_slot_json(const std::string &action, const std::string &filepath) { return LLMLocal::build_slot_json(this->id_slot, action, filepath); }
142
147 virtual std::string save_slot(const std::string &filepath) { return LLMLocal::save_slot(this->id_slot, filepath); }
148
153 virtual std::string load_slot(const std::string &filepath) { return LLMLocal::load_slot(this->id_slot, filepath); }
154
157 virtual void cancel() { llm->cancel(this->id_slot); }
158 //=================================== Slot-aware method overrides ===================================//
159
163 inline int get_slot() { return id_slot; }
164
168 void set_slot(int id_slot);
169
170 // Prompt configuration methods
171
175 void set_system_prompt(const std::string &system_prompt_) { system_prompt = system_prompt_; }
176
179 std::string get_system_prompt() const { return system_prompt; }
180
184 void set_history(const json &history_) { history = history_; }
185
189 json get_history() const { return history; }
190
191 // History management methods
192
196 void add_user_message(const std::string &content) { add_message(USER_ROLE, content); }
197
201 void add_assistant_message(const std::string &content) { add_message(ASSISTANT_ROLE, content); }
202
205 void clear_history();
206
209 void remove_last_message();
210
214 void save_history(const std::string &filepath) const;
215
219 void load_history(const std::string &filepath);
220
224 size_t get_history_size() const { return history.size(); }
225
226 // Chat functionality
227
237 std::string chat(const std::string &user_prompt, bool add_to_history = true, CharArrayFn callback = nullptr, bool return_response_json = false, bool debug_prompt = false);
238
239protected:
240 void set_n_keep();
241
246 virtual void add_message(const std::string &role, const std::string &content);
247
248private:
249 LLMLocal *llm = nullptr;
250 int id_slot = -1;
251 std::string system_prompt = "";
252 std::string system_role = "system";
253 json history;
254};
255
258
259extern "C"
260{
266 UNDREAMAI_API LLMAgent *LLMAgent_Construct(LLMLocal *llm, const char *system_prompt = "");
267
272 UNDREAMAI_API void LLMAgent_Set_System_Prompt(LLMAgent *llm, const char *system_prompt);
273
277 UNDREAMAI_API const char *LLMAgent_Get_System_Prompt(LLMAgent *llm);
278
282 UNDREAMAI_API void LLMAgent_Set_Slot(LLMAgent *llm, int slot_id);
283
287 UNDREAMAI_API int LLMAgent_Get_Slot(LLMAgent *llm);
288
298 UNDREAMAI_API const char *LLMAgent_Chat(LLMAgent *llm, const char *user_prompt, bool add_to_history = true, CharArrayFn callback = nullptr, bool return_response_json = false, bool debug_prompt = false);
299
303 UNDREAMAI_API void LLMAgent_Clear_History(LLMAgent *llm);
304
308 UNDREAMAI_API const char *LLMAgent_Get_History(LLMAgent *llm);
309
314 UNDREAMAI_API void LLMAgent_Set_History(LLMAgent *llm, const char *history_json);
315
320 UNDREAMAI_API void LLMAgent_Add_User_Message(LLMAgent *llm, const char *content);
321
326 UNDREAMAI_API void LLMAgent_Add_Assistant_Message(LLMAgent *llm, const char *content);
327
331 UNDREAMAI_API void LLMAgent_Remove_Last_Message(LLMAgent *llm);
332
337 UNDREAMAI_API void LLMAgent_Save_History(LLMAgent *llm, const char *filepath);
338
343 UNDREAMAI_API void LLMAgent_Load_History(LLMAgent *llm, const char *filepath);
344
348 UNDREAMAI_API size_t LLMAgent_Get_History_Size(LLMAgent *llm);
349}
350
351
Core LLM functionality interface and base classes.
Client interface for local and remote LLM access.
High-level conversational agent for LLM interactions.
Definition LLM_agent.h:60
std::string completion_json(const json &data, CharArrayFn callback=nullptr, bool callbackWithJSON=true) override
Generate completion (delegate to wrapped LLM)
Definition LLM_agent.h:94
virtual std::string save_slot(const std::string &filepath)
Save agent's slot state.
Definition LLM_agent.h:147
void set_history(const json &history_)
Set conversation history.
Definition LLM_agent.h:184
json get_history() const
Get conversation history.
Definition LLM_agent.h:189
int get_slot()
Get current processing slot ID.
Definition LLM_agent.h:163
void add_user_message(const std::string &content)
Add a user message to conversation history.
Definition LLM_agent.h:196
virtual std::string load_slot(const std::string &filepath)
Load agent's slot state.
Definition LLM_agent.h:153
virtual json build_completion_json(const std::string &prompt)
Build completion JSON with agent's slot.
Definition LLM_agent.h:123
std::string embeddings_json(const json &data) override
Generate embeddings with HTTP response support.
Definition LLM_agent.h:87
std::string apply_template_json(const json &data) override
Apply a chat template to message data.
Definition LLM_agent.h:100
int get_next_available_slot() override
Get available slot (delegate to wrapped LLM)
Definition LLM_agent.h:114
void add_assistant_message(const std::string &content)
Add an assistant message to conversation history.
Definition LLM_agent.h:201
size_t get_history_size() const
Get number of messages in history.
Definition LLM_agent.h:224
void cancel(int id_slot) override
Cancel request (delegate to wrapped LLM)
Definition LLM_agent.h:110
virtual json build_slot_json(const std::string &action, const std::string &filepath)
Build slot operation JSON with agent's slot.
Definition LLM_agent.h:141
void set_system_prompt(const std::string &system_prompt_)
Set system prompt.
Definition LLM_agent.h:175
std::string detokenize_json(const json &data) override
Convert tokens back to text.
Definition LLM_agent.h:81
std::string get_system_prompt() const
Get current system prompt.
Definition LLM_agent.h:179
virtual void cancel()
Cancel agent's current request.
Definition LLM_agent.h:157
std::string tokenize_json(const json &data) override
Tokenize input (override)
Definition LLM_agent.h:75
std::string slot_json(const json &data) override
Manage slots with HTTP response support.
Definition LLM_agent.h:106
virtual std::string completion(const std::string &prompt, CharArrayFn callback=nullptr, bool return_response_json=false)
Generate completion with agent's slot.
Definition LLM_agent.h:131
Abstract class for local LLM operations with slot management.
Definition LLM.h:222
virtual std::string slot_json(const json &data)=0
Manage slots with HTTP response support.
virtual std::string load_slot(int id_slot, const std::string &filepath)
Load slot state from file.
Definition LLM.h:238
virtual int get_next_available_slot()=0
Get an available processing slot.
virtual std::string save_slot(int id_slot, const std::string &filepath)
Save slot state to file.
Definition LLM.h:232
virtual void cancel(int id_slot)=0
Cancel request.
virtual json build_slot_json(int id_slot, const std::string &action, const std::string &filepath)
Build JSON for slot operations.
Definition LLM.cpp:296
virtual std::string embeddings_json(const json &data)=0
Generate embeddings with HTTP response support.
virtual std::string apply_template_json(const json &data)=0
Apply a chat template to message data.
virtual json build_completion_json(const std::string &prompt, int id_slot=-1)
Build JSON for completion generation.
Definition LLM.cpp:235
virtual std::string tokenize_json(const json &data)=0
Tokenize input (override)
virtual std::string completion(const std::string &prompt, CharArrayFn callback=nullptr, int id_slot=-1, bool return_response_json=false)
Generate completion.
Definition LLM.cpp:283
virtual std::string completion_json(const json &data, CharArrayFn callback, bool callbackWithJSON)=0
Generate text completion.
virtual std::string detokenize_json(const json &data)=0
Convert tokens back to text.
void LLMAgent_Save_History(LLMAgent *llm, const char *filepath)
Save conversation history to file (C API)
void LLMAgent_Load_History(LLMAgent *llm, const char *filepath)
Load conversation history from file (C API)
size_t LLMAgent_Get_History_Size(LLMAgent *llm)
Get conversation history size (C API)
void LLMAgent_Add_Assistant_Message(LLMAgent *llm, const char *content)
Add assistant message to history (C API)
LLMAgent * LLMAgent_Construct(LLMLocal *llm, const char *system_prompt="")
Construct LLMAgent (C API)
void LLMAgent_Clear_History(LLMAgent *llm)
Clear conversation history (C API)
void LLMAgent_Set_History(LLMAgent *llm, const char *history_json)
Set conversation history (C API)
const char * LLMAgent_Chat(LLMAgent *llm, const char *user_prompt, bool add_to_history=true, CharArrayFn callback=nullptr, bool return_response_json=false, bool debug_prompt=false)
Conduct chat interaction (C API)
int LLMAgent_Get_Slot(LLMAgent *llm)
Get processing slot (C API)
void LLMAgent_Set_Slot(LLMAgent *llm, int slot_id)
Set processing slot (C API)
const char * LLMAgent_Get_History(LLMAgent *llm)
Get conversation history (C API)
void LLMAgent_Add_User_Message(LLMAgent *llm, const char *content)
Add user message to history (C API)
void LLMAgent_Set_System_Prompt(LLMAgent *llm, const char *system_prompt)
Set system prompt (C API)
const char * LLMAgent_Get_System_Prompt(LLMAgent *llm)
Get system prompt (C API)
void LLMAgent_Remove_Last_Message(LLMAgent *llm)
Remove last message from history (C API)
Structure representing a single chat message.
Definition LLM_agent.h:16
std::string role
Message role (e.g., "user", "assistant", "system")
Definition LLM_agent.h:17
ChatMessage(const std::string &role_, const std::string &content_)
Parameterized constructor.
Definition LLM_agent.h:26
std::string content
Message content text.
Definition LLM_agent.h:18
json to_json() const
Convert message to JSON representation.
Definition LLM_agent.h:32
bool operator==(const ChatMessage &other) const
Equality comparison operator.
Definition LLM_agent.h:50
ChatMessage()=default
Default constructor.
static ChatMessage from_json(const json &j)
Create message from JSON representation.
Definition LLM_agent.h:42