26 ChatMessage(
const std::string &role_,
const std::string &content_)
27 : role(role_), content(content_) {}
34 return json{{
"role", role}, {
"content", content.empty() ?
" " : content}};
44 return ChatMessage(j.at(
"role").get<std::string>(), j.at(
"content").get<std::string>());
52 return role == other.
role && content == other.
content;
62 const std::string USER_ROLE =
"user";
63 const std::string ASSISTANT_ROLE =
"assistant";
94 std::string
completion_json(
const json &data, CharArrayFn callback =
nullptr,
bool callbackWithJSON =
true)
override {
return llm->
completion_json(data, callback, callbackWithJSON); }
131 virtual std::string
completion(
const std::string &prompt, CharArrayFn callback =
nullptr,
bool return_response_json =
false)
168 void set_slot(
int id_slot);
205 void clear_history();
209 void remove_last_message();
214 void save_history(
const std::string &filepath)
const;
219 void load_history(
const std::string &filepath);
237 std::string chat(
const std::string &user_prompt,
bool add_to_history =
true, CharArrayFn callback =
nullptr,
bool return_response_json =
false,
bool debug_prompt =
false);
246 virtual void add_message(
const std::string &role,
const std::string &content);
251 std::string system_prompt =
"";
252 std::string system_role =
"system";
298 UNDREAMAI_API
const char *
LLMAgent_Chat(
LLMAgent *llm,
const char *user_prompt,
bool add_to_history =
true, CharArrayFn callback =
nullptr,
bool return_response_json =
false,
bool debug_prompt =
false);
Core LLM functionality interface and base classes.
Client interface for local and remote LLM access.
High-level conversational agent for LLM interactions.
std::string completion_json(const json &data, CharArrayFn callback=nullptr, bool callbackWithJSON=true) override
Generate completion (delegate to wrapped LLM)
virtual std::string save_slot(const std::string &filepath)
Save agent's slot state.
void set_history(const json &history_)
Set conversation history.
json get_history() const
Get conversation history.
int get_slot()
Get current processing slot ID.
void add_user_message(const std::string &content)
Add a user message to conversation history.
virtual std::string load_slot(const std::string &filepath)
Load agent's slot state.
virtual json build_completion_json(const std::string &prompt)
Build completion JSON with agent's slot.
std::string embeddings_json(const json &data) override
Generate embeddings with HTTP response support.
std::string apply_template_json(const json &data) override
Apply a chat template to message data.
int get_next_available_slot() override
Get available slot (delegate to wrapped LLM)
void add_assistant_message(const std::string &content)
Add an assistant message to conversation history.
size_t get_history_size() const
Get number of messages in history.
void cancel(int id_slot) override
Cancel request (delegate to wrapped LLM)
virtual json build_slot_json(const std::string &action, const std::string &filepath)
Build slot operation JSON with agent's slot.
void set_system_prompt(const std::string &system_prompt_)
Set system prompt.
std::string detokenize_json(const json &data) override
Convert tokens back to text.
std::string get_system_prompt() const
Get current system prompt.
virtual void cancel()
Cancel agent's current request.
std::string tokenize_json(const json &data) override
Tokenize input (override)
std::string slot_json(const json &data) override
Manage slots with HTTP response support.
virtual std::string completion(const std::string &prompt, CharArrayFn callback=nullptr, bool return_response_json=false)
Generate completion with agent's slot.
Abstract class for local LLM operations with slot management.
virtual std::string slot_json(const json &data)=0
Manage slots with HTTP response support.
virtual std::string load_slot(int id_slot, const std::string &filepath)
Load slot state from file.
virtual int get_next_available_slot()=0
Get an available processing slot.
virtual std::string save_slot(int id_slot, const std::string &filepath)
Save slot state to file.
virtual void cancel(int id_slot)=0
Cancel request.
virtual json build_slot_json(int id_slot, const std::string &action, const std::string &filepath)
Build JSON for slot operations.
virtual std::string embeddings_json(const json &data)=0
Generate embeddings with HTTP response support.
virtual std::string apply_template_json(const json &data)=0
Apply a chat template to message data.
virtual json build_completion_json(const std::string &prompt, int id_slot=-1)
Build JSON for completion generation.
virtual std::string tokenize_json(const json &data)=0
Tokenize input (override)
virtual std::string completion(const std::string &prompt, CharArrayFn callback=nullptr, int id_slot=-1, bool return_response_json=false)
Generate completion.
virtual std::string completion_json(const json &data, CharArrayFn callback, bool callbackWithJSON)=0
Generate text completion.
virtual std::string detokenize_json(const json &data)=0
Convert tokens back to text.
void LLMAgent_Save_History(LLMAgent *llm, const char *filepath)
Save conversation history to file (C API)
void LLMAgent_Load_History(LLMAgent *llm, const char *filepath)
Load conversation history from file (C API)
size_t LLMAgent_Get_History_Size(LLMAgent *llm)
Get conversation history size (C API)
void LLMAgent_Add_Assistant_Message(LLMAgent *llm, const char *content)
Add assistant message to history (C API)
LLMAgent * LLMAgent_Construct(LLMLocal *llm, const char *system_prompt="")
Construct LLMAgent (C API)
void LLMAgent_Clear_History(LLMAgent *llm)
Clear conversation history (C API)
void LLMAgent_Set_History(LLMAgent *llm, const char *history_json)
Set conversation history (C API)
const char * LLMAgent_Chat(LLMAgent *llm, const char *user_prompt, bool add_to_history=true, CharArrayFn callback=nullptr, bool return_response_json=false, bool debug_prompt=false)
Conduct chat interaction (C API)
int LLMAgent_Get_Slot(LLMAgent *llm)
Get processing slot (C API)
void LLMAgent_Set_Slot(LLMAgent *llm, int slot_id)
Set processing slot (C API)
const char * LLMAgent_Get_History(LLMAgent *llm)
Get conversation history (C API)
void LLMAgent_Add_User_Message(LLMAgent *llm, const char *content)
Add user message to history (C API)
void LLMAgent_Set_System_Prompt(LLMAgent *llm, const char *system_prompt)
Set system prompt (C API)
const char * LLMAgent_Get_System_Prompt(LLMAgent *llm)
Get system prompt (C API)
void LLMAgent_Remove_Last_Message(LLMAgent *llm)
Remove last message from history (C API)
Structure representing a single chat message.
std::string role
Message role (e.g., "user", "assistant", "system")
ChatMessage(const std::string &role_, const std::string &content_)
Parameterized constructor.
std::string content
Message content text.
json to_json() const
Convert message to JSON representation.
bool operator==(const ChatMessage &other) const
Equality comparison operator.
ChatMessage()=default
Default constructor.
static ChatMessage from_json(const json &j)
Create message from JSON representation.