17 if (client->is_remote())
20 std::cerr <<
"Remote clients can only use id_slot -1" << std::endl;
29 history = json::array();
33void LLMAgent::set_n_keep()
37 json working_history = json::array();
38 working_history.push_back(
ChatMessage(system_role, system_prompt).to_json());
39 working_history.push_back(
ChatMessage(USER_ROLE,
"").to_json());
44std::string
LLMAgent::chat(
const std::string &user_prompt,
bool add_to_history, CharArrayFn callback,
bool return_response_json,
bool debug_prompt)
46 if (
n_keep == -1) set_n_keep();
49 json working_history = json::array();
50 working_history.push_back(
ChatMessage(system_role, system_prompt).to_json());
51 for (
auto &m : history)
52 working_history.push_back(m);
54 working_history.push_back(user_msg.
to_json());
62 if (log_callback !=
nullptr) log_callback(query_prompt.c_str());
66 std::string response =
completion(query_prompt, callback, return_response_json);
67 std::string assistant_content = response;
68 if (return_response_json)
73 history.push_back(user_msg.
to_json());
74 ChatMessage assistant_msg(ASSISTANT_ROLE, assistant_content);
75 history.push_back(assistant_msg.
to_json());
84 history.push_back(msg.
to_json());
91 history.erase(history.end() - 1);
99 std::ofstream file(filepath);
102 file << history.dump(4);
107 std::cerr <<
"Unable to open file for writing: " << filepath << std::endl;
110 catch (
const std::exception &e)
112 std::cerr <<
"Error saving history to file: " << e.what() << std::endl;
120 std::ifstream file(filepath);
124 file >> loaded_history;
127 if (loaded_history.is_array())
129 history = loaded_history;
133 std::cerr <<
"Invalid history file format: expected JSON array" << std::endl;
138 std::cerr <<
"Unable to open file for reading: " << filepath << std::endl;
141 catch (
const std::exception &e)
143 std::cerr <<
"Error loading history from file: " << e.what() << std::endl;
151 std::string system_prompt = system_prompt_ ? system_prompt_ :
"";
152 return new LLMAgent(llm, system_prompt);
155const char *
LLMAgent_Chat(
LLMAgent *llm,
const char *user_prompt,
bool add_to_history, CharArrayFn callback,
bool return_response_json,
bool debug_prompt)
157 return stringToCharArray(llm->
chat(user_prompt, add_to_history, callback, return_response_json, debug_prompt));
168 std::string sys_prompt = system_prompt ? system_prompt :
"";
179 return stringToCharArray(llm->
get_history().dump());
196 json history = json::parse(history_json ? history_json :
"[]");
197 if (!history.is_array())
198 std::cerr <<
"Expected JSON array for history." << std::endl;
202 catch (
const std::exception &e)
204 std::cerr <<
"Error parsing history JSON: " << e.what() << std::endl;
225 std::string path = filepath ? filepath :
"";
234 std::string path = filepath ? filepath :
"";
High-level conversational agent interface for LLMs.
High-level conversational agent for LLM interactions.
LLMAgent(LLMLocal *llm, const std::string &system_prompt="")
Constructor for LLM agent.
void load_history(const std::string &filepath)
Load conversation history from file.
void set_history(const json &history_)
Set conversation history.
json get_history() const
Get conversation history.
int get_slot()
Get current processing slot ID.
void add_user_message(const std::string &content)
Add a user message to conversation history.
virtual void add_message(const std::string &role, const std::string &content)
Add a message to conversation history.
void save_history(const std::string &filepath) const
Save conversation history to file.
void add_assistant_message(const std::string &content)
Add an assistant message to conversation history.
size_t get_history_size() const
Get number of messages in history.
void set_slot(int id_slot)
Set processing slot ID.
void set_system_prompt(const std::string &system_prompt_)
Set system prompt.
void remove_last_message()
Remove the last message from history.
std::string get_system_prompt() const
Get current system prompt.
std::string chat(const std::string &user_prompt, bool add_to_history=true, CharArrayFn callback=nullptr, bool return_response_json=false, bool debug_prompt=false)
Conduct a chat interaction.
void clear_history()
Clear all conversation history.
virtual std::string completion(const std::string &prompt, CharArrayFn callback=nullptr, bool return_response_json=false)
Generate completion with agent's slot.
Client for accessing LLM functionality locally or remotely.
Abstract class for local LLM operations with slot management.
virtual int get_next_available_slot()=0
Get an available processing slot.
Registry for managing LLM provider instances.
const CharArrayFn get_log_callback()
Get current log callback.
static LLMProviderRegistry & instance()
Get the singleton registry instance.
int32_t n_keep
Number of tokens to keep from the beginning of the context.
virtual std::string apply_template(const json &messages)
Apply template to messages.
virtual std::string parse_completion_json(const json &result)
Parse completion result.
virtual std::vector< int > tokenize(const std::string &query)
Tokenize text.
void LLMAgent_Save_History(LLMAgent *llm, const char *filepath)
Save conversation history to file (C API)
void LLMAgent_Load_History(LLMAgent *llm, const char *filepath)
Load conversation history from file (C API)
size_t LLMAgent_Get_History_Size(LLMAgent *llm)
Get conversation history size (C API)
void LLMAgent_Add_Assistant_Message(LLMAgent *llm, const char *content)
Add assistant message to history (C API)
LLMAgent * LLMAgent_Construct(LLMLocal *llm, const char *system_prompt_)
Construct LLMAgent (C API)
void LLMAgent_Clear_History(LLMAgent *llm)
Clear conversation history (C API)
void LLMAgent_Set_History(LLMAgent *llm, const char *history_json)
Set conversation history (C API)
const char * LLMAgent_Chat(LLMAgent *llm, const char *user_prompt, bool add_to_history=true, CharArrayFn callback=nullptr, bool return_response_json=false, bool debug_prompt=false)
Conduct chat interaction (C API)
int LLMAgent_Get_Slot(LLMAgent *llm)
Get processing slot (C API)
void LLMAgent_Set_Slot(LLMAgent *llm, int slot_id)
Set processing slot (C API)
const char * LLMAgent_Get_History(LLMAgent *llm)
Get conversation history (C API)
void LLMAgent_Add_User_Message(LLMAgent *llm, const char *content)
Add user message to history (C API)
void LLMAgent_Set_System_Prompt(LLMAgent *llm, const char *system_prompt)
Set system prompt (C API)
const char * LLMAgent_Get_System_Prompt(LLMAgent *llm)
Get system prompt (C API)
void LLMAgent_Remove_Last_Message(LLMAgent *llm)
Remove last message from history (C API)
Structure representing a single chat message.
json to_json() const
Convert message to JSON representation.