LlamaLib  v2.0.2
Cross-platform library for local LLMs
Loading...
Searching...
No Matches
C API functions

C-compatible API functions. More...

bool Has_GPU_Layers (const char *command)
 Check if command has GPU layers (C API)
 
void LLM_Debug (int debug_level)
 Set global debug level (C API)
 
void LLM_Logging_Callback (CharArrayFn callback)
 Set global logging callback (C API)
 
void LLM_Logging_Stop ()
 Stop global logging (C API)
 
void LLM_Set_Completion_Parameters (LLM *llm, const char *params_json="{}")
 Set completion parameters (C API)
 
const char * LLM_Get_Completion_Parameters (LLM *llm)
 Get completion parameters (C API)
 
void LLM_Set_Grammar (LLM *llm, const char *grammar="")
 Set grammar (C API)
 
const char * LLM_Get_Grammar (LLM *llm)
 Get grammar (C API)
 
const char * LLM_Apply_Template (LLM *llm, const char *messages_as_json)
 Apply chat template (C API)
 
const char * LLM_Tokenize (LLM *llm, const char *query)
 Tokenize text (C API)
 
const char * LLM_Detokenize (LLM *llm, const char *tokens_as_json)
 Detokenize tokens (C API)
 
const char * LLM_Embeddings (LLM *llm, const char *query)
 Generate embeddings (C API)
 
const char * LLM_Completion (LLM *llm, const char *prompt, CharArrayFn callback=nullptr, int id_slot=-1, bool return_response_json=false)
 Generate completion (C API)
 
const char * LLM_Save_Slot (LLMLocal *llm, int id_slot, const char *filepath)
 Save slot state (C API)
 
const char * LLM_Load_Slot (LLMLocal *llm, int id_slot, const char *filepath)
 Load slot state (C API)
 
void LLM_Cancel (LLMLocal *llm, int id_slot)
 Cancel request (C API)
 
bool LLM_Lora_Weight (LLMProvider *llm, const char *loras_as_json)
 Configure LoRA weights (C API)
 
void LLM_Enable_Reasoning (LLMProvider *llm, bool enable_reasoning)
 Enable reasoning (C API)
 
const char * LLM_Lora_List (LLMProvider *llm)
 List LoRA adapters (C API)
 
void LLM_Delete (LLMProvider *llm)
 Delete LLM provider (C API)
 
void LLM_Start (LLMProvider *llm)
 Start LLM service (C API)
 
const bool LLM_Started (LLMProvider *llm)
 Check if service is started (C API)
 
void LLM_Stop (LLMProvider *llm)
 Stop LLM service (C API)
 
void LLM_Start_Server (LLMProvider *llm, const char *host="0.0.0.0", int port=-1, const char *API_key="")
 Start HTTP server (C API)
 
void LLM_Stop_Server (LLMProvider *llm)
 Stop HTTP server (C API)
 
void LLM_Join_Service (LLMProvider *llm)
 Wait for service to complete (C API)
 
void LLM_Join_Server (LLMProvider *llm)
 Wait for server to complete (C API)
 
void LLM_Set_SSL (LLMProvider *llm, const char *SSL_cert, const char *SSL_key)
 Set SSL configuration (C API)
 
const int LLM_Status_Code ()
 Get last operation status code (C API)
 
const char * LLM_Status_Message ()
 Get last operation status message (C API)
 
const int LLM_Embedding_Size (LLMProvider *llm)
 Get embedding vector size (C API)
 
LLMAgentLLMAgent_Construct (LLMLocal *llm, const char *system_prompt="")
 Construct LLMAgent (C API)
 
void LLMAgent_Set_System_Prompt (LLMAgent *llm, const char *system_prompt)
 Set system prompt (C API)
 
const char * LLMAgent_Get_System_Prompt (LLMAgent *llm)
 Get system prompt (C API)
 
void LLMAgent_Set_Slot (LLMAgent *llm, int slot_id)
 Set processing slot (C API)
 
int LLMAgent_Get_Slot (LLMAgent *llm)
 Get processing slot (C API)
 
const char * LLMAgent_Chat (LLMAgent *llm, const char *user_prompt, bool add_to_history=true, CharArrayFn callback=nullptr, bool return_response_json=false, bool debug_prompt=false)
 Conduct chat interaction (C API)
 
void LLMAgent_Clear_History (LLMAgent *llm)
 Clear conversation history (C API)
 
const char * LLMAgent_Get_History (LLMAgent *llm)
 Get conversation history (C API)
 
void LLMAgent_Set_History (LLMAgent *llm, const char *history_json)
 Set conversation history (C API)
 
void LLMAgent_Add_User_Message (LLMAgent *llm, const char *content)
 Add user message to history (C API)
 
void LLMAgent_Add_Assistant_Message (LLMAgent *llm, const char *content)
 Add assistant message to history (C API)
 
void LLMAgent_Remove_Last_Message (LLMAgent *llm)
 Remove last message from history (C API)
 
void LLMAgent_Save_History (LLMAgent *llm, const char *filepath)
 Save conversation history to file (C API)
 
void LLMAgent_Load_History (LLMAgent *llm, const char *filepath)
 Load conversation history from file (C API)
 
size_t LLMAgent_Get_History_Size (LLMAgent *llm)
 Get conversation history size (C API)
 
bool LLMClient_Is_Server_Alive (LLMClient *llm)
 
void LLMClient_Set_SSL (LLMClient *llm, const char *SSL_cert)
 Set SSL certificate (C API)
 
LLMClientLLMClient_Construct (LLMProvider *llm)
 Construct local LLMClient (C API)
 
LLMClientLLMClient_Construct_Remote (const char *url, const int port, const char *API_key="")
 Construct remote LLMClient (C API)
 
const char * Available_Architectures (bool gpu)
 Get available architectures (C API)
 
void LLMService_Registry (LLMProviderRegistry *existing_instance)
 Set registry for LLMService (C API)
 
LLMServiceLLMService_Construct (const char *model_path, int num_slots=1, int num_threads=-1, int num_GPU_layers=0, bool flash_attention=false, int context_size=4096, int batch_size=2048, bool embedding_only=false, int lora_count=0, const char **lora_paths=nullptr)
 Construct LLMService instance (C API)
 
LLMServiceLLMService_From_Command (const char *params_string)
 Create LLMService from command string (C API)
 
const char * LLMService_Command (LLMService *llm_service)
 Returns the construct command (C API)
 
void LLMService_InjectErrorState (ErrorState *error_state)
 

Detailed Description

C-compatible API functions.

Function Documentation

◆ Available_Architectures()

const char * Available_Architectures ( bool gpu)

Get available architectures (C API)

Parameters
gpuWhether to include GPU architectures
Returns
JSON string containing available architectures

Definition at line 355 of file LLM_runtime.cpp.

◆ Has_GPU_Layers()

bool Has_GPU_Layers ( const char * command)

Check if command has GPU layers (C API)

Parameters
commandCommand string to check
Returns
true if GPU layers are specified, false otherwise

Definition at line 398 of file LLM.cpp.

◆ LLM_Apply_Template()

const char * LLM_Apply_Template ( LLM * llm,
const char * messages_as_json )

Apply chat template (C API)

Parameters
llmLLM instance pointer
messages_as_jsonJSON string with messages
Returns
Formatted chat string

Definition at line 486 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Cancel()

void LLM_Cancel ( LLMLocal * llm,
int id_slot )

Cancel request (C API)

Parameters
llmLLMLocal instance pointer
id_slotSlot ID to cancel

Definition at line 506 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Completion()

const char * LLM_Completion ( LLM * llm,
const char * prompt,
CharArrayFn callback = nullptr,
int id_slot = -1,
bool return_response_json = false )

Generate completion (C API)

Parameters
llmLLM instance pointer
promptInput prompt
callbackOptional streaming callback
id_slotSlot ID (-1 for auto)
return_response_jsonWhether to return JSON response
Returns
Generated text or JSON response

Definition at line 460 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Debug()

void LLM_Debug ( int debug_level)

Set global debug level (C API)

Parameters
debug_levelDebug verbosity level

Definition at line 403 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Delete()

void LLM_Delete ( LLMProvider * llm)

Delete LLM provider (C API)

Parameters
llmLLMProvider instance pointer

Definition at line 542 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Detokenize()

const char * LLM_Detokenize ( LLM * llm,
const char * tokens_as_json )

Detokenize tokens (C API)

Parameters
llmLLM instance pointer
tokens_as_jsonJSON string with token IDs
Returns
Detokenized text

Definition at line 449 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Embedding_Size()

const int LLM_Embedding_Size ( LLMProvider * llm)

Get embedding vector size (C API)

Parameters
llmLLMProvider instance pointer
Returns
Number of dimensions in embeddings

Definition at line 601 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Embeddings()

const char * LLM_Embeddings ( LLM * llm,
const char * query )

Generate embeddings (C API)

Parameters
llmLLM instance pointer
queryText to embed
Returns
JSON string with embeddings

Definition at line 454 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Enable_Reasoning()

void LLM_Enable_Reasoning ( LLMProvider * llm,
bool enable_reasoning )

Enable reasoning (C API)

Parameters
llmLLMProvider instance pointer
enable_reasoningbool whether to enable reasoning

Definition at line 491 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Get_Completion_Parameters()

const char * LLM_Get_Completion_Parameters ( LLM * llm)

Get completion parameters (C API)

Parameters
llmLLM instance pointer
Returns
JSON string with current parameters

Definition at line 471 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Get_Grammar()

const char * LLM_Get_Grammar ( LLM * llm)

Get grammar (C API)

Parameters
llmLLM instance pointer
Returns
Current grammar string

Definition at line 481 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Join_Server()

void LLM_Join_Server ( LLMProvider * llm)

Wait for server to complete (C API)

Parameters
llmLLMProvider instance pointer

Definition at line 565 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Join_Service()

void LLM_Join_Service ( LLMProvider * llm)

Wait for service to complete (C API)

Parameters
llmLLMProvider instance pointer

Definition at line 560 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Load_Slot()

const char * LLM_Load_Slot ( LLMLocal * llm,
int id_slot,
const char * filepath )

Load slot state (C API)

Parameters
llmLLMLocal instance pointer
id_slotSlot ID to restore
filepathPath to load file
Returns
Operation result string

Definition at line 501 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Logging_Callback()

void LLM_Logging_Callback ( CharArrayFn callback)

Set global logging callback (C API)

Parameters
callbackFunction to receive log messages

Definition at line 413 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Logging_Stop()

void LLM_Logging_Stop ( )

Stop global logging (C API)

Definition at line 423 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Lora_List()

const char * LLM_Lora_List ( LLMProvider * llm)

List LoRA adapters (C API)

Parameters
llmLLMProvider instance pointer
Returns
JSON string with LoRA list

Definition at line 530 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Lora_Weight()

bool LLM_Lora_Weight ( LLMProvider * llm,
const char * loras_as_json )

Configure LoRA weights (C API)

Parameters
llmLLMProvider instance pointer
loras_as_jsonJSON string with LoRA configuration
Returns
true if successful, false otherwise

Definition at line 511 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Save_Slot()

const char * LLM_Save_Slot ( LLMLocal * llm,
int id_slot,
const char * filepath )

Save slot state (C API)

Parameters
llmLLMLocal instance pointer
id_slotSlot ID to save
filepathPath to save file
Returns
Operation result string

Definition at line 496 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Set_Completion_Parameters()

void LLM_Set_Completion_Parameters ( LLM * llm,
const char * params_json = "{}" )

Set completion parameters (C API)

Parameters
llmLLM instance pointer
params_jsonJSON string with parameters (default: "{}")

Definition at line 465 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Set_Grammar()

void LLM_Set_Grammar ( LLM * llm,
const char * grammar = "" )

Set grammar (C API)

Parameters
llmLLM instance pointer
grammarGrammar string (default: "")

Definition at line 476 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Set_SSL()

void LLM_Set_SSL ( LLMProvider * llm,
const char * SSL_cert,
const char * SSL_key )

Set SSL configuration (C API)

Parameters
llmLLMProvider instance pointer
SSL_certPath to certificate file
SSL_keyPath to private key file

Definition at line 585 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Start()

void LLM_Start ( LLMProvider * llm)

Start LLM service (C API)

Parameters
llmLLMProvider instance pointer

Definition at line 570 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Start_Server()

void LLM_Start_Server ( LLMProvider * llm,
const char * host = "0.0.0.0",
int port = -1,
const char * API_key = "" )

Start HTTP server (C API)

Parameters
llmLLMProvider instance pointer
hostHost address (default: "0.0.0.0")
portPort number (0 for auto)
API_keyOptional API key

Definition at line 550 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Started()

const bool LLM_Started ( LLMProvider * llm)

Check if service is started (C API)

Parameters
llmLLMProvider instance pointer
Returns
true if started, false otherwise

Definition at line 575 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Status_Code()

const int LLM_Status_Code ( )

Get last operation status code (C API)

Returns
Status code of last operation

Definition at line 590 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Status_Message()

const char * LLM_Status_Message ( )

Get last operation status message (C API)

Returns
Status message of last operation

Definition at line 595 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Stop()

void LLM_Stop ( LLMProvider * llm)

Stop LLM service (C API)

Parameters
llmLLMProvider instance pointer

Definition at line 580 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Stop_Server()

void LLM_Stop_Server ( LLMProvider * llm)

Stop HTTP server (C API)

Parameters
llmLLMProvider instance pointer

Definition at line 555 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLM_Tokenize()

const char * LLM_Tokenize ( LLM * llm,
const char * query )

Tokenize text (C API)

Parameters
llmLLM instance pointer
queryText to tokenize
Returns
JSON string with token IDs

Definition at line 443 of file LLM.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Add_Assistant_Message()

void LLMAgent_Add_Assistant_Message ( LLMAgent * llm,
const char * content )

Add assistant message to history (C API)

Parameters
llmLLMAgent instance pointer
contentMessage content text

Appends a new message to conversation history

Definition at line 213 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Add_User_Message()

void LLMAgent_Add_User_Message ( LLMAgent * llm,
const char * content )

Add user message to history (C API)

Parameters
llmLLMAgent instance pointer
contentMessage content text

Appends a new message to conversation history

Definition at line 208 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Chat()

const char * LLMAgent_Chat ( LLMAgent * llm,
const char * user_prompt,
bool add_to_history = true,
CharArrayFn callback = nullptr,
bool return_response_json = false,
bool debug_prompt = false )

Conduct chat interaction (C API)

Parameters
llmLLMAgent instance pointer
user_promptUser input message
add_to_historyWhether to save messages to history (default: true)
callbackOptional streaming callback function
return_response_jsonWhether to return JSON response (default: false)
debug_promptWhether to display the complete prompt (default: false)
Returns
Generated assistant response

Main chat method for conversational interactions

Definition at line 155 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Clear_History()

void LLMAgent_Clear_History ( LLMAgent * llm)

Clear conversation history (C API)

Parameters
llmLLMAgent instance pointer

Removes all messages from conversation history

Definition at line 161 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Construct()

LLMAgent * LLMAgent_Construct ( LLMLocal * llm,
const char * system_prompt = "" )

Construct LLMAgent (C API)

Parameters
llmLLMLocal instance to wrap
system_promptInitial system prompt (default: "")
Returns
Pointer to new LLMAgent instance

Creates a conversational agent with the specified configuration

Definition at line 149 of file LLM_agent.cpp.

◆ LLMAgent_Get_History()

const char * LLMAgent_Get_History ( LLMAgent * llm)

Get conversation history (C API)

Parameters
llmLLMAgent instance pointer
Returns
JSON string containing conversation history

Definition at line 177 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Get_History_Size()

size_t LLMAgent_Get_History_Size ( LLMAgent * llm)

Get conversation history size (C API)

Parameters
llmLLMAgent instance pointer
Returns
Number of messages in conversation history

Definition at line 241 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Get_Slot()

int LLMAgent_Get_Slot ( LLMAgent * llm)

Get processing slot (C API)

Parameters
llmLLMAgent instance pointer
Returns
Current slot ID

Definition at line 187 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Get_System_Prompt()

const char * LLMAgent_Get_System_Prompt ( LLMAgent * llm)

Get system prompt (C API)

Parameters
llmLLMAgent instance pointer
Returns
Current system prompt string

Definition at line 172 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Load_History()

void LLMAgent_Load_History ( LLMAgent * llm,
const char * filepath )

Load conversation history from file (C API)

Parameters
llmLLMAgent instance pointer
filepathPath to history file to load

Loads conversation history from JSON file

Definition at line 232 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Remove_Last_Message()

void LLMAgent_Remove_Last_Message ( LLMAgent * llm)

Remove last message from history (C API)

Parameters
llmLLMAgent instance pointer

Removes the most recently added message from history

Definition at line 218 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Save_History()

void LLMAgent_Save_History ( LLMAgent * llm,
const char * filepath )

Save conversation history to file (C API)

Parameters
llmLLMAgent instance pointer
filepathPath to save history file

Saves conversation history as JSON to specified file

Definition at line 223 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Set_History()

void LLMAgent_Set_History ( LLMAgent * llm,
const char * history_json )

Set conversation history (C API)

Parameters
llmLLMAgent instance pointer
history_jsonJSON string containing conversation history

Replaces current history with provided JSON data

Definition at line 192 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Set_Slot()

void LLMAgent_Set_Slot ( LLMAgent * llm,
int slot_id )

Set processing slot (C API)

Parameters
llmLLMAgent instance pointer
slot_idSlot ID to assign

Definition at line 182 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMAgent_Set_System_Prompt()

void LLMAgent_Set_System_Prompt ( LLMAgent * llm,
const char * system_prompt )

Set system prompt (C API)

Parameters
llmLLMAgent instance pointer
system_promptNew system prompt string

Setting system prompt clears conversation history

Definition at line 166 of file LLM_agent.cpp.

Here is the caller graph for this function:

◆ LLMClient_Construct()

LLMClient * LLMClient_Construct ( LLMProvider * llm)

Construct local LLMClient (C API)

Parameters
llmLLMProvider instance to wrap
Returns
Pointer to new LLMClient instance

Creates a client for local LLM provider access

Definition at line 394 of file LLM_client.cpp.

Here is the caller graph for this function:

◆ LLMClient_Construct_Remote()

LLMClient * LLMClient_Construct_Remote ( const char * url,
const int port,
const char * API_key = "" )

Construct remote LLMClient (C API)

Parameters
urlServer URL or hostname
portServer port number
Returns
Pointer to new LLMClient instance

Creates a client for remote LLM server access

Definition at line 399 of file LLM_client.cpp.

Here is the caller graph for this function:

◆ LLMClient_Is_Server_Alive()

bool LLMClient_Is_Server_Alive ( LLMClient * llm)

Definition at line 384 of file LLM_client.cpp.

◆ LLMClient_Set_SSL()

void LLMClient_Set_SSL ( LLMClient * llm,
const char * SSL_cert )

Set SSL certificate (C API)

Parameters
llmLLMClient instance pointer
SSL_certPath to SSL certificate file

Configure SSL certificate for remote client connections

Definition at line 389 of file LLM_client.cpp.

Here is the caller graph for this function:

◆ LLMService_Command()

const char * LLMService_Command ( LLMService * llm_service)

Returns the construct command (C API)

Parameters
llm_servicethe LLMService instance

Definition at line 751 of file LLM_service.cpp.

Here is the caller graph for this function:

◆ LLMService_Construct()

LLMService * LLMService_Construct ( const char * model_path,
int num_slots = 1,
int num_threads = -1,
int num_GPU_layers = 0,
bool flash_attention = false,
int context_size = 4096,
int batch_size = 2048,
bool embedding_only = false,
int lora_count = 0,
const char ** lora_paths = nullptr )

Construct LLMService instance (C API)

Parameters
model_pathPath to model file
num_slotsNumber of parallel sequences
num_threadsNumber of CPU threads (-1 for auto)
num_GPU_layersNumber of GPU layers
flash_attentionWhether to use flash attention
context_sizeMaximum context size
batch_sizeProcessing batch size
embedding_onlyWhether embedding-only mode
lora_countNumber of LoRA paths provided
lora_pathsArray of LoRA file paths
Returns
Pointer to new LLMService instance

Definition at line 710 of file LLM_service.cpp.

Here is the caller graph for this function:

◆ LLMService_From_Command()

LLMService * LLMService_From_Command ( const char * params_string)

Create LLMService from command string (C API)

Parameters
params_stringCommand line parameter string
Returns
Pointer to new LLMService instance

See https://github.com/ggml-org/llama.cpp/tree/master/tools/server#usage for arguments.

Definition at line 729 of file LLM_service.cpp.

Here is the caller graph for this function:

◆ LLMService_InjectErrorState()

void LLMService_InjectErrorState ( ErrorState * error_state)

Definition at line 756 of file LLM_service.cpp.

◆ LLMService_Registry()

void LLMService_Registry ( LLMProviderRegistry * existing_instance)

Set registry for LLMService (C API)

Parameters
existing_instanceExisting registry instance to use

Allows injection of custom registry for LLMService instances

Definition at line 705 of file LLM_service.cpp.

Here is the caller graph for this function: