4using System.Collections.Generic;
7using System.Threading.Tasks;
8using UndreamAI.LlamaLib;
13 [DefaultExecutionOrder(-1)]
22 #region Inspector Fields
24 [Tooltip(
"Filename for saving chat history (saved in persistentDataPath)")]
28 [Tooltip(
"Debug LLM prompts")]
32 [Tooltip(
"Server slot to use for processing (affects caching behavior)")]
33 [ModelAdvanced, SerializeField]
protected int _slot = -1;
36 [TextArea(5, 10),
Chat, SerializeField]
37 [Tooltip(
"System prompt that defines the AI's personality and behavior")]
38 protected string _systemPrompt =
"A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.";
41 #region Public Properties
51 if (
llmAgent !=
null) llmAgent.SlotId = _slot;
62 if (_systemPrompt != value)
64 _systemPrompt = value;
65 if (
llmAgent !=
null) llmAgent.SystemPrompt = _systemPrompt;
71 public UndreamAI.LlamaLib.LLMAgent
llmAgent {
get;
protected set; }
74 public List<ChatMessage>
chat
78 if (
llmAgent ==
null)
return new List<ChatMessage>();
90 var history = value?.Select(m => (UndreamAI.LlamaLib.ChatMessage)m).ToList()
91 ??
new List<UndreamAI.LlamaLib.ChatMessage>();
99 #region Unity Lifecycle and Initialization
106 protected override async Task SetupCallerObject()
108 await base.SetupCallerObject();
110 string exceptionMessage =
"";
117 exceptionMessage = ex.Message;
119 if (
llmAgent ==
null || exceptionMessage !=
"")
121 string error =
"LLMAgent not initialized";
122 if (exceptionMessage !=
"") error +=
", error: " + exceptionMessage;
123 LLMUnitySetup.LogError(error,
true);
130 protected override async Task PostSetupCallerObject()
132 await base.PostSetupCallerObject();
133 if (
slot != -1) llmAgent.SlotId =
slot;
137 protected override void OnValidate()
144 LLMUnitySetup.LogError(
$"Slot must be between 0 and {llm.parallelPrompts - 1}, or -1 for auto-assignment");
148 protected override LLMLocal GetCaller()
156 protected virtual async Task InitHistory()
167 #region File Path Management
185 #region Chat Management
217 #region Chat Functionality
222 public string prompt;
255 SetCompletionParameters();
257 if (
this ==
null)
return null;
299 SetCompletionParameters();
334 LLMUnitySetup.LogError(
$"Failed to save chat history to '{jsonPath}': {ex.Message}",
true);
354 LLMUnitySetup.LogError(
$"Chat history file not found: {jsonPath}");
364 LLMUnitySetup.LogError(
$"Failed to load chat history from '{jsonPath}': {ex.Message}",
true);
370 #region Request Management
384 public ChatMessage(
string role,
string content) : base(role, content) {}
385 public ChatMessage(UndreamAI.LlamaLib.ChatMessage other) : base(other.role, other.content) {}
Unity MonoBehaviour that implements a conversational AI agent with persistent chat history....
virtual async Task Warmup(Action completionCallback=null)
Warms up the model by processing the system prompt without generating output. This caches the system ...
virtual async Task Warmup(string query, Action completionCallback=null)
Warms up the model with a specific prompt without adding it to history. This pre-processes prompts fo...
string save
Filename for saving chat history (saved in persistentDataPath)
List< ChatMessage > chat
Current conversation history as a list of chat messages.
virtual async Task SaveHistory()
Saves the conversation history and optionally the LLM cache to disk.
virtual async Task AddUserMessage(string content)
Adds a user message to the conversation history.
override void Awake()
Unity Awake method that validates configuration and assigns local LLM if needed.
bool debugPrompt
Debug LLM prompts.
virtual async Task ClearHistory()
Clears the entire conversation history.
string systemPrompt
System prompt defining the agent's behavior and personality.
virtual string GetSavePath()
Gets the full path for a file in the persistent data directory.
void CancelRequests()
Cancels any active requests for this agent.
UndreamAI.LlamaLib.LLMAgent llmAgent
The underlying LLMAgent instance from LlamaLib.
virtual async Task LoadHistory()
Loads conversation history and optionally the LLM cache from disk.
virtual async Task< string > Chat(string query, Action< string > callback=null, Action completionCallback=null, bool addToHistory=true)
Processes a user query asynchronously and generates an AI response using conversation context....
int slot
Server slot ID for this agent's requests.
virtual async Task AddAssistantMessage(string content)
Adds an AI assistant message to the conversation history.
Unity MonoBehaviour base class for LLM client functionality. Handles both local and remote LLM connec...
bool remote
Whether this client uses a remote server connection.
int numPredict
Maximum tokens to generate (-1 = unlimited)
LLM llm
The local LLM instance (null if using remote)
Class implementing helper functions for setup and process management.
Unity MonoBehaviour component that manages a local LLM server instance. Handles model loading,...
int parallelPrompts
Number of prompts that can be processed in parallel (-1 = auto-detect from clients)
void Register(LLMClient llmClient)
Registers an LLMClient for slot management.