LLM for Unity  v3.0.3
Create characters in Unity with LLMs!
Loading...
Searching...
No Matches
LLMAgent.cs
Go to the documentation of this file.
1
3using System;
4using System.Collections.Generic;
5using System.IO;
6using System.Linq;
7using System.Threading.Tasks;
8using UndreamAI.LlamaLib;
9using UnityEngine;
10
11namespace LLMUnity
12{
13 [DefaultExecutionOrder(-1)]
20 public class LLMAgent : LLMClient
21 {
22 #region Inspector Fields
24 [Tooltip("Filename for saving chat history (saved in persistentDataPath)")]
25 [LLM] public string save = "";
26
28 [Tooltip("Debug LLM prompts")]
29 [LLM] public bool debugPrompt = false;
30
32 [Tooltip("Strategy to apply when the conversation history exceeds the model's context window")]
33 [LLM] public ContextOverflowStrategy overflowStrategy = ContextOverflowStrategy.Truncate;
34
36 [Tooltip("Target fraction of the context window to fill after truncation or summarization (0.0–1.0)")]
37 [Overflow1, Range(0.1f, 0.95f)] public float overflowTargetRatio = 0.5f;
38
40 [Tooltip("Custom prompt used when asking the LLM to summarize history (leave empty for default)")]
41 [Overflow2, TextArea(2, 4)] public string overflowSummarizePrompt = "";
42
44 [Tooltip("Server slot to use for processing (affects caching behavior)")]
45 [ModelAdvanced, SerializeField] protected int _slot = -1;
46
48 [TextArea(5, 10), Chat, SerializeField]
49 [Tooltip("System prompt that defines the AI's personality and behavior")]
50 protected string _systemPrompt = "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.";
51 #endregion
52
53 #region Public Properties
55 public int slot
56 {
57 get => _slot;
58 set
59 {
60 if (_slot != value)
61 {
62 _slot = value;
63 if (llmAgent != null) llmAgent.SlotId = _slot;
64 }
65 }
66 }
67
69 public string systemPrompt
70 {
71 get => _systemPrompt;
72 set
73 {
74 if (_systemPrompt != value)
75 {
76 _systemPrompt = value;
77 if (llmAgent != null) llmAgent.SystemPrompt = _systemPrompt;
78 }
79 }
80 }
81
83 public UndreamAI.LlamaLib.LLMAgent llmAgent { get; protected set; }
84
86 public List<ChatMessage> chat
87 {
88 get
89 {
90 if (llmAgent == null) return new List<ChatMessage>();
91
92 // convert each UndreamAI.LlamaLib.ChatMessage to LLMUnity.ChatMessage
93 return llmAgent.GetHistory()
94 .Select(m => new ChatMessage(m))
95 .ToList();
96 }
97 set
98 {
99 if (llmAgent != null)
100 {
101 // convert LLMUnity.ChatMessage back to UndreamAI.LlamaLib.ChatMessage
102 var history = value?.Select(m => (UndreamAI.LlamaLib.ChatMessage)m).ToList()
103 ?? new List<UndreamAI.LlamaLib.ChatMessage>();
104
105 llmAgent.SetHistory(history);
106 }
107 }
108 }
109 #endregion
110
111 #region Unity Lifecycle and Initialization
112 public override void Awake()
113 {
114 if (!remote) llm?.Register(this);
115 base.Awake();
116 }
117
118 protected override async Task SetupCallerObject()
119 {
120 await base.SetupCallerObject();
121
122 string exceptionMessage = "";
123 try
124 {
125 llmAgent = new UndreamAI.LlamaLib.LLMAgent(llmClient, systemPrompt);
126 }
127 catch (Exception ex)
128 {
129 exceptionMessage = ex.Message;
130 }
131 if (llmAgent == null || exceptionMessage != "")
132 {
133 string error = "LLMAgent not initialized";
134 if (exceptionMessage != "") error += ", error: " + exceptionMessage;
135 LLMUnitySetup.LogError(error, true);
136 }
137 }
138
142 protected override async Task PostSetupCallerObject()
143 {
144 await base.PostSetupCallerObject();
145 if (slot != -1) llmAgent.SlotId = slot;
146 if (overflowStrategy != ContextOverflowStrategy.None)
147 {
148 string prompt = string.IsNullOrEmpty(overflowSummarizePrompt) ? null : overflowSummarizePrompt;
149 llmAgent.SetOverflowStrategy(overflowStrategy, overflowTargetRatio, prompt);
150 }
151 await InitHistory();
152 }
153
154 protected override void OnValidate()
155 {
156 base.OnValidate();
157
158 // Validate slot configuration
160 {
161 LLMUnitySetup.LogError($"Slot must be between 0 and {llm.parallelPrompts - 1}, or -1 for auto-assignment");
162 }
163 }
164
165 protected override LLMLocal GetCaller()
166 {
167 return llmAgent;
168 }
169
173 protected virtual async Task InitHistory()
174 {
176 if (!string.IsNullOrEmpty(save) && File.Exists(GetSavePath()))
177 {
179 }
180 }
181
182 #endregion
183
184 #region File Path Management
189 public virtual string GetSavePath()
190 {
191 if (string.IsNullOrEmpty(save))
192 {
193 LLMUnitySetup.LogError("No save path specified");
194 return null;
195 }
196
197 return Path.Combine(Application.persistentDataPath, save).Replace('\\', '/');
198 }
199
200 #endregion
201
202 #region Chat Management
206 public virtual async Task ClearHistory()
207 {
208 await CheckCaller(checkConnection: false);
209 llmAgent.ClearHistory();
210 }
211
216 public virtual async Task AddUserMessage(string content)
217 {
218 await CheckCaller();
219 llmAgent.AddUserMessage(content);
220 }
221
226 public virtual async Task AddAssistantMessage(string content)
227 {
228 await CheckCaller();
229 llmAgent.AddAssistantMessage(content);
230 }
231
236 public string GetSummary()
237 {
238 return llmAgent?.GetSummary() ?? string.Empty;
239 }
240
244 public void SetSummary(string summary)
245 {
246 llmAgent?.SetSummary(summary ?? string.Empty);
247 }
248
249 #endregion
250
251 #region Chat Functionality
254 public class CompletionResponseJson
255 {
256 public string prompt;
257 public string content;
258 }
269 public virtual async Task<string> Chat(string query, Action<string> callback = null,
270 Action completionCallback = null, bool addToHistory = true)
271 {
272 await CheckCaller();
273 string result = "";
274 try
275 {
277 if (callback != null)
278 {
279#if ENABLE_IL2CPP
280 // For IL2CPP: wrap to IntPtr callback, then wrap for main thread
281 Action<string> mainThreadCallback = Utils.WrapActionForMainThread(callback, this);
283#else
284 // For Mono: direct callback wrapping
285 wrappedCallback = Utils.WrapCallbackForAsync(callback, this);
286#endif
287 }
288
289 SetCompletionParameters();
291 if (this == null) return null;
292 if (addToHistory && result != null && save != "") _ = SaveHistory();
293 if (this != null) completionCallback?.Invoke();
294 }
295 catch (Exception ex)
296 {
297 LLMUnitySetup.LogError(ex.Message, true);
298 }
299 return result;
300 }
301
309 {
311 }
312
320 public virtual async Task Warmup(string query, Action completionCallback = null)
321 {
323 try
324 {
325 // Set to generate no tokens for warmup
326 numPredict = 0;
327 await Chat(query, null, completionCallback, false);
328 }
329 finally
330 {
331 // Restore original setting
333 SetCompletionParameters();
334 }
335 }
336
337 #endregion
338
339 #region Persistence
343 public virtual async Task SaveHistory()
344 {
345 if (string.IsNullOrEmpty(save))
346 {
347 LLMUnitySetup.LogError("No save path specified");
348 return;
349 }
350 await CheckCaller();
351
352 // Save chat history
353 string jsonPath = GetSavePath();
354 string directory = Path.GetDirectoryName(jsonPath);
355
356 if (!Directory.Exists(directory))
357 {
358 Directory.CreateDirectory(directory);
359 }
360
361 try
362 {
363 llmAgent.SaveHistory(jsonPath);
364 LLMUnitySetup.Log($"Saved chat history to: {jsonPath}");
365 }
366 catch (Exception ex)
367 {
368 LLMUnitySetup.LogError($"Failed to save chat history to '{jsonPath}': {ex.Message}", true);
369 }
370 }
371
375 public virtual async Task LoadHistory()
376 {
377 if (string.IsNullOrEmpty(save))
378 {
379 LLMUnitySetup.LogError("No save path specified");
380 return;
381 }
382 await CheckCaller();
383
384 // Load chat history
385 string jsonPath = GetSavePath();
386 if (!File.Exists(jsonPath))
387 {
388 LLMUnitySetup.LogError($"Chat history file not found: {jsonPath}");
389 }
390
391 try
392 {
393 llmAgent.LoadHistory(jsonPath);
394 LLMUnitySetup.Log($"Loaded chat history from: {jsonPath}");
395 }
396 catch (Exception ex)
397 {
398 LLMUnitySetup.LogError($"Failed to load chat history from '{jsonPath}': {ex.Message}", true);
399 }
400 }
401
402 #endregion
403
404 #region Request Management
408 public void CancelRequests()
409 {
410 llmAgent?.Cancel();
411 }
412
413 #endregion
414 }
415
416 public class ChatMessage : UndreamAI.LlamaLib.ChatMessage
417 {
418 public ChatMessage(string role, string content) : base(role, content) {}
419 public ChatMessage(UndreamAI.LlamaLib.ChatMessage other) : base(other.role, other.content) {}
420 }
421}
Unity MonoBehaviour that implements a conversational AI agent with persistent chat history....
Definition LLMAgent.cs:21
virtual async Task Warmup(Action completionCallback=null)
Warms up the model by processing the system prompt without generating output. This caches the system ...
Definition LLMAgent.cs:308
virtual async Task Warmup(string query, Action completionCallback=null)
Warms up the model with a specific prompt without adding it to history. This pre-processes prompts fo...
Definition LLMAgent.cs:320
string save
Filename for saving chat history (saved in persistentDataPath)
Definition LLMAgent.cs:25
List< ChatMessage > chat
Current conversation history as a list of chat messages.
Definition LLMAgent.cs:87
void SetSummary(string summary)
Overrides the rolling summary directly, e.g. to restore custom state.
Definition LLMAgent.cs:244
virtual async Task SaveHistory()
Saves the conversation history and optionally the LLM cache to disk.
Definition LLMAgent.cs:343
virtual async Task AddUserMessage(string content)
Adds a user message to the conversation history.
Definition LLMAgent.cs:216
ContextOverflowStrategy overflowStrategy
Strategy to apply when the conversation history exceeds the model's context window.
Definition LLMAgent.cs:33
override void Awake()
Unity Awake method that validates configuration and assigns local LLM if needed.
Definition LLMAgent.cs:112
bool debugPrompt
Debug LLM prompts.
Definition LLMAgent.cs:29
virtual async Task ClearHistory()
Clears the entire conversation history.
Definition LLMAgent.cs:206
string overflowSummarizePrompt
Custom prompt used when asking the LLM to summarize history (leave empty for default)
Definition LLMAgent.cs:41
string systemPrompt
System prompt defining the agent's behavior and personality.
Definition LLMAgent.cs:70
virtual string GetSavePath()
Gets the full path for a file in the persistent data directory.
Definition LLMAgent.cs:189
void CancelRequests()
Cancels any active requests for this agent.
Definition LLMAgent.cs:408
UndreamAI.LlamaLib.LLMAgent llmAgent
The underlying LLMAgent instance from LlamaLib.
Definition LLMAgent.cs:83
virtual async Task LoadHistory()
Loads conversation history and optionally the LLM cache from disk.
Definition LLMAgent.cs:375
virtual async Task< string > Chat(string query, Action< string > callback=null, Action completionCallback=null, bool addToHistory=true)
Processes a user query asynchronously and generates an AI response using conversation context....
Definition LLMAgent.cs:269
float overflowTargetRatio
Target fraction of the context window to fill after truncation or summarization (0....
Definition LLMAgent.cs:37
int slot
Server slot ID for this agent's requests.
Definition LLMAgent.cs:56
virtual async Task AddAssistantMessage(string content)
Adds an AI assistant message to the conversation history.
Definition LLMAgent.cs:226
string GetSummary()
Returns the current rolling summary produced by the Summarize overflow strategy. Empty string if no s...
Definition LLMAgent.cs:236
Unity MonoBehaviour base class for LLM client functionality. Handles both local and remote LLM connec...
Definition LLMClient.cs:21
bool remote
Whether this client uses a remote server connection.
Definition LLMClient.cs:128
int numPredict
Maximum tokens to generate (-1 = unlimited)
Definition LLMClient.cs:58
LLM llm
The local LLM instance (null if using remote)
Definition LLMClient.cs:142
Class implementing helper functions for setup and process management.
Unity MonoBehaviour component that manages a local LLM server instance. Handles model loading,...
Definition LLM.cs:21
int parallelPrompts
Number of prompts that can be processed in parallel (-1 = auto-detect from clients)
Definition LLM.cs:132
void Register(LLMClient llmClient)
Registers an LLMClient for slot management.
Definition LLM.cs:636