LLM for Unity  v3.0.0
Create characters in Unity with LLMs!
Loading...
Searching...
No Matches
LLMAgent.cs
Go to the documentation of this file.
1
3using System;
4using System.Collections.Generic;
5using System.IO;
6using System.Linq;
7using System.Threading.Tasks;
8using UndreamAI.LlamaLib;
9using UnityEngine;
10
11namespace LLMUnity
12{
13 [DefaultExecutionOrder(-1)]
20 public class LLMAgent : LLMClient
21 {
22 #region Inspector Fields
24 [Tooltip("Filename for saving chat history (saved in persistentDataPath)")]
25 [LLM] public string save = "";
26
28 [Tooltip("Debug LLM prompts")]
29 [LLM] public bool debugPrompt = false;
30
32 [Tooltip("Server slot to use for processing (affects caching behavior)")]
33 [ModelAdvanced, SerializeField] protected int _slot = -1;
34
36 [TextArea(5, 10), Chat, SerializeField]
37 [Tooltip("System prompt that defines the AI's personality and behavior")]
38 protected string _systemPrompt = "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.";
39 #endregion
40
41 #region Public Properties
43 public int slot
44 {
45 get => _slot;
46 set
47 {
48 if (_slot != value)
49 {
50 _slot = value;
51 if (llmAgent != null) llmAgent.SlotId = _slot;
52 }
53 }
54 }
55
57 public string systemPrompt
58 {
59 get => _systemPrompt;
60 set
61 {
62 if (_systemPrompt != value)
63 {
64 _systemPrompt = value;
65 if (llmAgent != null) llmAgent.SystemPrompt = _systemPrompt;
66 }
67 }
68 }
69
71 public UndreamAI.LlamaLib.LLMAgent llmAgent { get; protected set; }
72
74 public List<ChatMessage> chat
75 {
76 get
77 {
78 if (llmAgent == null) return new List<ChatMessage>();
79
80 // convert each UndreamAI.LlamaLib.ChatMessage to LLMUnity.ChatMessage
81 return llmAgent.GetHistory()
82 .Select(m => new ChatMessage(m))
83 .ToList();
84 }
85 set
86 {
87 if (llmAgent != null)
88 {
89 // convert LLMUnity.ChatMessage back to UndreamAI.LlamaLib.ChatMessage
90 var history = value?.Select(m => (UndreamAI.LlamaLib.ChatMessage)m).ToList()
91 ?? new List<UndreamAI.LlamaLib.ChatMessage>();
92
93 llmAgent.SetHistory(history);
94 }
95 }
96 }
97 #endregion
98
99 #region Unity Lifecycle and Initialization
100 public override void Awake()
101 {
102 if (!remote) llm?.Register(this);
103 base.Awake();
104 }
105
106 protected override async Task SetupCallerObject()
107 {
108 await base.SetupCallerObject();
109
110 string exceptionMessage = "";
111 try
112 {
113 llmAgent = new UndreamAI.LlamaLib.LLMAgent(llmClient, systemPrompt);
114 }
115 catch (Exception ex)
116 {
117 exceptionMessage = ex.Message;
118 }
119 if (llmAgent == null || exceptionMessage != "")
120 {
121 string error = "LLMAgent not initialized";
122 if (exceptionMessage != "") error += ", error: " + exceptionMessage;
123 LLMUnitySetup.LogError(error, true);
124 }
125 }
126
130 protected override async Task PostSetupCallerObject()
131 {
132 await base.PostSetupCallerObject();
133 if (slot != -1) llmAgent.SlotId = slot;
134 await InitHistory();
135 }
136
137 protected override void OnValidate()
138 {
139 base.OnValidate();
140
141 // Validate slot configuration
143 {
144 LLMUnitySetup.LogError($"Slot must be between 0 and {llm.parallelPrompts - 1}, or -1 for auto-assignment");
145 }
146 }
147
148 protected override LLMLocal GetCaller()
149 {
150 return llmAgent;
151 }
152
156 protected virtual async Task InitHistory()
157 {
159 if (!string.IsNullOrEmpty(save) && File.Exists(GetSavePath()))
160 {
162 }
163 }
164
165 #endregion
166
167 #region File Path Management
172 public virtual string GetSavePath()
173 {
174 if (string.IsNullOrEmpty(save))
175 {
176 LLMUnitySetup.LogError("No save path specified");
177 return null;
178 }
179
180 return Path.Combine(Application.persistentDataPath, save).Replace('\\', '/');
181 }
182
183 #endregion
184
185 #region Chat Management
189 public virtual async Task ClearHistory()
190 {
191 await CheckCaller(checkConnection: false);
192 llmAgent.ClearHistory();
193 }
194
199 public virtual async Task AddUserMessage(string content)
200 {
201 await CheckCaller();
202 llmAgent.AddUserMessage(content);
203 }
204
209 public virtual async Task AddAssistantMessage(string content)
210 {
211 await CheckCaller();
212 llmAgent.AddAssistantMessage(content);
213 }
214
215 #endregion
216
217 #region Chat Functionality
220 public class CompletionResponseJson
221 {
222 public string prompt;
223 public string content;
224 }
235 public virtual async Task<string> Chat(string query, Action<string> callback = null,
236 Action completionCallback = null, bool addToHistory = true)
237 {
238 await CheckCaller();
239 string result = "";
240 try
241 {
243 if (callback != null)
244 {
245#if ENABLE_IL2CPP
246 // For IL2CPP: wrap to IntPtr callback, then wrap for main thread
247 Action<string> mainThreadCallback = Utils.WrapActionForMainThread(callback, this);
249#else
250 // For Mono: direct callback wrapping
251 wrappedCallback = Utils.WrapCallbackForAsync(callback, this);
252#endif
253 }
254
255 SetCompletionParameters();
257 if (this == null) return null;
258 if (addToHistory && result != null && save != "") _ = SaveHistory();
259 if (this != null) completionCallback?.Invoke();
260 }
261 catch (Exception ex)
262 {
263 LLMUnitySetup.LogError(ex.Message, true);
264 }
265 return result;
266 }
267
275 {
277 }
278
286 public virtual async Task Warmup(string query, Action completionCallback = null)
287 {
289 try
290 {
291 // Set to generate no tokens for warmup
292 numPredict = 0;
293 await Chat(query, null, completionCallback, false);
294 }
295 finally
296 {
297 // Restore original setting
299 SetCompletionParameters();
300 }
301 }
302
303 #endregion
304
305 #region Persistence
309 public virtual async Task SaveHistory()
310 {
311 if (string.IsNullOrEmpty(save))
312 {
313 LLMUnitySetup.LogError("No save path specified");
314 return;
315 }
316 await CheckCaller();
317
318 // Save chat history
319 string jsonPath = GetSavePath();
320 string directory = Path.GetDirectoryName(jsonPath);
321
322 if (!Directory.Exists(directory))
323 {
324 Directory.CreateDirectory(directory);
325 }
326
327 try
328 {
329 llmAgent.SaveHistory(jsonPath);
330 LLMUnitySetup.Log($"Saved chat history to: {jsonPath}");
331 }
332 catch (Exception ex)
333 {
334 LLMUnitySetup.LogError($"Failed to save chat history to '{jsonPath}': {ex.Message}", true);
335 }
336 }
337
341 public virtual async Task LoadHistory()
342 {
343 if (string.IsNullOrEmpty(save))
344 {
345 LLMUnitySetup.LogError("No save path specified");
346 return;
347 }
348 await CheckCaller();
349
350 // Load chat history
351 string jsonPath = GetSavePath();
352 if (!File.Exists(jsonPath))
353 {
354 LLMUnitySetup.LogError($"Chat history file not found: {jsonPath}");
355 }
356
357 try
358 {
359 llmAgent.LoadHistory(jsonPath);
360 LLMUnitySetup.Log($"Loaded chat history from: {jsonPath}");
361 }
362 catch (Exception ex)
363 {
364 LLMUnitySetup.LogError($"Failed to load chat history from '{jsonPath}': {ex.Message}", true);
365 }
366 }
367
368 #endregion
369
370 #region Request Management
374 public void CancelRequests()
375 {
376 llmAgent?.Cancel();
377 }
378
379 #endregion
380 }
381
382 public class ChatMessage : UndreamAI.LlamaLib.ChatMessage
383 {
384 public ChatMessage(string role, string content) : base(role, content) {}
385 public ChatMessage(UndreamAI.LlamaLib.ChatMessage other) : base(other.role, other.content) {}
386 }
387}
Unity MonoBehaviour that implements a conversational AI agent with persistent chat history....
Definition LLMAgent.cs:21
virtual async Task Warmup(Action completionCallback=null)
Warms up the model by processing the system prompt without generating output. This caches the system ...
Definition LLMAgent.cs:274
virtual async Task Warmup(string query, Action completionCallback=null)
Warms up the model with a specific prompt without adding it to history. This pre-processes prompts fo...
Definition LLMAgent.cs:286
string save
Filename for saving chat history (saved in persistentDataPath)
Definition LLMAgent.cs:25
List< ChatMessage > chat
Current conversation history as a list of chat messages.
Definition LLMAgent.cs:75
virtual async Task SaveHistory()
Saves the conversation history and optionally the LLM cache to disk.
Definition LLMAgent.cs:309
virtual async Task AddUserMessage(string content)
Adds a user message to the conversation history.
Definition LLMAgent.cs:199
override void Awake()
Unity Awake method that validates configuration and assigns local LLM if needed.
Definition LLMAgent.cs:100
bool debugPrompt
Debug LLM prompts.
Definition LLMAgent.cs:29
virtual async Task ClearHistory()
Clears the entire conversation history.
Definition LLMAgent.cs:189
string systemPrompt
System prompt defining the agent's behavior and personality.
Definition LLMAgent.cs:58
virtual string GetSavePath()
Gets the full path for a file in the persistent data directory.
Definition LLMAgent.cs:172
void CancelRequests()
Cancels any active requests for this agent.
Definition LLMAgent.cs:374
UndreamAI.LlamaLib.LLMAgent llmAgent
The underlying LLMAgent instance from LlamaLib.
Definition LLMAgent.cs:71
virtual async Task LoadHistory()
Loads conversation history and optionally the LLM cache from disk.
Definition LLMAgent.cs:341
virtual async Task< string > Chat(string query, Action< string > callback=null, Action completionCallback=null, bool addToHistory=true)
Processes a user query asynchronously and generates an AI response using conversation context....
Definition LLMAgent.cs:235
int slot
Server slot ID for this agent's requests.
Definition LLMAgent.cs:44
virtual async Task AddAssistantMessage(string content)
Adds an AI assistant message to the conversation history.
Definition LLMAgent.cs:209
Unity MonoBehaviour base class for LLM client functionality. Handles both local and remote LLM connec...
Definition LLMClient.cs:21
bool remote
Whether this client uses a remote server connection.
Definition LLMClient.cs:128
int numPredict
Maximum tokens to generate (-1 = unlimited)
Definition LLMClient.cs:58
LLM llm
The local LLM instance (null if using remote)
Definition LLMClient.cs:142
Class implementing helper functions for setup and process management.
Unity MonoBehaviour component that manages a local LLM server instance. Handles model loading,...
Definition LLM.cs:21
int parallelPrompts
Number of prompts that can be processed in parallel (-1 = auto-detect from clients)
Definition LLM.cs:132
void Register(LLMClient llmClient)
Registers an LLMClient for slot management.
Definition LLM.cs:634