4using System.Collections.Generic;
7using System.Threading.Tasks;
8using UndreamAI.LlamaLib;
13 [DefaultExecutionOrder(-1)]
22 #region Inspector Fields
24 [Tooltip(
"Filename for saving chat history (saved in persistentDataPath)")]
28 [Tooltip(
"Debug LLM prompts")]
32 [Tooltip(
"Strategy to apply when the conversation history exceeds the model's context window")]
36 [Tooltip(
"Target fraction of the context window to fill after truncation or summarization (0.0–1.0)")]
40 [Tooltip(
"Custom prompt used when asking the LLM to summarize history (leave empty for default)")]
44 [Tooltip(
"Server slot to use for processing (affects caching behavior)")]
45 [ModelAdvanced, SerializeField]
protected int _slot = -1;
48 [TextArea(5, 10),
Chat, SerializeField]
49 [Tooltip(
"System prompt that defines the AI's personality and behavior")]
50 protected string _systemPrompt =
"A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.";
53 #region Public Properties
63 if (
llmAgent !=
null) llmAgent.SlotId = _slot;
74 if (_systemPrompt != value)
76 _systemPrompt = value;
77 if (
llmAgent !=
null) llmAgent.SystemPrompt = _systemPrompt;
83 public UndreamAI.LlamaLib.LLMAgent
llmAgent {
get;
protected set; }
86 public List<ChatMessage>
chat
90 if (
llmAgent ==
null)
return new List<ChatMessage>();
102 var history = value?.Select(m => (UndreamAI.LlamaLib.ChatMessage)m).ToList()
103 ??
new List<UndreamAI.LlamaLib.ChatMessage>();
111 #region Unity Lifecycle and Initialization
118 protected override async Task SetupCallerObject()
120 await base.SetupCallerObject();
122 string exceptionMessage =
"";
129 exceptionMessage = ex.Message;
131 if (
llmAgent ==
null || exceptionMessage !=
"")
133 string error =
"LLMAgent not initialized";
134 if (exceptionMessage !=
"") error +=
", error: " + exceptionMessage;
135 LLMUnitySetup.LogError(error,
true);
142 protected override async Task PostSetupCallerObject()
144 await base.PostSetupCallerObject();
145 if (
slot != -1) llmAgent.SlotId =
slot;
154 protected override void OnValidate()
161 LLMUnitySetup.LogError(
$"Slot must be between 0 and {llm.parallelPrompts - 1}, or -1 for auto-assignment");
165 protected override LLMLocal GetCaller()
173 protected virtual async Task InitHistory()
184 #region File Path Management
202 #region Chat Management
238 return llmAgent?.GetSummary() ??
string.Empty;
251 #region Chat Functionality
256 public string prompt;
289 SetCompletionParameters();
291 if (
this ==
null)
return null;
333 SetCompletionParameters();
368 LLMUnitySetup.LogError(
$"Failed to save chat history to '{jsonPath}': {ex.Message}",
true);
388 LLMUnitySetup.LogError(
$"Chat history file not found: {jsonPath}");
398 LLMUnitySetup.LogError(
$"Failed to load chat history from '{jsonPath}': {ex.Message}",
true);
404 #region Request Management
418 public ChatMessage(
string role,
string content) : base(role, content) {}
419 public ChatMessage(UndreamAI.LlamaLib.ChatMessage other) : base(other.role, other.content) {}
Unity MonoBehaviour that implements a conversational AI agent with persistent chat history....
virtual async Task Warmup(Action completionCallback=null)
Warms up the model by processing the system prompt without generating output. This caches the system ...
virtual async Task Warmup(string query, Action completionCallback=null)
Warms up the model with a specific prompt without adding it to history. This pre-processes prompts fo...
string save
Filename for saving chat history (saved in persistentDataPath)
List< ChatMessage > chat
Current conversation history as a list of chat messages.
void SetSummary(string summary)
Overrides the rolling summary directly, e.g. to restore custom state.
virtual async Task SaveHistory()
Saves the conversation history and optionally the LLM cache to disk.
virtual async Task AddUserMessage(string content)
Adds a user message to the conversation history.
ContextOverflowStrategy overflowStrategy
Strategy to apply when the conversation history exceeds the model's context window.
override void Awake()
Unity Awake method that validates configuration and assigns local LLM if needed.
bool debugPrompt
Debug LLM prompts.
virtual async Task ClearHistory()
Clears the entire conversation history.
string overflowSummarizePrompt
Custom prompt used when asking the LLM to summarize history (leave empty for default)
string systemPrompt
System prompt defining the agent's behavior and personality.
virtual string GetSavePath()
Gets the full path for a file in the persistent data directory.
void CancelRequests()
Cancels any active requests for this agent.
UndreamAI.LlamaLib.LLMAgent llmAgent
The underlying LLMAgent instance from LlamaLib.
virtual async Task LoadHistory()
Loads conversation history and optionally the LLM cache from disk.
virtual async Task< string > Chat(string query, Action< string > callback=null, Action completionCallback=null, bool addToHistory=true)
Processes a user query asynchronously and generates an AI response using conversation context....
float overflowTargetRatio
Target fraction of the context window to fill after truncation or summarization (0....
int slot
Server slot ID for this agent's requests.
virtual async Task AddAssistantMessage(string content)
Adds an AI assistant message to the conversation history.
string GetSummary()
Returns the current rolling summary produced by the Summarize overflow strategy. Empty string if no s...
Unity MonoBehaviour base class for LLM client functionality. Handles both local and remote LLM connec...
bool remote
Whether this client uses a remote server connection.
int numPredict
Maximum tokens to generate (-1 = unlimited)
LLM llm
The local LLM instance (null if using remote)
Class implementing helper functions for setup and process management.
Unity MonoBehaviour component that manages a local LLM server instance. Handles model loading,...
int parallelPrompts
Number of prompts that can be processed in parallel (-1 = auto-detect from clients)
void Register(LLMClient llmClient)
Registers an LLMClient for slot management.