Skip to content

UseNativeDebugger #346

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 64 additions & 9 deletions Runtime/LLM.cs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ public class LLM : MonoBehaviour
/// <summary> log the output of the LLM in the Unity Editor. </summary>
[Tooltip("log the output of the LLM in the Unity Editor.")]
[LLM] public bool debug = false;
/// <summary> Wait for native debugger to connect to the backend </summary>
[Tooltip("Wait for native debugger to connect to the backend")]
[LLMAdvanced] public bool UseNativeDebugger = false;
/// <summary> number of prompts that can happen in parallel (-1 = number of LLMCaller objects) </summary>
[Tooltip("number of prompts that can happen in parallel (-1 = number of LLMCaller objects)")]
[LLMAdvanced] public int parallelPrompts = -1;
Expand All @@ -53,6 +56,8 @@ public class LLM : MonoBehaviour
public bool started { get; protected set; } = false;
/// <summary> Boolean set to true if the server has failed to start. </summary>
public bool failed { get; protected set; } = false;
/// <summary> Boolean set to true if the server has been destroyed. </summary>
public bool destroyed { get; protected set; } = false;
/// <summary> Boolean set to true if the models were not downloaded successfully. </summary>
public static bool modelSetupFailed { get; protected set; } = false;
/// <summary> Boolean set to true if the server has started and is ready to receive requests, false otherwise. </summary>
Expand Down Expand Up @@ -127,6 +132,13 @@ void OnValidate()
public async void Awake()
{
if (!enabled) return;
Load();
}

public async Awaitable Load()
{
await Awaitable.BackgroundThreadAsync();

#if !UNITY_EDITOR
modelSetupFailed = !await LLMManager.Setup();
#endif
Expand All @@ -142,9 +154,13 @@ public async void Awake()
failed = true;
return;
}
await Task.Run(() => StartLLMServer(arguments));
await StartLLMServerAsync(arguments);
if (!started) return;
if (dontDestroyOnLoad) DontDestroyOnLoad(transform.root.gameObject);
if (dontDestroyOnLoad)
{
await Awaitable.MainThreadAsync();
DontDestroyOnLoad(transform.root.gameObject);
}
}

/// <summary>
Expand Down Expand Up @@ -476,10 +492,11 @@ private void StopLogging()
DestroyStreamWrapper(logStreamWrapper);
}

private void StartLLMServer(string arguments)
private async Task StartLLMServerAsync(string arguments)
{
started = false;
failed = false;
destroyed = false;
bool useGPU = numGPULayers > 0;

foreach (string arch in LLMLib.PossibleArchitectures(useGPU))
Expand All @@ -488,6 +505,19 @@ private void StartLLMServer(string arguments)
try
{
InitLib(arch);
#if UNITY_EDITOR
if (UseNativeDebugger)
{
if (llmlib?.LLM_IsDebuggerAttached == null)
{
LLMUnitySetup.Log($"Tried architecture: {arch} is not debug library");
Destroy();
continue;
}

await WaitNativeDebug();
}
#endif
InitService(arguments);
LLMUnitySetup.Log($"Using architecture: {arch}");
break;
Expand All @@ -504,6 +534,7 @@ private void StartLLMServer(string arguments)
catch (Exception e)
{
error = $"{e.GetType()}: {e.Message}";
Destroy();
}
LLMUnitySetup.Log($"Tried architecture: {arch}, error: " + error);
}
Expand All @@ -514,20 +545,39 @@ private void StartLLMServer(string arguments)
return;
}
CallWithLock(StartService);
LLMUnitySetup.Log("LLM service created");
if (started)
LLMUnitySetup.Log("LLM service created");
}

private void InitLib(string arch)
{
llmlib = new LLMLib(arch);
CheckLLMStatus(false);
}

#if UNITY_EDITOR
private async Task WaitNativeDebug()
{
if (llmlib?.LLM_IsDebuggerAttached != null)
{
LLMUnitySetup.Log("waiting debugger");
while (!destroyed)
{
if (llmlib.LLM_IsDebuggerAttached())
{
LLMUnitySetup.Log("remote debugger attached");
break;
}
await Task.Delay(100);
}
}
}
#endif

void CallWithLock(EmptyCallback fn)
{
lock (startLock)
{
if (llmlib == null) throw new DestroyException();
if (llmlib == null || destroyed) throw new DestroyException();
fn();
}
}
Expand Down Expand Up @@ -556,9 +606,12 @@ private void StartService()
{
llmThread = new Thread(() => llmlib.LLM_Start(LLMObject));
llmThread.Start();
while (!llmlib.LLM_Started(LLMObject)) {}
ApplyLoras();
started = true;
while (!llmlib.LLM_Started(LLMObject) && !destroyed) { }
if (!destroyed)
{
ApplyLoras();
started = true;
}
}

/// <summary>
Expand Down Expand Up @@ -611,6 +664,7 @@ void AssertStarted()
string error = null;
if (failed) error = "LLM service couldn't be created";
else if (!started) error = "LLM service not started";
else if (destroyed) error = "LLM service is being destroyed";
if (error != null)
{
LLMUnitySetup.LogError(error);
Expand Down Expand Up @@ -807,6 +861,7 @@ public void CancelRequest(int id_slot)
/// </summary>
public void Destroy()
{
destroyed = true;
lock (staticLock)
lock (startLock)
{
Expand Down
2 changes: 2 additions & 0 deletions Runtime/LLMCaller.cs
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,8 @@ protected virtual async Task<Ret> PostRequestLocal<Res, Ret>(string json, string
// send a post request to the server and call the relevant callbacks to convert the received content and handle it
// this function has streaming functionality i.e. handles the answer while it is being received
while (!llm.failed && !llm.started) await Task.Yield();
if (llm.destroyed)
return default;
string callResult = null;
switch (endpoint)
{
Expand Down
2 changes: 2 additions & 0 deletions Runtime/LLMCharacter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -685,6 +685,8 @@ protected override async Task<Ret> PostRequestLocal<Res, Ret>(string json, strin
if (endpoint != "completion") return await base.PostRequestLocal(json, endpoint, getContent, callback);

while (!llm.failed && !llm.started) await Task.Yield();
if (llm.destroyed)
return default;

string callResult = null;
bool callbackCalled = false;
Expand Down
12 changes: 12 additions & 0 deletions Runtime/LLMLib.cs
Original file line number Diff line number Diff line change
Expand Up @@ -497,6 +497,12 @@ public LLMLib(string arch)
StringWrapper_GetString = LibraryLoader.GetSymbolDelegate<StringWrapper_GetStringDelegate>(libraryHandle, "StringWrapper_GetString");
Logging = LibraryLoader.GetSymbolDelegate<LoggingDelegate>(libraryHandle, "Logging");
StopLogging = LibraryLoader.GetSymbolDelegate<StopLoggingDelegate>(libraryHandle, "StopLogging");

// editor only
#if UNITY_EDITOR
var symbol = LibraryLoader.GetSymbol(libraryHandle, "LLM_IsDebuggerAttached");
LLM_IsDebuggerAttached = (symbol != IntPtr.Zero) ? Marshal.GetDelegateForFunctionPointer<LLM_IsDebuggerAttachedDelegate>(symbol) : null;
#endif
}

/// <summary>
Expand Down Expand Up @@ -606,6 +612,9 @@ public static string GetArchitecturePath(string arch)
public delegate void StringWrapper_DeleteDelegate(IntPtr instance);
public delegate int StringWrapper_GetStringSizeDelegate(IntPtr instance);
public delegate void StringWrapper_GetStringDelegate(IntPtr instance, IntPtr buffer, int bufferSize, bool clear = false);
#if UNITY_EDITOR
public delegate bool LLM_IsDebuggerAttachedDelegate();
#endif

public LoggingDelegate Logging;
public StopLoggingDelegate StopLogging;
Expand All @@ -631,6 +640,9 @@ public static string GetArchitecturePath(string arch)
public StringWrapper_DeleteDelegate StringWrapper_Delete;
public StringWrapper_GetStringSizeDelegate StringWrapper_GetStringSize;
public StringWrapper_GetStringDelegate StringWrapper_GetString;
#if UNITY_EDITOR
public LLM_IsDebuggerAttachedDelegate LLM_IsDebuggerAttached;
#endif

#endif

Expand Down