Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add experimental auto-download support. #692

Merged
merged 5 commits into from
May 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions LLama.Examples/LLama.Examples.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\LLama.Experimental\LLama.Experimental.csproj" />
<ProjectReference Include="..\LLama.KernelMemory\LLamaSharp.KernelMemory.csproj" />
<ProjectReference Include="..\LLama.SemanticKernel\LLamaSharp.SemanticKernel.csproj" />
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
using LLama.Experimental.Native;

namespace LLama.Native
{
#if NET6_0_OR_GREATER
public static class NativeLibraryAutoDownloadExtension
{
/// <summary>
/// Set whether to download the best-matched native library file automatically if there's no backend or specified file to load.
/// You could add a setting here to customize the behavior of the download.
///
/// * If auto-download is enabled, please call <see cref="NativeLibraryConfig.DryRun"/> after you have finished setting your configurations.
/// </summary>
/// <param name="config"></param>
/// <param name="enable"></param>
/// <param name="settings"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public static NativeLibraryConfig WithAutoDownload(this NativeLibraryConfig config, bool enable = true, NativeLibraryDownloadSettings? settings = null)
{
if (config.LibraryHasLoaded)
{
throw new Exception("The library has already loaded, you can't change the configurations. " +
"Please finish the configuration setting before any call to LLamaSharp native APIs." +
"Please use NativeLibraryConfig.DryRun if you want to see whether it's loaded successfully " +
"but still have chance to modify the configurations.");
}
if (enable)
{
if(settings is null)
{
settings = NativeLibraryDownloadSettings.Create();
}
// Don't modify and pass the original object to `Description`, create a new one instead.
// Also, we need to set the default local directory if the user does not.
if (string.IsNullOrEmpty(settings.Tag))
{
settings = settings.WithTag(GetNativeLibraryCommitHash());
}
var defaultLocalDir = NativeLibraryDownloadSettings.GetDefaultLocalDir(settings.Tag);
settings = settings.WithLocalDir(settings.LocalDir ?? defaultLocalDir);

// When using auto-download, this should be the only search directory.
List<string> searchDirectoriesForDownload = [settings.LocalDir!];
// unless extra search paths are added by the user.
searchDirectoriesForDownload.AddRange(settings.ExtraSearchDirectories ?? []);
config.WithSearchDirectories(searchDirectoriesForDownload);

config.WithSelectingPolicy(new SelectingPolicyWithAutoDownload(settings));
}
return config;
}

private const string COMMIT_HASH = "a743d7";

private static string GetNativeLibraryCommitHash() => COMMIT_HASH;

/// <summary>
/// Set whether to download the best-matched native library file automatically if there's no backend or specified file to load.
/// You could add a setting here to customize the behavior of the download.
///
/// If auto-download is enabled, please call <see cref="NativeLibraryConfig.DryRun"/> after you have finished setting your configurations.
/// </summary>
/// <param name="container"></param>
/// <param name="enable"></param>
/// <param name="settings"></param>
/// <returns></returns>
public static NativeLibraryConfigContainer WithAutoDownload(this NativeLibraryConfigContainer container,
bool enable = true, NativeLibraryDownloadSettings? settings = null)
{
container.ForEach((config) => config.WithAutoDownload(enable, settings));
return container;
}
}
#endif
}
42 changes: 42 additions & 0 deletions LLama.Experimental/LLama.Experimental.csproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFrameworks>net6;net7;net8;netstandard2.0</TargetFrameworks>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<langversion>12</langversion>
<RootNamespace>LLama</RootNamespace>

<Version>0.12.0</Version>
<Authors>Rinne</Authors>
<Company>SciSharp STACK</Company>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
<RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl>
<PackageTags>LLama, LLM, GPT, ChatGPT, NLP, AI, Chat Bot, SciSharp</PackageTags>
<Description>
LLamaSharp.Experimental is a package with some experimental features and aggressive updates.
This package includes some important features in advance, but is less stable.
</Description>
<PackageReleaseNotes>
Support native library auto-download.
</PackageReleaseNotes>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<PackageOutputPath>$(SolutionDir)/packages</PackageOutputPath>
<Platforms>AnyCPU;x64;Arm64</Platforms>
<PackageId>LLamaSharp.Experimental</PackageId>
<Configurations>Debug;Release;GPU</Configurations>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="HuggingfaceHub" Version="0.1.3" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup>

</Project>
91 changes: 91 additions & 0 deletions LLama.Experimental/Native/AutoDownloadedLibraries.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
using LLama.Abstractions;
using LLama.Native;

namespace LLama.Experimental.Native
{
#if NET6_0_OR_GREATER
public class AutoDownloadedLibraries
{
public class Cuda: INativeLibrary
{
private NativeLibraryWithCuda _cudaLibrary;
private NativeLibraryDownloadSettings _settings;

public Cuda(NativeLibraryWithCuda cudaLibrary, NativeLibraryDownloadSettings settings)
{
_cudaLibrary = cudaLibrary;
_settings = settings;
}

public NativeLibraryMetadata? Metadata => _cudaLibrary.Metadata;

public IEnumerable<string> Prepare(SystemInfo systemInfo, NativeLogConfig.LLamaLogCallback? logCallback = null)
{
foreach(var relativePath in _cudaLibrary.Prepare(systemInfo, logCallback))
{
yield return relativePath;
var path = NativeLibraryDownloader.DownloadLibraryFile(_settings, relativePath, logCallback).Result;
if (path is not null)
{
yield return path;
}
}
}
}

public class Avx : INativeLibrary
{
private NativeLibraryWithAvx _avxLibrary;
private NativeLibraryDownloadSettings _settings;

public Avx(NativeLibraryWithAvx avxLibrary, NativeLibraryDownloadSettings settings)
{
_avxLibrary = avxLibrary;
_settings = settings;
}

public NativeLibraryMetadata? Metadata => _avxLibrary.Metadata;

public IEnumerable<string> Prepare(SystemInfo systemInfo, NativeLogConfig.LLamaLogCallback? logCallback = null)
{
foreach (var relativePath in _avxLibrary.Prepare(systemInfo, logCallback))
{
yield return relativePath;
var path = NativeLibraryDownloader.DownloadLibraryFile(_settings, relativePath, logCallback).Result;
if (path is not null)
{
yield return path;
}
}
}
}

public class MacOrFallback : INativeLibrary
{
private NativeLibraryWithMacOrFallback _macLibrary;
private NativeLibraryDownloadSettings _settings;

public MacOrFallback(NativeLibraryWithMacOrFallback macLibrary, NativeLibraryDownloadSettings settings)
{
_macLibrary = macLibrary;
_settings = settings;
}

public NativeLibraryMetadata? Metadata => _macLibrary.Metadata;

public IEnumerable<string> Prepare(SystemInfo systemInfo, NativeLogConfig.LLamaLogCallback? logCallback = null)
{
foreach (var relativePath in _macLibrary.Prepare(systemInfo, logCallback))
{
yield return relativePath;
var path = NativeLibraryDownloader.DownloadLibraryFile(_settings, relativePath, logCallback).Result;
if (path is not null)
{
yield return path;
}
}
}
}
}
#endif
}