diff --git a/.gitignore b/.gitignore index d070475..cdc71e0 100644 --- a/.gitignore +++ b/.gitignore @@ -340,3 +340,4 @@ ASALocalRun/ /src/AntSK/AntSK.db /src/AntSK/appsettings.Development.json /src/AntSK.db +/src/AntSK/llama_models diff --git a/src/AntSK.Domain/Options/LLamaSharpOption.cs b/src/AntSK.Domain/Options/LLamaSharpOption.cs index c268ec9..b8a26f7 100644 --- a/src/AntSK.Domain/Options/LLamaSharpOption.cs +++ b/src/AntSK.Domain/Options/LLamaSharpOption.cs @@ -6,5 +6,7 @@ public static string Chat { get; set; } public static string Embedding { get; set; } + + public static string FileDirectory { get; set; } } } diff --git a/src/AntSK/AntSK.csproj b/src/AntSK/AntSK.csproj index 9e6bbbf..be2276f 100644 --- a/src/AntSK/AntSK.csproj +++ b/src/AntSK/AntSK.csproj @@ -15,6 +15,7 @@ + diff --git a/src/AntSK/Pages/Setting/AIModel/AddModel.razor b/src/AntSK/Pages/Setting/AIModel/AddModel.razor index 17e5293..6347c6c 100644 --- a/src/AntSK/Pages/Setting/AIModel/AddModel.razor +++ b/src/AntSK/Pages/Setting/AIModel/AddModel.razor @@ -25,10 +25,10 @@ 会话模型 - 向量模型 - - - @if (context.AIModelType == AIModelType.Embedding) + 向量模型 + + + @if (context.AIModelType == AIModelType.Embedding) { 请不要使用不同维度的向量模型,否则会导致无法向量存储 @@ -74,13 +74,15 @@ @if (context.AIType == AIType.LLamaSharp) { - + + + 从Haggingface下载 + + } @if (context.AIType == AIType.Mock) { - - } @@ -95,6 +97,27 @@ -@code { + + -} + + 支持LLamaSharp的本地模型 支持gguf类型,推荐使用llama或者qwen + 如果模型加载报内存错误,可能是和llama.cpp版本不一致 + 打开下载地址 + + + + + @if (!_downloadStarted) + { + 开始 + } + else + { + 停止 + } + + + + + diff --git a/src/AntSK/Pages/Setting/AIModel/AddModel.razor.cs b/src/AntSK/Pages/Setting/AIModel/AddModel.razor.cs index 4e2d2e5..57fd878 100644 --- a/src/AntSK/Pages/Setting/AIModel/AddModel.razor.cs +++ b/src/AntSK/Pages/Setting/AIModel/AddModel.razor.cs @@ -1,8 +1,11 @@ using AntDesign; using AntDesign.ProLayout; +using AntSK.Domain.Options; using AntSK.Domain.Repositories; using AntSK.Domain.Utils; +using Downloader; using Microsoft.AspNetCore.Components; +using System.ComponentModel; namespace AntSK.Pages.Setting.AIModel { @@ -16,6 +19,17 @@ namespace AntSK.Pages.Setting.AIModel private AIModels _aiModel = new AIModels(); + private string _downloadUrl; + private bool _downloadModalVisible; + private double _downloadProgress; + private bool _downloadFinished; + private bool _downloadStarted; + IDownload _download; + + private Modal _modal; + + string[] _modelFiles; + IEnumerable _menuKeys; private List menuList = new List(); @@ -26,6 +40,8 @@ namespace AntSK.Pages.Setting.AIModel { _aiModel = _aimodels_Repositories.GetFirst(p => p.Id == ModelId); } + + _modelFiles = Directory.GetFiles(Path.Combine(Directory.GetCurrentDirectory(), LLamaSharpOption.FileDirectory)); } private void HandleSubmit() @@ -69,5 +85,68 @@ namespace AntSK.Pages.Setting.AIModel { NavigationManager.NavigateTo("/setting/modellist"); } + + private async Task StartDownload() + { + if (string.IsNullOrWhiteSpace(_downloadUrl)) + { + return; + } + + _download = DownloadBuilder.New() + .WithUrl(_downloadUrl) + .WithDirectory(Path.Combine(Directory.GetCurrentDirectory(), LLamaSharpOption.FileDirectory)) + .WithConfiguration(new DownloadConfiguration() + { + ParallelCount = 5, + }) + .Build(); + + _download.DownloadProgressChanged += DownloadProgressChanged; + _download.DownloadFileCompleted += DownloadFileCompleted; + _download.DownloadStarted += DownloadStarted; + + await _download.StartAsync(); + + //download.Stop(); // cancel current download + } + + private void DownloadProgressChanged(object? sender, DownloadProgressChangedEventArgs e) + { + _downloadProgress = e.ProgressPercentage; + InvokeAsync(StateHasChanged); + } + + private void DownloadFileCompleted(object? sender, AsyncCompletedEventArgs e) + { + _downloadFinished = true; + _aiModel.ModelName = _download.Package.FileName; + _downloadModalVisible = false; + _downloadStarted = false; +u _modelFiles = Directory.GetFiles(Path.Combine(Directory.GetCurrentDirectory(), LLamaSharpOption.FileDirectory)); + InvokeAsync(StateHasChanged); + } + + private void DownloadStarted(object? sender, DownloadStartedEventArgs e) + { + _downloadStarted = true; + InvokeAsync(StateHasChanged); + } + + private void OnCancel() + { + if (_downloadStarted) + { + return; + } + + _downloadModalVisible = false; + } + + private void Stop() + { + _downloadStarted=false; + _download?.Stop(); + } } } diff --git a/src/AntSK/appsettings.json b/src/AntSK/appsettings.json index da01d6d..d55e33c 100644 --- a/src/AntSK/appsettings.json +++ b/src/AntSK/appsettings.json @@ -33,9 +33,10 @@ "TableNamePrefix": "km-" }, "LLamaSharp": { - "RunType": "GPU", + "RunType": "GPU", "Chat": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf", - "Embedding": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf" + "Embedding": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf", + "FileDirectory": "./llama_models" }, "Login": { "User": "admin",
支持LLamaSharp的本地模型 支持gguf类型,推荐使用llama或者qwen
如果模型加载报内存错误,可能是和llama.cpp版本不一致