fix 目录大小写问题

This commit is contained in:
zyxucp
2024-08-05 12:52:59 +08:00
parent 531b4473e8
commit e084317a46
23 changed files with 0 additions and 1344 deletions

View File

@@ -1,52 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SemanticKernel" Version="$(SKVersion)" />
<PackageReference Include="Newtonsoft.Json" Version="$(NewtonsoftVersion)" />
<PackageReference Include="RestSharp" Version="$(RestSharpVersion)" />
<PackageReference Include="Cnblogs.KernelMemory.AI.DashScope" Version="0.3.0" />
<PackageReference Include="Cnblogs.SemanticKernel.Connectors.DashScope" Version="0.3.2" />
<PackageReference Include="Sdcb.SparkDesk" Version="3.0.0" />
<PackageReference Include="System.Drawing.Common" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<None Update="OllamaModelList.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\CPU\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\CPU\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda11\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda11\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda12\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda12\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\ROCm\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\ROCm\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusionModelList.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@@ -1,17 +0,0 @@
using Sdcb.SparkDesk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.LLM.LLamaFactory
{
public class LLMFactoryOptions
{
public string ModelPath { get; set; }
public string ModelName { get; set; }
}
}

View File

@@ -1,48 +0,0 @@
using AntSK.LLM.SparkDesk;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Services;
using Microsoft.SemanticKernel.TextGeneration;
using Sdcb.SparkDesk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json.Serialization;
using System.Text.Json;
using System.Text.Unicode;
using System.Threading.Tasks;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using System.Runtime.Intrinsics.Arm;
using RestSharp;
using System.Text.Json.Nodes;
using Newtonsoft.Json;
namespace AntSK.LLM.LLamaFactory
{
public class LLMFactoryTextCompletion : ITextGenerationService, IAIService
{
private readonly Dictionary<string, object?> _attributes = new();
private string _chatId;
private readonly LLMFactoryOptions _options;
public IReadOnlyDictionary<string, object?> Attributes => _attributes;
public LLMFactoryTextCompletion(LLMFactoryOptions options, string chatId)
{
_options = options;
_chatId = chatId;
}
public Task<IReadOnlyList<TextContent>> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
throw new NotImplementedException();
}
public IAsyncEnumerable<StreamingTextContent> GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
throw new NotImplementedException();
}
}
}

View File

@@ -1,55 +0,0 @@
using AntSK.LLM.SparkDesk;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel;
using Sdcb.SparkDesk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json.Serialization;
using System.Text.Json;
using System.Text.Unicode;
using System.Threading.Tasks;
namespace AntSK.LLM.Mock
{
public class MockChatCompletion : IChatCompletionService
{
private readonly Dictionary<string, object?> _attributes = new();
private readonly SparkDeskClient _client;
private string _chatId;
private readonly SparkDeskOptions _options;
private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
NumberHandling = JsonNumberHandling.AllowReadingFromString,
Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
};
public IReadOnlyDictionary<string, object?> Attributes => _attributes;
public MockChatCompletion()
{
}
public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
string result = $"这是一条Mock数据便于聊天测试你的消息是{chatHistory.LastOrDefault().ToString()}";
return [new(AuthorRole.Assistant, result.ToString())];
}
public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
string result = $"这是一条Mock数据便于聊天测试你的消息是{chatHistory.LastOrDefault().ToString()}";
foreach (var c in result)
{
yield return new StreamingChatMessageContent(AuthorRole.Assistant, c.ToString());
}
}
}
}

View File

@@ -1,56 +0,0 @@
using AntSK.LLM.SparkDesk;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Services;
using Microsoft.SemanticKernel.TextGeneration;
using Sdcb.SparkDesk;
using System;
using System.ComponentModel;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.Unicode;
namespace AntSK.LLM.Mock
{
public class MockTextCompletion : ITextGenerationService, IAIService
{
private readonly Dictionary<string, object?> _attributes = new();
private readonly SparkDeskClient _client;
private string _chatId;
private readonly SparkDeskOptions _options;
private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
NumberHandling = JsonNumberHandling.AllowReadingFromString,
Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
};
public IReadOnlyDictionary<string, object?> Attributes => _attributes;
public MockTextCompletion()
{
}
public async Task<IReadOnlyList<TextContent>> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
string result = $"这是一条Mock数据便于聊天测试你的消息是{prompt}";
return [new(result.ToString())];
}
public async IAsyncEnumerable<StreamingTextContent> GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
string result = $"这是一条Mock数据便于聊天测试你的消息是{prompt}";
foreach (var c in result)
{
var streamingTextContent = new StreamingTextContent(c.ToString(), modelId: "mock");
yield return streamingTextContent;
}
}
}
}

View File

@@ -1,105 +0,0 @@
gemma2
gemma2:27b
gemma:2b
gemma:7b
llama3
llama3:70b
yi:6b
yi:9B
yi:34B
qwen2:0.5b
qwen2:1.5b
qwen2:7b
qwen2:72b
qwen:0.5b
qwen:1.8b
qwen:4b
qwen:7b
qwen:14b
qwen:32b
qwen:72b
qwen:110b
deepseek-coder:1.3b
deepseek-coder:6.7b
deepseek-coder:33b
deepseek-coder-v2:16b
deepseek-coder-v2:236b
phi:2.7b
phi3:mini
phi3:medium
phi3:medium-128k
aya:8b
aya:35b
mistral:7b
mixtral:8x22b
mixtral:8x7b
codegemma:2b
codegemma:7b
command-r:35b
llava
gemma:2b
gemma:7b
llama2:7b
llama2:13b
llama2:70b
llama2-chinese:7b
llama2-chinese:13b
llama3.1:8b
llama3.1:70b
llama3.1:405b
codellama:7b
codellama:13b
codellama:34b
codellama:70b
dolphin-mistral:7b
dolphin-mixtral:8x22b
dolphin-mixtral:8x7b
llama2-uncensored:7b
llama2-uncensored:70b
tinyllama:1.1b
openchat:7b
orca-mini:3b
orca-mini:7b
orca-mini:13b
orca-mini:70b
mistral-openorca:7b
dolphin-llama3:8b
dolphin-llama3:70b
starcoder:1b
starcoder:3b
starcoder:7b
starcoder:15b
starcoder2:3b
starcoder2:7b
starcoder2:15b
zephyr:7b
zephyr:141b
nous-hermes2:10.7b
nous-hermes2:34b
vicuna:7b
vicuna:13b
vicuna:33b
wizard-vicuna-uncensored:7b
wizard-vicuna-uncensored:13b
wizard-vicuna-uncensored:30b
wizardlm2:7b
codestral:22b
tinydolphin:1.1b
openhermes:v2.5
neural-chat:7b
codeqwen:7b
phind-codellama:34b
nous-hermes:7b
nous-hermes:13b
nous-hermes:13b
starling-lm:7b
llama3-gradient:8b
llama3-gradient:70b
yarn-llama2:7b
yarn-llama2:13b
llava-llama3:8b
llama-pro:instruct
everythinglm:13b
llava-phi3:3.8b
mistrallite:7b
notus:7b

View File

@@ -1,231 +0,0 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel;
using Sdcb.SparkDesk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json.Serialization;
using System.Text.Json;
using System.Text.Unicode;
using System.Threading.Tasks;
namespace AntSK.LLM.SparkDesk
{
public class SparkDeskChatCompletion : IChatCompletionService
{
private readonly Dictionary<string, object?> _attributes = new();
private readonly SparkDeskClient _client;
private string _chatId;
private readonly SparkDeskOptions _options;
private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
NumberHandling = JsonNumberHandling.AllowReadingFromString,
Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
};
public IReadOnlyDictionary<string, object?> Attributes => _attributes;
public SparkDeskChatCompletion(SparkDeskOptions options, string chatId)
{
_options = options;
_chatId = chatId;
_client = new(options.AppId, options.ApiKey, options.ApiSecret);
}
public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
var parameters = new ChatRequestParameters
{
ChatId = _chatId,
};
OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
parameters.Temperature = (float)chatExecutionSettings.Temperature;
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSkFunctions").ToList() ?? [];
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
List<ChatMessage> messages = GetSparkMessage(chatHistory);
var result = await _client.ChatAsync(_options.ModelVersion, messages.ToArray(), parameters, functionDefs.Count > 0 ? [.. functionDefs] : null, cancellationToken: cancellationToken);
if (result.FunctionCall != null)
{
var func = functions.Where(x => x.Name == result.FunctionCall.Name).FirstOrDefault();
if (func == null)
{
return new List<ChatMessageContent> { new(AuthorRole.Assistant, $"插件{result.FunctionCall.Name}未注册") }.AsReadOnly();
}
if (kernel.Plugins.TryGetFunction(func.PluginName, func.Name, out var function))
{
var arguments = new KernelArguments();
var JsonElement = JsonDocument.Parse(result.FunctionCall.Arguments).RootElement;
foreach (var parameter in func.Parameters)
{
var error = "";
try
{
if (JsonElement.TryGetProperty(parameter.Name, out var property))
{
arguments.Add(parameter.Name, property.Deserialize(parameter.ParameterType!, _jsonSerializerOptions));
}
}
catch (Exception ex)
{
error = $"参数{parameter.Name}解析错误:{ex.Message}";
}
if (!string.IsNullOrEmpty(error))
{
return new List<ChatMessageContent> { new(AuthorRole.Assistant, error) }.AsReadOnly();
}
}
var functionResult = await function.InvokeAsync(kernel, arguments, cancellationToken);
messages = [ ChatMessage.FromUser(messages.LastOrDefault().Content),
ChatMessage.FromSystem($@"
执行函数调用成功
函数描述:{func.Description}
函数执行结果:{functionResult}
"),
ChatMessage.FromUser("请根据函数调用结果回答我的问题,不要超出函数调用结果的返回,以及不要有多余描述:")];
var callResult = await _client.ChatAsync(_options.ModelVersion, messages.ToArray(), parameters, null);
ChatMessageContent chatMessageContent = new(AuthorRole.Assistant, callResult.Text.ToString(), modelId: "SparkDesk");
return new List<ChatMessageContent> { chatMessageContent }.AsReadOnly();
}
return new List<ChatMessageContent> { new(AuthorRole.Assistant, "未找到插件") }.AsReadOnly();
}
else
{
ChatMessageContent chatMessageContent = new(AuthorRole.Assistant, result.Text.ToString(), modelId: "SparkDesk");
return new List<ChatMessageContent> { chatMessageContent }.AsReadOnly();
}
}
public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var parameters = new ChatRequestParameters
{
ChatId = _chatId,
};
OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
parameters.Temperature = (float)chatExecutionSettings.Temperature;
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSkFunctions").ToList() ?? [];
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
List<ChatMessage> messages = GetSparkMessage(chatHistory);
await foreach (StreamedChatResponse msg in _client.ChatAsStreamAsync(_options.ModelVersion, messages.ToArray(), parameters, functionDefs.Count > 0 ? [.. functionDefs] : null, cancellationToken: cancellationToken))
{
yield return new StreamingChatMessageContent(AuthorRole.Assistant, msg);
};
}
private static List<ChatMessage> GetSparkMessage(ChatHistory chatHistory)
{
List<ChatMessage> messages = new List<ChatMessage>();
foreach (var msg in chatHistory.ToList())
{
string role = "";
if (msg.Role == AuthorRole.User)
{
role = "user";
}
else if (msg.Role == AuthorRole.System)
{
role = "system";
}
else
{
role = "assistant";
}
messages.Add(new ChatMessage(role, msg.ToString()));
}
return messages;
}
private static string? ProcessFunctionResult(object functionResult, ToolCallBehavior? toolCallBehavior)
{
if (functionResult is string stringResult)
{
return stringResult;
}
if (functionResult is ChatMessageContent chatMessageContent)
{
return chatMessageContent.ToString();
}
return JsonSerializer.Serialize(functionResult, _jsonSerializerOptions);
}
public static Dictionary<string, object> ParseJsonElement(JsonElement element, string propertyName)
{
Dictionary<string, object> dict = new();
switch (element.ValueKind)
{
case JsonValueKind.Object:
foreach (JsonProperty property in element.EnumerateObject())
{
dict.Add(property.Name, ParseJsonElement(property.Value, property.Name));
}
break;
case JsonValueKind.Array:
List<object> list = new List<object>();
foreach (JsonElement arrayElement in element.EnumerateArray())
{
list.Add(ParseJsonElement(arrayElement, ""));
}
dict.Add(propertyName, list);
break;
case JsonValueKind.String:
dict.Add(propertyName, element.GetString());
break;
case JsonValueKind.Number:
dict.Add(propertyName, element.GetInt32());
break;
case JsonValueKind.True:
case JsonValueKind.False:
dict.Add(propertyName, element.GetBoolean());
break;
default:
dict.Add(propertyName, "Unsupported value type");
break;
}
return dict;
}
}
}

View File

@@ -1,20 +0,0 @@
using Sdcb.SparkDesk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.LLM.SparkDesk
{
public class SparkDeskOptions
{
public string AppId { get; set; }
public string ApiKey { get; set; }
public string ApiSecret { get; set; }
public ModelVersion ModelVersion { get; set; }
}
}

View File

@@ -1,212 +0,0 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Services;
using Microsoft.SemanticKernel.TextGeneration;
using Sdcb.SparkDesk;
using System;
using System.ComponentModel;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.Unicode;
namespace AntSK.LLM.SparkDesk
{
public class SparkDeskTextCompletion : ITextGenerationService, IAIService
{
private readonly Dictionary<string, object?> _attributes = new();
private readonly SparkDeskClient _client;
private string _chatId;
private readonly SparkDeskOptions _options;
private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
NumberHandling = JsonNumberHandling.AllowReadingFromString,
Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
};
public IReadOnlyDictionary<string, object?> Attributes => _attributes;
public SparkDeskTextCompletion(SparkDeskOptions options, string chatId)
{
_options = options;
_chatId = chatId;
_client = new(options.AppId, options.ApiKey, options.ApiSecret);
}
public async Task<IReadOnlyList<TextContent>> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
var parameters = new ChatRequestParameters
{
ChatId = _chatId,
};
OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
parameters.Temperature = (float)chatExecutionSettings.Temperature;
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
await foreach (StreamedChatResponse msg in _client.ChatAsStreamAsync(_options.ModelVersion, GetHistories(prompt), parameters))
{
sb.Append(msg);
};
return [new(sb.ToString())];
}
public IAsyncEnumerable<StreamingTextContent> GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
var parameters = new ChatRequestParameters
{
ChatId = _chatId,
};
OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
parameters.Temperature = (float)chatExecutionSettings.Temperature;
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSkFunctions").ToList() ?? [];
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
//var messages = GetHistories(prompt);
var messages = new ChatMessage[] { new ChatMessage("user", prompt) };
return GetStreamingMessageAsync(messages, parameters, functionDefs, cancellationToken);
async IAsyncEnumerable<StreamingTextContent> GetStreamingMessageAsync(ChatMessage[] messages, ChatRequestParameters parameters, List<FunctionDef> functionDefs, CancellationToken cancellationToken)
{
await foreach (StreamedChatResponse msg in _client.ChatAsStreamAsync(_options.ModelVersion, messages, parameters, functionDefs.Count > 0 ? [.. functionDefs] : null, cancellationToken: cancellationToken))
{
if (msg.FunctionCall != null)
{
var func = functions.Where(x => x.Name == msg.FunctionCall.Name).FirstOrDefault();
if (func == null)
{
yield return new($"插件{msg.FunctionCall.Name}未注册");
yield break;
}
if (kernel.Plugins.TryGetFunction(func.PluginName, func.Name, out var function))
{
var arguments = new KernelArguments();
var JsonElement = JsonDocument.Parse(msg.FunctionCall.Arguments).RootElement;
foreach (var parameter in func.Parameters)
{
var error = "";
try
{
if (JsonElement.TryGetProperty(parameter.Name, out var property))
{
arguments.Add(parameter.Name, property.Deserialize(parameter.ParameterType!, _jsonSerializerOptions));
}
}
catch (Exception ex)
{
error = $"参数{parameter.Name}解析错误:{ex.Message}";
}
if (!string.IsNullOrEmpty(error))
{
yield return new(error);
yield break;
}
}
var result = (await function.InvokeAsync(kernel, arguments, cancellationToken)).GetValue<object>() ?? string.Empty;
var stringResult = ProcessFunctionResult(result, chatExecutionSettings.ToolCallBehavior);
messages = [ChatMessage.FromSystem($"""
用户意图{func.Description}结果是:{stringResult}
请结合用户的提问回复:
"""), ChatMessage.FromUser(prompt)];
functionDefs.Clear();
await foreach (var content in GetStreamingMessageAsync(messages, parameters, functionDefs, cancellationToken))
{
yield return content;
}
}
}
else
{
yield return new(msg);
}
};
}
}
private ChatMessage[] GetHistories(string prompt)
{
var histories = prompt.Replace("history", "")
.Split("\r\n")
.Select(m => m.Split(":", 2))
.Where(m => m.Length == 2)
.Select(pair => new ChatMessage(pair[0].Trim() == "user" ? "user" : "assistant", pair[1])).ToArray();
return histories;
}
private static string? ProcessFunctionResult(object functionResult, ToolCallBehavior? toolCallBehavior)
{
if (functionResult is string stringResult)
{
return stringResult;
}
if (functionResult is ChatMessageContent chatMessageContent)
{
return chatMessageContent.ToString();
}
return JsonSerializer.Serialize(functionResult, _jsonSerializerOptions);
}
public static Dictionary<string, object> ParseJsonElement(JsonElement element, string propertyName)
{
Dictionary<string, object> dict = new();
switch (element.ValueKind)
{
case JsonValueKind.Object:
foreach (JsonProperty property in element.EnumerateObject())
{
dict.Add(property.Name, ParseJsonElement(property.Value, property.Name));
}
break;
case JsonValueKind.Array:
List<object> list = new List<object>();
foreach (JsonElement arrayElement in element.EnumerateArray())
{
list.Add(ParseJsonElement(arrayElement, ""));
}
dict.Add(propertyName, list);
break;
case JsonValueKind.String:
dict.Add(propertyName, element.GetString());
break;
case JsonValueKind.Number:
dict.Add(propertyName, element.GetInt32());
break;
case JsonValueKind.True:
case JsonValueKind.False:
dict.Add(propertyName, element.GetBoolean());
break;
default:
dict.Add(propertyName, "Unsupported value type");
break;
}
return dict;
}
}
}

View File

@@ -1,108 +0,0 @@
using System;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using static AntSK.LLM.StableDiffusion.Structs;
using int32_t = Int32;
using int64_t = Int64;
using SdContext = IntPtr;
using SDImagePtr = IntPtr;
using UpscalerContext = IntPtr;
internal class Native
{
const string DllName = "stable-diffusion";
internal delegate void SdLogCallback(SdLogLevel level, [MarshalAs(UnmanagedType.LPStr)] string text, IntPtr data);
internal delegate void SdProgressCallback(int step, int steps, float time, IntPtr data);
[DllImport(DllName, EntryPoint = "new_sd_ctx", CallingConvention = CallingConvention.Cdecl)]
internal extern static SdContext new_sd_ctx(string model_path,
string vae_path,
string taesd_path,
string control_net_path_c_str,
string lora_model_dir,
string embed_dir_c_str,
string stacked_id_embed_dir_c_str,
bool vae_decode_only,
bool vae_tiling,
bool free_params_immediately,
int n_threads,
WeightType weightType,
RngType rng_type,
ScheduleType s,
bool keep_clip_on_cpu,
bool keep_control_net_cpu,
bool keep_vae_on_cpu);
[DllImport(DllName, EntryPoint = "txt2img", CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImagePtr txt2img(SdContext sd_ctx,
string prompt,
string negative_prompt,
int clip_skip,
float cfg_scale,
int width,
int height,
SampleMethod sample_method,
int sample_steps,
int64_t seed,
int batch_count,
SDImagePtr control_cond,
float control_strength,
float style_strength,
bool normalize_input,
string input_id_images_path);
[DllImport(DllName, EntryPoint = "img2img", CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImagePtr img2img(SdContext sd_ctx,
SDImage init_image,
string prompt_c_str,
string negative_prompt_c_str,
int clip_skip,
float cfg_scale,
int width,
int height,
SampleMethod sample_method,
int sample_steps,
float strength,
int64_t seed,
int batch_count);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern IntPtr preprocess_canny(IntPtr imgData,
int width,
int height,
float high_threshold,
float low_threshold,
float weak,
float strong,
bool inverse);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern UpscalerContext new_upscaler_ctx(string esrgan_path,
int n_threads,
WeightType wtype);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern int32_t get_num_physical_cores();
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern void free_sd_ctx(SdContext sd_ctx);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern void free_upscaler_ctx(UpscalerContext upscaler_ctx);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImage upscale(UpscalerContext upscaler_ctx, SDImage input_image, int upscale_factor);
[DllImport(DllName, EntryPoint = "sd_set_log_callback", CallingConvention = CallingConvention.Cdecl)]
internal static extern void sd_set_log_callback(SdLogCallback cb, IntPtr data);
[DllImport(DllName, EntryPoint = "sd_set_progress_callback", CallingConvention = CallingConvention.Cdecl)]
internal static extern void sd_set_progress_callback(SdProgressCallback cb, IntPtr data);
}
}

View File

@@ -1,234 +0,0 @@
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using static AntSK.LLM.StableDiffusion.Structs;
using SdContext = IntPtr;
using SDImagePtr = IntPtr;
using UpscalerContext = IntPtr;
public static class SDHelper
{
public static bool IsInitialized => SdContext.Zero != sd_ctx;
public static bool IsUpscalerInitialized => UpscalerContext.Zero != upscaler_ctx;
private static SdContext sd_ctx = new SdContext();
private static UpscalerContext upscaler_ctx = new UpscalerContext();
public static event EventHandler<StableDiffusionEventArgs.StableDiffusionLogEventArgs> Log;
public static event EventHandler<StableDiffusionEventArgs.StableDiffusionProgressEventArgs> Progress;
static readonly Native.SdLogCallback sd_Log_Cb;
static readonly Native.SdProgressCallback sd_Progress_Cb;
//Hide the code below so that the process can be seen in console.
//static SDHelper()
//{
// sd_Log_Cb = new Native.SdLogCallback(OnNativeLog);
// Native.sd_set_log_callback(sd_Log_Cb, IntPtr.Zero);
// sd_Progress_Cb = new Native.SdProgressCallback(OnProgressRunning);
// Native.sd_set_progress_callback(sd_Progress_Cb, IntPtr.Zero);
//}
public static bool Initialize(ModelParams modelParams)
{
sd_ctx = Native.new_sd_ctx(modelParams.ModelPath,
modelParams.VaePath,
modelParams.TaesdPath,
modelParams.ControlnetPath,
modelParams.LoraModelDir,
modelParams.EmbeddingsPath,
modelParams.StackedIdEmbeddingsPath,
modelParams.VaeDecodeOnly,
modelParams.VaeTiling,
modelParams.FreeParamsImmediately,
modelParams.Threads,
modelParams.SdType,
modelParams.RngType,
modelParams.Schedule,
modelParams.KeepClipOnCpu,
modelParams.KeepControlNetOnCpu,
modelParams.KeepVaeOnCpu);
return SdContext.Zero != sd_ctx;
}
public static bool InitializeUpscaler(UpscalerParams @params)
{
upscaler_ctx = Native.new_upscaler_ctx(@params.ESRGANPath, @params.Threads, @params.SdType);
return UpscalerContext.Zero != upscaler_ctx;
}
public static void FreeSD()
{
if (SdContext.Zero != sd_ctx)
{
Native.free_sd_ctx(sd_ctx);
sd_ctx = SdContext.Zero;
}
}
public static void FreeUpscaler()
{
if (UpscalerContext.Zero != upscaler_ctx)
{
Native.free_upscaler_ctx(upscaler_ctx);
upscaler_ctx = UpscalerContext.Zero;
}
}
public static Bitmap[] TextToImage(TextToImageParams textToImageParams)
{
if (!IsInitialized) throw new ArgumentNullException("Model not loaded!");
IntPtr cnPtr = IntPtr.Zero;
if (textToImageParams.ControlCond != null)
{
if (textToImageParams.ControlCond.Width > 1)
{
SDImage cnImg = GetSDImageFromBitmap(textToImageParams.ControlCond);
cnPtr = GetPtrFromImage(cnImg);
}
}
SDImagePtr sd_Image_ptr = Native.txt2img(sd_ctx,
textToImageParams.Prompt,
textToImageParams.NegativePrompt,
textToImageParams.ClipSkip,
textToImageParams.CfgScale,
textToImageParams.Width,
textToImageParams.Height,
textToImageParams.SampleMethod,
textToImageParams.SampleSteps,
textToImageParams.Seed,
textToImageParams.BatchCount,
cnPtr,
textToImageParams.ControlStrength,
textToImageParams.StyleStrength,
textToImageParams.NormalizeInput,
textToImageParams.InputIdImagesPath);
Bitmap[] images = new Bitmap[textToImageParams.BatchCount];
for (int i = 0; i < textToImageParams.BatchCount; i++)
{
SDImage sd_image = Marshal.PtrToStructure<SDImage>(sd_Image_ptr + i * Marshal.SizeOf<SDImage>());
images[i] = GetBitmapFromSdImage(sd_image);
}
return images;
}
public static Bitmap ImageToImage(ImageToImageParams imageToImageParams)
{
if (!IsInitialized) throw new ArgumentNullException("Model not loaded!");
SDImage input_sd_image = GetSDImageFromBitmap(imageToImageParams.InputImage);
SDImagePtr sdImgPtr = Native.img2img(sd_ctx,
input_sd_image,
imageToImageParams.Prompt,
imageToImageParams.NegativePrompt,
imageToImageParams.ClipSkip,
imageToImageParams.CfgScale,
imageToImageParams.Width,
imageToImageParams.Height,
imageToImageParams.SampleMethod,
imageToImageParams.SampleSteps,
imageToImageParams.Strength,
imageToImageParams.Seed,
imageToImageParams.BatchCount);
SDImage sdImg = Marshal.PtrToStructure<SDImage>(sdImgPtr);
return GetBitmapFromSdImage(sdImg);
}
public static Bitmap UpscaleImage(Bitmap image, int upscaleFactor)
{
if (!IsUpscalerInitialized) throw new ArgumentNullException("Upscaler not loaded!");
SDImage inputSDImg = GetSDImageFromBitmap(image);
SDImage result = Native.upscale(upscaler_ctx, inputSDImg, upscaleFactor);
return GetBitmapFromSdImage(result);
}
private static Bitmap GetBitmapFromSdImage(SDImage sd_Image)
{
int width = (int)sd_Image.Width;
int height = (int)sd_Image.Height;
int channel = (int)sd_Image.Channel;
byte[] bytes = new byte[width * height * channel];
Marshal.Copy(sd_Image.Data, bytes, 0, bytes.Length);
Bitmap bmp = new Bitmap(width, height, PixelFormat.Format24bppRgb);
int stride = bmp.Width * channel;
byte[] des = new byte[bytes.Length];
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++)
{
des[stride * i + channel * j + 0] = bytes[stride * i + channel * j + 2];
des[stride * i + channel * j + 1] = bytes[stride * i + channel * j + 1];
des[stride * i + channel * j + 2] = bytes[stride * i + channel * j + 0];
}
}
BitmapData bitmapData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.WriteOnly, bmp.PixelFormat);
Marshal.Copy(des, 0, bitmapData.Scan0, bytes.Length);
bmp.UnlockBits(bitmapData);
return bmp;
}
private static SDImage GetSDImageFromBitmap(Bitmap bmp)
{
int width = bmp.Width;
int height = bmp.Height;
int channel = Bitmap.GetPixelFormatSize(bmp.PixelFormat) / 8;
int stride = width * channel;
byte[] bytes = new byte[width * height * channel];
BitmapData bitmapData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, bmp.PixelFormat);
Marshal.Copy(bitmapData.Scan0, bytes, 0, bytes.Length);
bmp.UnlockBits(bitmapData);
byte[] sdImageBytes = new byte[bytes.Length];
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++)
{
sdImageBytes[stride * i + j * 3 + 0] = bytes[stride * i + j * 3 + 2];
sdImageBytes[stride * i + j * 3 + 1] = bytes[stride * i + j * 3 + 1];
sdImageBytes[stride * i + j * 3 + 2] = bytes[stride * i + j * 3 + 0];
}
}
SDImage sd_Image = new SDImage
{
Width = (uint)width,
Height = (uint)height,
Channel = 3,
Data = Marshal.UnsafeAddrOfPinnedArrayElement(sdImageBytes, 0),
};
return sd_Image;
}
private static IntPtr GetPtrFromImage(SDImage sdImg)
{
IntPtr imgPtr = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(SDImage)));
Marshal.StructureToPtr(sdImg, imgPtr, false);
return imgPtr;
}
private static void OnNativeLog(SdLogLevel level, string text, IntPtr data)
{
Log?.Invoke(null, new StableDiffusionEventArgs.StableDiffusionLogEventArgs { Level = level, Text = text });
}
private static void OnProgressRunning(int step, int steps, float time, IntPtr data)
{
Progress?.Invoke(null, new StableDiffusionEventArgs.StableDiffusionProgressEventArgs { Step = step, Steps = steps, Time = time });
}
}
}

View File

@@ -1,33 +0,0 @@
using System;
using static AntSK.LLM.StableDiffusion.Structs;
namespace AntSK.LLM.StableDiffusion
{
public class StableDiffusionEventArgs
{
public class StableDiffusionProgressEventArgs : EventArgs
{
#region Properties & Fields
public int Step { get; set; }
public int Steps { get; set; }
public float Time { get; set; }
public IntPtr Data { get; set; }
public double Progress => (double)Step / Steps;
public float IterationsPerSecond => 1.0f / Time;
#endregion
}
public class StableDiffusionLogEventArgs : EventArgs
{
#region Properties & Fields
public SdLogLevel Level { get; set; }
public string Text { get; set; }
#endregion
}
}
}

View File

@@ -1,13 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.LLM.StableDiffusion
{
public static class StableDiffusionService
{
}
}

View File

@@ -1,154 +0,0 @@
using System;
using System.Drawing;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using int64_t = Int64;
using uint32_t = UInt32;
public class Structs
{
public class ModelParams
{
public string ModelPath = string.Empty;
public string VaePath = string.Empty;
public string TaesdPath = string.Empty;
public string ControlnetPath = string.Empty;
public string LoraModelDir = string.Empty;
public string EmbeddingsPath = string.Empty;
public string StackedIdEmbeddingsPath = string.Empty;
public bool VaeDecodeOnly = false;
public bool VaeTiling = true;
public bool FreeParamsImmediately = false;
public int Threads = Native.get_num_physical_cores();
public WeightType SdType = WeightType.SD_TYPE_COUNT;
public RngType RngType = RngType.CUDA_RNG;
public ScheduleType Schedule = ScheduleType.DEFAULT;
public bool KeepClipOnCpu = false;
public bool KeepControlNetOnCpu = false;
public bool KeepVaeOnCpu = false;
}
public class TextToImageParams
{
public string Prompt = string.Empty;
public string NegativePrompt = string.Empty;
public int ClipSkip = 0;
public float CfgScale = 7;
public int Width = 512;
public int Height = 512;
public SampleMethod SampleMethod = SampleMethod.EULER_A;
public int SampleSteps = 20;
public int64_t Seed = -1;
public int BatchCount = 1;
public Bitmap ControlCond = new Bitmap(1, 1);
public float ControlStrength = 0.9f;
public float StyleStrength = 0.75f;
public bool NormalizeInput = false;
public string InputIdImagesPath = string.Empty;
}
public class ImageToImageParams
{
public Bitmap InputImage;
public string Prompt = string.Empty;
public string NegativePrompt = string.Empty;
public int ClipSkip = -1;
public float CfgScale = 7.0f;
public int Width = 512;
public int Height = 512;
public SampleMethod SampleMethod = SampleMethod.EULER_A;
public int SampleSteps = 20;
public float Strength = 0.75f;
public int64_t Seed = 42;
public int BatchCount = 1;
}
public class UpscalerParams
{
public string ESRGANPath = string.Empty;
public int Threads = Native.get_num_physical_cores();
public WeightType SdType = WeightType.SD_TYPE_COUNT;
}
[StructLayout(LayoutKind.Sequential)]
internal struct SDImage
{
public uint32_t Width;
public uint32_t Height;
public uint32_t Channel;
public IntPtr Data;
}
public enum WeightType
{
SD_TYPE_F32 = 0,
SD_TYPE_F16 = 1,
SD_TYPE_Q4_0 = 2,
SD_TYPE_Q4_1 = 3,
// SD_TYPE_Q4_2 = 4, support has been removed
// SD_TYPE_Q4_3 (5) support has been removed
SD_TYPE_Q5_0 = 6,
SD_TYPE_Q5_1 = 7,
SD_TYPE_Q8_0 = 8,
SD_TYPE_Q8_1 = 9,
// k-quantizations
SD_TYPE_Q2_K = 10,
SD_TYPE_Q3_K = 11,
SD_TYPE_Q4_K = 12,
SD_TYPE_Q5_K = 13,
SD_TYPE_Q6_K = 14,
SD_TYPE_Q8_K = 15,
SD_TYPE_IQ2_XXS = 16,
SD_TYPE_IQ2_XS = 17,
SD_TYPE_IQ3_XXS = 18,
SD_TYPE_IQ1_S = 19,
SD_TYPE_IQ4_NL = 20,
SD_TYPE_IQ3_S = 21,
SD_TYPE_IQ2_S = 22,
SD_TYPE_IQ4_XS = 23,
SD_TYPE_I8,
SD_TYPE_I16,
SD_TYPE_I32,
SD_TYPE_COUNT,
};
public enum RngType
{
STD_DEFAULT_RNG,
CUDA_RNG
};
public enum ScheduleType
{
DEFAULT,
DISCRETE,
KARRAS,
N_SCHEDULES
};
public enum SampleMethod
{
EULER_A,
EULER,
HEUN,
DPM2,
DPMPP2S_A,
DPMPP2M,
DPMPP2Mv2,
LCM,
N_SAMPLE_METHODS
};
public enum SdLogLevel
{
Debug,
Info,
Warn,
Error
}
}
}

View File

@@ -1,6 +0,0 @@
AsAHuman/chilloutmix
GraMpa7/dreamsharper
Airic/Anything-V4.5
liqira/anythingv3
wind1/MoYou
Reuploadingfromcivitai/DosMix