Compare commits

...

119 Commits
0.2.3 ... 0.2.5

Author SHA1 Message Date
zyxucp
74406d88a0 Merge pull request #58 from AIDotNet/feature_StableDiffusion
fix 修改为静态类
2024-04-01 23:57:12 +08:00
zeyu xu
e5f9d97560 fix 修改为静态类 2024-04-01 23:56:44 +08:00
zyxucp
59e768aaea Merge pull request #57 from AIDotNet/feature_StableDiffusion
Feature stable diffusion
2024-04-01 23:39:22 +08:00
zeyu xu
6a7cb24a5b add sd 2024-04-01 23:08:53 +08:00
zeyu xu
1db40d534c add apptype 2024-04-01 22:14:18 +08:00
zeyu xu
11d6e30f7e add sd function 2024-04-01 22:03:00 +08:00
zeyu xu
9d5214aaae add sdmodel 2024-04-01 21:57:18 +08:00
zeyu xu
010b906271 add sd 2024-04-01 21:35:51 +08:00
zeyu xu
16bf944edf add sd 2024-04-01 21:31:15 +08:00
zeyu xu
5bae5a099a margin 2024-04-01 21:01:29 +08:00
zyxucp
f771ea9521 Merge branch 'main' of https://github.com/AIDotNet/AntSK 2024-04-01 13:54:53 +08:00
zyxucp
994efbf37c update nuget 2024-04-01 13:54:20 +08:00
zyxucp
938cd86c88 Update README.md 2024-03-31 13:24:21 +08:00
zeyu xu
1339cbadbc fix 修改menukey 2024-03-31 13:07:30 +08:00
zeyu xu
bd0ad570ad add 增加使用文档 2024-03-31 13:07:08 +08:00
zeyu xu
234e649a7e fix 优化部分内容 2024-03-31 12:38:17 +08:00
zyxucp
c431dbc842 Update README.md 2024-03-31 00:28:16 +08:00
zyxucp
76283060d9 Update docker-compose.simple.yml 2024-03-30 23:28:52 +08:00
zyxucp
75ba506db4 Update docker-compose.yml 2024-03-30 23:28:33 +08:00
zyxucp
e086ca60df Merge pull request #56 from AIDotNet/feature_chatview
add chathistory to localstorage
2024-03-30 22:40:06 +08:00
zeyu xu
04acaa9b12 add chathistory to localstorage 2024-03-30 22:39:26 +08:00
zyxucp
7a824bf18c Merge pull request #55 from AIDotNet/feature_chatview
fix 处理文件上传会话 不配embedding模型则隐藏
2024-03-30 22:26:55 +08:00
zeyu xu
769de2e526 fix 处理文件上传会话 不配embedding模型则隐藏 2024-03-30 22:23:01 +08:00
zyxucp
c9950609c9 Merge pull request #54 from AIDotNet/feature_chatview
fix 删除不要元素
2024-03-30 21:58:08 +08:00
zeyu xu
ccee6cfea5 fix 删除不要元素 2024-03-30 21:57:38 +08:00
zyxucp
f626c618be Merge pull request #53 from AIDotNet/feature_chatview
Feature chatview
2024-03-30 21:53:01 +08:00
zeyu xu
3b601a9e3d fix 调整样式 2024-03-30 21:48:29 +08:00
zeyu xu
3d5f63d595 add chatview 2024-03-30 21:45:37 +08:00
zeyu xu
6933f2f495 add openchat file 2024-03-30 20:39:02 +08:00
zeyu xu
79c7e8626a add chatview 2024-03-30 20:28:40 +08:00
zyxucp
4a017d311c Update README.md 2024-03-30 20:03:57 +08:00
zeyu xu
0c8ad5fe8d add loadding 2024-03-30 19:50:29 +08:00
zeyu xu
68ce0db011 fix 样式修改 2024-03-30 17:35:40 +08:00
zeyu xu
c36de1a1e9 add 选项控制 2024-03-30 17:25:58 +08:00
zeyu xu
44ef759abd fix 修改控件 2024-03-30 14:47:29 +08:00
longdream
0c3d9844be Merge pull request #52 from longdream/main
bge embedding模型添加,bge用的CPU。
2024-03-29 21:51:35 +08:00
longdream
854c62a4ca 合并 2024-03-29 21:50:17 +08:00
longdream
5ed4fd5299 Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-29 20:00:53 +08:00
longdream
af5ec43571 修改设置界面 2024-03-29 20:00:49 +08:00
zyxucp
24d685879e Update README.md 2024-03-29 18:43:10 +08:00
zyxucp
e801a2ec46 Update README.md 2024-03-29 18:42:18 +08:00
junlong
d7b56d1590 Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-29 15:34:08 +08:00
longdream
b925f8890b 修改token长度 2024-03-28 23:06:21 +08:00
longdream
5d80ee994a 解决线程冲突问题 2024-03-28 19:04:11 +08:00
zeyu xu
da8f955ca2 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-03-28 18:34:59 +08:00
zeyu xu
2e04582c5e fix prompt 2024-03-28 18:34:46 +08:00
zyxucp
e69994f727 Merge pull request #50 from ElderJames/fix/sparkdesk-func-call
fix function calling result for sparkdesk
2024-03-28 11:14:52 +08:00
James Yeung
d8dc26127d fix function calling result for sparkdesk 2024-03-28 10:49:50 +08:00
longdream
f73bd2dfda 增减embedding 2024-03-27 22:53:45 +08:00
zyxucp
9f08b60348 Update README.md 2024-03-27 20:25:36 +08:00
zeyu xu
75c2f36b30 update nuget 2024-03-27 19:27:59 +08:00
zyxucp
39c02a6064 Update README.en.md 2024-03-27 19:20:21 +08:00
zyxucp
52c119befd Update README.md 2024-03-27 19:19:12 +08:00
zyxucp
23903ded3f Update README.en.md 2024-03-27 19:11:46 +08:00
zyxucp
4799fbac72 Update README.md 2024-03-27 19:11:02 +08:00
zyxucp
16a7d55271 Merge pull request #49 from AIDotNet/feature_vctordb
add AzureAISearch
2024-03-27 19:10:27 +08:00
zeyu xu
be0bafcc50 add AzureAISearch 2024-03-27 19:09:42 +08:00
zyxucp
defc51a074 Update README.md 2024-03-27 18:46:51 +08:00
zyxucp
09709c210d Merge pull request #48 from AIDotNet/feature_vctordb
add RedisMemoryDb
2024-03-27 18:44:59 +08:00
zeyu xu
8ebb2f54eb add RedisMemoryDb 2024-03-27 18:44:26 +08:00
zyxucp
b831aab115 Update README.md 2024-03-27 17:49:27 +08:00
zyxucp
8e322162cc Update README.md 2024-03-27 17:48:40 +08:00
zyxucp
6a8a6509b8 Merge pull request #47 from AIDotNet/feature_vctordb
add QdrantMemoryDb
2024-03-27 17:42:12 +08:00
zeyu xu
707dff09f8 add QdrantMemoryDb 2024-03-27 17:41:15 +08:00
zyxucp
17c8fca40f Merge pull request #45 from duyanming/main
文字纠正
2024-03-27 15:49:29 +08:00
zeyu xu
415f9757e9 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-03-27 15:40:57 +08:00
zeyu xu
27394f0699 fix 修改bearer 示例错误 2024-03-27 15:40:47 +08:00
duyanming
8a9ca40bb6 文字纠正 2024-03-27 08:09:23 +08:00
longdream
f340ee1088 embedding封装 2024-03-26 23:14:55 +08:00
zyxucp
080eb5765e Update README.md 2024-03-26 21:24:26 +08:00
zeyu xu
36c8ff184a Merge branch 'main' of github.com:AIDotNet/AntSK 2024-03-26 21:22:26 +08:00
zeyu xu
0486f67b50 add gzh 2024-03-26 21:21:57 +08:00
zyxucp
aa7e8d545c Update README.md 2024-03-26 21:07:02 +08:00
zyxucp
59f6a899a6 Update README.en.md 2024-03-26 21:06:28 +08:00
zyxucp
0fc98d42aa Update README.md 2024-03-26 21:04:21 +08:00
longdream
edad2644aa 删除没必要的py文件 2024-03-26 20:48:49 +08:00
longdream
8a56a0393a Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-26 20:48:07 +08:00
zyxucp
f4cbf9a40a Update README.md 2024-03-26 00:03:18 +08:00
zyxucp
fb5b92f499 Update README.en.md 2024-03-26 00:02:59 +08:00
zeyu xu
c286258f2b del 删除LLamaSharp早起http版本 2024-03-25 23:04:17 +08:00
zeyu xu
4416651589 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-03-25 22:29:16 +08:00
zeyu xu
48a33e8977 fix 修复格式变更 2024-03-25 22:29:04 +08:00
junlong
bd5ca06d8f test 2024-03-25 16:55:41 +08:00
junlong
e0985ecec3 Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-25 16:48:21 +08:00
junlong
e56b74d4af 删除chat以外的文件 2024-03-25 16:48:11 +08:00
zyxucp
c417098c2c Merge pull request #44 from ElderJames/fix/sparkdesk-func-issue
fix: sparkdesk function call definition conversion
2024-03-25 14:15:03 +08:00
zyxucp
93527215a7 Merge branch 'main' of https://github.com/AIDotNet/AntSK 2024-03-25 13:43:57 +08:00
James Yeung
0cf3945693 fix: sparkdesk function call definition conversion 2024-03-25 13:16:10 +08:00
zyxucp
ced2a9b2e2 fix 调整llamafactory加载顺序 2024-03-25 12:03:09 +08:00
zyxucp
987b231c4d Update README.md 2024-03-25 11:16:26 +08:00
zyxucp
7a541c1da1 Update README.md 2024-03-25 11:14:43 +08:00
zyxucp
74e323158d fix 修改变量名规范 2024-03-24 23:46:14 +08:00
zyxucp
563a7409f6 Merge pull request #42 from AIDotNet/feature_chathistory
Feature chathistory
2024-03-24 23:44:20 +08:00
zyxucp
b13b93e04e Merge branch 'feature_chathistory' of https://github.com/AIDotNet/AntSK into feature_chathistory 2024-03-24 23:43:34 +08:00
zyxucp
44568c8d65 fix 修改OpenAIService 历史对话 2024-03-24 23:43:07 +08:00
zyxucp
fb277dff80 Merge pull request #41 from AIDotNet/feature_chathistory
Feature chathistory
2024-03-24 23:12:45 +08:00
zeyu xu
efae890650 fix 调整kms提示词 2024-03-24 23:08:32 +08:00
zyxucp
a146f6059e fix 调整历史记录会话 2024-03-24 22:51:20 +08:00
zyxucp
3c67096cd8 Update README.md 2024-03-24 19:56:13 +08:00
zeyu xu
a993a60f95 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-03-24 13:47:51 +08:00
zeyu xu
d3fdc77600 add 增加文档 2024-03-24 13:47:38 +08:00
zyxucp
b62c56e36f Update README.md 2024-03-24 13:05:18 +08:00
zeyu xu
7d72911239 fix 修改gpu默认分层为20 2024-03-24 12:23:27 +08:00
zeyu xu
9e24d7cc67 add Authors 2024-03-24 12:07:31 +08:00
zeyu xu
9baa24b496 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-03-24 12:06:02 +08:00
zeyu xu
da826525f7 add py环境docker file 2024-03-24 12:05:50 +08:00
zyxucp
62dfab41fd Update README.en.md 2024-03-24 12:03:08 +08:00
zeyu xu
04fc811c2c fix 修改首页样式和github链接样式 2024-03-24 12:02:05 +08:00
zeyu xu
8638ecbe29 fix 修改描述一致 2024-03-24 10:55:30 +08:00
zeyu xu
6f1f93fbaf update 更新AntDesign.ProLayout、SemanticKernel、KernelMemory 版本 2024-03-24 10:52:44 +08:00
zyxucp
dc38d83f89 Update README.md 2024-03-23 23:14:20 +08:00
zeyu xu
fd780780c5 update docker-compose.yml 2024-03-23 22:39:51 +08:00
zyxucp
6fd918f33b Update README.md 2024-03-23 21:48:17 +08:00
zyxucp
8fcfa8974b Update README.md 2024-03-23 16:49:15 +08:00
zyxucp
7e23c32c6c Update README.md 2024-03-23 16:48:24 +08:00
zyxucp
7fdfceeea5 Update docker-compose.yml 2024-03-23 13:01:03 +08:00
longdream
849b18f677 Merge branch 'AIDotNet:main' into main 2024-03-22 19:36:20 +08:00
junlong
344128e49d Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-21 19:38:03 +08:00
junlong
56fc9dd517 test 2024-03-21 19:37:56 +08:00
90 changed files with 2015 additions and 1311 deletions

28
Dockerfile-py Normal file
View File

@@ -0,0 +1,28 @@
# 1. Define the Python image to use for getting pip
FROM pytorch/pytorch AS python-base
# 2. Define the .NET SDK image to build your application
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
WORKDIR /src
COPY ["src/AntSK/AntSK.csproj", "AntSK/"]
RUN dotnet restore "AntSK/AntSK.csproj"
COPY src/ .
WORKDIR "/src/AntSK"
RUN dotnet build "AntSK.csproj" -c Release -o /app/build
RUN dotnet publish "AntSK.csproj" -c Release -o /app/publish
# 3. Define the final image that will contain both .NET runtime and Python
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS final
# Copy the Python/pip installation from the official Python image
COPY --from=python-base /usr/local /usr/local
COPY --from=python-base /opt/conda/ /opt/conda/
WORKDIR /app
COPY --from=build /app/publish .
# Make sure the app and Python directories are in PATH
ENV PATH="/app:/opt/conda/bin:/usr/local/bin:${PATH}"
RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
RUN echo 'Asia/Shanghai' >/etc/timezone
RUN pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
ENTRYPOINT ["dotnet", "AntSK.dll"]

View File

@@ -1,152 +1,89 @@
[简体中文](./README.md) | English
# AntSK
## AI Knowledge Base/Intelligent Agent built on .Net8+AntBlazor+SemanticKernel
## An AI knowledge base/intelligent agent built with .Net 8+AntBlazor+SemanticKernel
## ⭐Core Features
- **Semantic Kernel**: Utilizes advanced natural language processing technology to accurately understand, process, and respond to complex semantic queries, providing users with precise information retrieval and recommendation services.
- **Kernel Memory**: Capable of continuous learning and storing knowledge points, AntSK has long-term memory function, accumulates experience, and provides a more personalized interaction experience.
## Core Features
- **Knowledge Base**: Import knowledge base through documents (Word, PDF, Excel, Txt, Markdown, Json, PPT) and perform knowledge base Q&A.
- **GPT Generation**: This platform supports creating personalized GPT models, enabling users to build their own GPT models.
- **API Interface Publishing**: Exposes internal functions in the form of APIs, enabling developers to integrate AntSK into other applications and enhance application intelligence.
- **Semantic Kernel**: Utilizes advanced natural language processing technologies to accurately understand, process, and respond to complex semantic queries, providing users with precise information retrieval and recommendation services.
- **API Plugin System**: Open API plugin system that allows third-party developers or service providers to easily integrate their services into AntSK, continuously enhancing application functionality.
- **.Net Plugin System**: Open dll plugin system that allows third-party developers or service providers to easily integrate their business functions by generating dll in standard format code, continuously enhancing application functionality.
- **Kernel Memory**: Capable of continuous learning and knowledge storage, AntSK has a long-term memory function, accumulating experience to offer more personalized interaction experiences.
- **Online Search**: AntSK, real-time access to the latest information, ensuring users receive the most timely and relevant data.
- **Model Management**: Adapts and manages integration of different models from different manufacturers, including gguf types supported by **llama.cpp** and models offline running supported by **llamafactory**.
- **Knowledge base**: Import knowledge into the database through documents (Word, PDF, Excel, Txt, Markdown, Json, PPT) and manage knowledge base documents.
- **GPTs Generation**The platform supports the creation of personalized GPT models, try building your own GPT model.
- **API Interface Release**: Internal functions are provided as APIs for developers to integrate AntSK into other applications, enhancing application intelligence.
- **API Plugin System**: An open API plugin system allows third-party developers or service providers to easily integrate their services into AntSK, continuously enhancing application functions.
- **.Net Plugin System**: An open dll plugin system allows third-party developers or service providers to integrate their business functions into AntSK by generating dlls with the standard format codes, continuously enhancing application functions.
- **Internet Search**: AntSK can retrieve the latest information in real-time, ensuring that the information users receive is always timely and relevant.
- **Model management**: Adapts and manages different models from various manufacturers. It also supports offline running of models in 'gguf' format supported by llama.cpp.
- **National Information Creation**: AntSK supports domestic models and databases, and can operate under information creation conditions.
## Application scenarios
- **Domestic Innovation**: AntSK supports domestic models and databases and can run under domestic innovation conditions.
- **Model Fine-Tuning**: Planned based on llamafactory for model fine-tuning.
## ⛪Application Scenarios
AntSK is suitable for various business scenarios, such as:
- Corporate knowledge management systems
- Automated customer service and chatbots
- Enterprise Search Engine
- Enterprise knowledge management system
- Automatic customer service and chatbots
- Enterprise search engine
- Personalized recommendation system
- Intelligent writing assistance
- Education and online learning platforms
- Other interesting AI Apps
- Intelligent assisted writing
- Education and online learning platform
- Other interesting AI applications
## Function example
First, you need to create a knowledge base
![Knowledge base](https://github.com/xuzeyu91/AntSK/blob/main/images/%E7%9F%A5%E8%AF%86%E5%BA%93.png)
In the knowledge base, you can use documents or urls to import
![Knowledge base details](https://github.com/xuzeyu91/AntSK/blob/main/images/%E7%9F%A5%E8%AF%86%E5%BA%93%E8%AF%A6%E6%83%85.png)
Click View to view the document slicing of the knowledge base
![Document Slice](https://github.com/xuzeyu91/AntSK/blob/main/images/%E6%96%87%E6%A1%A3%E5%88%87%E7%89%87.png)
Then we need to create applications, which can create dialog applications and knowledge bases.
![Application](https://github.com/xuzeyu91/AntSK/blob/main/images/%E5%BA%94%E7%94%A8.png)
The application of knowledge base needs to select the existing knowledge base, which can be multiple
![Application Configuration](https://github.com/xuzeyu91/AntSK/blob/main/images/%E5%BA%94%E7%94%A8%E9%85%8D%E7%BD%AE.png)
Then you can ask questions about the knowledge base documents in the dialogue
![Q&A](https://github.com/xuzeyu91/AntSK/blob/main/images/%E9%97%AE%E7%AD%94.png)
In addition, we can also create dialogue applications, and configure prompt word templates in corresponding applications
![Conversation application](https://github.com/xuzeyu91/AntSK/blob/main/images/%E7%AE%80%E5%8D%95%E5%AF%B9%E8%AF%9D.png)
Let's see the effect
![Conversation effect](https://github.com/xuzeyu91/AntSK/blob/main/images/%E5%AF%B9%E8%AF%9D%E6%95%88%E6%9E%9C.png)
## How do I get started?
Here I am using Postgres as a data and vector store, because the Semantic Kernel and Kernel Memory both support it, though you can switch to others.
The model by default supports local models in 'gguf' format from openai, azure openai, and llama. If you need to use other models, you can integrate them using the one-api.
The login configuration in the configuration file is the default account and password.
The following configuration files are needed:
## Using Docker Compose
A appsettings.json for the pg version and a simplified version (Sqlite+disk) docker-compose.simple.yml are provided.
Download docker-compose.yml from the project root directory, then place the configuration file appsettings.json in the same directory,
The pg image has already been prepared. You can modify the default account and password in the docker-compose.yml, and then your appsettings.json database connection needs to be consistent.
Then you can enter the directory and execute
## ✏Function Examples
### Online Demo
```
docker compose up - d
https://antsk.ai-dotnet.com/
```
to start AntSK.
How to mount local models and model download directories in docker
```
# Non-host version, does not use local proxy
Default account: test
Default password: test
Due to the low configuration of the cloud server, the local model cannot be run, so the system settings permissions have been closed. You can simply view the interface. If you want to use the local model, please download and use it on your own.
```
### Other Function Examples
[Video Demonstration](https://www.bilibili.com/video/BV1zH4y1h7Y9/)
## ❓How to get started?
Here I am using Postgres as the data and vector storage because Semantic Kernel and Kernel Memory support it, but you can also use other options.
The model by default supports the local model of openai, azure openai, and llama. If you need to use other models, you can integrate them using one-api.
The Login configuration in the configuration file is the default login account and password.
The following configuration file needs to be configured
## 1⃣Using docker-compose
Provided the pg version **appsettings.json** and simplified version (Sqlite+disk) **docker-compose.simple.yml**
Download **docker-compose.yml** from the project root directory and place the configuration file **appsettings.json** in the same directory.
The pg image has already been prepared. You can modify the default username and password in docker-compose.yml, and then the database connection in your **appsettings.json** needs to be consistent.
Then you can execute the following command in the directory to start AntSK
```
docker-compose up -d
```
## 2⃣How to mount local models and model download directory in docker
```
# Non-host version, do not use local proxy
version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.1
ports:
image: registry.cn-hangzhou.aliyuncs.com/AIDotNet/antsk:v0.1.5ports:
- 5000:5000
networks:
- antsk
@@ -156,48 +93,30 @@ services:
environment:
- ASPNETCORE_URLS=http://*:5000
volumes:
- ./appsettings.json:/app/appsettings.json # local configuration file must be placed in the same directory
- ./appsettings.json:/app/appsettings.json # Local configuration file needs to be placed in the same directory
- D://model:/app/model
networks:
antsk:
```
Using this as an example, the meaning is to mount the local folder D://model from Windows into the container /app/model. If so, your appsettings.json model directory should be configured as
Taking this as an example, it means mounting the local D://model folder of Windows into the container /app/model. If so, the model address in your appsettings.json should be configured as
```
model/xxx.gguf
```
Some meanings of the configuration file
// (The rest of the information is omitted as it's unnecessary for the translation example context.)
Solving the missing style issue:
Execute under AntSK/src/AntSK:
## 3⃣Some meanings of configuration file
```
dotnet clean
dotnet build
dotnet publish "AntSK.csproj"
```
Then go to AntSK/src/AntSK/bin/Release/net8.0/publish
```
dotnet AntSK.dll
```
```
{
"DBConnection": {
"DbType": "Sqlite",
"DbType": "Sqlite",
"ConnectionStrings": "Data Source=AntSK.db;"
},
"KernelMemory": {
"VectorDb": "Disk",
"VectorDb": "Disk",
"ConnectionString": "Host=;Port=;Database=antsk;Username=;Password=",
"TableNamePrefix": "km-"
},
"LLamaSharp": {
"RunType": "GPU",
"Chat": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"Embedding": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"RunType": "GPU",
"FileDirectory": "D:\\Code\\AI\\AntBlazor\\model\\"
},
"Login": {
@@ -210,44 +129,86 @@ dotnet AntSK.dll
}
}
}
```
```
//Supports multiple databases, including SqlSugar, MySql, SqlServer, Sqlite, Oracle, PostgreSQL, Dm, Kdbndp, Oscar, MySqlConnector, Access, OpenGaussian, QuestDB, HG, ClickHouse, GBase, Odbc, OceanBaseForOracle, TDengine, GaussDB, OceanBase, Tidb, Vastbase, PolarDB, Custom
DBConnection DbType
//Connection string, corresponding strings need to be used according to different DB types
DBConnection ConnectionStrings
//The type of vector storage supports Postgres Disk Memory, where Postgres requires the configuration of ConnectionString
KernelMemory VectorDb
//The running mode used by the local model is GUP CPU. If using an online API, you can freely use one
LLamaSharp RunType
//The model path of the local session model should pay attention to distinguishing between Linux and Windows drive letters
LLamaSharp Chat
//The model path of the local vector model should pay attention to distinguishing between Linux and Windows drive letters
LLamaSharp Embedding
//Default administrator account password
// Supports various databases, you can check SqlSugar, MySql, SqlServer, Sqlite, Oracle, PostgreSQL, Dm, Kdbndp, Oscar, MySqlConnector, Access, OpenGauss, QuestDB, HG, ClickHouse, GBase, Odbc, OceanBaseForOracle, TDengine, GaussDB, OceanBase, Tidb, Vastbase, PolarDB, Custom
DBConnection.DbType
// Connection string, need to use the corresponding string according to the different DB types
DBConnection.ConnectionStrings
//The type of vector storage, supporting Postgres, Disk, Memory, Qdrant, Redis, AzureAISearch
//Postgres and Redis require ConnectionString configuration
//The ConnectionString of Qdrant and AzureAISearch uses Endpoint | APIKey
KernelMemory.VectorDb
//Local model execution options: GPU and CPU. When using the online API, any option can be used.
LLamaSharp.RunType
//Local model path, used for quick selection of models under llama, as well as saving downloaded models.
LLamaSharp.FileDirectory
//Default admin account password
Login
//The number of threads for importing asynchronous processing can be higher when using online APIs. Local models suggest 1, otherwise memory overflow and crash may occur
BackgroundTaskBroker ImportKMSTask WorkerCount
//Import asynchronous processing thread count. A higher count can be used for online API, but for local models, 1 is recommended to avoid memory overflow issues.
BackgroundTaskBroker.ImportKMSTask.WorkerCount
```
## ⚠Fixing Style Issues:
Run the following in AntSK/src/AntSK:
```
dotnet clean
dotnet build
dotnet publish "AntSK.csproj"
```
Then navigate to AntSK/src/AntSK/bin/Release/net8.0/publish and run:
```
dotnet AntSK.dll
```
The styles should now be applied after starting.
To learn more or start using**AntSK**, you can follow my public account and join the exchange group.
I'm using CodeFirst mode for the database, so as long as the database connection is properly configured, the table structure will be created automatically.
## ✔Using llamafactory
```
1. First, ensure that Python and pip are installed in your environment. This step is not necessary if using an image, such as version v0.2.3.2, which already includes the complete Python environment.
2. Go to the model add page and select llamafactory.
3. Click "Initialize" to check whether the 'pip install' environment setup is complete.
4. Choose a model that you like.
5. Click "Start" to begin downloading the model from the tower. This may involve a somewhat lengthy wait.
6. After the model has finished downloading, enter http://localhost:8000/ in the request address. The default port is 8000.
7. Click "Save" and start chatting.
8. Many people ask about the difference between LLamaSharp and llamafactory. In fact, LLamaSharp is a .NET implementation of llama.cpp, but only supports local gguf models, while llamafactory supports a wider variety of models and uses Python implementation. The main difference lies here. Additionally, llamafactory has the ability to fine-tune models, which is an area we will focus on integrating in the future.
```
## 🤝 Contributing
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](https://github.com/AIDotNet/AntSK/pulls)
If you would like to contribute, feel free to create a [Pull Request](https://github.com/AIDotNet/AntSK/pulls), or give us [Bug Report](https://github.com/AIDotNet/AntSK/issues/new).
## 💕 Contributors
## Contact me
This project exists thanks to all the people who contribute.
If you have any questions or suggestions, please follow my public account through the following ways, and send a message to me. We also have an exchange group, which can send messages such as joining the group, and then I will bring you into the exchange group
<a href="https://github.com/AIDotNet/AntSK/graphs/contributors">
<img src="https://contrib.rocks/image?repo=AIDotNet/AntSK&max=1000&columns=15&anon=1" />
</a>
![Official account](https://github.com/xuzeyu91/Avalonia-Assistant/blob/main/img/gzh.jpg)
## 🚨 Code of Conduct
This project has adopted the code of conduct defined by the Contributor Covenant to clarify expected behavior in our community.
For more information see the [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct).
To learn more or get started with **AntSK**, follow my official WeChat account and join the discussion group.
## ☎Contact Me
If you have any questions or suggestions, please contact me through my official WeChat account. We also have a discussion group where you can send a message to join, and then I will add you to the group.
![Official WeChat Account](https://github.com/AIDotNet/Avalonia-Assistant/blob/main/img/gzh.jpg)
---
We appreciate your interest in**AntSK**and look forward to working with you to create an intelligent future!
We appreciate your interest in **AntSK** and look forward to collaborating with you to create an intelligent future!

122
README.md
View File

@@ -1,14 +1,14 @@
中文|[English](https://github.com/xuzeyu91/AntSK/blob/main/README.en.md)
中文|[English](https://github.com/AIDotNet/AntSK/blob/main/README.en.md)
# AntSK
## 基于.Net8+AntBlazor+SemanticKernel 打造的AI知识库/智能体
## 使用.Net8+Blazor+SemanticKernel 打造的AI知识库/智能体
## 核心功能
## 核心功能
- **语义内核 (Semantic Kernel)**:采用领先的自然语言处理技术,准确理解、处理和响应复杂的语义查询,为用户提供精确的信息检索和推荐服务。
- **内存内核 (Kernel Memory)**具备持续学习和存储知识点的能力AntSK 拥有长期记忆功能,累积经验,提供更个性化的交互体验。
- **知识库**通过文档Word、PDF、Excel、Txt、Markdown、Json、PPT等形式导入知识库可以进行知识库文档
- **知识库**通过文档Word、PDF、Excel、Txt、Markdown、Json、PPT等形式导入知识库可以进行知识库问答
- **GPTs 生成**此平台支持创建个性化的GPT模型尝试构建您自己的GPT模型。
@@ -20,11 +20,14 @@
- **联网搜索**AntSK实时获取最新信息确保用户接受到的资料总是最及时、最相关的。
- **模型管理**适配和管理集成不同厂商的不同模型。并且支持llama.cpp所支持的gguf类型以及llamafactory所支持的模型离线运行
- **模型管理**:适配和管理集成不同厂商的不同模型。并且支持**llama.cpp**所支持的gguf类型以及**llamafactory**所支持的模型离线运行
- **国产信创**AntSK支持国产模型和国产数据库可以在信创条件下运行
## 应用场景
- **模型微调**规划中基于llamafactory进行模型微调
## ⛪应用场景
AntSK 适用于多种业务场景,例如:
- 企业级知识管理系统
@@ -35,57 +38,37 @@ AntSK 适用于多种业务场景,例如:
- 教育与在线学习平台
- 其他有意思的AI App
## 功能示例
## ✏️功能示例
### 在线演示
```
https://antsk.ai-dotnet.com/
```
默认账号admin
```
默认账号test
默认密码:xuzeyu
默认密码:test
由于云服务器配置较低,无法运行本地模型,所以把系统设置权限关闭了,大家看看界面即可,要使用本地模型,请下载自行使用
```
### 其他功能示例
[视频示例](https://www.bilibili.com/video/BV1zH4y1h7Y9/)
首先需要创建知识库
![知识库](https://github.com/xuzeyu91/AntSK/blob/main/images/%E7%9F%A5%E8%AF%86%E5%BA%93.png)
[在线文档http://antsk.cn](http://antsk.cn)
在知识库里可以使用文档或者url进行导入
![知识库详情](https://github.com/xuzeyu91/AntSK/blob/main/images/%E7%9F%A5%E8%AF%86%E5%BA%93%E8%AF%A6%E6%83%85.png)
点击查看可以查看知识库的文档切片情况
![文档切片](https://github.com/xuzeyu91/AntSK/blob/main/images/%E6%96%87%E6%A1%A3%E5%88%87%E7%89%87.png)
然后我们需要创建应用,可以创建对话应用和知识库。
![应用](https://github.com/xuzeyu91/AntSK/blob/main/images/%E5%BA%94%E7%94%A8.png)
知识库应用需要选择已有的知识库,可以选多个
![应用配置](https://github.com/xuzeyu91/AntSK/blob/main/images/%E5%BA%94%E7%94%A8%E9%85%8D%E7%BD%AE.png)
然后再对话中可以对知识库的文档进行提问
![问答](https://github.com/xuzeyu91/AntSK/blob/main/images/%E9%97%AE%E7%AD%94.png)
另外我们也可以创建对话应用,可以在对应应用中配置提示词模板
![对话应用](https://github.com/xuzeyu91/AntSK/blob/main/images/%E7%AE%80%E5%8D%95%E5%AF%B9%E8%AF%9D.png)
下面来看看效果吧
![对话效果](https://github.com/xuzeyu91/AntSK/blob/main/images/%E5%AF%B9%E8%AF%9D%E6%95%88%E6%9E%9C.png)
## 如何开始?
## ❓如何开始?
在这里我使用的是Postgres 作为数据存储和向量存储因为Semantic Kernel和Kernel Memory都支持他当然你也可以换成其他的。
模型默认支持openai、azure openai 和llama支持的gguf本地模型,如果需要使用其他模型可以使用one-api进行集成。
模型默认支持openai、azure openai、讯飞星火、阿里云积、 和llama支持的gguf本地模型 以及llamafactory的本地模型,如果需要使用其他模型可以使用one-api进行集成。
配置文件中的Login配置是默认的登账号和密码
配置文件中的Login配置是默认的登账号和密码
需要配置如下的配置文件
## 使用docker-compose
## 1使用docker-compose
提供了pg版本 **appsettings.json** 和 简化版本Sqlite+disk **docker-compose.simple.yml**
提供了pg版本 **appsettings.json** 和 简化版本(**Sqlite+disk** **docker-compose.simple.yml**
从项目根目录下载**docker-compose.yml**,然后把配置文件**appsettings.json**和它放在统一目录,
@@ -97,14 +80,14 @@ docker-compose up -d
```
来启动AntSK
## 如何在docker中挂载本地模型和模型下载的目录
## 2如何在docker中挂载本地模型和模型下载的目录
```
# 非 host 版本, 不使用本机代理
version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.1.5
image: registry.cn-hangzhou.aliyuncs.com/AIDotNet/antsk:v0.2.3
ports:
- 5000:5000
networks:
@@ -125,7 +108,7 @@ networks:
model/xxx.gguf
```
## 配置文件的一些含义
## 3配置文件的一些含义
```
{
"DBConnection": {
@@ -139,8 +122,6 @@ model/xxx.gguf
},
"LLamaSharp": {
"RunType": "GPU",
"Chat": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"Embedding": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"FileDirectory": "D:\\Code\\AI\\AntBlazor\\model\\"
},
"Login": {
@@ -160,15 +141,14 @@ DBConnection.DbType
//连接字符串需要根据不同DB类型用对应的字符串
DBConnection.ConnectionStrings
//向量存储的类型,支持 Postgres Disk Memory 其中Postgres需要配置 ConnectionString
//向量存储的类型,支持 PostgresDiskMemory、Qdrant、Redis、AzureAISearch
//Postgres、Redis需要配置 ConnectionString
//Qdrant 和AzureAISearch 的 ConnectionString 使用 Endpoint|APIKey
KernelMemory.VectorDb
//本地模型使用的运行方式 GUP CPU ,如果用在线API 这个随意使用一个即可
LLamaSharp.RunType
//本地会话模型的模型路径 注意区分linux和windows盘符不同
LLamaSharp.Chat
//本地向量模型的模型路径 注意区分linux和windows盘符不同
LLamaSharp.Embedding
//本地模型路径用于在选择llama时可以快速选择目录下的模型以及保存下载的模型
LLamaSharp.FileDirectory
@@ -178,7 +158,7 @@ Login
BackgroundTaskBroker.ImportKMSTask.WorkerCount
```
## 找不到样式问题解决:
## ⚠️找不到样式问题解决:
AntSK/src/AntSK下执行:
```
dotnet clean
@@ -193,15 +173,49 @@ dotnet AntSK.dll
DB我使用的是CodeFirst模式只要配置好数据库链接表结构是自动创建的
## ✔使用llamafactory
```
1、首先需要确保你的环境已经安装了python和pip如果使用镜像例如p0.2.4版本已经包含了 python全套环境则无需此步骤
2、进入模型添加页面选择llamafactory
3、点击初始化可以检查pip install 环境是否完成
4、选择一个喜欢的模型
5、点击启动,这会开始从魔塔下载模型,你可能需要有一个较为漫长的等待
6、等待模型下载完毕后在请求地址输入 http://localhost:8000/ 这里默认是使用8000端口
7、点击保存然后就可以开始聊天了
8、很多人会问 LLamaSharp与llamafactory有什么区别其实这两者LLamaSharp是llama.cpp的 dotnet实现但是只支持本地gguf模型 而llamafactory 支持的模型种类更多但使用的是python的实现其主要差异在这里另外llamafactory具有模型微调的能力这也是我们下一步需要重点集成的部分。
```
## 🤝 贡献
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](https://github.com/AIDotNet/AntSK/pulls)

如果你想贡献,可以创建一个[拉取请求](https://github.com/AIDotNet/AntSK/pulls), 或给我们[错误报告](https://github.com/AIDotNet/AntSK/issues/new).


## 💕 贡献者
这个项目的存在要感谢所有的贡献者。

<a href="https://github.com/AIDotNet/AntSK/graphs/contributors">
<img src="https://contrib.rocks/image?repo=AIDotNet/AntSK&max=1000&columns=15&anon=1" />
</a>

## 🚨 行为准则
该项目采用了贡献者公约定义的行为准则,以阐明我们社区的预期行为。有关更多信息,请参见 .NET Foundation 行为准则。 [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct).
想了解更多信息或开始使用 **AntSK**,可以关注我的公众号以及加入交流群。
## 联系我
## ☎️联系我
如有任何问题或建议,请通过以下方式关注我的公众号,发消息与我联系,我们也有交流群,可以发送进群等消息,然后我会拉你进交流群
![公众号](https://github.com/xuzeyu91/Avalonia-Assistant/blob/main/img/gzh.jpg)
![公众号](https://github.com/AIDotNet/AntSK/blob/main/images/gzh.jpg)
---
我们对您在**AntSK**的兴趣表示感谢,并期待与您携手共创智能化的未来!
## 🌟 Star History
<a href="https://github.com/AIDotNet/AntSK/stargazers" target="_blank" style="display: block" align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date" />
</picture>
</a>

View File

@@ -3,7 +3,9 @@ version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.1
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.4
# 如果需要pytorch环境需要使用下面这个镜像镜像比较大
# image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:p0.2.4
ports:
- 5000:5000
networks:

View File

@@ -18,7 +18,9 @@ services:
- ./pg/data:/var/lib/postgresql/data
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.2.1
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.4
# 如果需要pytorch环境需要使用下面这个镜像镜像比较大
# image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:p0.2.4
ports:
- 5000:5000
networks:

View File

@@ -0,0 +1,14 @@
{
"position": 3,
"label": "部署",
"collapsible": true,
"collapsed": false,
"className": "red",
"link": {
"type": "generated-index",
"title": "使用案例"
},
"customProps": {
"description": "提供快速使用AntSK的一些案例"
}
}

56
docs/deploy/settings.md Normal file
View File

@@ -0,0 +1,56 @@
---
sidebar_position: 2
---
# 配置文件的一些含义
```
{
"DBConnection": {
"DbType": "Sqlite",
"ConnectionStrings": "Data Source=AntSK.db;"
},
"KernelMemory": {
"VectorDb": "Disk",
"ConnectionString": "Host=;Port=;Database=antsk;Username=;Password=",
"TableNamePrefix": "km-"
},
"LLamaSharp": {
"RunType": "GPU",
"Chat": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"Embedding": "D:\\Code\\AI\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"FileDirectory": "D:\\Code\\AI\\AntBlazor\\model\\"
},
"Login": {
"User": "admin",
"Password": "xuzeyu"
},
"BackgroundTaskBroker": {
"ImportKMSTask": {
"WorkerCount": 1
}
}
}
```
```
//支持多种数据库具体可以查看SqlSugarMySqlSqlServerSqliteOraclePostgreSQLDmKdbndpOscarMySqlConnectorAccessOpenGaussQuestDBHGClickHouseGBaseOdbcOceanBaseForOracleTDengineGaussDBOceanBaseTidbVastbasePolarDBCustom
DBConnection.DbType
//连接字符串需要根据不同DB类型用对应的字符串
DBConnection.ConnectionStrings
//向量存储的类型,支持 Postgres Disk Memory 其中Postgres需要配置 ConnectionString
KernelMemory.VectorDb
//本地模型使用的运行方式 GUP CPU ,如果用在线API 这个随意使用一个即可
LLamaSharp.RunType
//本地会话模型的模型路径 注意区分linux和windows盘符不同
LLamaSharp.Chat
//本地向量模型的模型路径 注意区分linux和windows盘符不同
LLamaSharp.Embedding
//本地模型路径用于在选择llama时可以快速选择目录下的模型以及保存下载的模型
LLamaSharp.FileDirectory
//默认管理员账号密码
Login
//导入异步处理的线程数使用在线API可以高一点本地模型建议1 否则容易内存溢出崩掉
BackgroundTaskBroker.ImportKMSTask.WorkerCount
```

57
docs/deploy/start.md Normal file
View File

@@ -0,0 +1,57 @@
---
sidebar_position: 1
---
# 如何开始?
在这里我使用的是Postgres 作为数据存储和向量存储因为Semantic Kernel和Kernel Memory都支持他当然你也可以换成其他的。
模型默认支持openai、azure openai 和llama支持的gguf本地模型,如果需要使用其他模型可以使用one-api进行集成。
配置文件中的Login配置是默认的登陆账号和密码
需要配置如下的配置文件
## 使用docker-compose
提供了pg版本 **appsettings.json** 和 简化版本Sqlite+disk **docker-compose.simple.yml**
从项目根目录下载**docker-compose.yml**,然后把配置文件**appsettings.json**和它放在统一目录,
这里已经把pg的镜像做好了。在docker-compose.yml中可以修改默认账号密码然后你的**appsettings.json**的数据库连接需要保持一致。
然后你可以进入到目录后执行
```
docker-compose up -d
```
来启动AntSK
## 如何在docker中挂载本地模型和模型下载的目录
```
# 非 host 版本, 不使用本机代理
version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/AIDotNet/antsk:v0.1.5
ports:
- 5000:5000
networks:
- antsk
depends_on:
- antskpg
restart: always
environment:
- ASPNETCORE_URLS=http://*:5000
volumes:
- ./appsettings.json:/app/appsettings.json # 本地配置文件 需要放在同级目录
- D://model:/app/model
networks:
antsk:
```
以这个为示例意思是把windows本地D://model的文件夹挂载进 容器内/app/model 如果是这样你的appsettings.json中的模型地址应该配置为
```
model/xxx.gguf
```
DB我使用的是CodeFirst模式只要配置好数据库链接表结构是自动创建的

16
docs/deploy/style.md Normal file
View File

@@ -0,0 +1,16 @@
---
sidebar_position: 3
---
# 找不到样式问题解决
AntSK/src/AntSK下执行:
```
dotnet clean
dotnet build
dotnet publish "AntSK.csproj"
```
再去AntSK/src/AntSK/bin/Release/net8.0/publish下
```
dotnet AntSK.dll
```
然后启动就有样式了

View File

@@ -0,0 +1,14 @@
{
"position": 2,
"label": "快速开发",
"collapsible": true,
"collapsed": false,
"className": "red",
"link": {
"type": "generated-index",
"title": "快速开发"
},
"customProps": {
"description": "快速基于项目二次开发!"
}
}

View File

@@ -0,0 +1,14 @@
{
"position": 2,
"label": "介绍",
"collapsible": true,
"collapsed": false,
"className": "red",
"link": {
"type": "generated-index",
"title": "使用案例"
},
"customProps": {
"description": "提供快速使用AntSK的一些案例"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 101 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 202 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 170 KiB

70
docs/introduce/readme.md Normal file
View File

@@ -0,0 +1,70 @@
---
sidebar_position: 1
---
# AntSK功能介绍
## 基于.Net8+AntBlazor+SemanticKernel 打造的AI知识库/智能体
## 核心功能
- **语义内核 (Semantic Kernel)**:采用领先的自然语言处理技术,准确理解、处理和响应复杂的语义查询,为用户提供精确的信息检索和推荐服务。
- **内存内核 (Kernel Memory)**具备持续学习和存储知识点的能力AntSK 拥有长期记忆功能,累积经验,提供更个性化的交互体验。
- **知识库**通过文档Word、PDF、Excel、Txt、Markdown、Json、PPT等形式导入知识库可以进行知识库问答。
- **GPTs 生成**此平台支持创建个性化的GPT模型尝试构建您自己的GPT模型。
- **API接口发布**将内部功能以API的形式对外提供便于开发者将AntSK 集成进其他应用,增强应用智慧。
- **API插件系统**开放式API插件系统允许第三方开发者或服务商轻松将其服务集成到AntSK不断增强应用功能。
- **.Net插件系统**开放式dll插件系统允许第三方开发者或服务商轻松将其业务功能通过标准格式的代码生成dll后集成到AntSK不断增强应用功能。
- **联网搜索**AntSK实时获取最新信息确保用户接受到的资料总是最及时、最相关的。
- **模型管理**:适配和管理集成不同厂商的不同模型。并且支持**llama.cpp**所支持的gguf类型以及**llamafactory**所支持的模型离线运行
- **国产信创**AntSK支持国产模型和国产数据库可以在信创条件下运行
- **模型微调**规划中基于llamafactory进行模型微调
## 应用场景
AntSK 适用于多种业务场景,例如:
- 企业级知识管理系统
- 自动客服与聊天机器人
- 企业级搜索引擎
- 个性化推荐系统
- 智能辅助写作
- 教育与在线学习平台
- 其他有意思的AI App
## 功能示例
[视频示例](https://www.bilibili.com/video/BV1zH4y1h7Y9/)
首先需要创建知识库
![知识库](./img/知识库.png)
在知识库里可以使用文档或者url进行导入
![知识库详情](./img/知识库详情.png)
点击查看可以查看知识库的文档切片情况
![文档切片](./img/文档切片.png)
然后我们需要创建应用,可以创建对话应用和知识库。
![应用](./img/应用.png)
知识库应用需要选择已有的知识库,可以选多个
![应用配置](./img/应用配置.png)
然后再对话中可以对知识库的文档进行提问
![问答](./img/问答.png)
另外我们也可以创建对话应用,可以在对应应用中配置提示词模板
![对话应用](./img/简单对话.png)
下面来看看效果吧
![对话效果](./img/对话效果.png)

BIN
images/gzh.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 180 KiB

View File

@@ -9,8 +9,11 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AntDesign.Charts" Version="0.5.1" />
<PackageReference Include="AntDesign.ProLayout" Version="0.18.0" />
<PackageReference Include="AntDesign.ProLayout" Version="0.18.1" />
<PackageReference Include="BlazorComponents.Terminal" Version="0.6.0" />
<PackageReference Include="Blazored.LocalStorage" Version="4.5.0" />
<PackageReference Include="pythonnet" Version="3.0.3" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
@@ -18,21 +21,24 @@
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3" />
<PackageReference Include="Markdig" Version="0.36.2" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SqlSugarCore" Version="5.1.4.145" />
<PackageReference Include="SqlSugarCore" Version="5.1.4.149" />
<PackageReference Include="System.Data.SQLite.Core" Version="1.0.118" />
<PackageReference Include="RestSharp" Version="110.2.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.6.2" />
<PackageReference Include="Microsoft.SemanticKernel.Core" Version="1.6.2" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Core" Version="1.6.2-alpha" />
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.34.240313.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Postgres" Version="0.34.240313.1" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.6.3" />
<PackageReference Include="Microsoft.SemanticKernel.Core" Version="1.6.3" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Core" Version="1.6.3-alpha" />
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Postgres" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Qdrant" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Redis" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.AzureAISearch" Version="0.35.240321.1" />
<PackageReference Include="LLamaSharp" Version="0.10.0" />
<PackageReference Include="LLamaSharp.Backend.Cpu" Version="0.10.0" />
<PackageReference Include="LLamaSharp.Backend.Cuda12" Version="0.10.0" />
<PackageReference Include="LLamaSharp.kernel-memory" Version="0.10.0" />
<PackageReference Include="LLamaSharp.semantic-kernel" Version="0.10.0" />
<PackageReference Include="LLamaSharp" Version="0.11.1" />
<PackageReference Include="LLamaSharp.Backend.Cpu" Version="0.11.1" />
<PackageReference Include="LLamaSharp.Backend.Cuda12" Version="0.11.1" />
<PackageReference Include="LLamaSharp.kernel-memory" Version="0.11.1" />
<PackageReference Include="LLamaSharp.semantic-kernel" Version="0.11.1" />
</ItemGroup>

View File

@@ -99,12 +99,17 @@
总数
</summary>
</member>
<member name="M:AntSK.Domain.Domain.Other.EmbeddingConfig.LoadModel(System.String,System.String)">
<summary>
模型写死
</summary>
</member>
<member name="F:AntSK.Domain.Domain.Other.LLamaConfig.dicLLamaWeights">
<summary>
避免模型重复加载,本地缓存
</summary>
</member>
<member name="M:AntSK.Domain.Domain.Service.ChatService.SendChatByAppAsync(AntSK.Domain.Repositories.Apps,System.String,System.String)">
<member name="M:AntSK.Domain.Domain.Service.ChatService.SendChatByAppAsync(AntSK.Domain.Repositories.Apps,System.String,Microsoft.SemanticKernel.ChatCompletion.ChatHistory)">
<summary>
发送消息
</summary>
@@ -771,6 +776,14 @@
<param name="parameters"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.Utils.ConvertUtils.ComparisonIgnoreCase(System.String,System.String)">
<summary>
忽略大小写匹配
</summary>
<param name="s"></param>
<param name="value"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.Utils.RepoFiles.SamplePluginsPath">
<summary>
Scan the local folders from the repo, looking for "samples/plugins" folder.

View File

@@ -0,0 +1,21 @@
using LLamaSharp.KernelMemory;
using Microsoft.KernelMemory.AI;
using Microsoft.KernelMemory;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.Domain.Common.Embedding
{
public static class BuilderBgeExtensions
{
public static IKernelMemoryBuilder WithBgeTextEmbeddingGeneration(this IKernelMemoryBuilder builder, HuggingfaceTextEmbeddingGenerator textEmbeddingGenerator)
{
builder.AddSingleton((ITextEmbeddingGenerator)textEmbeddingGenerator);
builder.AddIngestionEmbeddingGenerator(textEmbeddingGenerator);
return builder;
}
}
}

View File

@@ -0,0 +1,56 @@
using LLama.Common;
using LLama;
using LLamaSharp.KernelMemory;
using Microsoft.KernelMemory.AI;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using AntSK.Domain.Domain.Other;
namespace AntSK.Domain.Common.Embedding
{
public class HuggingfaceTextEmbeddingGenerator : ITextEmbeddingGenerator, ITextTokenizer, IDisposable
{
public int MaxTokens => 1024;
public int MaxTokenTotal => 1024;
private readonly dynamic _embedder;
public HuggingfaceTextEmbeddingGenerator(string pyDllPath,string modelName)
{
_embedder = EmbeddingConfig.LoadModel(pyDllPath, modelName);
}
public void Dispose()
{
EmbeddingConfig.Dispose();
}
//public async Task<IList<ReadOnlyMemory<float>>> GenerateEmbeddingAsync(IList<string> data, CancellationToken cancellationToken = default)
//{
// IList<ReadOnlyMemory<float>> results = new List<ReadOnlyMemory<float>>();
// foreach (var d in data)
// {
// var embeddings = await EmbeddingConfig.GetEmbedding(d);
// results.Add(new ReadOnlyMemory<float>(embeddings));
// }
// return results;
//}
public async Task<Microsoft.KernelMemory.Embedding> GenerateEmbeddingAsync(string text, CancellationToken cancellationToken = default)
{
var embeddings = await EmbeddingConfig.GetEmbedding(text);
return new Microsoft.KernelMemory.Embedding(embeddings);
}
public int CountTokens(string text)
{
return EmbeddingConfig.TokenCount(text);
}
}
}

View File

@@ -1,8 +1,11 @@
using AntSK.Domain.Domain.Model.Dto;
using AntSK.Domain.Domain.Model;
using AntSK.Domain.Domain.Model.Dto;
using AntSK.Domain.Repositories;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
@@ -11,8 +14,10 @@ namespace AntSK.Domain.Domain.Interface
{
public interface IChatService
{
IAsyncEnumerable<StreamingKernelContent> SendChatByAppAsync(Apps app, string questions, string history);
IAsyncEnumerable<StreamingKernelContent> SendChatByAppAsync(Apps app, string questions, ChatHistory history);
IAsyncEnumerable<StreamingKernelContent> SendKmsByAppAsync(Apps app, string questions, string history, string filePath, List<RelevantSource> relevantSources = null);
IAsyncEnumerable<StreamingKernelContent> SendKmsByAppAsync(Apps app, string questions, ChatHistory history, string filePath, List<RelevantSource> relevantSources = null);
Task<string> SendImgByAppAsync(Apps app, string questions);
Task<ChatHistory> GetChatHistory(List<MessageInfo> MessageList);
}
}

View File

@@ -21,12 +21,16 @@ namespace AntSK.Domain.Domain.Model.Enum
[Display(Name = "灵积大模型")]
DashScope = 5,
[Display(Name = "LLamaFactory")]
LLamaFactory = 6,
[Display(Name = "Bge Embedding")]
BgeEmbedding = 7,
[Display(Name = "StableDiffusion")]
StableDiffusion = 8,
[Display(Name = "模拟输出")]
Mock = 100,
}
/// <summary>
@@ -36,5 +40,6 @@ namespace AntSK.Domain.Domain.Model.Enum
{
Chat = 1,
Embedding = 2,
Image=3,
}
}

View File

@@ -9,6 +9,7 @@ namespace AntSK.Domain.Domain.Model.Enum
public enum AppType
{
chat = 1,
kms = 2
kms = 2,
img=3
}
}

View File

@@ -18,7 +18,7 @@ namespace AntSK.Domain.Domain.Model.hfmirror
public string Author { get; set; }
public HfAuthorData AuthorData { get; set; }
public int Downloads { get; set; }
public bool Gated { get; set; }
public object Gated { get; set; }
public string Id { get; set; }
public DateTime LastModified { get; set; }
public int Likes { get; set; }

View File

@@ -0,0 +1,88 @@
using Python.Runtime;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static Python.Runtime.Py;
namespace AntSK.Domain.Domain.Other
{
public static class EmbeddingConfig
{
public static dynamic model { get; set; }
static object lockobj = new object();
/// <summary>
/// 模型写死
/// </summary>
public static dynamic LoadModel(string pythondllPath, string modelName)
{
lock (lockobj)
{
if (model == null)
{
//Runtime.PythonDLL = @"D:\Programs\Python\Python311\python311.dll";
Runtime.PythonDLL = pythondllPath;
PythonEngine.Initialize();
PythonEngine.BeginAllowThreads();
try
{
using (Py.GIL())// 初始化Python环境的Global Interpreter Lock)
{
dynamic modelscope = Py.Import("modelscope");
//dynamic model_dir = modelscope.snapshot_download("AI-ModelScope/bge-large-zh-v1.5", revision: "master");
dynamic model_dir = modelscope.snapshot_download(modelName, revision: "master");
dynamic HuggingFaceBgeEmbeddingstemp = Py.Import("langchain.embeddings");
dynamic HuggingFaceBgeEmbeddings = HuggingFaceBgeEmbeddingstemp.HuggingFaceBgeEmbeddings;
string model_name = model_dir;
dynamic model_kwargs = new PyDict();
model_kwargs["device"] = new PyString("cpu");
dynamic hugginmodel = HuggingFaceBgeEmbeddings(
model_name: model_dir,
model_kwargs: model_kwargs
);
model = hugginmodel;
return hugginmodel;
}
}
catch(Exception ex)
{
throw ex;
}
}
else
return model;
}
}
public static Task<float[]> GetEmbedding(string queryStr)
{
using (Py.GIL())
{
PyObject queryResult = model.embed_query(queryStr);
var floatList = queryResult.As<float[]>();
return Task.FromResult(floatList); ;
}
}
public static int TokenCount(string queryStr)
{
using (Py.GIL())
{
PyObject queryResult = model.client.tokenize(queryStr);
int len = (int)(queryResult.Length());
return len;
}
}
public static void Dispose()
{
Console.WriteLine("python dispose");
}
}
}

View File

@@ -31,7 +31,7 @@ namespace AntSK.Domain.Domain.Other
{
ContextSize = lsConfig?.ContextSize ?? 2048,
Seed = lsConfig?.Seed ?? 0,
GpuLayerCount = lsConfig?.GpuLayerCount ?? 10,
GpuLayerCount = lsConfig?.GpuLayerCount ?? 20,
EmbeddingMode = true
};
var weights = LLamaWeights.LoadFromFile(parameters);

View File

@@ -12,6 +12,12 @@ using System.Reflection.Metadata;
using Microsoft.KernelMemory;
using System.Collections.Generic;
using Markdig;
using ChatHistory = Microsoft.SemanticKernel.ChatCompletion.ChatHistory;
using Microsoft.SemanticKernel.Plugins.Core;
using Azure.Core;
using AntSK.Domain.Domain.Model;
using AntSK.LLM.StableDiffusion;
using System.Drawing;
namespace AntSK.Domain.Domain.Service
{
@@ -19,7 +25,8 @@ namespace AntSK.Domain.Domain.Service
public class ChatService(
IKernelService _kernelService,
IKMService _kMService,
IKmsDetails_Repositories _kmsDetails_Repositories
IKmsDetails_Repositories _kmsDetails_Repositories,
IAIModels_Repositories _aIModels_Repositories
) : IChatService
{
/// <summary>
@@ -29,13 +36,31 @@ namespace AntSK.Domain.Domain.Service
/// <param name="questions"></param>
/// <param name="history"></param>
/// <returns></returns>
public async IAsyncEnumerable<StreamingKernelContent> SendChatByAppAsync(Apps app, string questions, string history)
public async IAsyncEnumerable<StreamingKernelContent> SendChatByAppAsync(Apps app, string questions, ChatHistory history)
{
if (string.IsNullOrEmpty(app.Prompt) || !app.Prompt.Contains("{{$input}}"))
{
//如果模板为空,给默认提示词
app.Prompt = app.Prompt.ConvertToString() + "{{$input}}";
}
KernelArguments args =new KernelArguments();
if (history.Count > 10)
{
app.Prompt = @"${{ConversationSummaryPlugin.SummarizeConversation $history}}" + app.Prompt;
args = new() {
{ "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) },
{ "input", questions }
};
}
else
{
args=new()
{
{ "input", $"{string.Join("\n", history.Select(x => x.Role + ": " + x.Content))}{Environment.NewLine} user:{questions}" }
};
}
var _kernel = _kernelService.GetKernelByApp(app);
var temperature = app.Temperature / 100;//存的是0~100需要缩小
OpenAIPromptExecutionSettings settings = new() { Temperature = temperature };
@@ -45,14 +70,15 @@ namespace AntSK.Domain.Domain.Service
settings.ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions;
}
var func = _kernel.CreateFunctionFromPrompt(app.Prompt, settings);
var chatResult = _kernel.InvokeStreamingAsync(function: func, arguments: new KernelArguments() { ["input"] = $"{history}{Environment.NewLine} user:{questions}" });
var chatResult = _kernel.InvokeStreamingAsync(function: func,
arguments: args);
await foreach (var content in chatResult)
{
yield return content;
}
}
public async IAsyncEnumerable<StreamingKernelContent> SendKmsByAppAsync(Apps app, string questions, string history, string filePath, List<RelevantSource> relevantSources = null)
public async IAsyncEnumerable<StreamingKernelContent> SendKmsByAppAsync(Apps app, string questions, ChatHistory history, string filePath, List<RelevantSource> relevantSources = null)
{
var relevantSourceList = await _kMService.GetRelevantSourceList(app.KmsIdList, questions);
var _kernel = _kernelService.GetKernelByApp(app);
@@ -84,9 +110,9 @@ namespace AntSK.Domain.Domain.Service
dataMsg.AppendLine(item.ToString());
}
KernelFunction jsonFun = _kernel.Plugins.GetFunction("KMSPlugin", "Ask");
KernelFunction jsonFun = _kernel.Plugins.GetFunction("KMSPlugin", "Ask1");
var chatResult = _kernel.InvokeStreamingAsync(function: jsonFun,
arguments: new KernelArguments() { ["doc"] = dataMsg, ["history"] = history, ["questions"] = questions });
arguments: new KernelArguments() { ["doc"] = dataMsg, ["history"] = string.Join("\n", history.Select(x => x.Role + ": " + x.Content)), ["questions"] = questions });
await foreach (var content in chatResult)
{
@@ -98,5 +124,78 @@ namespace AntSK.Domain.Domain.Service
yield return new StreamingTextContent(KmsConstantcs.KmsSearchNull);
}
}
public async Task<string> SendImgByAppAsync(Apps app, string questions)
{
var imageModel = _aIModels_Repositories.GetFirst(p => p.Id == app.ImageModelID);
KernelArguments args = new() {
{ "input", questions }
};
var _kernel = _kernelService.GetKernelByApp(app);
var temperature = app.Temperature / 100; //存的是0~100需要缩小
OpenAIPromptExecutionSettings settings = new() { Temperature = temperature };
var func = _kernel.CreateFunctionFromPrompt("你是一个StableDiffusion提示词助手,需要将用户问题转化为StableDiffusion的英文提示词并返回,请注意只返回提示词不要有其他多余内容,用户的问题是:{{$input}}", settings);
var chatResult = await _kernel.InvokeAsync(function: func, arguments: args);
if (chatResult.IsNotNull())
{
string prompt = chatResult.GetValue<string>();
if (!SDHelper.IsInitialized)
{
Structs.ModelParams modelParams = new Structs.ModelParams
{
ModelPath = imageModel.ModelName,
RngType = Structs.RngType.CUDA_RNG,
//VaePath = vaePath,
//KeepVaeOnCpu = keepVaeOnCpu,
//VaeTiling = vaeTiling,
//LoraModelDir = loraModelDir,
};
bool result = SDHelper.Initialize(modelParams);
}
Structs.TextToImageParams textToImageParams = new Structs.TextToImageParams
{
Prompt = prompt,
NegativePrompt = "2d, 3d, cartoon, paintings",
SampleMethod = (Structs.SampleMethod)Enum.Parse(typeof(Structs.SampleMethod), "EULER_A"),
Width = 256,
Height = 256,
NormalizeInput = true,
ClipSkip = -1,
CfgScale = 7,
SampleSteps = 20,
Seed = -1,
};
Bitmap[] outputImages = SDHelper.TextToImage(textToImageParams);
var base64 = ImageUtils.BitmapToBase64(outputImages[0]);
return base64;
}
else
{
return "";
}
}
public async Task<ChatHistory> GetChatHistory(List<MessageInfo> MessageList)
{
ChatHistory history = new ChatHistory();
if (MessageList.Count > 1)
{
foreach (var item in MessageList)
{
if (item.IsSend)
{
history.AddUserMessage(item.Context);
}
else
{
history.AddAssistantMessage(item.Context);
}
}
}
return history;
}
}
}

View File

@@ -1,5 +1,6 @@
using AntDesign;
using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Common.Embedding;
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model.Constant;
using AntSK.Domain.Domain.Model.Dto;
@@ -147,6 +148,11 @@ namespace AntSK.Domain.Domain.Service
var embedder = new LLamaEmbedder(weights, parameters);
memory.WithLLamaSharpTextEmbeddingGeneration(new LLamaSharpTextEmbeddingGenerator(embedder));
break;
case Model.Enum.AIType.BgeEmbedding:
string pyDll = embedModel.EndPoint;
string bgeEmbeddingModelName = embedModel.ModelName;
memory.WithBgeTextEmbeddingGeneration(new HuggingfaceTextEmbeddingGenerator(pyDll,bgeEmbeddingModelName));
break;
case Model.Enum.AIType.DashScope:
memory.WithDashScopeDefaults(embedModel.ModelKey);
break;
@@ -183,6 +189,14 @@ namespace AntSK.Domain.Domain.Service
var executor = new StatelessExecutor(weights, parameters);
memory.WithLLamaSharpTextGeneration(new LlamaSharpTextGenerator(weights, context, executor));
break;
case Model.Enum.AIType.LLamaFactory:
memory.WithOpenAITextGeneration(new OpenAIConfig()
{
APIKey = "123",
TextModel = chatModel.ModelName
}, null, chatHttpClient);
break;
case Model.Enum.AIType.DashScope:
memory.WithDashScopeTextGeneration(new Cnblogs.KernelMemory.AI.DashScope.DashScopeConfig
{
@@ -220,6 +234,20 @@ namespace AntSK.Domain.Domain.Service
StorageType = FileSystemTypes.Volatile
});
break;
case "Qdrant":
var qdrantConfig = ConnectionString.Split("|");
memory.WithQdrantMemoryDb(qdrantConfig[0],qdrantConfig[1]);
break;
case "Redis":
memory.WithRedisMemoryDb(new RedisConfig()
{
ConnectionString = ConnectionString,
});
break;
case "AzureAISearch":
var aisearchConfig = ConnectionString.Split("|");
memory.WithAzureAISearchMemoryDb(aisearchConfig[0], aisearchConfig[1]);
break;
}
}

View File

@@ -162,7 +162,7 @@ namespace AntSK.Domain.Domain.Service
new KernelParameterMetadata("jsonbody"){
Name="json参数字符串",
ParameterType=typeof(string),
Description=$"需要根据背景文档:{Environment.NewLine}{api.InputPrompt} {Environment.NewLine}提取出对应的json格式字符串参考如下格式:{Environment.NewLine}{api.Query}"
Description=$"背景文档:{Environment.NewLine}{api.InputPrompt} {Environment.NewLine}提取出对应的json格式字符串参考如下格式:{Environment.NewLine}{api.Query}"
}
};
functions.Add(_kernel.CreateFunctionFromMethod((string jsonbody) =>
@@ -201,7 +201,7 @@ namespace AntSK.Domain.Domain.Service
new KernelParameterMetadata("jsonbody"){
Name="json参数字符串",
ParameterType=typeof(string),
Description=$"需要根据背景文档:{Environment.NewLine}{api.InputPrompt} {Environment.NewLine}提取出对应的json格式字符串参考如下格式:{Environment.NewLine}{api.JsonBody}"
Description=$"背景文档:{Environment.NewLine}{api.InputPrompt} {Environment.NewLine}提取出对应的json格式字符串参考如下格式:{Environment.NewLine}{api.JsonBody}"
}
};
functions.Add(_kernel.CreateFunctionFromMethod((string jsonBody) =>

View File

@@ -3,10 +3,6 @@
public class LLamaSharpOption
{
public static string RunType { get; set; }
public static string Chat { get; set; }
public static string Embedding { get; set; }
public static string FileDirectory { get; set; } = Directory.GetCurrentDirectory();
}
}

View File

@@ -44,6 +44,7 @@ namespace AntSK.Domain.Repositories
/// </summary>
public string? EmbeddingModelID { get; set; }
public string? ImageModelID { get; set; }
/// <summary>
/// 温度
/// </summary>

View File

@@ -250,5 +250,16 @@ namespace AntSK.Domain.Utils
return nameValueCollection.ToString();
}
/// <summary>
/// 忽略大小写匹配
/// </summary>
/// <param name="s"></param>
/// <param name="value"></param>
/// <returns></returns>
public static bool ComparisonIgnoreCase(this string s, string value)
{
return s.Equals(value, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -0,0 +1,39 @@
using System;
using System.Collections.Generic;
using System.Drawing.Imaging;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.Domain.Utils
{
public class ImageUtils
{
public static string BitmapToBase64(Bitmap bitmap)
{
using (MemoryStream memoryStream = new MemoryStream())
{
// 保存为JPEG格式也可以选择PngGif等等
bitmap.Save(memoryStream, ImageFormat.Jpeg);
// 获取内存流的字节数组
byte[] imageBytes = memoryStream.ToArray();
// 将字节转换为Base64字符串
string base64String = Convert.ToBase64String(imageBytes);
return base64String;
}
}
public static List<string> BitmapListToBase64(Bitmap[] bitmaps)
{
List<string> base64Strings = new List<string>();
foreach (Bitmap bitmap in bitmaps)
{
base64Strings.Add(BitmapToBase64(bitmap));
}
return base64Strings;
}
}
}

View File

@@ -1,27 +0,0 @@
import subprocess
import shlex
import os
class Start(object):
def __init__(self,model_name_or_path):
self.model_name_or_path=model_name_or_path
def StartCommand(self):
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
os.environ['API_PORT'] = '8000'
# 构建要执行的命令
command = (
'python api_demo.py'
' --model_name_or_path E:/model/Qwen1.5-0.5B-Chat_back'
' --template default '
)
# 使用shlex.split()去安全地分割命令字符串
command = shlex.split(command)
# 执行命令
subprocess.run(command, shell=True)
if __name__ == "__main__":
star= Start('model_name_or_path')
star.StartCommand()

View File

@@ -1,49 +0,0 @@
from llmtuner import ChatModel
from llmtuner.extras.misc import torch_gc
try:
import platform
if platform.system() != "Windows":
import readline # noqa: F401
except ImportError:
print("Install `readline` for a better experience.")
def main():
chat_model = ChatModel()
messages = []
print("Welcome to the CLI application, use `clear` to remove the history, use `exit` to exit the application.")
while True:
try:
query = input("\nUser: ")
except UnicodeDecodeError:
print("Detected decoding error at the inputs, please set the terminal encoding to utf-8.")
continue
except Exception:
raise
if query.strip() == "exit":
break
if query.strip() == "clear":
messages = []
torch_gc()
print("History has been removed.")
continue
messages.append({"role": "user", "content": query})
print("Assistant: ", end="", flush=True)
response = ""
for new_text in chat_model.stream_chat(messages):
print(new_text, end="", flush=True)
response += new_text
print()
messages.append({"role": "assistant", "content": response})
if __name__ == "__main__":
main()

View File

@@ -1,10 +0,0 @@
from llmtuner import Evaluator
def main():
evaluator = Evaluator()
evaluator.eval()
if __name__ == "__main__":
main()

View File

@@ -1,9 +0,0 @@
from llmtuner import export_model
def main():
export_model()
if __name__ == "__main__":
main()

View File

@@ -1,14 +0,0 @@
from llmtuner import run_exp
def main():
run_exp()
def _mp_fn(index):
# For xla_spawn (TPUs)
main()
if __name__ == "__main__":
main()

View File

@@ -1,11 +0,0 @@
from llmtuner import create_ui
def main():
demo = create_ui()
demo.queue()
demo.launch(server_name="0.0.0.0", share=False, inbrowser=True)
if __name__ == "__main__":
main()

View File

@@ -1,11 +0,0 @@
from llmtuner import create_web_demo
def main():
demo = create_web_demo()
demo.queue()
demo.launch(server_name="0.0.0.0", share=False, inbrowser=True)
if __name__ == "__main__":
main()

View File

@@ -1,4 +1,4 @@
torch>=1.13.1
torch>=1.13.1 --index-url https://download.pytorch.org/whl/cu121
transformers>=4.37.2
datasets>=2.14.3
accelerate>=0.27.2
@@ -15,4 +15,4 @@ fastapi
sse-starlette
matplotlib
fire
modelscope
modelscope

View File

@@ -0,0 +1,9 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,7 @@
namespace AntSK.PyNet
{
public class Class1
{
}
}

View File

@@ -22,7 +22,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AntSK.LLM", "AntSk.LLM\AntS
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AntSK.Test", "AntSK.Test\AntSK.Test.csproj", "{6AD71410-127F-4C83-95A8-F699C39B44FF}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AntSK.LLamaFactory", "AntSK.LLamaFactory\AntSK.LLamaFactory.csproj", "{664DFA1F-68B7-49C7-B889-FA14D1756D3D}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AntSK.LLamaFactory", "AntSK.LLamaFactory\AntSK.LLamaFactory.csproj", "{664DFA1F-68B7-49C7-B889-FA14D1756D3D}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution

View File

@@ -7,6 +7,13 @@
<DocumentationFile>AntSK.xml</DocumentationFile>
<NoWarn>CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,,SKEXP0012,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0040,SKEXP0041,SKEXP0042,SKEXP0050,SKEXP0051,SKEXP0052,SKEXP0053,SKEXP0054,SKEXP0055,SKEXP0060,SKEXP0061,SKEXP0101,SKEXP0102</NoWarn>
</PropertyGroup>
<ItemGroup>
<Compile Remove="llamafactory\**" />
<Content Remove="llamafactory\**" />
<EmbeddedResource Remove="llamafactory\**" />
<None Remove="llamafactory\**" />
</ItemGroup>
<ItemGroup>

View File

@@ -3,7 +3,7 @@
@inherits AntDomComponentBase
<Space Class="@ClassMapper.Class" Size="@("26")">
<Image Src="http://img.shields.io/github/stars/aidotnet/antsk?style=social" Width="88px" Height="20px;"></Image>
<button class="github_btn" onclick="window.open('https://github.com/AIDotNet/AntSK')"></button>
<SpaceItem Style="margin-left:20px;">
<AvatarDropdown Name="@context.Identity.Name"
Avatar="@_currentUser.Avatar"
@@ -12,3 +12,14 @@
</SpaceItem>
</Space>
<style>
.github_btn
{
cursor:pointer;
border:none;
background-image: url('http://img.shields.io/github/stars/aidotnet/antsk?style=social');
width:88px;
height:20px;
}
</style>

View File

@@ -35,7 +35,6 @@ namespace AntSK.Controllers
[HttpPost]
public async Task<IActionResult> ImportKMSTask(ImportKMSTaskDTO model)
{
Console.WriteLine("api/kms/ImportKMSTask 开始");
ImportKMSTaskReq req = model.ToDTO<ImportKMSTaskReq>();
KmsDetails detail = new KmsDetails()
{
@@ -49,7 +48,6 @@ namespace AntSK.Controllers
await _kmsDetailsRepositories.InsertAsync(detail);
req.KmsDetail = detail;
_taskBroker.QueueWorkItem(req);
Console.WriteLine("api/kms/ImportKMSTask 结束");
return Ok();
}
}

View File

@@ -1,45 +0,0 @@
using AntSK.Domain.Domain.Model.Dto.OpenAPI;
using AntSK.Services.LLamaSharp;
using Microsoft.AspNetCore.Mvc;
namespace AntSK.Controllers
{
[ApiController]
public class LLamaSharpController(ILLamaSharpService _lLamaSharpService) : ControllerBase
{
/// <summary>
/// 本地会话接口
/// </summary>
/// <returns></returns>
[HttpPost]
[Route("llama/v1/chat/completions")]
public async Task chat(OpenAIModel model)
{
Console.WriteLine("开始llama/v1/chat/completions");
if (model.stream)
{
await _lLamaSharpService.ChatStream(model, HttpContext);
}
else
{
await _lLamaSharpService.Chat(model, HttpContext);
}
Console.WriteLine("结束llama/v1/chat/completions");
}
/// <summary>
/// 本地嵌入接口
/// </summary>
/// <param name="model"></param>
/// <returns></returns>
[HttpPost]
[Route("llama/v1/embeddings")]
public async Task embedding(OpenAIEmbeddingModel model)
{
Console.WriteLine("开始llama/v1/embeddings");
await _lLamaSharpService.Embedding(model, HttpContext);
Console.WriteLine("结束llama/v1/embeddings");
}
}
}

View File

@@ -95,7 +95,7 @@
{
Key = "许泽宇的技术分享",
Title = "许泽宇的技术分享",
Href = "http://studiogpt.cn/",
Href = "./assets/gzh.jpg",
BlankTarget = true,
},
new LinkItem

View File

@@ -29,7 +29,7 @@
{
Key = "许泽宇的技术分享",
Title = "许泽宇的技术分享",
Href = "http://studiogpt.cn/",
Href = "./assets/gzh.jpg",
BlankTarget = true,
},
new LinkItem

View File

@@ -26,30 +26,45 @@
<FormItem Label="类型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<RadioGroup @bind-Value="context.Type">
<Radio RadioButton Value="@AppType.chat.ToString()">会话应用</Radio>
<Radio RadioButton Value="@AppType.kms.ToString()">知识库</Radio>
</RadioGroup>
</FormItem>
<FormItem Label="描述" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Input Placeholder="请输入描述" @bind-Value="@context.Describe" />
</FormItem>
<Radio RadioButton Value="@AppType.kms.ToString()">知识库</Radio>
<Radio RadioButton Value="@AppType.img.ToString()">做图应用</Radio>
</RadioGroup>
</FormItem>
<FormItem Label="描述" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Input Placeholder="请输入描述" @bind-Value="@context.Describe" />
</FormItem>
<FormItem Label="会话模型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Select DataSource="@_chatList"
@bind-Value="@context.ChatModelID"
ValueProperty="c=>c.Id"
LabelProperty="c=>'【'+c.AIType.ToString()+'】'+c.ModelDescription">
</Select>
<Button Type="@ButtonType.Link" OnClick="NavigateModelList">去创建</Button>
</FormItem>
<FormItem Label="Embedding模型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Select DataSource="@_embedignList"
@bind-Value="@context.EmbeddingModelID"
<FormItem Label="会话模型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Select DataSource="@_chatList"
@bind-Value="@context.ChatModelID"
ValueProperty="c=>c.Id"
LabelProperty="c=>'【'+c.AIType.ToString()+'】'+c.ModelDescription">
</Select>
<Button Type="@ButtonType.Link" OnClick="NavigateModelList">去创建</Button>
</FormItem>
@if (@context.Type == AppType.chat.ToString())
@if (@context.Type != AppType.img.ToString())
{
<FormItem Label="向量模型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Select DataSource="@_embedingList"
@bind-Value="@context.EmbeddingModelID"
ValueProperty="c=>c.Id"
LabelProperty="c=>'【'+c.AIType.ToString()+'】'+c.ModelDescription">
</Select>
<Button Type="@ButtonType.Link" OnClick="NavigateModelList">去创建</Button>
</FormItem>
}
else
{
<FormItem Label="图片模型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Select DataSource="@_imageList"
@bind-Value="@context.ImageModelID"
ValueProperty="c=>c.Id"
LabelProperty="c=>'【'+c.AIType.ToString()+'】'+c.ModelDescription">
</Select>
<Button Type="@ButtonType.Link" OnClick="NavigateModelList">去创建</Button>
</FormItem>
}
@if (@context.Type == AppType.chat.ToString())
{
<FormItem Label="提示词" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
@@ -61,8 +76,8 @@
<Slider TValue="double" Style="display: inline-block;width: 300px; " Min="0" Max="100" DefaultValue="70" @bind-Value="@context.Temperature" />
<span>更发散</span>
</FormItem>
<FormItem Label="API插件列表" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<FormItem Label="API插件列表" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Select Mode="multiple"
@bind-Values="apiIds"
Placeholder="选择API插件, 选择后会开启自动调用"

View File

@@ -45,7 +45,8 @@ namespace AntSK.Pages.AppPage
public Dictionary<string, string> _funList = new Dictionary<string, string>();
private List<AIModels> _chatList;
private List<AIModels> _embedignList;
private List<AIModels> _embedingList;
private List<AIModels> _imageList;
protected override async Task OnInitializedAsync()
{
await base.OnInitializedAsync();
@@ -53,7 +54,8 @@ namespace AntSK.Pages.AppPage
_apiList = _apis_Repositories.GetList();
var models=_aimodels_Repositories.GetList();
_chatList = models.Where(p => p.AIModelType == AIModelType.Chat).ToList();
_embedignList = models.Where(p => p.AIModelType == AIModelType.Embedding).ToList();
_embedingList = models.Where(p => p.AIModelType == AIModelType.Embedding).ToList();
_imageList = models.Where(p => p.AIModelType == AIModelType.Image).ToList();
_functionService.SearchMarkedMethods();
foreach (var func in _functionService.Functions)

View File

@@ -58,6 +58,10 @@
<Tag Color="@PresetColor.Green.ToString()">知识库</Tag>
}
else if (context.Type == AppType.img.ToString())
{
<Tag Color="@PresetColor.Lime.ToString()">做图应用</Tag>
}
</DescriptionTemplate>
</CardMeta>
</Card>

View File

@@ -58,7 +58,13 @@ namespace AntSK.Pages.AppPage
private void GetDesc()
{
_desc = @$"为了方便其他应用对接接口符合openai规范省略了温度TopP等参数。{Environment.NewLine}BaseUrl:{Environment.NewLine}{_openApiUrl} {Environment.NewLine}headers:{Environment.NewLine}Authorization:Bearer ""{_appModel.SecretKey}"" {Environment.NewLine}Body: {Environment.NewLine}{JsonConvert.SerializeObject(new OpenAIModel() { messages = new List<OpenAIMessage>() { new OpenAIMessage() { role = "user", content = "" } } }, Formatting.Indented)}";
_desc = @$"为了方便其他应用对接接口符合openai规范省略了温度TopP等参数。
BaseUrl:
{_openApiUrl}
headers:
Authorization:Bearer {_appModel.SecretKey}
Body:
{JsonConvert.SerializeObject(new OpenAIModel() { messages = new List<OpenAIMessage>() { new OpenAIMessage() { role = "user", content = "" } } }, Formatting.Indented)}";
}
private void GetScript()

View File

@@ -2,6 +2,7 @@
@using AntSK.Domain.Repositories
@using AntSK.Models
@using Microsoft.AspNetCore.Components.Web.Virtualization
@using AntSK.Pages.ChatPage.Components
@page "/Chat"
@page "/Chat/{AppId}"
@using AntSK.Services.Auth
@@ -9,89 +10,25 @@
<GridRow Gutter="(16, 16)">
<GridCol Span="12">
<Spin Size="large" Tip="请稍等..." Spinning="@(_loading)">
<Card Style="height:75vh;overflow: auto;">
<TitleTemplate>
<Icon Type="setting" /> 选择应用
<Select DataSource="@_list"
@bind-Value="@AppId"
DefaultValue="@("lucy")"
ValueProperty="c=>c.Id"
LabelProperty="c=>c.Name"
Style="width:200px">
</Select>
<a href="@( NavigationManager.BaseUri + "openchat/" + AppId)" target="_blank">分享使用</a>
</TitleTemplate>
<Body>
<div id="scrollDiv" style="height: calc(75vh - 190px); overflow-y: auto; overflow-x: hidden;">
<GridRow Gutter="(8, 8)" Style="margin:0">
<Virtualize Items="@(MessageList.OrderBy(o => o.CreateTime).ToList())" Context="item">
@if (item.IsSend)
{
<GridRow Style="width:100%">
<GridCol Span="23">
<div class="chat-bubble sent">
<Popover Title="@item.CreateTime.ToString()">
<Unbound>
<Flex Vertical RefBack="context">
@if (item.FileName != null)
{
<p class="message-file">
<Upload DefaultFileList="[new(){ FileName= item.FileName }]" />
</p>
}
<p>@(item.Context)</p>
</Flex>
</Unbound>
</Popover>
</div>
<Icon Style="float:right;margin-top:10px;" Type="copy" Theme="outline" OnClick="async () =>await OnCopyAsync(item)" />
</GridCol>
<GridCol Span="1">
<Image Width="25px" Height="25px" Style="margin-top:10px;margin-right:10px;" Src="https://gw.alipayobjects.com/zos/rmsportal/KDpgvguMpGfqaHPjicRK.svg" />
</GridCol>
</GridRow>
}
else
{
<GridRow Style="width:100%">
<GridCol Span="1">
<Image Width="25px" Height="25px" Style="margin-top:10px;" Src="https://gw.alipayobjects.com/zos/antfincdn/aPkFc8Sj7n/method-draw-image.svg" />
</GridCol>
<GridCol Span="23">
<div class="chat-bubble received">
@((MarkupString)(item.HtmlAnswers))
</div>
</GridCol>
</GridRow>
}
</Virtualize>
</GridRow>
</div>
<Flex Vertical>
@if (fileList.Count > 0)
{
<Upload DefaultFileList="fileList" OnRemove="HandleFileRemove" />
}
<AntDesign.Input @bind-Value="@(_messageInput)" DebounceMilliseconds="@(-1)" Placeholder="输入消息回车发送" OnPressEnter="@(async () => await OnSendAsync())" Disabled="@Sendding"></AntDesign.Input>
</Flex>
<Flex Justify="end">
<Upload Action="@("api/File/UploadFile")"
Name="file"
Accept="*/*"
ShowUploadList="false"
BeforeUpload="_kMService.BeforeUpload"
OnSingleCompleted="OnSingleCompleted" >
<Button Icon="@IconType.Outline.Upload" Type="@(ButtonType.Link)" Disabled="@Sendding" />
</Upload>
<Button Icon="clear" Type="@(ButtonType.Link)" OnClick="@(async () => await OnClearAsync())" Disabled="@Sendding"></Button>
<Button Icon="send" Type="@(ButtonType.Link)" OnClick="@(async () => await OnSendAsync())" Disabled="@Sendding"></Button>
</Flex>
</Body>
</Card>
</Spin>
<Card Style="height:75vh;overflow: auto;">
<TitleTemplate>
<Icon Type="setting" /> 选择应用
<Select DataSource="@_list"
@bind-Value="@AppId"
DefaultValue="@("lucy")"
ValueProperty="c=>c.Id"
LabelProperty="c=>c.Name"
Style="width:200px">
</Select>
<a href="@( NavigationManager.BaseUri + "openchat/" + AppId)" target="_blank">分享使用</a>
</TitleTemplate>
<Body>
@if (!string.IsNullOrEmpty(AppId))
{
<ChatView AppId="@AppId" ShowTitle=false OnRelevantSources="OnRelevantSources"></ChatView>
}
</Body>
</Card>
</GridCol>
<GridCol Span="12">
<Card Style="height: 75vh;overflow: auto;">
@@ -116,6 +53,15 @@
</GridRow>
<style>
#chat {
height: calc(75vh - 120px);
display: flex;
flex-direction: column;
overflow-x: hidden;
overflow-y: auto;
font-family: Arial, sans-serif;
margin: 0;
}
body {
font-family: Arial, sans-serif;
@@ -123,39 +69,7 @@
justify-content: center;
align-items: flex-start;
height: 100vh;
}
.chat-container {
width: 350px;
border: 1px solid #ccc;
border-radius: 5px;
overflow: hidden;
display: flex;
flex-direction: column;
background-color: #fff;
padding-bottom: 15px;
}
.chat-bubble {
padding: 10px;
margin: 10px;
margin-bottom: 0;
border-radius: 5px;
max-width: 70%;
position: relative;
}
.received {
background-color: #f0f0f0;
align-self: flex-start;
float: left;
}
.sent {
background-color: #daf8cb;
align-self: flex-end;
float: right;
position: relative;
overflow-y: hidden;
}
</style>
@code {

View File

@@ -8,288 +8,30 @@ using System.Text;
using Markdig;
using AntSK.Domain.Domain.Model;
using AntSK.Domain.Domain.Model.Dto;
using Microsoft.SemanticKernel.ChatCompletion;
namespace AntSK.Pages.ChatPage
{
public partial class Chat
{
[Parameter] public string AppId { get; set; }
[Inject] protected MessageService? Message { get; set; }
[Inject] protected IApps_Repositories _apps_Repositories { get; set; }
[Inject] protected IApis_Repositories _apis_Repositories { get; set; }
[Inject] protected IKmss_Repositories _kmss_Repositories { get; set; }
[Inject] protected IKmsDetails_Repositories _kmsDetails_Repositories { get; set; }
[Inject] private IJSRuntime _JSRuntime { get; set; }
[Inject] protected IKernelService _kernelService { get; set; }
[Inject] protected IKMService _kMService { get; set; }
[Inject] private IConfirmService _confirmService { get; set; }
[Inject] private IChatService _chatService { get; set; }
[Inject] private ILogger<Chat> Logger { get; set; }
protected bool _loading = false;
protected List<MessageInfo> MessageList = [];
protected string? _messageInput;
protected string _json = "";
protected bool Sendding = false;
private List<RelevantSource> _relevantSources = new List<RelevantSource>();
protected List<Apps> _list = new List<Apps>();
private List<UploadFileItem> fileList = [];
private Upload _uploadRef;
protected override async Task OnInitializedAsync()
{
await base.OnInitializedAsync();
_list = _apps_Repositories.GetList();
}
protected async Task OnSendAsync()
private void OnRelevantSources(List<RelevantSource> relevantSources)
{
try
{
if (string.IsNullOrWhiteSpace(_messageInput))
{
_ = Message.Info("请输入消息", 2);
return;
}
if (string.IsNullOrWhiteSpace(AppId))
{
_ = Message.Info("请选择应用进行测试", 2);
return;
}
var filePath = fileList.FirstOrDefault()?.Url;
var fileName = fileList.FirstOrDefault()?.FileName;
MessageList.Add(new MessageInfo()
{
ID = Guid.NewGuid().ToString(),
Context = _messageInput,
CreateTime = DateTime.Now,
IsSend = true,
FilePath = filePath,
FileName = fileName
});
var prompt = _messageInput;
_messageInput = "";
fileList.Clear();
Sendding = true;
await SendAsync(prompt, filePath);
Sendding = false;
}
catch (System.Exception ex)
{
Sendding = false;
Logger.LogError(ex, "对话异常");
_ = Message.Error("异常:" + ex.Message, 2);
}
}
protected async Task OnCopyAsync(MessageInfo item)
{
await Task.Run(() => { _messageInput = item.Context; });
}
protected async Task OnClearAsync()
{
if (MessageList.Count > 0)
{
var content = "是否要清理会话记录";
var title = "清理";
var result = await _confirmService.Show(content, title, ConfirmButtons.YesNo);
if (result == ConfirmResult.Yes)
{
MessageList.Clear();
_ = Message.Info("清理成功");
}
}
else
{
_ = Message.Info("没有会话记录");
}
}
protected async Task<bool> SendAsync(string questions, string? filePath)
{
string msg = "";
//处理多轮会话
Apps app = _apps_Repositories.GetFirst(p => p.Id == AppId);
if (MessageList.Count > 0)
{
msg = await HistorySummarize(app, questions);
}
switch (app.Type)
{
case "chat" when filePath == null:
//普通会话
await SendChat(questions, msg, app);
break;
default:
//知识库问答
await SendKms(questions, msg, filePath, app);
break;
}
return await Task.FromResult(true);
}
/// <summary>
/// 发送知识库问答
/// </summary>
/// <param name="questions"></param>
/// <param name="msg"></param>
/// <param name="filePath"></param>
/// <param name="app"></param>
/// <returns></returns>
private async Task SendKms(string questions, string msg, string filePath, Apps app)
{
MessageInfo info = null;
var chatResult = _chatService.SendKmsByAppAsync(app, questions, msg, filePath, _relevantSources);
await foreach (var content in chatResult)
{
if (info == null)
{
info = new MessageInfo();
info.ID = Guid.NewGuid().ToString();
info.Context = content?.ConvertToString();
info.HtmlAnswers = content?.ConvertToString();
info.CreateTime = DateTime.Now;
MessageList.Add(info);
}
else
{
info.HtmlAnswers += content.ConvertToString();
await Task.Delay(50);
}
await InvokeAsync(StateHasChanged);
}
//全部处理完后再处理一次Markdown
await MarkDown(info);
}
/// <summary>
/// 发送普通对话
/// </summary>
/// <param name="questions"></param>
/// <param name="history"></param>
/// <param name="app"></param>
/// <returns></returns>
private async Task SendChat(string questions, string history, Apps app)
{
MessageInfo info = null;
var chatResult = _chatService.SendChatByAppAsync(app, questions, history);
await foreach (var content in chatResult)
{
if (info == null)
{
info = new MessageInfo();
info.ID = Guid.NewGuid().ToString();
info.Context = content?.ConvertToString();
info.HtmlAnswers = content?.ConvertToString();
info.CreateTime = DateTime.Now;
MessageList.Add(info);
}
else
{
info.HtmlAnswers += content.ConvertToString();
await Task.Delay(50);
}
await InvokeAsync(StateHasChanged);
}
//全部处理完后再处理一次Markdown
await MarkDown(info);
}
private async Task MarkDown(MessageInfo info)
{
if (info.IsNotNull())
{
// info!.HtmlAnswers = markdown.Transform(info.HtmlAnswers);
info!.HtmlAnswers = Markdown.ToHtml(info.HtmlAnswers);
}
await InvokeAsync(StateHasChanged);
await _JSRuntime.InvokeVoidAsync("Prism.highlightAll");
await _JSRuntime.ScrollToBottomAsync("scrollDiv");
}
/// <summary>
/// 历史会话的会话总结
/// </summary>
/// <param name="questions"></param>
/// <returns></returns>
private async Task<string> HistorySummarize(Apps app, string questions)
{
var _kernel = _kernelService.GetKernelByApp(app);
if (MessageList.Count > 1)
{
StringBuilder history = new StringBuilder();
foreach (var item in MessageList)
{
if (item.IsSend)
{
history.Append($"user:{item.Context}{Environment.NewLine}");
}
else
{
history.Append($"assistant:{item.Context}{Environment.NewLine}");
}
}
if (MessageList.Count > 10)
{
//历史会话大于10条进行总结
var msg = await _kernelService.HistorySummarize(_kernel, questions, history.ToString());
return msg;
}
else
{
var msg =
$"history{Environment.NewLine}{history.ToString()}{Environment.NewLine}{Environment.NewLine}";
return msg;
}
}
else
{
return "";
}
}
private void OnSingleCompleted(UploadInfo fileInfo)
{
fileList.Add(new()
{
FileName = fileInfo.File.FileName,
Url = fileInfo.File.Url = fileInfo.File.Response,
Ext = fileInfo.File.Ext,
State = UploadState.Success,
});
_kMService.OnSingleCompleted(fileInfo);
}
private async Task<bool> HandleFileRemove(UploadFileItem file)
{
fileList.RemoveAll(x => x.FileName == file.FileName);
await Task.Yield();
return true;
_relevantSources = relevantSources;
InvokeAsync(StateHasChanged);
}
}
}

View File

@@ -0,0 +1,132 @@
@namespace AntSK.Pages.ChatPage.Components
@using AntSK.Domain.Repositories
@using AntSK.Models
@using Microsoft.AspNetCore.Components.Web.Virtualization
@layout OpenLayout
@inherits AntDomComponentBase
<div id="chat">
@if (ShowTitle)
{
<PageHeader Class="site-page-header" Title="@app.Name" Subtitle="@app.Describe" />
}
<div id="scrollDiv" style="flex:1; width:100%; overflow-y:auto; overflow-x:hidden;padding:10px;">
<Virtualize Items="@(MessageList.OrderBy(o => o.CreateTime).ToList())" Context="item">
@if (item.IsSend)
{
<GridRow>
<GridCol Span="23">
<div class="chat-bubble sent">
<Popover Title="@item.CreateTime.ToString()">
<Unbound>
<Flex Vertical RefBack="context">
@if (item.FileName != null)
{
<p class="message-file">
<Upload DefaultFileList="[new(){ FileName= item.FileName }]" />
</p>
}
<p>@(item.Context)</p>
</Flex>
</Unbound>
</Popover>
</div>
<Icon Style="float:right;margin-top:10px;" Type="copy" Theme="outline" OnClick="async () =>await OnCopyAsync(item)" />
</GridCol>
<GridCol Span="1">
<Image Width="25px" Height="25px" Style="margin-top:10px;" Src="https://gw.alipayobjects.com/zos/rmsportal/KDpgvguMpGfqaHPjicRK.svg" />
</GridCol>
</GridRow>
}
else
{
<GridRow>
<GridCol Span="1">
<Image Width="25px" Height="25px" Style="margin-top:10px;" Src="https://gw.alipayobjects.com/zos/antfincdn/aPkFc8Sj7n/method-draw-image.svg" />
</GridCol>
<GridCol Span="23">
<div class="chat-bubble received">
@((MarkupString)(item.HtmlAnswers))
</div>
</GridCol>
</GridRow>
}
</Virtualize>
</div>
@if (fileList.Count > 0)
{
<Flex Vertical>
<Upload DefaultFileList="fileList" OnRemove="HandleFileRemove" />
</Flex>
}
<Flex Justify="end">
<AntDesign.Input @bind-Value="@(_messageInput)" DebounceMilliseconds="@(-1)" Placeholder="输入消息回车发送" OnPressEnter="@(async () => await OnSendAsync())" Disabled="@Sendding"></AntDesign.Input>
@if (app.EmbeddingModelID!=null)
{
<Upload Action="@("api/File/UploadFile")"
Name="file"
Accept="*/*"
ShowUploadList="false"
BeforeUpload="_kMService.BeforeUpload"
OnSingleCompleted="OnSingleCompleted">
<Button Icon="@IconType.Outline.Upload" Type="@(ButtonType.Link)" Disabled="@Sendding" />
</Upload>
}
<Button Icon="clear" Type="@(ButtonType.Link)" OnClick="@(async () => await OnClearAsync())" Disabled="@Sendding"></Button>
<Button Icon="send" Type="@(ButtonType.Link)" OnClick="@(async () => await OnSendAsync())" Disabled="@Sendding"></Button>
</Flex>
</div>
<style>
body {
font-family: Arial, sans-serif;
margin: 0;
justify-content: center;
align-items: flex-start;
height: 100vh;
}
.chat-container {
width: 350px;
border: 1px solid #ccc;
border-radius: 5px;
overflow: hidden;
display: flex;
flex-direction: column;
background-color: #fff;
padding-bottom: 15px;
}
.chat-bubble {
padding: 10px;
margin: 10px;
margin-bottom: 0;
border-radius: 5px;
max-width: 70%;
position: relative;
}
.received {
background-color: #f0f0f0;
align-self: flex-start;
float: left;
}
.sent {
background-color: #daf8cb;
align-self: flex-end;
float: right;
}
.ant-card-body {
height: 90% !important;
}
</style>
@code {
}

View File

@@ -1,44 +1,42 @@
using AntDesign;
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model;
using AntSK.Domain.Domain.Model.Dto;
using AntSK.Domain.Domain.Model.Enum;
using AntSK.Domain.Repositories;
using AntSK.Domain.Utils;
using Microsoft.AspNetCore.Components;
using Microsoft.KernelMemory;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using SqlSugar;
using System.Text;
using AntSK.Domain.Utils;
using Microsoft.JSInterop;
using AntSK.LLM.StableDiffusion;
using Blazored.LocalStorage;
using Markdig;
using AntSK.Domain.Domain.Model;
using Microsoft.AspNetCore.Components;
using Microsoft.JSInterop;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Newtonsoft.Json;
namespace AntSK.Pages.ChatPage
namespace AntSK.Pages.ChatPage.Components
{
public partial class OpenChat
public partial class ChatView
{
[Parameter]
public string AppId { get; set; }
[Inject]
protected MessageService? Message { get; set; }
[Inject]
protected IApps_Repositories _apps_Repositories { get; set; }
[Inject]
protected IKmss_Repositories _kmss_Repositories { get; set; }
[Inject]
protected IKmsDetails_Repositories _kmsDetails_Repositories { get; set; }
[Inject]
protected IKernelService _kernelService { get; set; }
[Inject]
protected IKMService _kMService { get; set; }
[Inject]
IConfirmService _confirmService { get; set; }
[Inject]
IChatService _chatService { get; set; }
[Parameter]
public bool ShowTitle { get; set; } = false;
[Parameter]
public EventCallback<List<RelevantSource>> OnRelevantSources { get; set; }
[Inject] protected MessageService? Message { get; set; }
[Inject] protected IApps_Repositories _apps_Repositories { get; set; }
[Inject] protected IKmss_Repositories _kmss_Repositories { get; set; }
[Inject] protected IKmsDetails_Repositories _kmsDetails_Repositories { get; set; }
[Inject] protected IKernelService _kernelService { get; set; }
[Inject] protected IKMService _kMService { get; set; }
[Inject] IConfirmService _confirmService { get; set; }
[Inject] IChatService _chatService { get; set; }
[Inject] IJSRuntime _JSRuntime { get; set; }
[Inject] ILocalStorageService _localStorage { get; set; }
protected bool _loading = false;
protected List<MessageInfo> MessageList = [];
protected string? _messageInput;
protected string _json = "";
@@ -46,10 +44,29 @@ namespace AntSK.Pages.ChatPage
protected Apps app = new Apps();
private List<UploadFileItem> fileList = [];
private List<RelevantSource> _relevantSources = new List<RelevantSource>();
protected override async Task OnInitializedAsync()
{
await base.OnInitializedAsync();
app = _apps_Repositories.GetFirst(p => p.Id == AppId);
await LoadData();
var msgs = await _localStorage.GetItemAsync<List<MessageInfo>>("msgs");
if (msgs != null && msgs.Count > 0)
{
MessageList = msgs;
}
}
protected override async Task OnParametersSetAsync()
{
await LoadData();
}
private async Task LoadData()
{
app =await _apps_Repositories.GetFirstAsync(p => p.Id == AppId);
}
protected async Task OnClearAsync()
@@ -62,7 +79,10 @@ namespace AntSK.Pages.ChatPage
if (result == ConfirmResult.Yes)
{
MessageList.Clear();
await _localStorage.SetItemAsync<List<MessageInfo>>("msgs", MessageList);
await InvokeAsync(StateHasChanged);
_ = Message.Info("清理成功");
}
}
else
@@ -79,6 +99,8 @@ namespace AntSK.Pages.ChatPage
_ = Message.Info("请输入消息", 2);
return;
}
var filePath = fileList.FirstOrDefault()?.Url;
var fileName = fileList.FirstOrDefault()?.FileName;
MessageList.Add(new MessageInfo()
{
@@ -87,12 +109,11 @@ namespace AntSK.Pages.ChatPage
CreateTime = DateTime.Now,
IsSend = true
});
Sendding = true;
await SendAsync(_messageInput);
await SendAsync(_messageInput,filePath);
_messageInput = "";
Sendding = false;
Sendding = false;
}
catch (System.Exception ex)
{
@@ -118,30 +139,57 @@ namespace AntSK.Pages.ChatPage
});
}
protected async Task<bool> SendAsync(string questions)
protected async Task<bool> SendAsync(string questions, string? filePath)
{
string msg = "";
ChatHistory history = new ChatHistory();
//处理多轮会话
Apps app = _apps_Repositories.GetFirst(p => p.Id == AppId);
if (MessageList.Count > 0)
{
msg = await HistorySummarize(app, questions);
}
switch (app.Type)
{
case "chat":
//普通会话
await SendChat(questions, msg, app);
break;
case "kms":
//知识库问答
await SendKms(questions, msg, app);
break;
history = await _chatService.GetChatHistory(MessageList);
}
if (app.Type == AppType.chat.ToString() && (filePath == null || app.EmbeddingModelID.IsNull()))
{
await SendChat(questions, history, app);
}
else if (app.Type == AppType.kms.ToString() || filePath != null || app.EmbeddingModelID.IsNotNull())
{
await SendKms(questions, history, app, filePath);
}
else if (app.Type == AppType.img.ToString())
{
await SendImg(questions,app);
}
//缓存消息记录
if (app.Type != AppType.img.ToString())
{
await _localStorage.SetItemAsync<List<MessageInfo>>("msgs", MessageList);
}
return await Task.FromResult(true);
}
private async Task SendImg(string questions,Apps app)
{
MessageInfo info = new MessageInfo();
info.ID = Guid.NewGuid().ToString();
info.CreateTime = DateTime.Now;
var base64= await _chatService.SendImgByAppAsync(app, questions);
if (string.IsNullOrEmpty(base64))
{
info.HtmlAnswers = "生成失败";
}
else
{
info.HtmlAnswers = $"<img src=\"data:image/jpeg;base64,{base64}\" alt=\"Base64 Image\" />";
}
MessageList.Add(info);
}
/// <summary>
/// 发送知识库问答
/// </summary>
@@ -149,10 +197,10 @@ namespace AntSK.Pages.ChatPage
/// <param name="msg"></param>
/// <param name="app"></param>
/// <returns></returns>
private async Task SendKms(string questions, string msg, Apps app)
private async Task SendKms(string questions, ChatHistory history, Apps app, string? filePath)
{
MessageInfo info = null;
var chatResult=_chatService.SendKmsByAppAsync(app, questions, "" ,msg);
var chatResult = _chatService.SendKmsByAppAsync(app, questions, history, filePath, _relevantSources);
await foreach (var content in chatResult)
{
if (info == null)
@@ -172,6 +220,7 @@ namespace AntSK.Pages.ChatPage
}
await InvokeAsync(StateHasChanged);
}
await OnRelevantSources.InvokeAsync(_relevantSources);
//全部处理完后再处理一次Markdown
await MarkDown(info);
}
@@ -183,7 +232,7 @@ namespace AntSK.Pages.ChatPage
/// <param name="history"></param>
/// <param name="app"></param>
/// <returns></returns>
private async Task SendChat(string questions, string history, Apps app)
private async Task SendChat(string questions, ChatHistory history, Apps app)
{
MessageInfo info = null;
var chatResult = _chatService.SendChatByAppAsync(app, questions, history);
@@ -222,44 +271,23 @@ namespace AntSK.Pages.ChatPage
await _JSRuntime.InvokeVoidAsync("Prism.highlightAll");
await _JSRuntime.ScrollToBottomAsync("scrollDiv");
}
/// <summary>
/// 历史会话的会话总结
/// </summary>
/// <param name="questions"></param>
/// <returns></returns>
private async Task<string> HistorySummarize(Apps app, string questions)
private void OnSingleCompleted(UploadInfo fileInfo)
{
var _kernel = _kernelService.GetKernelByApp(app);
if (MessageList.Count > 1)
fileList.Add(new()
{
StringBuilder history = new StringBuilder();
foreach (var item in MessageList)
{
if (item.IsSend)
{
history.Append($"user:{item.Context}{Environment.NewLine}");
}
else
{
history.Append($"assistant:{item.Context}{Environment.NewLine}");
}
}
if (MessageList.Count > 10)
{
//历史会话大于10条进行总结
var msg = await _kernelService.HistorySummarize(_kernel, questions, history.ToString());
return msg;
}
else
{
var msg = $"history{history.ToString()}{Environment.NewLine} user{questions}"; ;
return msg;
}
}
else
{
return "";
}
FileName = fileInfo.File.FileName,
Url = fileInfo.File.Url = fileInfo.File.Response,
Ext = fileInfo.File.Ext,
State = UploadState.Success,
});
_kMService.OnSingleCompleted(fileInfo);
}
private async Task<bool> HandleFileRemove(UploadFileItem file)
{
fileList.RemoveAll(x => x.FileName == file.FileName);
await Task.Yield();
return true;
}
}
}

View File

@@ -2,100 +2,26 @@
@using AntSK.Domain.Repositories
@using AntSK.Models
@using Microsoft.AspNetCore.Components.Web.Virtualization
@using AntSK.Pages.ChatPage.Components
@page "/OpenChat/{AppId}"
@layout OpenLayout
<div id="chat" style="display:flex; flex-direction:column; height:100%; overflow-x:hidden;">
<PageHeader Class="site-page-header" Title="@app.Name" Subtitle="@app.Describe" />
<div id="scrollDiv" style="flex:1; width:100%; overflow-y:auto; overflow-x:hidden;padding:10px;">
<Virtualize Items="@(MessageList.OrderBy(o => o.CreateTime).ToList())" Context="item">
@if (item.IsSend)
{
<GridRow>
<GridCol Span="23">
<div class="chat-bubble sent">
<Popover Title="@item.CreateTime.ToString()">
@(item.Context)
</Popover>
</div>
<Icon Style="float:right;margin-top:10px;" Type="copy" Theme="outline" OnClick="async () =>await OnCopyAsync(item)" />
</GridCol>
<GridCol Span="1">
<Image Width="25px" Height="25px" Style="margin-top:10px;" Src="https://gw.alipayobjects.com/zos/rmsportal/KDpgvguMpGfqaHPjicRK.svg" />
</GridCol>
</GridRow>
}
else
{
<GridRow>
<GridCol Span="1">
<Image Width="25px" Height="25px" Style="margin-top:10px;" Src="https://gw.alipayobjects.com/zos/antfincdn/aPkFc8Sj7n/method-draw-image.svg" />
</GridCol>
<GridCol Span="23">
<div class="chat-bubble received">
@((MarkupString)(item.HtmlAnswers))
</div>
<ChatView AppId="@AppId" ShowTitle=true>
</GridCol>
</GridRow>
}
</Virtualize>
</div>
<div style="flex-shrink:0;margin:10px;">
<AntDesign.Input @bind-Value="@(_messageInput)" DebounceMilliseconds="@(-1)" Placeholder="输入消息回车发送" OnPressEnter="@(async () => await OnSendAsync())" Disabled="@Sendding">
<Suffix>
<Button Icon="clear" Type="@(ButtonType.Link)" OnClick="@(async () => await OnClearAsync())" Disabled="@Sendding"></Button>
<Button Icon="send" Type="@(ButtonType.Link)" OnClick="@(async () => await OnSendAsync())" Disabled="@Sendding"></Button>
</Suffix>
</AntDesign.Input>
</div>
</div>
</ChatView>
<style>
body {
#chat{
height:100% ;
display:flex;
flex-direction: column;
overflow-x: hidden;
font-family: Arial, sans-serif;
margin: 0;
padding: 10px;
justify-content: center;
align-items: flex-start;
height: 100vh;
}
.chat-container {
width: 350px;
border: 1px solid #ccc;
border-radius: 5px;
overflow: hidden;
display: flex;
flex-direction: column;
background-color: #fff;
padding-bottom: 15px;
}
.chat-bubble {
padding: 10px;
margin: 10px;
margin-bottom: 0;
border-radius: 5px;
max-width: 70%;
position: relative;
}
.received {
background-color: #f0f0f0;
align-self: flex-start;
float: left;
}
.sent {
background-color: #daf8cb;
align-self: flex-end;
float: right;
}
</style>
@code {
}
@code{
[Parameter]
public string AppId { get; set; }
}

View File

@@ -1,3 +0,0 @@
.ant-card-body {
height: 90% !important;
}

View File

@@ -116,7 +116,7 @@
</Card>
</SpaceItem>
<SpaceItem>
<Card Bordered="true" Title=@("📱模型管理") Hoverable="true" Style="height:260px;width:300px">
<Card Bordered="true" Title=@("📱模型集成与管理") Hoverable="true" Style="height:260px;width:300px">
<Body>
<Text>1、适配和管理集成不同厂商的不同模型</Text>
<br>
@@ -124,8 +124,12 @@
2、并且支持llama.cpp所支持的gguf类型的模型离线运行
</Text>
<br>
<Text>
3、支持llamafactory所支持的模型包括baichuan、gemma、yuan、yi等
</Text>
<br>
<Text>
3、未来将实现模型的训练、微调、部署一站式服务
4、未来将实现模型的训练、微调、部署一站式服务
</Text>
<br>
</Body>
@@ -148,16 +152,17 @@
</Card>
</SpaceItem>
<SpaceItem>
<Card Bordered="true" Title=@("🥤如果本项目帮助到了您") Hoverable="true" Style="height:260px;width:300px">
<Card Bordered="true" Title=@("❤项目最新进展") Hoverable="true" Style="height:260px;width:300px">
<Body>
<Image Height="170" Src="./assets/zfb.png" />
<Image Height="170" Src="./assets/gzh.jpg" />
</Body>
</Card>
</SpaceItem>
<SpaceItem>
<Card Bordered="true" Title=@("您可用以下方式支持~🥤") Hoverable="true" Style="height:260px;width:300px">
<Card Bordered="true" Title=@("🥤项目赞助") Hoverable="true" Style="height:260px;width:300px">
<Body>
<Image Height="170" Src="./assets/wx.png" />
<Image Width="110" Src="./assets/zfb.png" />
<Image Width="110" Src="./assets/wx.png" />
</Body>
</Card>
</SpaceItem>

View File

@@ -24,18 +24,32 @@
</FormItem>
<FormItem Label="AI类型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<EnumRadioGroup @bind-Value="context.AIType"></EnumRadioGroup>
<EnumRadioGroup @bind-Value="context.AIType" ButtonStyle="RadioButtonStyle.Solid"> </EnumRadioGroup>
</FormItem>
<FormItem Label="模型类型" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<RadioGroup @bind-Value="context.AIModelType">
<Radio RadioButton Value="@(AIModelType.Chat)">会话模型</Radio>
<Radio RadioButton Value="@(AIModelType.Embedding)">向量模型</Radio>
@if (context.AIType == AIType.StableDiffusion)
{
<Radio RadioButton Value="@(AIModelType.Image)">图片模型</Radio>
}
else
{
@if (context.AIType != AIType.BgeEmbedding)
{
<Radio RadioButton Value="@(AIModelType.Chat)">会话模型</Radio>
}
@if (context.AIType != AIType.LLamaFactory && context.AIType != AIType.Mock)
{
<Radio RadioButton Value="@(AIModelType.Embedding)">向量模型</Radio>
}
}
</RadioGroup>
</FormItem>
@if (context.AIModelType == AIModelType.Embedding)
{
<FormItem Label="注意事项" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<b>请不要使用不同维度的向量模型,否则会导致无法向量存储</b>
<span style="color:red"><b>请不要使用不同维度的向量模型,否则会导致无法向量存储</b></span>
</FormItem>
}
@@ -84,7 +98,7 @@
<Input Placeholder="请输入模型名称" @bind-Value="@context.ModelName" />
</FormItem>
}
@if (context.AIType == AIType.LLamaSharp)
@if (context.AIType == AIType.LLamaSharp || context.AIType == AIType.StableDiffusion)
{
<FormItem Label="模型路径" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<InputGroup>
@@ -125,10 +139,30 @@
}
</InputGroup>
</FormItem>
<FormItem Label="环境安装" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Button Type="primary" OnClick="PipInstall" >初始化</Button>
<FormItem Label="环境安装" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Button Type="primary" OnClick="PipInstall">初始化</Button>
</FormItem>
}
@if (context.AIType == AIType.BgeEmbedding)
{
<FormItem Label="模型名称" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Select DataSource="@bgeEmbeddingList"
@bind-Value="@context.ModelName"
ValueProperty="c=>c"
LabelProperty="c=>c">
</Select>
</FormItem>
<FormItem Label="PythonDll路径" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Input Placeholder="D:\Programs\Python\Python311\python311.dll" @bind-Value="@context.EndPoint" />
</FormItem>
<FormItem Label="下载并初始化" LabelCol="LayoutModel._formItemLayout.LabelCol" WrapperCol="LayoutModel._formItemLayout.WrapperCol">
<Spin Tip="请等待..." Spinning="@(BgeIsStart)" >
<Button Type="primary" Disabled="@(BgeIsStart)" OnClick="BgeDownload">@BgeBtnText</Button>
</Spin>
</FormItem>
}
@if (context.AIType == AIType.Mock)
{
}
@@ -168,3 +202,7 @@
</Modal>
@code
{
}

View File

@@ -3,6 +3,7 @@ using AntDesign.ProLayout;
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model.Constant;
using AntSK.Domain.Domain.Model.Enum;
using AntSK.Domain.Domain.Other;
using AntSK.Domain.Domain.Service;
using AntSK.Domain.Options;
using AntSK.Domain.Repositories;
@@ -12,6 +13,7 @@ using BlazorComponents.Terminal;
using DocumentFormat.OpenXml.Office2010.Excel;
using Downloader;
using Microsoft.AspNetCore.Components;
using NRedisStack.Search;
using System.ComponentModel;
namespace AntSK.Pages.Setting.AIModel
@@ -58,6 +60,10 @@ namespace AntSK.Pages.Setting.AIModel
private TerminalParagraph para;
private bool _logModalVisible;
private List<string> bgeEmbeddingList = new List<string>() { "AI-ModelScope/bge-small-zh-v1.5", "AI-ModelScope/bge-base-zh-v1.5", "AI-ModelScope/bge-large-zh-v1.5" };
private bool BgeIsStart = false;
private string BgeBtnText = "初始化";
protected override async Task OnInitializedAsync()
{
try
@@ -67,23 +73,37 @@ namespace AntSK.Pages.Setting.AIModel
{
_aiModel = _aimodels_Repositories.GetFirst(p => p.Id == ModelId);
}
//目前只支持gguf的 所以筛选一下
_modelFiles = Directory.GetFiles(Path.Combine(Directory.GetCurrentDirectory(), LLamaSharpOption.FileDirectory)).Where(p=>p.Contains(".gguf")).ToArray();
if (!string.IsNullOrEmpty(ModelPath))
{
//下载页跳入
_aiModel.AIType = AIType.LLamaSharp;
_downloadModalVisible = true;
_downloadUrl = $"https://hf-mirror.com{ModelPath.Replace("---","/")}";
}
modelList = _ILLamaFactoryService.GetLLamaFactoryModels();
llamaFactoryDic = await _IDics_Repositories.GetFirstAsync(p => p.Type == LLamaFactoryConstantcs.LLamaFactorDic && p.Key == LLamaFactoryConstantcs.IsStartKey);
if (llamaFactoryDic != null)
{
llamaFactoryIsStart= llamaFactoryDic.Value== "false" ? false:true;
llamaFactoryIsStart = llamaFactoryDic.Value == "false" ? false : true;
}
//目前只支持gguf的 所以筛选一下
_modelFiles = Directory.GetFiles(Path.Combine(Directory.GetCurrentDirectory(), LLamaSharpOption.FileDirectory)).Where(p=> p.Contains(".gguf")||p.Contains(".ckpt")|| p.Contains(".safetensors")).ToArray();
if (!string.IsNullOrEmpty(ModelPath))
{
string extension = Path.GetExtension(ModelPath);
switch (extension)
{
case ".gguf":
_aiModel.AIType = AIType.LLamaSharp;
break;
case ".safetensors":
case ".ckpt":
_aiModel.AIType = AIType.StableDiffusion;
break;
}
//下载页跳入
_downloadModalVisible = true;
_downloadUrl = $"https://hf-mirror.com{ModelPath.Replace("---","/")}";
}
}
catch
{
@@ -244,9 +264,40 @@ namespace AntSK.Pages.Setting.AIModel
{
_logModalVisible = true;
_ILLamaFactoryService.LogMessageReceived += CmdLogHandler;
_ILLamaFactoryService.PipInstall();
await _ILLamaFactoryService.PipInstall();
}
}
private async Task BgeDownload()
{
if (string.IsNullOrEmpty(_aiModel.ModelName))
{
_ = Message.Error("请输入模型名称!", 2);
return;
}
if (string.IsNullOrEmpty(_aiModel.EndPoint))
{
_ = Message.Error("请输入正确的Python dll路径", 2);
return;
}
BgeIsStart = true;
BgeBtnText = "正在初始化...";
await Task.Run(() =>
{
try
{
EmbeddingConfig.LoadModel(_aiModel.EndPoint, _aiModel.ModelName);
BgeBtnText = "初始化完成";
BgeIsStart = false;
}
catch (System.Exception ex)
{
_ = Message.Error(ex.Message, 2);
BgeIsStart = false;
}
});
}
private async Task CmdLogHandler(string message)
{
await InvokeAsync(() =>
@@ -262,5 +313,10 @@ namespace AntSK.Pages.Setting.AIModel
private void OnCancelLog() {
_logModalVisible = false;
}
private void AITypeModelChange()
{
}
}
}

View File

@@ -10,12 +10,19 @@
<PageContainer Title="模型列表">
<Content>
<RadioGroup @bind-Value="@_modelType">
<Radio Value="@("gguf")" DefaultChecked=true>LLama本地模型(gguf)</Radio>
<Radio Value="@("safetensors")">StableDiffusion(safetensors)</Radio>
<Radio Value="@("ckpt")">StableDiffusion2(ckpt)</Radio>
</RadioGroup>
<div style="text-align: center;">
<Search Placeholder="输入回车"
EnterButton="@("搜索")"
Size="large"
Style="max-width: 522px; width: 100%;"
OnSearch="Search" />
</div>
</Content>
<ChildContent>

View File

@@ -16,7 +16,7 @@ namespace AntSK.Pages.Setting.AIModel
private readonly IList<string> _selectCategories = new List<string>();
private List<HfModels> _modelList = new List<HfModels>();
private string _modelType;
protected override async Task OnInitializedAsync()
{
await base.OnInitializedAsync();
@@ -27,7 +27,7 @@ namespace AntSK.Pages.Setting.AIModel
{
var param = searchKey.ConvertToString().Split(" ");
string urlBase = "https://hf-mirror.com/models-json?sort=trending&search=gguf";
string urlBase = $"https://hf-mirror.com/models-json?sort=trending&search={_modelType}";
if (param.Count() > 0)
{
urlBase += "+" + string.Join("+", param);

View File

@@ -70,6 +70,14 @@
{
<Tag Color="@PresetColor.Cyan.ToString()">LLamaFactory</Tag>
}
else if (context.AIType == AIType.BgeEmbedding)
{
<Tag Color="@PresetColor.Gold.ToString()">BgeEmbedding</Tag>
}
else if (context.AIType == AIType.StableDiffusion)
{
<Tag Color="@PresetColor.Lime.ToString()">StableDiffusion</Tag>
}
</p>
</div>
@@ -85,6 +93,10 @@
{
<Tag Color="@PresetColor.Green.ToString()">向量模型</Tag>
}
else if (context.AIModelType == AIModelType.Image)
{
<Tag Color="@PresetColor.Lime.ToString()">图片模型</Tag>
}
</p>
</div>
<div class="listContentItem" style="width:20%">

View File

@@ -8,7 +8,7 @@
<div class="login">
<Form Model="@_model" OnFinish="HandleSubmit">
<Tabs ActiveKey="@context.LoginType">
<TabPane Key="1" Tab="账号登">
<TabPane Key="1" Tab="账号登">
<FormItem>
<AntDesign.Input Placeholder="请输入账号" Size="large" @bind-Value="@context.UserName">
<Prefix><Icon Type="user" /></Prefix>
@@ -21,7 +21,7 @@
</FormItem>
</TabPane>
</Tabs>
<Button Type="primary" HtmlType="submit" Class="submit" Size="large" Block>登</Button>
<Button Type="primary" HtmlType="submit" Class="submit" Size="large" Block>登</Button>
</Form>
</div>
</div>

View File

@@ -9,6 +9,7 @@ using AntSK.Domain.Repositories;
using AntSK.Domain.Utils;
using AntSK.plugins.Functions;
using AntSK.Services.Auth;
using Blazored.LocalStorage;
using LLama.Native;
using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.Components.Authorization;
@@ -48,6 +49,8 @@ builder.Services.AddServicesFromAssemblies("AntSK.Domain");
builder.Services.AddSingleton(sp => new FunctionService(sp, [typeof(AntSK.App).Assembly]));
builder.Services.AddScoped<FunctionTest>();
builder.Services.AddAntSKSwagger();
builder.Services.AddBlazoredLocalStorage(config =>
config.JsonSerializerOptions.WriteIndented = true);
//Mapper
builder.Services.AddMapper();
//后台队列任务

View File

@@ -1,102 +0,0 @@
using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Options;
using LLama;
using LLama.Common;
namespace AntSK.Services.LLamaSharp
{
public interface ILLamaChatService
{
Task<string> ChatAsync(string input);
IAsyncEnumerable<string> ChatStreamAsync(string input);
}
/// <summary>
///
/// </summary>
[ServiceDescription(typeof(ILLamaChatService), Domain.Common.DependencyInjection.ServiceLifetime.Singleton)]
public class LLamaChatService : IDisposable, ILLamaChatService
{
private readonly ChatSession _session;
private readonly LLamaContext _context;
private readonly ILogger<LLamaChatService> _logger;
private bool _continue = false;
private const string SystemPrompt = "You are a personal assistant who needs to help users .";
public LLamaChatService(ILogger<LLamaChatService> logger)
{
var @params = new ModelParams(LLamaSharpOption.Chat)
{
ContextSize = 2048,
};
// todo: share weights from a central service
using var weights = LLamaWeights.LoadFromFile(@params);
_logger = logger;
_context = new LLamaContext(weights, @params);
_session = new ChatSession(new InteractiveExecutor(_context));
_session.History.AddMessage(AuthorRole.System, SystemPrompt);
}
public void Dispose()
{
_context?.Dispose();
}
public async Task<string> ChatAsync(string input)
{
if (!_continue)
{
_logger.LogInformation("Prompt: {text}", SystemPrompt);
_continue = true;
}
_logger.LogInformation("Input: {text}", input);
var outputs = _session.ChatAsync(
new ChatHistory.Message(AuthorRole.User, input),
new InferenceParams()
{
RepeatPenalty = 1.0f,
AntiPrompts = new string[] { "User:" },
});
var result = "";
await foreach (var output in outputs)
{
_logger.LogInformation("Message: {output}", output);
result += output;
}
return result;
}
public async IAsyncEnumerable<string> ChatStreamAsync(string input)
{
if (!_continue)
{
_logger.LogInformation(SystemPrompt);
_continue = true;
}
_logger.LogInformation(input);
var outputs = _session.ChatAsync(
new ChatHistory.Message(AuthorRole.User, input!)
, new InferenceParams()
{
RepeatPenalty = 1.0f,
AntiPrompts = new string[] { "User:" },
});
await foreach (var output in outputs)
{
_logger.LogInformation(output);
yield return output;
}
}
}
}

View File

@@ -1,40 +0,0 @@
using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Options;
using LLama;
using LLama.Common;
namespace AntSK.Services.LLamaSharp
{
public interface ILLamaEmbeddingService
{
Task<List<float>> Embedding(string text);
}
/// <summary>
/// 本地Embedding
/// </summary>
[ServiceDescription(typeof(ILLamaEmbeddingService), Domain.Common.DependencyInjection.ServiceLifetime.Singleton)]
public class LLamaEmbeddingService : IDisposable, ILLamaEmbeddingService
{
private LLamaEmbedder _embedder;
public LLamaEmbeddingService()
{
var @params = new ModelParams(LLamaSharpOption.Embedding) { EmbeddingMode = true };
using var weights = LLamaWeights.LoadFromFile(@params);
_embedder = new LLamaEmbedder(weights, @params);
}
public void Dispose()
{
_embedder?.Dispose();
}
public async Task<List<float>> Embedding(string text)
{
float[] embeddings = await _embedder.GetEmbeddings(text);
//PG只有1536维
return embeddings.ToList();
}
}
}

View File

@@ -1,68 +0,0 @@
using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Domain.Model.Dto.OpenAPI;
using AntSK.Domain.Utils;
using Newtonsoft.Json;
using System.Text;
using ServiceLifetime = AntSK.Domain.Common.DependencyInjection.ServiceLifetime;
namespace AntSK.Services.LLamaSharp
{
public interface ILLamaSharpService
{
Task Chat(OpenAIModel model, HttpContext HttpContext);
Task ChatStream(OpenAIModel model, HttpContext HttpContext);
Task Embedding(OpenAIEmbeddingModel model, HttpContext HttpContext);
}
[ServiceDescription(typeof(ILLamaSharpService), ServiceLifetime.Scoped)]
public class LLamaSharpService(
ILLamaEmbeddingService _lLamaEmbeddingService,
ILLamaChatService _lLamaChatService
) : ILLamaSharpService
{
public async Task ChatStream(OpenAIModel model, HttpContext HttpContext)
{
HttpContext.Response.Headers.Add("Content-Type", "text/event-stream");
OpenAIStreamResult result = new OpenAIStreamResult();
result.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result.choices = new List<StreamChoicesModel>() { new StreamChoicesModel() { delta = new OpenAIMessage() { role = "assistant" } } };
string questions = model.messages.LastOrDefault().content;
await foreach (var r in _lLamaChatService.ChatStreamAsync(questions))
{
result.choices[0].delta.content = r.ConvertToString();
string message = $"data: {JsonConvert.SerializeObject(result)}\n\n";
await HttpContext.Response.WriteAsync(message, Encoding.UTF8);
await HttpContext.Response.Body.FlushAsync();
}
await HttpContext.Response.WriteAsync("data: [DONE]");
await HttpContext.Response.Body.FlushAsync();
await HttpContext.Response.CompleteAsync();
}
public async Task Chat(OpenAIModel model, HttpContext HttpContext)
{
string questions = model.messages.LastOrDefault().content;
OpenAIResult result = new OpenAIResult();
result.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result.choices = new List<ChoicesModel>() { new ChoicesModel() { message = new OpenAIMessage() { role = "assistant" } } };
result.choices[0].message.content = await _lLamaChatService.ChatAsync(questions); ;
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result));
await HttpContext.Response.CompleteAsync();
}
public async Task Embedding(OpenAIEmbeddingModel model, HttpContext HttpContext)
{
var result = new OpenAIEmbeddingResult();
result.data[0].embedding = await _lLamaEmbeddingService.Embedding(model.input[0]);
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result));
await HttpContext.Response.CompleteAsync();
}
}
}

View File

@@ -5,6 +5,7 @@ using AntSK.Domain.Repositories;
using AntSK.Domain.Utils;
using Microsoft.KernelMemory;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Newtonsoft.Json;
using System.Text;
@@ -35,7 +36,7 @@ namespace AntSK.Services.OpenApi
Apps app = _apps_Repositories.GetFirst(p => p.SecretKey == token);
if (app.IsNotNull())
{
string msg = await HistorySummarize(app, model);
(string questions,ChatHistory history) = await GetHistory(model);
switch (app.Type)
{
case "chat":
@@ -46,7 +47,7 @@ namespace AntSK.Services.OpenApi
result1.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result1.choices = new List<StreamChoicesModel>()
{ new StreamChoicesModel() { delta = new OpenAIMessage() { role = "assistant" } } };
await SendChatStream(HttpContext, result1, app, msg);
await SendChatStream(HttpContext, result1, app, questions,history);
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result1));
await HttpContext.Response.CompleteAsync();
@@ -58,7 +59,7 @@ namespace AntSK.Services.OpenApi
result2.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result2.choices = new List<ChoicesModel>()
{ new ChoicesModel() { message = new OpenAIMessage() { role = "assistant" } } };
result2.choices[0].message.content = await SendChat(msg, app);
result2.choices[0].message.content = await SendChat(questions,history, app);
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result2));
await HttpContext.Response.CompleteAsync();
@@ -74,7 +75,7 @@ namespace AntSK.Services.OpenApi
result3.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result3.choices = new List<StreamChoicesModel>()
{ new StreamChoicesModel() { delta = new OpenAIMessage() { role = "assistant" } } };
await SendKmsStream(HttpContext, result3, app, msg);
await SendKmsStream(HttpContext, result3, app, questions,history);
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result3));
await HttpContext.Response.CompleteAsync();
@@ -85,7 +86,7 @@ namespace AntSK.Services.OpenApi
result4.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result4.choices = new List<ChoicesModel>()
{ new ChoicesModel() { message = new OpenAIMessage() { role = "assistant" } } };
result4.choices[0].message.content = await SendKms(msg, app);
result4.choices[0].message.content = await SendKms(questions,history, app);
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result4));
await HttpContext.Response.CompleteAsync();
@@ -96,10 +97,10 @@ namespace AntSK.Services.OpenApi
}
}
private async Task SendChatStream(HttpContext HttpContext, OpenAIStreamResult result, Apps app, string msg)
private async Task SendChatStream(HttpContext HttpContext, OpenAIStreamResult result, Apps app,string questions, ChatHistory history)
{
HttpContext.Response.Headers.Add("Content-Type", "text/event-stream");
var chatResult = _chatService.SendChatByAppAsync(app, msg, "");
var chatResult = _chatService.SendChatByAppAsync(app, questions, history);
await foreach (var content in chatResult)
{
result.choices[0].delta.content = content.ConvertToString();
@@ -119,17 +120,35 @@ namespace AntSK.Services.OpenApi
/// <summary>
/// 发送普通对话
/// </summary>
/// <param name="msg"></param>
/// <param name="questions"></param>
/// <param name="history"></param>
/// <param name="app"></param>
/// <returns></returns>
private async Task<string> SendChat(string msg, Apps app)
private async Task<string> SendChat(string questions, ChatHistory history, Apps app)
{
string result = "";
if (string.IsNullOrEmpty(app.Prompt) || !app.Prompt.Contains("{{$input}}"))
{
//如果模板为空,给默认提示词
app.Prompt = app.Prompt.ConvertToString() + "{{$input}}";
}
KernelArguments args = new KernelArguments();
if (history.Count > 10)
{
app.Prompt = @"${{ConversationSummaryPlugin.SummarizeConversation $history}}" + app.Prompt;
args = new() {
{ "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) },
{ "input", questions }
};
}
else
{
args = new()
{
{ "input", $"{string.Join("\n", history.Select(x => x.Role + ": " + x.Content))}{Environment.NewLine} user:{questions}" }
};
}
var _kernel = _kernelService.GetKernelByApp(app);
var temperature = app.Temperature / 100; //存的是0~100需要缩小
@@ -139,9 +158,8 @@ namespace AntSK.Services.OpenApi
_kernelService.ImportFunctionsByApp(app, _kernel);
settings.ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions;
}
var func = _kernel.CreateFunctionFromPrompt(app.Prompt, settings);
var chatResult =await _kernel.InvokeAsync(function: func, arguments: new KernelArguments() { ["input"] = msg });
var chatResult =await _kernel.InvokeAsync(function: func, arguments: args);
if (chatResult.IsNotNull())
{
string answers = chatResult.GetValue<string>();
@@ -151,10 +169,10 @@ namespace AntSK.Services.OpenApi
return result;
}
private async Task SendKmsStream(HttpContext HttpContext, OpenAIStreamResult result, Apps app, string msg)
private async Task SendKmsStream(HttpContext HttpContext, OpenAIStreamResult result, Apps app, string questions,ChatHistory history)
{
HttpContext.Response.Headers.Add("Content-Type", "text/event-stream");
var chatResult = _chatService.SendKmsByAppAsync(app, msg,"", "");
var chatResult = _chatService.SendKmsByAppAsync(app, questions, history, "");
int i = 0;
await foreach (var content in chatResult)
{
@@ -175,15 +193,15 @@ namespace AntSK.Services.OpenApi
/// <summary>
/// 发送知识库问答
/// </summary>
/// <param name="msg"></param>
/// <param name="questions"></param>
/// <param name="app"></param>
/// <returns></returns>
private async Task<string> SendKms(string msg, Apps app)
private async Task<string> SendKms(string questions, ChatHistory history, Apps app)
{
string result = "";
var _kernel = _kernelService.GetKernelByApp(app);
var relevantSource = await _kMService.GetRelevantSourceList(app.KmsIdList, msg);
var relevantSource = await _kMService.GetRelevantSourceList(app.KmsIdList, questions);
var dataMsg = new StringBuilder();
if (relevantSource.Any())
{
@@ -192,9 +210,9 @@ namespace AntSK.Services.OpenApi
dataMsg.AppendLine(item.ToString());
}
KernelFunction jsonFun = _kernel.Plugins.GetFunction("KMSPlugin", "Ask");
KernelFunction jsonFun = _kernel.Plugins.GetFunction("KMSPlugin", "Ask1");
var chatResult = await _kernel.InvokeAsync(function: jsonFun,
arguments: new KernelArguments() { ["doc"] = dataMsg, ["history"] = "", ["questions"] = msg });
arguments: new KernelArguments() { ["doc"] = dataMsg, ["history"] = string.Join("\n", history.Select(x => x.Role + ": " + x.Content)), ["questions"] = questions });
if (chatResult.IsNotNull())
{
string answers = chatResult.GetValue<string>();
@@ -211,29 +229,27 @@ namespace AntSK.Services.OpenApi
/// <param name="app"></param>
/// <param name="model"></param>
/// <returns></returns>
private async Task<string> HistorySummarize(Apps app, OpenAIModel model)
private async Task<(string,ChatHistory)> GetHistory(OpenAIModel model)
{
var _kernel = _kernelService.GetKernelByApp(app);
StringBuilder history = new StringBuilder();
ChatHistory history = new ChatHistory();
string questions = model.messages[model.messages.Count - 1].content;
for (int i = 0; i < model.messages.Count() - 1; i++)
{
var item = model.messages[i];
history.Append($"{item.role}:{item.content}{Environment.NewLine}");
}
if (model.messages.Count() > 10)
{
//历史会话大于10条进行总结
var msg = await _kernelService.HistorySummarize(_kernel, questions, history.ToString());
return msg;
}
else
{
var msg = $"history{history.ToString()}{Environment.NewLine} user{questions}";
;
return msg;
if (item.role.ComparisonIgnoreCase("user"))
{
history.AddUserMessage(item.content);
}
else if (item.role.ComparisonIgnoreCase("assistant"))
{
history.AddAssistantMessage(item.content);
}
else if (item.role.ComparisonIgnoreCase("system"))
{
history.AddSystemMessage(item.content);
}
}
return (questions,history);
}
}
}

View File

@@ -35,8 +35,6 @@
},
"LLamaSharp": {
"RunType": "GPU",
"Chat": "D:\\git\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"Embedding": "D:\\git\\AntBlazor\\model\\qwen1_5-1_8b-chat-q8_0.gguf",
"FileDirectory": "D:\\git\\AntBlazor\\model"
},
"Login": {

View File

@@ -1,7 +1,7 @@
Facts:
{{$doc}}
--------------------------
History:{{$history}}
History:{{ConversationSummaryPlugin.SummarizeConversation $history}}
--------------------------
Question: {{$questions}}
--------------------------

View File

@@ -11,7 +11,7 @@
- 如果Markdown有图片则正常显示
--------------------------
历史聊天记录:{{$history}}
历史聊天记录:{{ConversationSummaryPlugin.SummarizeConversation $history}}
--------------------------
用户问题: {{$questions}}

Binary file not shown.

After

Width:  |  Height:  |  Size: 180 KiB

View File

@@ -57,5 +57,11 @@
"key": "setting.modeldown"
}
]
},
{
"path": "http://antsk.cn/",
"name": "文档",
"key": "antskdoc",
"icon": "question-circle"
}
]

View File

@@ -10,9 +10,16 @@
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="RestSharp" Version="110.2.0" />
<PackageReference Include="Cnblogs.KernelMemory.AI.DashScope" Version="0.1.0" />
<PackageReference Include="Cnblogs.SemanticKernel.Connectors.DashScope" Version="0.2.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.6.2" />
<PackageReference Include="Cnblogs.SemanticKernel.Connectors.DashScope" Version="0.3.2" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.6.3" />
<PackageReference Include="Sdcb.SparkDesk" Version="3.0.0" />
<PackageReference Include="System.Drawing.Common" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<None Update="stable-diffusion.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@@ -68,10 +68,10 @@ namespace AntSK.LLM.SparkDesk
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSkFunctions").ToList() ?? [];
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", func.Description, p.IsRequired)).ToList())).ToList();
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
//var messages = GetHistories(prompt);
var messages = new ChatMessage[] { new ChatMessage("user", prompt) };
var messages = new ChatMessage[] { new ChatMessage("user", prompt) };
return GetStreamingMessageAsync(messages, parameters, functionDefs, cancellationToken);
@@ -108,7 +108,7 @@ namespace AntSK.LLM.SparkDesk
{
error = $"参数{parameter.Name}解析错误:{ex.Message}";
}
if (!string.IsNullOrEmpty(error))
{
yield return new(error);
@@ -118,12 +118,13 @@ namespace AntSK.LLM.SparkDesk
var result = (await function.InvokeAsync(kernel, arguments, cancellationToken)).GetValue<object>() ?? string.Empty;
var stringResult = ProcessFunctionResult(result, chatExecutionSettings.ToolCallBehavior);
messages = [.. messages, ChatMessage.FromUser($"""
function call result:
{stringResult}
""")];
messages = [ChatMessage.FromSystem($"""
用户意图{func.Description}结果是:{stringResult}
functionDefs.RemoveAll(x => x.Name == msg.FunctionCall.Name);
请结合用户的提问回复:
"""), ChatMessage.FromUser(prompt)];
functionDefs.Clear();
await foreach (var content in GetStreamingMessageAsync(messages, parameters, functionDefs, cancellationToken))
{

View File

@@ -0,0 +1,108 @@
using System;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using static AntSK.LLM.StableDiffusion.Structs;
using int32_t = Int32;
using int64_t = Int64;
using SdContext = IntPtr;
using SDImagePtr = IntPtr;
using UpscalerContext = IntPtr;
internal class Native
{
const string DllName = "stable-diffusion";
internal delegate void SdLogCallback(SdLogLevel level, [MarshalAs(UnmanagedType.LPStr)] string text, IntPtr data);
internal delegate void SdProgressCallback(int step, int steps, float time, IntPtr data);
[DllImport(DllName, EntryPoint = "new_sd_ctx", CallingConvention = CallingConvention.Cdecl)]
internal extern static SdContext new_sd_ctx(string model_path,
string vae_path,
string taesd_path,
string control_net_path_c_str,
string lora_model_dir,
string embed_dir_c_str,
string stacked_id_embed_dir_c_str,
bool vae_decode_only,
bool vae_tiling,
bool free_params_immediately,
int n_threads,
WeightType weightType,
RngType rng_type,
ScheduleType s,
bool keep_clip_on_cpu,
bool keep_control_net_cpu,
bool keep_vae_on_cpu);
[DllImport(DllName, EntryPoint = "txt2img", CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImagePtr txt2img(SdContext sd_ctx,
string prompt,
string negative_prompt,
int clip_skip,
float cfg_scale,
int width,
int height,
SampleMethod sample_method,
int sample_steps,
int64_t seed,
int batch_count,
SDImagePtr control_cond,
float control_strength,
float style_strength,
bool normalize_input,
string input_id_images_path);
[DllImport(DllName, EntryPoint = "img2img", CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImagePtr img2img(SdContext sd_ctx,
SDImage init_image,
string prompt_c_str,
string negative_prompt_c_str,
int clip_skip,
float cfg_scale,
int width,
int height,
SampleMethod sample_method,
int sample_steps,
float strength,
int64_t seed,
int batch_count);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern IntPtr preprocess_canny(IntPtr imgData,
int width,
int height,
float high_threshold,
float low_threshold,
float weak,
float strong,
bool inverse);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern UpscalerContext new_upscaler_ctx(string esrgan_path,
int n_threads,
WeightType wtype);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern int32_t get_num_physical_cores();
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern void free_sd_ctx(SdContext sd_ctx);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern void free_upscaler_ctx(UpscalerContext upscaler_ctx);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImage upscale(UpscalerContext upscaler_ctx, SDImage input_image, int upscale_factor);
[DllImport(DllName, EntryPoint = "sd_set_log_callback", CallingConvention = CallingConvention.Cdecl)]
internal static extern void sd_set_log_callback(SdLogCallback cb, IntPtr data);
[DllImport(DllName, EntryPoint = "sd_set_progress_callback", CallingConvention = CallingConvention.Cdecl)]
internal static extern void sd_set_progress_callback(SdProgressCallback cb, IntPtr data);
}
}

View File

@@ -0,0 +1,217 @@
using System;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using static AntSK.LLM.StableDiffusion.Structs;
using SdContext = IntPtr;
using SDImagePtr = IntPtr;
using UpscalerContext = IntPtr;
public static class SDHelper
{
public static bool IsInitialized => SdContext.Zero != sd_ctx;
public static bool IsUpscalerInitialized => UpscalerContext.Zero != upscaler_ctx;
private static SdContext sd_ctx = new SdContext();
private static UpscalerContext upscaler_ctx = new UpscalerContext();
public static event EventHandler<StableDiffusionEventArgs.StableDiffusionLogEventArgs> Log;
public static event EventHandler<StableDiffusionEventArgs.StableDiffusionProgressEventArgs> Progress;
static readonly Native.SdLogCallback sd_Log_Cb;
static readonly Native.SdProgressCallback sd_Progress_Cb;
static SDHelper()
{
sd_Log_Cb = new Native.SdLogCallback(OnNativeLog);
Native.sd_set_log_callback(sd_Log_Cb, IntPtr.Zero);
sd_Progress_Cb = new Native.SdProgressCallback(OnProgressRunning);
Native.sd_set_progress_callback(sd_Progress_Cb, IntPtr.Zero);
}
public static bool Initialize(ModelParams modelParams)
{
sd_ctx = Native.new_sd_ctx(modelParams.ModelPath,
modelParams.VaePath,
modelParams.TaesdPath,
modelParams.ControlnetPath,
modelParams.LoraModelDir,
modelParams.EmbeddingsPath,
modelParams.StackedIdEmbeddingsPath,
modelParams.VaeDecodeOnly,
modelParams.VaeTiling,
modelParams.FreeParamsImmediately,
modelParams.Threads,
modelParams.SdType,
modelParams.RngType,
modelParams.Schedule,
modelParams.KeepClipOnCpu,
modelParams.KeepControlNetOnCpu,
modelParams.KeepVaeOnCpu);
return SdContext.Zero != sd_ctx;
}
public static bool InitializeUpscaler(UpscalerParams @params)
{
upscaler_ctx = Native.new_upscaler_ctx(@params.ESRGANPath, @params.Threads, @params.SdType);
return UpscalerContext.Zero != upscaler_ctx;
}
public static void FreeSD()
{
if (SdContext.Zero != sd_ctx)
{
Native.free_sd_ctx(sd_ctx);
sd_ctx = SdContext.Zero;
}
}
public static void FreeUpscaler()
{
if (UpscalerContext.Zero != upscaler_ctx)
{
Native.free_upscaler_ctx(upscaler_ctx);
upscaler_ctx = UpscalerContext.Zero;
}
}
public static Bitmap[] TextToImage(TextToImageParams textToImageParams)
{
if (!IsInitialized) throw new ArgumentNullException("Model not loaded!");
SDImagePtr sd_Image_ptr = Native.txt2img(sd_ctx,
textToImageParams.Prompt,
textToImageParams.NegativePrompt,
textToImageParams.ClipSkip,
textToImageParams.CfgScale,
textToImageParams.Width,
textToImageParams.Height,
textToImageParams.SampleMethod,
textToImageParams.SampleSteps,
textToImageParams.Seed,
textToImageParams.BatchCount,
SDImagePtr.Zero,
textToImageParams.ControlStrength,
textToImageParams.StyleStrength,
textToImageParams.NormalizeInput,
textToImageParams.InputIdImagesPath);
Bitmap[] images = new Bitmap[textToImageParams.BatchCount];
for (int i = 0; i < textToImageParams.BatchCount; i++)
{
SDImage sd_image = Marshal.PtrToStructure<SDImage>(sd_Image_ptr + i * Marshal.SizeOf<SDImage>());
images[i] = GetBitmapFromSdImage(sd_image);
}
return images;
}
public static Bitmap ImageToImage(ImageToImageParams imageToImageParams)
{
if (!IsInitialized) throw new ArgumentNullException("Model not loaded!");
SDImage input_sd_image = GetSDImageFromBitmap(imageToImageParams.InputImage);
SDImagePtr sdImgPtr = Native.img2img(sd_ctx,
input_sd_image,
imageToImageParams.Prompt,
imageToImageParams.NegativePrompt,
imageToImageParams.ClipSkip,
imageToImageParams.CfgScale,
imageToImageParams.Width,
imageToImageParams.Height,
imageToImageParams.SampleMethod,
imageToImageParams.SampleSteps,
imageToImageParams.Strength,
imageToImageParams.Seed,
imageToImageParams.BatchCount);
SDImage sdImg = Marshal.PtrToStructure<SDImage>(sdImgPtr);
return GetBitmapFromSdImage(sdImg);
}
public static Bitmap UpscaleImage(Bitmap image, int upscaleFactor)
{
if (!IsUpscalerInitialized) throw new ArgumentNullException("Upscaler not loaded!");
SDImage inputSDImg = GetSDImageFromBitmap(image);
SDImage result = Native.upscale(upscaler_ctx, inputSDImg, upscaleFactor);
return GetBitmapFromSdImage(result);
}
private static Bitmap GetBitmapFromSdImage(SDImage sd_Image)
{
int width = (int)sd_Image.Width;
int height = (int)sd_Image.Height;
int channel = (int)sd_Image.Channel;
byte[] bytes = new byte[width * height * channel];
Marshal.Copy(sd_Image.Data, bytes, 0, bytes.Length);
Bitmap bmp = new Bitmap(width, height, PixelFormat.Format24bppRgb);
int stride = bmp.Width * channel;
byte[] des = new byte[bytes.Length];
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++)
{
des[stride * i + channel * j + 0] = bytes[stride * i + channel * j + 2];
des[stride * i + channel * j + 1] = bytes[stride * i + channel * j + 1];
des[stride * i + channel * j + 2] = bytes[stride * i + channel * j + 0];
}
}
BitmapData bitmapData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.WriteOnly, bmp.PixelFormat);
Marshal.Copy(des, 0, bitmapData.Scan0, bytes.Length);
bmp.UnlockBits(bitmapData);
return bmp;
}
private static SDImage GetSDImageFromBitmap(Bitmap bmp)
{
int width = bmp.Width;
int height = bmp.Height;
int channel = Bitmap.GetPixelFormatSize(bmp.PixelFormat) / 8;
int stride = width * channel;
byte[] bytes = new byte[width * height * channel];
BitmapData bitmapData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, bmp.PixelFormat);
Marshal.Copy(bitmapData.Scan0, bytes, 0, bytes.Length);
bmp.UnlockBits(bitmapData);
byte[] sdImageBytes = new byte[bytes.Length];
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++)
{
sdImageBytes[stride * i + j * 3 + 0] = bytes[stride * i + j * 3 + 2];
sdImageBytes[stride * i + j * 3 + 1] = bytes[stride * i + j * 3 + 1];
sdImageBytes[stride * i + j * 3 + 2] = bytes[stride * i + j * 3 + 0];
}
}
SDImage sd_Image = new SDImage
{
Width = (uint)width,
Height = (uint)height,
Channel = 3,
Data = Marshal.UnsafeAddrOfPinnedArrayElement(sdImageBytes, 0),
};
return sd_Image;
}
private static void OnNativeLog(SdLogLevel level, string text, IntPtr data)
{
Log?.Invoke(null, new StableDiffusionEventArgs.StableDiffusionLogEventArgs { Level = level, Text = text });
}
private static void OnProgressRunning(int step, int steps, float time, IntPtr data)
{
Progress?.Invoke(null, new StableDiffusionEventArgs.StableDiffusionProgressEventArgs { Step = step, Steps = steps, Time = time });
}
}
}

View File

@@ -0,0 +1,33 @@
using System;
using static AntSK.LLM.StableDiffusion.Structs;
namespace AntSK.LLM.StableDiffusion
{
public class StableDiffusionEventArgs
{
public class StableDiffusionProgressEventArgs : EventArgs
{
#region Properties & Fields
public int Step { get; set; }
public int Steps { get; set; }
public float Time { get; set; }
public IntPtr Data { get; set; }
public double Progress => (double)Step / Steps;
public float IterationsPerSecond => 1.0f / Time;
#endregion
}
public class StableDiffusionLogEventArgs : EventArgs
{
#region Properties & Fields
public SdLogLevel Level { get; set; }
public string Text { get; set; }
#endregion
}
}
}

View File

@@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.LLM.StableDiffusion
{
public static class StableDiffusionService
{
}
}

View File

@@ -0,0 +1,154 @@
using System;
using System.Drawing;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using int64_t = Int64;
using uint32_t = UInt32;
public class Structs
{
public class ModelParams
{
public string ModelPath = string.Empty;
public string VaePath = string.Empty;
public string TaesdPath = string.Empty;
public string ControlnetPath = string.Empty;
public string LoraModelDir = string.Empty;
public string EmbeddingsPath = string.Empty;
public string StackedIdEmbeddingsPath = string.Empty;
public bool VaeDecodeOnly = false;
public bool VaeTiling = true;
public bool FreeParamsImmediately = false;
public int Threads = Native.get_num_physical_cores();
public WeightType SdType = WeightType.SD_TYPE_COUNT;
public RngType RngType = RngType.CUDA_RNG;
public ScheduleType Schedule = ScheduleType.DEFAULT;
public bool KeepClipOnCpu = false;
public bool KeepControlNetOnCpu = false;
public bool KeepVaeOnCpu = false;
}
public class TextToImageParams
{
public string Prompt = string.Empty;
public string NegativePrompt = string.Empty;
public int ClipSkip = 0;
public float CfgScale = 7;
public int Width = 512;
public int Height = 512;
public SampleMethod SampleMethod = SampleMethod.EULER_A;
public int SampleSteps = 20;
public int64_t Seed = -1;
public int BatchCount = 1;
public Bitmap ControlCond = new Bitmap(1, 1);
public float ControlStrength = 0.9f;
public float StyleStrength = 0.75f;
public bool NormalizeInput = false;
public string InputIdImagesPath = string.Empty;
}
public class ImageToImageParams
{
public Bitmap InputImage;
public string Prompt = string.Empty;
public string NegativePrompt = string.Empty;
public int ClipSkip = -1;
public float CfgScale = 7.0f;
public int Width = 512;
public int Height = 512;
public SampleMethod SampleMethod = SampleMethod.EULER_A;
public int SampleSteps = 20;
public float Strength = 0.75f;
public int64_t Seed = 42;
public int BatchCount = 1;
}
public class UpscalerParams
{
public string ESRGANPath = string.Empty;
public int Threads = Native.get_num_physical_cores();
public WeightType SdType = WeightType.SD_TYPE_COUNT;
}
[StructLayout(LayoutKind.Sequential)]
internal struct SDImage
{
public uint32_t Width;
public uint32_t Height;
public uint32_t Channel;
public IntPtr Data;
}
public enum WeightType
{
SD_TYPE_F32 = 0,
SD_TYPE_F16 = 1,
SD_TYPE_Q4_0 = 2,
SD_TYPE_Q4_1 = 3,
// SD_TYPE_Q4_2 = 4, support has been removed
// SD_TYPE_Q4_3 (5) support has been removed
SD_TYPE_Q5_0 = 6,
SD_TYPE_Q5_1 = 7,
SD_TYPE_Q8_0 = 8,
SD_TYPE_Q8_1 = 9,
// k-quantizations
SD_TYPE_Q2_K = 10,
SD_TYPE_Q3_K = 11,
SD_TYPE_Q4_K = 12,
SD_TYPE_Q5_K = 13,
SD_TYPE_Q6_K = 14,
SD_TYPE_Q8_K = 15,
SD_TYPE_IQ2_XXS = 16,
SD_TYPE_IQ2_XS = 17,
SD_TYPE_IQ3_XXS = 18,
SD_TYPE_IQ1_S = 19,
SD_TYPE_IQ4_NL = 20,
SD_TYPE_IQ3_S = 21,
SD_TYPE_IQ2_S = 22,
SD_TYPE_IQ4_XS = 23,
SD_TYPE_I8,
SD_TYPE_I16,
SD_TYPE_I32,
SD_TYPE_COUNT,
};
public enum RngType
{
STD_DEFAULT_RNG,
CUDA_RNG
};
public enum ScheduleType
{
DEFAULT,
DISCRETE,
KARRAS,
N_SCHEDULES
};
public enum SampleMethod
{
EULER_A,
EULER,
HEUN,
DPM2,
DPMPP2S_A,
DPMPP2M,
DPMPP2Mv2,
LCM,
N_SAMPLE_METHODS
};
public enum SdLogLevel
{
Debug,
Info,
Warn,
Error
}
}
}

View File

@@ -10,6 +10,7 @@
</PropertyGroup>
<PropertyGroup>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Authors>xuzeyu91</Authors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />