Compare commits

...

385 Commits
0.2.4 ... 0.6.0

Author SHA1 Message Date
zyxucp
48a8670f01 update llamafactory 2025-02-18 15:23:46 +08:00
zyxucp
2c8efd2eef Merge branch 'main' of https://github.com/AIDotNet/AntSK 2025-02-17 16:34:02 +08:00
zyxucp
a4acb80151 add embedding model 2025-02-17 16:28:31 +08:00
zyxucp
a8f143995d Update LICENSE 2025-02-17 10:44:48 +08:00
zyxucp
e46b02d70b Update LICENSE 2025-02-17 10:41:06 +08:00
zyxucp
31ea75ecba Merge branch 'main' of https://github.com/AIDotNet/AntSK 2025-02-13 17:23:28 +08:00
zyxucp
e2142deb4a update 2025-02-13 17:22:30 +08:00
zyxucp
a6b11bb33d Update README.zh.md 2025-02-11 22:57:43 +08:00
zyxucp
773073b847 update 注释 2025-02-11 18:44:16 +08:00
zyxucp
2bd38b96e7 update 上传文件问答bug 2025-02-11 18:43:30 +08:00
zyxucp
f7bcbcfc95 update 单文件发布 2025-02-11 16:55:04 +08:00
zyxucp
6f5429695c update 2025-02-11 16:07:41 +08:00
zyxucp
547a3b5919 update 调整检索最低相似度 2025-02-06 15:13:28 +08:00
zyxucp
358909892c update deepseek 思考 2025-02-06 11:01:42 +08:00
zyxucp
6c1bc4be04 add deepseek-r1 ollama 2025-02-03 18:37:14 +08:00
zyxucp
431c754f42 Update README.md 2025-01-13 14:29:39 +08:00
zyxucp
f46faa7679 Update README.md 2025-01-13 14:27:33 +08:00
zyxucp
195053cbd7 update 星火 2024-12-17 12:14:33 +08:00
zyxucp
b41d79c247 Merge pull request #108 from itchangc/main
BAAI/bge-reranker-v2-minicpm-layerwise
2024-11-20 14:24:40 +08:00
zhaochang
5acab7a46f todo 2024-11-17 20:12:34 +08:00
zhaochang
5b83c844a1 BAAI/bge-reranker-v2-minicpm-layerwise 2024-11-17 20:11:51 +08:00
token
2325c58092 Update Chats.cs 2024-11-04 05:16:07 +08:00
zyxucp
04d16148b5 add rerank api 2024-10-30 11:13:39 +08:00
zyxucp
f0f14889c7 update docker yml 2024-10-24 15:11:52 +08:00
zyxucp
dedafdd108 fix sk update bug 2024-10-24 15:11:25 +08:00
zyxucp
fad2b4e733 Update docker-compose.yml 2024-10-23 18:05:39 +08:00
zyxucp
032de7a447 Update docker-compose.simple.yml 2024-10-23 18:05:24 +08:00
zyxucp
cb0df4d4af update sk and km version 2024-10-23 18:03:21 +08:00
zyxucp
c810f85cea update nuget 2024-10-23 17:06:27 +08:00
zyxucp
6700b75684 update del icon 2024-10-23 09:40:15 +08:00
zyxucp
ab15dd6e99 add SecretKey search 2024-10-18 09:52:47 +08:00
zyxucp
13a4419705 update index 2024-10-14 14:10:04 +08:00
zyxucp
cf03465e23 Update README.zh.md 2024-10-06 10:35:45 +08:00
zyxucp
a441730508 Update README.md 2024-10-06 10:34:45 +08:00
zyxucp
6e30886bd2 Update README.md 2024-10-06 10:34:21 +08:00
zyxucp
bc2af42724 Update README.md 2024-10-06 10:31:35 +08:00
zyxucp
919fc5dea7 Update docker-compose.yml 2024-10-06 10:30:50 +08:00
zyxucp
9ab287fabd Update docker-compose.simple.yml 2024-10-06 10:30:36 +08:00
zyxucp
cf02efc2da update nuget 2024-10-05 18:33:30 +08:00
zyxucp
1c34ad5987 fix bug 2024-10-05 18:26:36 +08:00
zyxucp
4e8039703e Update README.zh.md 2024-09-29 10:57:48 +08:00
zyxucp
c2b97c7f82 Update README.md 2024-09-29 10:57:17 +08:00
zyxucp
1a621f5cbc Update docker-compose.yml 2024-09-29 10:54:39 +08:00
zyxucp
836f898ffe Update docker-compose.simple.yml 2024-09-29 10:54:11 +08:00
zyxucp
0a9a737709 Merge pull request #105 from AIDotNet/feature_0.5.1
Feature 0.5.1
2024-09-29 10:53:09 +08:00
zyxucp
0df8c74ec2 del sd 2024-09-29 10:52:27 +08:00
zyxucp
5e3ff74eaa update AntBlazor 2024-09-29 10:48:58 +08:00
zyxucp
7c49ff0a6c udpate exception 2024-09-29 10:43:44 +08:00
zyxucp
63f5267bca update 2024-09-29 10:41:09 +08:00
zyxucp
e0c35aac06 update style 2024-09-29 10:36:30 +08:00
zyxucp
27d52d3331 update nuget 2024-09-03 09:22:01 +08:00
zyxucp
e7b2c6e193 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-09-02 09:37:45 +08:00
zyxucp
7600397b79 update style 2024-09-02 09:37:34 +08:00
zyxucp
874b8e5d7f Update LICENSE 2024-08-28 14:48:23 +08:00
zyxucp
3ac18086a1 Update README.zh.md 2024-08-28 12:41:04 +08:00
zyxucp
16049c7413 Update README.md 2024-08-28 12:40:37 +08:00
zyxucp
7bb7a41bb3 Update menu.json 2024-08-28 10:30:25 +08:00
zyxucp
6c37ed66b2 Update menu.json 2024-08-28 10:18:31 +08:00
zyxucp
bc86f96159 Update README.md 2024-08-27 23:43:01 +08:00
zyxucp
8eb09fb783 Update README.zh.md 2024-08-27 22:33:33 +08:00
zyxucp
eff5f69f0f fix 处理环境依赖导致rerank失败问题 2024-08-26 10:15:31 +08:00
zyxucp
e3f966d4f2 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-08-23 18:08:55 +08:00
zyxucp
015f51b99c update KmsIdList length 2024-08-23 18:08:44 +08:00
zyxucp
cd66b61014 Update README.md 2024-08-20 10:23:49 +08:00
zyxucp
f0bef7d2fa Update docker-compose.simple.yml 2024-08-18 14:13:17 +08:00
zyxucp
de051b047d Update docker-compose.yml 2024-08-18 14:13:04 +08:00
zyxucp
ed6f5dada2 Update README.md 2024-08-18 13:54:33 +08:00
zyxucp
d2e3fde829 Update README.zh.md 2024-08-18 13:54:03 +08:00
zyxucp
195551e9c1 Update README.md 2024-08-18 13:51:02 +08:00
zyxucp
855103c2a4 Update README.md 2024-08-18 13:50:03 +08:00
zyxucp
6150d543d3 Update README.md 2024-08-18 13:47:08 +08:00
zyxucp
d968d78982 Update LICENSE 2024-08-18 13:44:10 +08:00
zyxucp
0ec5d1f1cf Update README.zh.md 2024-08-18 13:39:33 +08:00
zyxucp
31f44c1758 Update README.md 2024-08-18 13:39:15 +08:00
zyxucp
e5f63d605d Merge pull request #102 from AIDotNet/feature_delllamasharp
Feature delllamasharp
2024-08-18 13:37:51 +08:00
zyxucp
7db62e3dc6 fix 提示词修改 2024-08-18 13:37:20 +08:00
zyxucp
4408fa4345 add 增加默认值 2024-08-18 13:36:42 +08:00
zyxucp
c5e952b98e update 2024-08-18 13:33:17 +08:00
zyxucp
bedfeaf53d add 使用 2024-08-18 13:22:34 +08:00
zyxucp
d605fd6685 fix 删除llamasharp 2024-08-18 13:20:40 +08:00
zyxucp
e5e3f7cd8f Update README.md 2024-08-15 09:52:24 +08:00
zyxucp
657949694c Update README.zh.md 2024-08-15 09:50:50 +08:00
zyxucp
10b6035f84 Update README.md 2024-08-15 09:49:54 +08:00
zyxucp
3f9fe27456 Update README.md 2024-08-15 09:46:14 +08:00
zyxucp
da3a0681e5 Update README.zh.md 2024-08-07 14:38:37 +08:00
zyxucp
57b7948d86 Update README.zh.md 2024-08-07 14:17:00 +08:00
zyxucp
40b8bd0439 Update README.zh.md 2024-08-07 14:05:21 +08:00
zyxucp
6ed9cc9b70 Update README.md 2024-08-07 14:03:01 +08:00
zyxucp
e51bf35217 update 版本 2024-08-05 22:23:03 +08:00
zyxucp
28f88438e7 Update README.md 2024-08-05 22:21:57 +08:00
zyxucp
85f4a330d5 Merge pull request #100 from AIDotNet/feature_request-encoding
add 处理请求编码
2024-08-05 22:20:42 +08:00
zyxucp
21d7c719f1 add 处理请求编码 2024-08-05 22:20:18 +08:00
zyxucp
4ef398bd57 Merge pull request #99 from AIDotNet/feature_offline
fix 处理离线资源文件
2024-08-05 22:15:39 +08:00
zyxucp
dc70270362 fix 处理离线资源文件 2024-08-05 22:13:00 +08:00
zyxucp
97b7211cce fix 大小写问题 2024-08-05 12:54:37 +08:00
zyxucp
3e762e13af fix 目录大小写问题 2024-08-05 12:53:33 +08:00
zyxucp
e084317a46 fix 目录大小写问题 2024-08-05 12:52:59 +08:00
zyxucp
531b4473e8 Merge pull request #98 from AIDotNet/feature_nuget
Feature nuget
2024-08-05 11:31:09 +08:00
zyxucp
aefd0d2775 update 2024-08-05 11:30:52 +08:00
zyxucp
960468edf0 update nuget 2024-08-05 11:16:32 +08:00
zyxucp
07ad1f58b5 update sk nuget 2024-08-05 11:03:51 +08:00
zyxucp
095428be50 add llama3 2024-08-05 10:52:51 +08:00
zyxucp
87fc8911fa Merge pull request #97 from AIDotNet/feature_css
fix  处理本地js问题
2024-08-05 10:50:02 +08:00
zyxucp
58272e1ce8 fix 处理本地js问题 2024-08-05 10:49:39 +08:00
zyxucp
700bbcb63f update nuget 2024-07-20 18:14:42 +08:00
zyxucp
dde1d68876 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-07-20 18:13:04 +08:00
zyxucp
71553a6153 fix 修改bge 默认cpu 2024-07-20 18:12:52 +08:00
zyxucp
d4f8de3e21 Merge pull request #96 from AIDotNet/feature_pyruntime
fix pyruntime
2024-07-20 18:11:25 +08:00
zyxucp
6cf5dea10d fix pyruntime 2024-07-20 18:11:03 +08:00
zyxucp
05379dfee6 Update README.zh.md 2024-07-13 14:18:57 +08:00
zyxucp
5a6d49ff64 Update README.md 2024-07-13 14:18:35 +08:00
zyxucp
64ab940a26 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-07-03 21:22:18 +08:00
zyxucp
55982ea36d add ollama model list 2024-07-03 21:22:08 +08:00
zyxucp
21efcf2479 update 复制 2024-07-03 13:07:26 +08:00
zyxucp
0dc7bfcadb add 延迟 2024-07-01 17:10:24 +08:00
zyxucp
22d99091e1 fix string += 2024-07-01 11:52:18 +08:00
zyxucp
7558d3ffdc add ollama modellist 2024-06-30 18:51:01 +08:00
zyxucp
85ae41c44c add utf8 2024-06-30 18:43:48 +08:00
zyxucp
91193850dd fxi 2024-06-30 18:32:58 +08:00
zyxucp
7cc04e3364 add 处理代码 2024-06-30 17:55:19 +08:00
zyxucp
3da28090c6 fix 处理代码最后逻辑 2024-06-30 17:46:58 +08:00
zyxucp
1595ef2c0a Merge branch 'main' of github.com:AIDotNet/AntSK 2024-06-30 17:30:37 +08:00
zyxucp
83e3d81de7 fix markdown 2024-06-30 17:30:26 +08:00
zyxucp
18437ddda4 Update README.zh.md 2024-06-30 17:22:29 +08:00
zyxucp
fd503171a1 Update README.md 2024-06-30 17:22:04 +08:00
zyxucp
7022139780 update docker file and yaml 2024-06-30 17:15:06 +08:00
zyxucp
1e508e45af fix modellist 2024-06-30 17:11:21 +08:00
zyxucp
03d9ec2cad Merge pull request #94 from duyanming/main
解决内容较多的时候需要等结束转Markdown的不好体验。
2024-06-30 17:02:45 +08:00
zyxucp
86fb48bab7 Merge pull request #95 from AIDotNet/feature_ollama
Feature ollama
2024-06-30 17:01:39 +08:00
zyxucp
a4bc1e4a55 fix 2024-06-30 17:00:24 +08:00
zyxucp
8681e15da5 add ollama 2024-06-30 16:59:46 +08:00
zyxucp
ebc82f8b1b add ollamatype 2024-06-30 15:55:42 +08:00
duyanming
3bcd7bd7e1 1、生成结果的同时转化为 Markdown 文本,解决内容较多的时候需要等结束转Markdown的不好体验。
2、去掉模拟延迟,解决体验问题。仿佛生成很慢
2024-06-30 14:16:36 +08:00
zyxucp
b64d8669b1 fix AntDesign.ProLayout bug 2024-06-29 22:43:02 +08:00
zyxucp
0489044098 fix rerank 2024-06-29 10:57:58 +08:00
zyxucp
17e2062b72 margin 2024-06-29 10:57:18 +08:00
zyxucp
4e4f5a698d update nuget 2024-06-29 10:56:47 +08:00
zyxucp
b879d04bcd update nuget 2024-06-23 23:57:01 +08:00
zyxucp
95f918f4c7 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-06-19 23:08:36 +08:00
zyxucp
f0e1ad6088 fix 处理星火模型秘钥在模型列表显示的问题,以及增加星火模型版本 2024-06-19 23:08:18 +08:00
zyxucp
61773af48d Update docker-compose.simple.yml 2024-06-12 21:57:21 +08:00
zyxucp
54cd04c3bf Update docker-compose.yml 2024-06-12 21:57:03 +08:00
zyxucp
cd9f4ae11b Update README.md 2024-06-12 21:05:58 +08:00
zyxucp
3f9c748b41 update nuget 2024-06-12 11:20:59 +08:00
zyxucp
d483005531 add api地址 2024-06-10 22:01:57 +08:00
zyxucp
1d2db6a896 Update docker-compose.simple.yml 2024-06-08 18:49:09 +08:00
zyxucp
9a7a263055 Update docker-compose.yml 2024-06-08 18:48:52 +08:00
zyxucp
6beb0b52c7 Merge pull request #92 from AIDotNet/feature_llamafactory
update llamafactory 0.8.0
2024-06-08 18:47:13 +08:00
zyxucp
0ea167a204 update llamafactory 0.8.0 2024-06-08 18:29:37 +08:00
zyxucp
6e6afa2a7c Update docker-compose.simple.yml 2024-06-08 11:36:19 +08:00
zyxucp
7a2a5d86bb Update docker-compose.yml 2024-06-08 11:36:04 +08:00
zyxucp
a1a36c3494 update nuget 2024-06-08 11:31:24 +08:00
zyxucp
4f350081dd update llamasharp 2024-06-08 11:23:02 +08:00
zyxucp
b3ea0c4e1a add llamasharp 配置 2024-06-08 11:04:14 +08:00
zyxucp
e72a6acd03 fix 处理聊天上下文 2024-05-30 13:08:37 +08:00
zyxucp
9bb8ab89fe Update README.zh.md 2024-05-29 22:54:41 +08:00
zyxucp
e78da66d1a Update README.md 2024-05-29 22:54:25 +08:00
zyxucp
9ee21fd5e5 AddServiceDefaults 2024-05-29 21:26:41 +08:00
zyxucp
a22c04c9b2 Merge pull request #91 from AIDotNet/feature_aspire
Feature aspire
2024-05-29 17:29:00 +08:00
zyxucp
3bb5bfaca7 add otel 2024-05-29 16:34:54 +08:00
zyxucp
c4bf5ee7e5 fix 增加OTEL 2024-05-29 15:06:16 +08:00
zyxucp
5e1e688f84 fix seq 2024-05-29 14:20:07 +08:00
zyxucp
80d9bf68f3 fix seq 2024-05-29 13:52:47 +08:00
zyxucp
65f2e3e363 add Serilog.Sinks.Seq 2024-05-29 13:20:11 +08:00
zyxucp
68d27ff2bc update Serilog 2024-05-29 13:03:00 +08:00
zyxucp
034da30811 add Serilog 2024-05-29 12:14:12 +08:00
zyxucp
3db0cdcd19 add aspire 2024-05-29 00:01:30 +08:00
zyxucp
42181a6f1d add aspire 2024-05-28 22:23:55 +08:00
zyxucp
ec8cbf2550 add 增加跨域处理 2024-05-27 22:19:22 +08:00
zyxucp
9a1bd079da fix 删除默认提示词 2024-05-26 19:41:58 +08:00
zyxucp
4213c4379c update 处理openapi 没有systemPrompt的问题 2024-05-26 19:38:32 +08:00
zyxucp
05cda17e2e style 样式修改 2024-05-26 00:50:23 +08:00
zyxucp
cda6e54f0b Merge branch 'main' of github.com:AIDotNet/AntSK 2024-05-25 23:11:40 +08:00
zyxucp
51d8ba6408 update km、sk 版本 2024-05-25 23:11:33 +08:00
zyxucp
b571c7d22d Update README.md 2024-05-24 22:01:03 +08:00
zyxucp
a0c91f565e fix 修复openapi聊天上下文bug 2024-05-24 21:47:53 +08:00
zyxucp
280c750165 Update README.md 2024-05-23 14:47:20 +08:00
zyxucp
fec9337fda margin 2024-05-23 14:34:46 +08:00
zyxucp
b84f252f2f update 更新readme 2024-05-23 14:17:54 +08:00
zyxucp
5c998ccce2 Update README.en.md 2024-05-23 13:53:36 +08:00
zyxucp
0e3cfd2cfb Update README.md 2024-05-23 13:53:33 +08:00
zyxucp
4040831a23 Update README.md 2024-05-23 13:52:17 +08:00
zyxucp
a3a2308659 Update docker-compose.yml 2024-05-23 13:46:03 +08:00
zyxucp
6d43c71d13 Update docker-compose.simple.yml 2024-05-23 13:45:42 +08:00
zyxucp
8315b6f37f fix 样式修改 2024-05-23 12:07:37 +08:00
zyxucp
7bc708e6ae margin 2024-05-23 11:33:15 +08:00
zyxucp
e6f2c5c2fe update 升级SK KM版本 2024-05-23 11:29:23 +08:00
zyxucp
5cab781362 Merge pull request #90 from yc-2503/main
fix: 对话窗口的第一条对话没有传给大模型问题
2024-05-14 22:20:32 +08:00
Chason
02d7994bae fix: 对话窗口的第一条对话丢失 2024-05-14 20:32:11 +08:00
zyxucp
b740957157 fix 调整KM版本 2024-05-12 20:49:13 +08:00
zyxucp
2480ec1272 margin 2024-05-12 19:07:51 +08:00
zyxucp
35c98a0d14 update 更新ant blazor \sk \km 2024-05-12 19:07:27 +08:00
zyxucp
0964a5ad5b Merge pull request #88 from yc-2503/main
bugfix: 调用function时 报错 jsonbody 参数不存在
2024-05-09 23:33:24 +08:00
Chason
a95131efe9 fix: 调用function时 报错 jsonbody 参数不存在
KernelParameterMetadata 的构造函数已指定参数名 jsonbody, 后续却又将参数名改为 json参数字符串
2024-05-09 17:41:43 +08:00
Chason
7783cdf3c4 bugfix: 语法错误 2024-05-09 10:55:41 +08:00
zyxucp
7a65f33cb6 Update README.md 2024-05-09 01:33:37 +08:00
zyxucp
6efd01db3f Merge pull request #87 from yc-2503/main
fix: 修正 会话总结 中的返回字符串
2024-05-08 13:26:15 +08:00
Chason
1e2322b573 Merge pull request #1 from yc-2503/yc-2503-patch-1
fix: 修正会话总结
2024-05-07 19:55:12 +08:00
Chason
2cb2241a66 fix: 修正会话总结 2024-05-07 19:54:29 +08:00
zyxucp
64efdd7881 add logo 2024-05-01 14:09:21 +08:00
zyxucp
be28e32803 update 更新nuget版本 2024-05-01 13:05:11 +08:00
zyxucp
468422baee fix 处理异步聊天问题 2024-04-30 21:53:50 +08:00
zyxucp
7b1c6c8c64 fix 修改异步 2024-04-30 17:53:16 +08:00
zyxucp
7ff0ea0bfe Update README.en.md 2024-04-29 21:42:57 +08:00
zyxucp
6bed4356f0 Update README.md 2024-04-29 18:17:06 +08:00
zyxucp
73b65f7305 Merge pull request #84 from AIDotNet/feature_llamasharp
Feature llamasharp
2024-04-28 20:38:01 +08:00
zyxucp
0ea52eced9 fix 修改聊天为chathistory 2024-04-28 20:37:37 +08:00
zyxucp
498e9ba9f6 Merge branch 'main' into feature_llamasharp 2024-04-28 20:24:30 +08:00
zyxucp
125695665b add 修改水印影响样式问题 2024-04-28 16:53:10 +08:00
zyxucp
0e08b3ae85 add 水印 2024-04-28 15:37:51 +08:00
zyxucp
7cb8f99e7e fix 处理聊天对话 2024-04-27 23:03:46 +08:00
zyxucp
d15cb527d0 add 教程视频 2024-04-24 23:56:45 +08:00
zyxucp
9cb36174fd Update README.md 2024-04-23 14:16:50 +08:00
zyxucp
6265f94ef2 fix 修改文档导入问答单独的index 2024-04-23 13:32:57 +08:00
zyxucp
09d90b654c fix 处理文档问答问题 2024-04-23 13:28:38 +08:00
zyxucp
64e2bca2e6 Merge pull request #80 from AIDotNet/fix_chatkmsbug
fix 修改聊天记录知识库保存bug
2024-04-23 12:54:49 +08:00
zyxucp
328ece6d73 fix 修改聊天记录知识库保存bug 2024-04-23 12:53:51 +08:00
zyxucp
fabb8c2044 fix 处理合并 2024-04-23 11:51:08 +08:00
zyxucp
6ca75df880 fix 处理合并 2024-04-23 11:50:46 +08:00
zyxucp
3d4dfaced1 margin 2024-04-23 11:48:43 +08:00
zyxucp
d532bf3bb6 add 聊天历史记录搜索 2024-04-22 23:50:08 +08:00
zyxucp
e1fd288875 fix 样式修改 2024-04-22 23:37:20 +08:00
zyxucp
91eae9cfa8 fix 修改聊天记录存储 2024-04-22 23:31:08 +08:00
zyxucp
b0059942d3 Merge pull request #79 from AIDotNet/feature_chat
Feature chat
2024-04-22 23:17:58 +08:00
zyxucp
a716982878 add 聊天记录 2024-04-22 23:17:16 +08:00
zyxucp
3d4e48f9f5 fix 修改错误 2024-04-22 22:26:45 +08:00
zyxucp
1f212d3156 update semantic kernel to 1.8.0 2024-04-22 22:19:46 +08:00
zyxucp
7d91ef6ba1 Update LICENSE 2024-04-22 22:14:44 +08:00
zyxucp
2a450b00de add 处理在有用户时使用chats表次存储聊天记录,匿名访问时使用localstorage存储聊天记录 2024-04-22 22:03:34 +08:00
zyxucp
3a97068248 Update README.md 2024-04-21 12:12:46 +08:00
zeyu xu
1d9d95899a update 更新antsk logo 2024-04-21 11:17:51 +08:00
zyxucp
7ae8e52b57 Merge pull request #77 from AIDotNet/feature_bge
update kernelMemory nuget版本
2024-04-21 11:01:06 +08:00
zeyu xu
f5c195a1d0 update kernelMemory nuget版本 2024-04-21 11:00:36 +08:00
zyxucp
78a6b662d3 Update docker-compose.simple.yml 2024-04-20 23:30:27 +08:00
zyxucp
5f814eb76c Update docker-compose.yml 2024-04-20 23:30:07 +08:00
zyxucp
d9e5ebb464 Update README.md 2024-04-20 23:29:48 +08:00
zyxucp
bce0e9183c Update README.md 2024-04-20 23:29:26 +08:00
zyxucp
c40a7bcf22 Merge pull request #76 from AIDotNet/feature_bge
Feature bge
2024-04-20 23:19:10 +08:00
zeyu xu
97a7d447ab add rerank kms 2024-04-20 23:18:07 +08:00
zeyu xu
f803b9538b fix 调整目录 2024-04-20 21:17:27 +08:00
zeyu xu
1ac34c1702 add 应用增加rerank 2024-04-20 21:09:34 +08:00
zeyu xu
e07b480da1 add bgemodel 2024-04-20 21:02:44 +08:00
zeyu xu
9036af57e3 重命名 2024-04-20 20:56:55 +08:00
zeyu xu
93288f9b5c add bgererank 模型下载 2024-04-20 20:56:00 +08:00
zyxucp
f40dd8b013 Merge pull request #75 from AIDotNet/feature_menu
add 模型管理页面 文字超长的样式处理
2024-04-20 10:42:23 +08:00
zeyu xu
c6b83d0695 add 模型管理页面 文字超长的样式处理 2024-04-20 10:41:48 +08:00
zyxucp
592c850198 Merge pull request #74 from AIDotNet/feature_menu
add 单独剥离模型管理菜单
2024-04-20 10:31:43 +08:00
zeyu xu
4a3930ac7b add 单独剥离模型管理菜单 2024-04-20 10:31:19 +08:00
zyxucp
c05ba0af3e Update README.md 2024-04-19 23:20:44 +08:00
zyxucp
630ee51df6 Update docker-compose.simple.yml 2024-04-19 23:20:26 +08:00
zyxucp
d0e75e26c3 Update docker-compose.yml 2024-04-19 23:20:04 +08:00
zyxucp
62c36c3072 Merge pull request #73 from AIDotNet/feature_deldimensions
add DelDimensions
2024-04-19 23:09:33 +08:00
zeyu xu
baef309064 add DelDimensions 2024-04-19 23:08:50 +08:00
zeyu xu
d717cbad9c update nuget sqlsugar 2024-04-19 22:12:02 +08:00
zeyu xu
5ef0624605 fix 修改WithLogCallback 日志输出 2024-04-19 22:04:08 +08:00
zyxucp
af2930a371 fix 修改WithLog 2024-04-19 18:37:03 +08:00
zyxucp
98f0f9fe84 Update README.md 2024-04-18 22:15:30 +08:00
zeyu xu
28a23271e9 fix 文件名修改 2024-04-18 22:05:23 +08:00
zeyu xu
f1ba0bdf10 add 模型删除校验 2024-04-18 21:49:09 +08:00
zeyu xu
0d5513f374 add Directory.Build.props 2024-04-18 21:23:25 +08:00
zyxucp
4812cc308c Update README.md 2024-04-17 22:59:46 +08:00
zyxucp
584f7faded add 环境变量 2024-04-17 18:25:58 +08:00
zyxucp
08dcef2d8b Update docker-compose.simple.yml 2024-04-16 21:56:58 +08:00
zyxucp
68218733a2 Update docker-compose.yml 2024-04-16 21:56:40 +08:00
zyxucp
eb64cbf3d4 update 升级km nuget版本 2024-04-16 17:01:32 +08:00
zyxucp
f0e8a55522 Merge pull request #72 from AIDotNet/feature_qa1
fix 修改切分使用服务注入
2024-04-16 13:56:39 +08:00
zyxucp
5ec5a0bde4 fix 修改切分使用服务注入 2024-04-16 13:54:24 +08:00
zyxucp
1cc56dd553 Merge pull request #71 from AIDotNet/feature_qa
Feature qa
2024-04-15 23:27:02 +08:00
zeyu xu
64e949a88b add qa切片 2024-04-15 23:26:23 +08:00
zeyu xu
a2390a7c97 add qa问答 2024-04-15 23:21:11 +08:00
zeyu xu
559661bb6c add qa 参数 2024-04-15 21:30:06 +08:00
zyxucp
79326de263 Merge pull request #70 from IntptrMax/Add-stable-diffusion-reference-for-Windows
Add stable diffusion reference for windows
2024-04-15 10:37:03 +08:00
IntptrMax
3815891b28 remove unused stable-diffusion.dll 2024-04-15 08:51:17 +08:00
IntptrMax
42d474382a Merge branch 'AIDotNet:main' into Add-stable-diffusion-reference-for-Windows 2024-04-15 08:36:29 +08:00
IntptrMax
fe691f2d44 1.Update some Stable Diffusion code 2024-04-15 08:34:16 +08:00
IntptrMax
3ee41a8ab1 1. Add references for Stable Diffusion for windows.
2. Update load lib code
2024-04-15 08:22:12 +08:00
zeyu xu
7ca41dff8a add docker file py 2024-04-13 10:56:47 +08:00
zeyu xu
ba2e86993e add dll 2024-04-13 10:55:42 +08:00
zyxucp
13878046a2 忽略文件 2024-04-12 12:13:43 +08:00
zeyu xu
49ff8bf54f fix 删除空引用 2024-04-11 23:35:53 +08:00
zeyu xu
e9cc5a3993 add loadding 2024-04-11 22:32:00 +08:00
zyxucp
b213964b63 Merge pull request #67 from IntptrMax/UpdateStableDiffusion
Update Stable Diffusion
2024-04-11 21:53:44 +08:00
IntptrMax
bfbed44270 Update Stable Diffusion 2024-04-11 15:11:17 +08:00
zyxucp
9b07d88392 Update Dockerfile-py 2024-04-10 23:31:20 +08:00
zyxucp
3f8ed109f9 fix nuget 2024-04-10 23:23:18 +08:00
zyxucp
3f969627a4 fix 修改ocr runtime 2024-04-10 22:51:37 +08:00
zyxucp
d92970819a Merge pull request #65 from AIDotNet/feature_ocr
add runtime
2024-04-10 22:39:28 +08:00
zyxucp
23e756fa9b add runtime 2024-04-10 22:37:32 +08:00
zyxucp
5f58126fbf Merge pull request #64 from AIDotNet/feature_ocr
Feature ocr
2024-04-10 22:25:36 +08:00
zyxucp
dcfd0ffb8f add ocr 2024-04-10 22:23:59 +08:00
zyxucp
17221d056c add img 2024-04-10 21:57:45 +08:00
zyxucp
4a9dcfada4 fix 修改默认key 2024-04-10 21:54:26 +08:00
zyxucp
bb6c2bb020 fix 升级LLamaSharp 2024-04-09 12:32:39 +08:00
zyxucp
a8760a34de fix 修改地址错误问题 2024-04-09 12:20:15 +08:00
zeyu xu
1e432a5782 fix 删除pynet 2024-04-08 14:50:34 +08:00
zeyu xu
cb861ef2bb fix OCR 2024-04-08 12:38:14 +08:00
zeyu xu
7cee8fd87a add OCR 和文档查询上限 2024-04-08 12:10:32 +08:00
zeyu xu
8ce0e5d348 add ocr 2024-04-07 22:31:57 +08:00
zyxucp
90bce7c89f Merge branch 'main' of https://github.com/AIDotNet/AntSK 2024-04-07 15:59:03 +08:00
zyxucp
b840d0bcce fix add gpu avx 2024-04-07 15:58:26 +08:00
zyxucp
bfa6d28289 Update README.md 2024-04-07 14:40:47 +08:00
zeyu xu
f6e6ca9747 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-04-07 11:44:15 +08:00
zeyu xu
75f8d39648 fix 修改类型 2024-04-07 11:44:05 +08:00
zyxucp
9a939eba5a fix 修改异步为同步 2024-04-07 11:03:18 +08:00
zyxucp
4e93efe821 fix 修改异步为同步 2024-04-07 10:49:35 +08:00
zyxucp
8bdbee80a0 fix 修改类型 2024-04-07 10:37:16 +08:00
zyxucp
6bdf5dcc03 fix 修复PG字段报错问题 2024-04-07 10:05:15 +08:00
zeyu xu
0bf0a9d78a fix chat style 2024-04-06 11:50:15 +08:00
zeyu xu
38e9fea601 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-04-06 11:47:59 +08:00
zeyu xu
d2366b3b46 update Semantic Kernel and fix kmsdetaillist style 2024-04-06 11:47:49 +08:00
zyxucp
3aff93083a Update README.md 2024-04-06 11:04:58 +08:00
zeyu xu
eb998199db fix 删除不要的Controller 2024-04-06 00:03:53 +08:00
zeyu xu
1dd794af1b fix 修改安装向量插件 2024-04-06 00:03:17 +08:00
zeyu xu
08c9923e7e update docker-compose.yal 2024-04-05 23:58:45 +08:00
zyxucp
06b109ca87 Merge pull request #62 from AIDotNet/feature_kms
Feature kms
2024-04-05 22:27:21 +08:00
zeyu xu
9b039335c7 fix 修改style 2024-04-05 22:26:37 +08:00
zeyu xu
041378e5fd add 文档搜索测试 2024-04-05 22:16:44 +08:00
zeyu xu
6dc5ae10e3 add 搜索测试布局 2024-04-05 21:19:41 +08:00
zeyu xu
5807f4c283 fix 修改切片详情样式 2024-04-05 21:09:38 +08:00
zyxucp
8ef4445908 Merge pull request #61 from AIDotNet/feature_kms
Feature kms
2024-04-05 20:23:49 +08:00
zeyu xu
8a0609e970 add excel导入 2024-04-05 20:23:03 +08:00
zeyu xu
9f33b5009b add excel 导入 2024-04-05 19:50:58 +08:00
zeyu xu
50e66db8a1 Merge branch 'feature_kms' of github.com:AIDotNet/AntSK into feature_kms 2024-04-05 19:18:26 +08:00
zeyu xu
c3e83b569a fix 升级nuget 2024-04-05 19:18:20 +08:00
zyxucp
85d1c5ea7e add npoi 2024-04-05 19:17:49 +08:00
zeyu xu
ec1d126a02 add excel导入 2024-04-05 19:13:30 +08:00
zyxucp
e857695e70 Update docker-compose.simple.yml 2024-04-05 18:45:56 +08:00
zyxucp
fa9b2051fe Update docker-compose.yml 2024-04-05 18:45:41 +08:00
zyxucp
d450efcffe Merge pull request #60 from AIDotNet/feature_kms
fix 修改提示词上限
2024-04-05 15:51:22 +08:00
zeyu xu
2a6c84c200 fix 修改提示词上限 2024-04-05 15:50:46 +08:00
zyxucp
138a952ace Merge pull request #59 from AIDotNet/feature_kms
Feature kms
2024-04-05 15:41:14 +08:00
zeyu xu
eb6528ecd2 add 修改message结构,减少localstore存储 2024-04-05 15:39:56 +08:00
zeyu xu
2c30bbfa09 fix 细节调整 2024-04-05 15:29:39 +08:00
zeyu xu
c5a78c2135 add modeldownchange 2024-04-05 15:12:29 +08:00
zeyu xu
f03362ee41 fix 修改dropdown Trigger.Click 2024-04-05 15:04:44 +08:00
zeyu xu
fad3167d97 add kms settings 2024-04-05 15:00:37 +08:00
zeyu xu
ad949681dd add change 2024-04-05 14:41:58 +08:00
zeyu xu
27999d76b0 fix 修改知识库函数 2024-04-05 14:26:35 +08:00
zeyu xu
83278352d6 add kms 配置 2024-04-05 14:12:25 +08:00
zeyu xu
fcc56f5fef fix bge embedding 无法切片问题 2024-04-04 00:37:08 +08:00
zyxucp
4ebe2ecc32 fix 修改初始化,增加完成标识 2024-04-02 13:53:32 +08:00
zeyu xu
e684cba527 Merge branch 'main' of github.com:AIDotNet/AntSK 2024-04-02 13:34:38 +08:00
zeyu xu
888dc19ee0 fix bgeembedding 2024-04-02 13:34:24 +08:00
zyxucp
731aea702f fix 修改提示词 2024-04-02 11:17:53 +08:00
zyxucp
09e22bc76a Update README.md 2024-04-02 00:07:08 +08:00
zyxucp
74406d88a0 Merge pull request #58 from AIDotNet/feature_StableDiffusion
fix 修改为静态类
2024-04-01 23:57:12 +08:00
zeyu xu
e5f9d97560 fix 修改为静态类 2024-04-01 23:56:44 +08:00
zyxucp
59e768aaea Merge pull request #57 from AIDotNet/feature_StableDiffusion
Feature stable diffusion
2024-04-01 23:39:22 +08:00
zeyu xu
6a7cb24a5b add sd 2024-04-01 23:08:53 +08:00
zeyu xu
1db40d534c add apptype 2024-04-01 22:14:18 +08:00
zeyu xu
11d6e30f7e add sd function 2024-04-01 22:03:00 +08:00
zeyu xu
9d5214aaae add sdmodel 2024-04-01 21:57:18 +08:00
zeyu xu
010b906271 add sd 2024-04-01 21:35:51 +08:00
zeyu xu
16bf944edf add sd 2024-04-01 21:31:15 +08:00
zeyu xu
5bae5a099a margin 2024-04-01 21:01:29 +08:00
zyxucp
f771ea9521 Merge branch 'main' of https://github.com/AIDotNet/AntSK 2024-04-01 13:54:53 +08:00
zyxucp
994efbf37c update nuget 2024-04-01 13:54:20 +08:00
zyxucp
938cd86c88 Update README.md 2024-03-31 13:24:21 +08:00
zeyu xu
1339cbadbc fix 修改menukey 2024-03-31 13:07:30 +08:00
zeyu xu
bd0ad570ad add 增加使用文档 2024-03-31 13:07:08 +08:00
zeyu xu
234e649a7e fix 优化部分内容 2024-03-31 12:38:17 +08:00
zyxucp
c431dbc842 Update README.md 2024-03-31 00:28:16 +08:00
zyxucp
76283060d9 Update docker-compose.simple.yml 2024-03-30 23:28:52 +08:00
zyxucp
75ba506db4 Update docker-compose.yml 2024-03-30 23:28:33 +08:00
zeyu xu
0c8ad5fe8d add loadding 2024-03-30 19:50:29 +08:00
zeyu xu
68ce0db011 fix 样式修改 2024-03-30 17:35:40 +08:00
zeyu xu
c36de1a1e9 add 选项控制 2024-03-30 17:25:58 +08:00
zeyu xu
44ef759abd fix 修改控件 2024-03-30 14:47:29 +08:00
longdream
0c3d9844be Merge pull request #52 from longdream/main
bge embedding模型添加,bge用的CPU。
2024-03-29 21:51:35 +08:00
longdream
854c62a4ca 合并 2024-03-29 21:50:17 +08:00
longdream
5ed4fd5299 Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-29 20:00:53 +08:00
longdream
af5ec43571 修改设置界面 2024-03-29 20:00:49 +08:00
junlong
d7b56d1590 Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-29 15:34:08 +08:00
longdream
b925f8890b 修改token长度 2024-03-28 23:06:21 +08:00
longdream
5d80ee994a 解决线程冲突问题 2024-03-28 19:04:11 +08:00
longdream
f73bd2dfda 增减embedding 2024-03-27 22:53:45 +08:00
longdream
f340ee1088 embedding封装 2024-03-26 23:14:55 +08:00
longdream
edad2644aa 删除没必要的py文件 2024-03-26 20:48:49 +08:00
longdream
8a56a0393a Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-26 20:48:07 +08:00
junlong
bd5ca06d8f test 2024-03-25 16:55:41 +08:00
junlong
e0985ecec3 Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-25 16:48:21 +08:00
junlong
e56b74d4af 删除chat以外的文件 2024-03-25 16:48:11 +08:00
longdream
849b18f677 Merge branch 'AIDotNet:main' into main 2024-03-22 19:36:20 +08:00
junlong
344128e49d Merge branch 'main' of https://github.com/longdream/AntSK 2024-03-21 19:38:03 +08:00
junlong
56fc9dd517 test 2024-03-21 19:37:56 +08:00
313 changed files with 43228 additions and 6363 deletions

4
.gitignore vendored
View File

@@ -324,10 +324,6 @@ ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
*.dll
*.pdb
# MFractors (Xamarin productivity tool) working folder
.mfractor/
**/bin/

View File

@@ -22,4 +22,5 @@ WORKDIR /app
COPY --from=build /app/publish .
RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
RUN echo 'Asia/Shanghai' >/etc/timezone
RUN apt update && apt install -y libpugixml-dev libtbb-dev
ENTRYPOINT ["dotnet", "AntSK.dll"]

View File

@@ -1,8 +1,4 @@
# 1. Define the Python image to use for getting pip
FROM pytorch/pytorch AS python-base
# 2. Define the .NET SDK image to build your application
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
WORKDIR /src
COPY ["src/AntSK/AntSK.csproj", "AntSK/"]
RUN dotnet restore "AntSK/AntSK.csproj"
@@ -11,18 +7,11 @@ WORKDIR "/src/AntSK"
RUN dotnet build "AntSK.csproj" -c Release -o /app/build
RUN dotnet publish "AntSK.csproj" -c Release -o /app/publish
# 3. Define the final image that will contain both .NET runtime and Python
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS final
# Copy the Python/pip installation from the official Python image
COPY --from=python-base /usr/local /usr/local
COPY --from=python-base /opt/conda/ /opt/conda/
FROM registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk-base:v1.0.0 AS final
WORKDIR /app
COPY --from=build /app/publish .
# Make sure the app and Python directories are in PATH
ENV PATH="/app:/opt/conda/bin:/usr/local/bin:${PATH}"
ENV PATH="/app:/opt/conda/bin:/usr/local/bin:${PATH}"
RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
RUN echo 'Asia/Shanghai' >/etc/timezone
RUN pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
ENTRYPOINT ["dotnet", "AntSK.dll"]
ENTRYPOINT ["dotnet", "AntSK.dll"]

216
LICENSE
View File

@@ -1,201 +1,79 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
## AntSK 用户协议
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
欢迎使用 AntSK 。请仔细阅读以下协议条款,继续使用本软件即表示您同意本协议内容。
1. Definitions.
**许可协议**
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
本软件采用 Apache License 2.0 许可。除 Apache License 2.0 规定的条款外,您在使用 AntSK 时还应遵守以下附加条款:
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
**一. 商用许可**
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
1. **免费商用**:用户在不修改代码的情况下,可以免费用于商业目的。
2. **商业授权**:如果您满足以下任意条件之一,需取得商业授权:
1. 对本软件进行二次修改、开发包括但不限于修改应用名称、logo、代码以及功能
2. 为企业客户提供多租户服务,且该服务支持 10 人或以上的使用。
3. 预装或集成到硬件设备或产品中进行捆绑销售。
4. 政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
**二. 贡献者协议**
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
作为 AntSK 的贡献者,您应当同意以下条款:
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
1. **许可调整**:生产者有权根据需要对开源协议进行调整,使其更加严格或宽松。
2. **商业用途**:您贡献的代码可能会被用于商业用途,包括但不限于云业务运营。
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
**三. 其他条款**
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
1. 本协议条款的解释权归 AntSK 开发者所有。
2. 本协议可能根据实际情况进行更新,更新时将通过本软件通知用户。
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
如有任何问题或需申请商业授权,请联系 AntSK 开发团队。
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
除上述特定条件外,其他所有权利和限制均遵循 Apache License 2.0。有关 Apache License 2.0 的详细信息,请访问 http://www.apache.org/licenses/LICENSE-2.0。
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
---
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
根据 Apache 许可证 2.0 版(“许可证”)进行许可;除非符合许可证,否则您不得使用此文件。您可以在以下网址获取许可证副本:
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
http://www.apache.org/licenses/LICENSE-2.0
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
除非适用法律要求或书面同意,软件根据许可证分发的内容以“原样”分发,不附带任何明示或暗示的保证或条件。请参阅特定语言管理权限的许可证和许可证下的限制。
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
## AntSK User Agreement
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
Welcome to AntSK, a AI knowledge base. Please read the following agreement carefully. By continuing to use this software, you agree to the terms outlined below.
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
**License Agreement**
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
This software is licensed under the **Apache License 2.0**. In addition to the terms of the Apache License 2.0, the following additional terms apply to the use of AntSK:
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
**I. Commercial Use License**
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
1. **Free Commercial Use**: Users can use the software for commercial purposes without modifying the code.
2. **Commercial License Required**: A commercial license is required if any of the following conditions are met:
1. You modify, develop, or alter the software, including but not limited to changes to the application name, logo, code, or functionality.
2. You provide multi-tenant services to enterprise customers with 10 or more users.
3. You pre-install or integrate the software into hardware devices or products and bundle it for sale.
4. You are engaging in large-scale procurement for government or educational institutions, especially involving security, data privacy, or other sensitive requirements.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
**II. Contributor Agreement**
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
As a contributor to AntSK, you agree to the following:
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
1. **License Adjustment**: The producer reserves the right to adjust the open-source license as needed, making it stricter or more lenient.
2. **Commercial Use**: Any code you contribute may be used for commercial purposes, including but not limited to cloud business operations.
END OF TERMS AND CONDITIONS
**III. Other Terms**
APPENDIX: How to apply the Apache License to your work.
1. The interpretation of these terms is subject to the discretion of AntSK developers.
2. These terms may be updated, and users will be notified through the software when changes occur.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
For any questions or to request a commercial license, please contact the AntSK development team.
Copyright [yyyy] [name of copyright owner]
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache License 2.0. Detailed information about the Apache License 2.0 can be found at http://www.apache.org/licenses/LICENSE-2.0.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
---
http://www.apache.org/licenses/LICENSE-2.0
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

View File

@@ -1,214 +0,0 @@
[简体中文](./README.md) | English
# AntSK
## AI Knowledge Base/Intelligent Agent built on .Net8+AntBlazor+SemanticKernel
## ⭐Core Features
- **Semantic Kernel**: Utilizes advanced natural language processing technology to accurately understand, process, and respond to complex semantic queries, providing users with precise information retrieval and recommendation services.
- **Kernel Memory**: Capable of continuous learning and storing knowledge points, AntSK has long-term memory function, accumulates experience, and provides a more personalized interaction experience.
- **Knowledge Base**: Import knowledge base through documents (Word, PDF, Excel, Txt, Markdown, Json, PPT) and perform knowledge base Q&A.
- **GPT Generation**: This platform supports creating personalized GPT models, enabling users to build their own GPT models.
- **API Interface Publishing**: Exposes internal functions in the form of APIs, enabling developers to integrate AntSK into other applications and enhance application intelligence.
- **API Plugin System**: Open API plugin system that allows third-party developers or service providers to easily integrate their services into AntSK, continuously enhancing application functionality.
- **.Net Plugin System**: Open dll plugin system that allows third-party developers or service providers to easily integrate their business functions by generating dll in standard format code, continuously enhancing application functionality.
- **Online Search**: AntSK, real-time access to the latest information, ensuring users receive the most timely and relevant data.
- **Model Management**: Adapts and manages integration of different models from different manufacturers, including gguf types supported by **llama.cpp** and models offline running supported by **llamafactory**.
- **Domestic Innovation**: AntSK supports domestic models and databases and can run under domestic innovation conditions.
- **Model Fine-Tuning**: Planned based on llamafactory for model fine-tuning.
## ⛪Application Scenarios
AntSK is suitable for various business scenarios, such as:
- Enterprise knowledge management system
- Automatic customer service and chatbots
- Enterprise search engine
- Personalized recommendation system
- Intelligent writing assistance
- Education and online learning platforms
- Other interesting AI Apps
## ✏Function Examples
### Online Demo
```
https://antsk.ai-dotnet.com/
```
```
Default account: test
Default password: test
Due to the low configuration of the cloud server, the local model cannot be run, so the system settings permissions have been closed. You can simply view the interface. If you want to use the local model, please download and use it on your own.
```
### Other Function Examples
[Video Demonstration](https://www.bilibili.com/video/BV1zH4y1h7Y9/)
## ❓How to get started?
Here I am using Postgres as the data and vector storage because Semantic Kernel and Kernel Memory support it, but you can also use other options.
The model by default supports the local model of openai, azure openai, and llama. If you need to use other models, you can integrate them using one-api.
The Login configuration in the configuration file is the default login account and password.
The following configuration file needs to be configured
## 1⃣Using docker-compose
Provided the pg version **appsettings.json** and simplified version (Sqlite+disk) **docker-compose.simple.yml**
Download **docker-compose.yml** from the project root directory and place the configuration file **appsettings.json** in the same directory.
The pg image has already been prepared. You can modify the default username and password in docker-compose.yml, and then the database connection in your **appsettings.json** needs to be consistent.
Then you can execute the following command in the directory to start AntSK
```
docker-compose up -d
```
## 2⃣How to mount local models and model download directory in docker
```
# Non-host version, do not use local proxy
version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/AIDotNet/antsk:v0.1.5ports:
- 5000:5000
networks:
- antsk
depends_on:
- antskpg
restart: always
environment:
- ASPNETCORE_URLS=http://*:5000
volumes:
- ./appsettings.json:/app/appsettings.json # Local configuration file needs to be placed in the same directory
- D://model:/app/model
networks:
antsk:
```
Taking this as an example, it means mounting the local D://model folder of Windows into the container /app/model. If so, the model address in your appsettings.json should be configured as
```
model/xxx.gguf
```
## 3⃣Some meanings of configuration file
```
{
"DBConnection": {
"DbType": "Sqlite",
"ConnectionStrings": "Data Source=AntSK.db;"
},
"KernelMemory": {
"VectorDb": "Disk",
"ConnectionString": "Host=;Port=;Database=antsk;Username=;Password=",
"TableNamePrefix": "km-"
},
"LLamaSharp": {
"RunType": "GPU",
"FileDirectory": "D:\\Code\\AI\\AntBlazor\\model\\"
},
"Login": {
"User": "admin",
"Password": "xuzeyu"
},
"BackgroundTaskBroker": {
"ImportKMSTask": {
"WorkerCount": 1
}
}
}
```
```
// Supports various databases, you can check SqlSugar, MySql, SqlServer, Sqlite, Oracle, PostgreSQL, Dm, Kdbndp, Oscar, MySqlConnector, Access, OpenGauss, QuestDB, HG, ClickHouse, GBase, Odbc, OceanBaseForOracle, TDengine, GaussDB, OceanBase, Tidb, Vastbase, PolarDB, Custom
DBConnection.DbType
// Connection string, need to use the corresponding string according to the different DB types
DBConnection.ConnectionStrings
//The type of vector storage, supporting Postgres, Disk, Memory, Qdrant, Redis, AzureAISearch
//Postgres and Redis require ConnectionString configuration
//The ConnectionString of Qdrant and AzureAISearch uses Endpoint | APIKey
KernelMemory.VectorDb
//Local model execution options: GPU and CPU. When using the online API, any option can be used.
LLamaSharp.RunType
//Local model path, used for quick selection of models under llama, as well as saving downloaded models.
LLamaSharp.FileDirectory
//Default admin account password
Login
//Import asynchronous processing thread count. A higher count can be used for online API, but for local models, 1 is recommended to avoid memory overflow issues.
BackgroundTaskBroker.ImportKMSTask.WorkerCount
```
## ⚠Fixing Style Issues:
Run the following in AntSK/src/AntSK:
```
dotnet clean
dotnet build
dotnet publish "AntSK.csproj"
```
Then navigate to AntSK/src/AntSK/bin/Release/net8.0/publish and run:
```
dotnet AntSK.dll
```
The styles should now be applied after starting.
I'm using CodeFirst mode for the database, so as long as the database connection is properly configured, the table structure will be created automatically.
## ✔Using llamafactory
```
1. First, ensure that Python and pip are installed in your environment. This step is not necessary if using an image, such as version v0.2.3.2, which already includes the complete Python environment.
2. Go to the model add page and select llamafactory.
3. Click "Initialize" to check whether the 'pip install' environment setup is complete.
4. Choose a model that you like.
5. Click "Start" to begin downloading the model from the tower. This may involve a somewhat lengthy wait.
6. After the model has finished downloading, enter http://localhost:8000/ in the request address. The default port is 8000.
7. Click "Save" and start chatting.
8. Many people ask about the difference between LLamaSharp and llamafactory. In fact, LLamaSharp is a .NET implementation of llama.cpp, but only supports local gguf models, while llamafactory supports a wider variety of models and uses Python implementation. The main difference lies here. Additionally, llamafactory has the ability to fine-tune models, which is an area we will focus on integrating in the future.
```
## 🤝 Contributing
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](https://github.com/AIDotNet/AntSK/pulls)
If you would like to contribute, feel free to create a [Pull Request](https://github.com/AIDotNet/AntSK/pulls), or give us [Bug Report](https://github.com/AIDotNet/AntSK/issues/new).
## 💕 Contributors
This project exists thanks to all the people who contribute.
<a href="https://github.com/AIDotNet/AntSK/graphs/contributors">
<img src="https://contrib.rocks/image?repo=AIDotNet/AntSK&max=1000&columns=15&anon=1" />
</a>
## 🚨 Code of Conduct
This project has adopted the code of conduct defined by the Contributor Covenant to clarify expected behavior in our community.
For more information see the [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct).
To learn more or get started with **AntSK**, follow my official WeChat account and join the discussion group.
## ☎Contact Me
If you have any questions or suggestions, please contact me through my official WeChat account. We also have a discussion group where you can send a message to join, and then I will add you to the group.
![Official WeChat Account](https://github.com/AIDotNet/Avalonia-Assistant/blob/main/img/gzh.jpg)
---
We appreciate your interest in **AntSK** and look forward to collaborating with you to create an intelligent future!

244
README.md
View File

@@ -1,91 +1,92 @@
中文|[English](https://github.com/AIDotNet/AntSK/blob/main/README.en.md)
[简体中文](./README.zh.md) | English
# AntSK
## 使用.Net8+Blazor+SemanticKernel 打造的AI知识库/智能体
## AI Knowledge Base/Intelligent Agent built on .Net8+AntBlazor+SemanticKernel
## ⭐核心功能
## ⭐Core Features
- **语义内核 (Semantic Kernel)**:采用领先的自然语言处理技术,准确理解、处理和响应复杂的语义查询,为用户提供精确的信息检索和推荐服务。
- **Semantic Kernel**: Utilizes advanced natural language processing technology to accurately understand, process, and respond to complex semantic queries, providing users with precise information retrieval and recommendation services.
- **内存内核 (Kernel Memory)**具备持续学习和存储知识点的能力AntSK 拥有长期记忆功能,累积经验,提供更个性化的交互体验。
- **Kernel Memory**: Capable of continuous learning and storing knowledge points, AntSK has long-term memory function, accumulates experience, and provides a more personalized interaction experience.
- **知识库**:通过文档(WordPDFExcelTxtMarkdownJsonPPT)等形式导入知识库,可以进行知识库问答。
- **Knowledge Base**: Import knowledge base through documents (Word, PDF, Excel, Txt, Markdown, Json, PPT) and perform knowledge base Q&A.
- **GPTs 生成**此平台支持创建个性化的GPT模型尝试构建您自己的GPT模型。
- **GPT Generation**: This platform supports creating personalized GPT models, enabling users to build their own GPT models.
- **API接口发布**将内部功能以API的形式对外提供便于开发者将AntSK 集成进其他应用,增强应用智慧。
- **API Interface Publishing**: Exposes internal functions in the form of APIs, enabling developers to integrate AntSK into other applications and enhance application intelligence.
- **API插件系统**开放式API插件系统允许第三方开发者或服务商轻松将其服务集成到AntSK不断增强应用功能。
- **API Plugin System**: Open API plugin system that allows third-party developers or service providers to easily integrate their services into AntSK, continuously enhancing application functionality.
- **.Net插件系统**开放式dll插件系统允许第三方开发者或服务商轻松将其业务功能通过标准格式的代码生成dll后集成到AntSK不断增强应用功能。
- **.Net Plugin System**: Open dll plugin system that allows third-party developers or service providers to easily integrate their business functions by generating dll in standard format code, continuously enhancing application functionality.
- **联网搜索**AntSK实时获取最新信息确保用户接受到的资料总是最及时、最相关的。
- **Online Search**: AntSK, real-time access to the latest information, ensuring users receive the most timely and relevant data.
- **模型管理**:适配和管理集成不同厂商的不同模型。并且支持**llama.cpp**所支持的gguf类型以及**llamafactory**所支持的模型离线运行
- **Model Management**: Adapts and manages integration of different models from different manufacturers, models offline running supported by **llamafactory** and **ollama**.
- **国产信创**AntSK支持国产模型和国产数据库可以在信创条件下运行
- **Domestic Innovation**: AntSK supports domestic models and databases and can run under domestic innovation conditions.
- **模型微调**规划中基于llamafactory进行模型微调
- **Model Fine-Tuning**: Planned based on llamafactory for model fine-tuning.
## ⛪应用场景
## ⛪Application Scenarios
AntSK 适用于多种业务场景,例如:
- 企业级知识管理系统
- 自动客服与聊天机器人
- 企业级搜索引擎
- 个性化推荐系统
- 智能辅助写作
- 教育与在线学习平台
- 其他有意思的AI App
AntSK is suitable for various business scenarios, such as:
- Enterprise knowledge management system
- Automatic customer service and chatbots
- Enterprise search engine
- Personalized recommendation system
- Intelligent writing assistance
- Education and online learning platforms
- Other interesting AI Apps
## ✏Function Examples
### Online Demo
[document](http://antsk.cn/)
[demo](https://demo.antsk.cn/)
and
[demo1](https://antsk.ai-dotnet.com/)
## ✏️功能示例
### 在线演示
```
https://antsk.ai-dotnet.com/
```
```
默认账号test
Default account: test
默认密码:test
Default password: test
由于云服务器配置较低,无法运行本地模型,所以把系统设置权限关闭了,大家看看界面即可,要使用本地模型,请下载自行使用
Due to the low configuration of the cloud server, the local model cannot be run, so the system settings permissions have been closed. You can simply view the interface. If you want to use the local model, please download and use it on your own.
```
### 其他功能示例
[视频示例](https://www.bilibili.com/video/BV1zH4y1h7Y9/)
### Other Function Examples
[Video Demonstration](https://www.bilibili.com/video/BV1zH4y1h7Y9/)
## ❓如何开始?
## ❓How to get started?
在这里我使用的是Postgres 作为数据存储和向量存储,因为Semantic KernelKernel Memory都支持他,当然你也可以换成其他的。
Here I am using Postgres as the data and vector storage because Semantic Kernel and Kernel Memory support it, but you can also use other options.
模型默认支持openaiazure openai、讯飞星火、阿里云积、 和llama支持的gguf本地模型 以及llamafactory的本地模型,如果需要使用其他模型,可以使用one-api进行集成。
The model by default supports the local model of openai, azure openai, and llama. If you need to use other models, you can integrate them using one-api.
配置文件中的Login配置是默认的登录账号和密码
The Login configuration in the configuration file is the default login account and password.
需要配置如下的配置文件
The following configuration file needs to be configured
## 1使用docker-compose
## 1Using docker-compose
提供了pg版本 **appsettings.json** 和 简化版本(**Sqlite+disk** **docker-compose.simple.yml**
Provided the pg version **appsettings.json** and simplified version (Sqlite+disk) **docker-compose.simple.yml**
从项目根目录下载**docker-compose.yml**,然后把配置文件**appsettings.json**和它放在统一目录,
Download **docker-compose.yml** from the project root directory and place the configuration file **appsettings.json** in the same directory.
这里已经把pg的镜像做好了。在docker-compose.yml中可以修改默认账号密码然后你的**appsettings.json**的数据库连接需要保持一致。
The pg image has already been prepared. You can modify the default username and password in docker-compose.yml, and then the database connection in your **appsettings.json** needs to be consistent.
然后你可以进入到目录后执行
Then you can execute the following command in the directory to start AntSK
```
docker-compose up -d
```
来启动AntSK
## 2如何在docker中挂载本地模型和模型下载的目录
## 2How to mount local models and model download directory in docker
```
# host 版本, 不使用本机代理
# Non-host version, do not use local proxy
version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/AIDotNet/antsk:v0.2.3
image: registry.cn-hangzhou.aliyuncs.com/AIDotNet/antsk:v0.5.0
ports:
- 5000:5000
networks:
@@ -96,31 +97,37 @@ services:
environment:
- ASPNETCORE_URLS=http://*:5000
volumes:
- ./appsettings.json:/app/appsettings.json # 本地配置文件 需要放在同级目录
- ./appsettings.json:/app/appsettings.json # Local configuration file needs to be placed in the same directory
- D://model:/app/model
networks:
antsk:
external: true
```
以这个为示例意思是把windows本地D://model的文件夹挂载进 容器内/app/model 如果是这样你的appsettings.json中的模型地址应该配置为
```
model/xxx.gguf
```
Taking this as an example, it means mounting the local D://model folder of Windows into the container /app/model. If so, the model address in your appsettings.json should be configured as
## 3⃣配置文件的一些含义
[LiteDockerCompose](https://github.com/AIDotNet/AntSK/blob/main/docker-compose.simple.yml)
The compact version is deployed with sqlite-disk by one click
[FullDockerCompose](https://github.com/AIDotNet/AntSK/blob/main/docker-compose.yml)
The full version uses pg+aspire
## 3⃣Some meanings of configuration file
```
{
"DBConnection": {
"DbType": "Sqlite",
"DbType": "Sqlite",
"ConnectionStrings": "Data Source=AntSK.db;"
},
"KernelMemory": {
"VectorDb": "Disk",
"VectorDb": "Disk",
"ConnectionString": "Host=;Port=;Database=antsk;Username=;Password=",
"TableNamePrefix": "km-"
},
"LLamaSharp": {
"RunType": "GPU",
"FileDirectory": "D:\\Code\\AI\\AntBlazor\\model\\"
"FileDir": {
"DirectoryPath": "D:\\git\\AntBlazor\\model"
},
"Login": {
"User": "admin",
@@ -134,86 +141,95 @@ model/xxx.gguf
}
```
```
//支持多种数据库,具体可以查看SqlSugarMySqlSqlServerSqliteOraclePostgreSQLDmKdbndpOscarMySqlConnectorAccessOpenGaussQuestDBHGClickHouseGBaseOdbcOceanBaseForOracleTDengineGaussDBOceanBaseTidbVastbasePolarDBCustom
// Supports various databases, you can check SqlSugar, MySql, SqlServer, Sqlite, Oracle, PostgreSQL, Dm, Kdbndp, Oscar, MySqlConnector, Access, OpenGauss, QuestDB, HG, ClickHouse, GBase, Odbc, OceanBaseForOracle, TDengine, GaussDB, OceanBase, Tidb, Vastbase, PolarDB, Custom
DBConnection.DbType
//连接字符串需要根据不同DB类型用对应的字符串
// Connection string, need to use the corresponding string according to the different DB types
DBConnection.ConnectionStrings
//向量存储的类型,支持 PostgresDiskMemoryQdrantRedisAzureAISearch
//Postgres、Redis需要配置 ConnectionString
//Qdrant AzureAISearch 的 ConnectionString 使用 Endpoint|APIKey
//The type of vector storage, supporting Postgres, Disk, Memory, Qdrant, Redis, AzureAISearch
//Postgres and Redis require ConnectionString configuration
//The ConnectionString of Qdrant and AzureAISearch uses Endpoint | APIKey
KernelMemory.VectorDb
//本地模型使用的运行方式 GUP CPU ,如果用在线API 这个随意使用一个即可
LLamaSharp.RunType
//Local model path, used for quick selection of models under llama, as well as saving downloaded models.
FileDir.DirectoryPath
//本地模型路径用于在选择llama时可以快速选择目录下的模型以及保存下载的模型
LLamaSharp.FileDirectory
//默认管理员账号密码
//Default admin account password
Login
//导入异步处理的线程数使用在线API可以高一点本地模型建议1 否则容易内存溢出崩掉
//Import asynchronous processing thread count. A higher count can be used for online API, but for local models, 1 is recommended to avoid memory overflow issues.
BackgroundTaskBroker.ImportKMSTask.WorkerCount
```
## ⚠️找不到样式问题解决:
AntSK/src/AntSK下执行:
## ⚠️Fixing Style Issues:
Run the following in AntSK/src/AntSK:
```
dotnet clean
dotnet build
dotnet publish "AntSK.csproj"
```
再去AntSK/src/AntSK/bin/Release/net8.0/publish
Then navigate to AntSK/src/AntSK/bin/Release/net8.0/publish and run:
```
dotnet AntSK.dll
```
然后启动就有样式了
The styles should now be applied after starting.
DB我使用的是CodeFirst模式只要配置好数据库链接表结构是自动创建的
I'm using CodeFirst mode for the database, so as long as the database connection is properly configured, the table structure will be created automatically.
## ✔️使用llamafactory
## ✔️Using llamafactory
```
1、首先需要确保你的环境已经安装了python和pip如果使用镜像例如v0.2.3.2版本已经包含了 python全套环境则无需此步骤
2、进入模型添加页面选择llamafactory
3、点击初始化可以检查pip install 环境是否完成
4、选择一个喜欢的模型
5、点击启动,这会开始从魔塔下载模型,你可能需要有一个较为漫长的等待
6、等待模型下载完毕后,在请求地址输入 http://localhost:8000/ 这里默认是使用8000端口
7、点击保存,然后就可以开始聊天了
8、很多人会问 LLamaSharpllamafactory有什么区别其实这两者LLamaSharp是llama.cpp的 dotnet实现但是只支持本地gguf模型 而llamafactory 支持的模型种类更多但使用的是python的实现其主要差异在这里另外llamafactory具有模型微调的能力这也是我们下一步需要重点集成的部分。
1. First, ensure that Python and pip are installed in your environment. This step is not necessary if using an image, such as version v0.2.3.2, which already includes the complete Python environment.
2. Go to the model add page and select llamafactory.
3. Click "Initialize" to check whether the 'pip install' environment setup is complete.
4. Choose a model that you like.
5. Click "Start" to begin downloading the model from the tower. This may involve a somewhat lengthy wait.
6. After the model has finished downloading, enter http://localhost:8000/ in the request address. The default port is 8000.
7. Click "Save" and start chatting.
8. Many people ask about the difference between LLamaSharp and llamafactory. In fact, LLamaSharp is a .NET implementation of llama.cpp, but only supports local gguf models, while llamafactory supports a wider variety of models and uses Python implementation. The main difference lies here. Additionally, llamafactory has the ability to fine-tune models, which is an area we will focus on integrating in the future.
```
## 🤝 贡献
## 💕 Contributors
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](https://github.com/AIDotNet/AntSK/pulls)

如果你想贡献,可以创建一个[拉取请求](https://github.com/AIDotNet/AntSK/pulls), 或给我们[错误报告](https://github.com/AIDotNet/AntSK/issues/new).


## 💕 贡献者
This project exists thanks to all the people who contribute.
这个项目的存在要感谢所有的贡献者。

<a href="https://github.com/AIDotNet/AntSK/graphs/contributors">
<img src="https://contrib.rocks/image?repo=AIDotNet/AntSK&max=1000&columns=15&anon=1" />
</a>

## 🚨 行为准则
该项目采用了贡献者公约定义的行为准则,以阐明我们社区的预期行为。有关更多信息,请参见 .NET Foundation 行为准则。 [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct).
想了解更多信息或开始使用 **AntSK**,可以关注我的公众号以及加入交流群。
## ☎️联系我
如有任何问题或建议,请通过以下方式关注我的公众号,发消息与我联系,我们也有交流群,可以发送进群等消息,然后我会拉你进交流群
![公众号](https://github.com/AIDotNet/AntSK/blob/main/images/gzh.jpg)
## 🌟 Star History
<a href="https://github.com/AIDotNet/AntSK/stargazers" target="_blank" style="display: block" align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date" />
</picture>
<img src="https://contrib.rocks/image?repo=AIDotNet/AntSK&max=1000&columns=15&anon=1" />
</a>
## 🚨 Use Protocol
This warehouse follows the [AntSK License](https://github.com/AIDotNet/AntSK?tab=Apache-2.0-1-ov-file) open source protocol.
This project follows the Apache 2.0 agreement, in addition to the following additional terms
1. This project can be used for commercial purposes, but it has the right to prohibit you from using it if it violates the following provisions
2. Without authorization, you are not allowed to modify AntSK's logo and title information
4. Without authorization, you are not allowed to modify the copyright information at the bottom of the page
6. If you need authorization, you can contact WeChat: **xuzeyu91**
If you plan to use AntSK in commercial projects, you need to ensure that you follow the following steps:
1. Copyright statement containing AntSK license. [AntSK License](https://github.com/AIDotNet/AntSK?tab=Apache-2.0-1-ov-file).
2. If you modify the software source code, you need to clearly indicate these modifications in the source code.
3. Meet the above requirements
## 💕 Special thanks
Helping enterprise AI application development, we recommend [AntBlazor](https://antblazor.com)
## ☎Contact Me
If you have any questions or suggestions, please contact me through my official WeChat account. We also have a discussion group where you can send a message to join, and then I will add you to the group.
Additionally, you can also contact me via email: antskpro@qq.com
![Official WeChat Account](https://github.com/AIDotNet/AntSK/blob/main/images/gzh.jpg)
---
We appreciate your interest in **AntSK** and look forward to collaborating with you to create an intelligent future!

253
README.zh.md Normal file
View File

@@ -0,0 +1,253 @@
中文|[English](./README.md)
# AntSK
## 使用.Net8+Blazor+SemanticKernel 打造的AI知识库/智能体
## ⭐核心功能
- **语义内核 (Semantic Kernel)**:采用领先的自然语言处理技术,准确理解、处理和响应复杂的语义查询,为用户提供精确的信息检索和推荐服务。
- **内存内核 (Kernel Memory)**具备持续学习和存储知识点的能力AntSK 拥有长期记忆功能,累积经验,提供更个性化的交互体验。
- **知识库**通过文档Word、PDF、Excel、Txt、Markdown、Json、PPT等形式导入知识库可以进行知识库问答支持本地bge-embedding 向量模型 以及bge-rerank 重排模型。
- **文生图**:集成**StableDiffusion** 本地模型,可以进行文生图。
- **GPTs 生成**此平台支持创建个性化的GPT模型尝试构建您自己的GPT模型。
- **API接口发布**将内部功能以API的形式对外提供便于开发者将AntSK 集成进其他应用,增强应用智慧。
- **API插件系统**开放式API插件系统允许第三方开发者或服务商轻松将其服务集成到AntSK不断增强应用功能。
- **.Net插件系统**开放式dll插件系统允许第三方开发者或服务商轻松将其业务功能通过标准格式的代码生成dll后集成到AntSK不断增强应用功能。
- **联网搜索**AntSK实时获取最新信息确保用户接受到的资料总是最及时、最相关的。
- **模型管理**:适配和管理集成不同厂商的不同模型。并且支持**llama.cpp**所支持的gguf类型以及**llamafactory** 和 **ollama** 所支持的模型离线运行
- **国产信创**AntSK支持国产模型和国产数据库可以在信创条件下运行
- **模型微调**规划中基于llamafactory进行模型微调
## ⛪应用场景
AntSK 适用于多种业务场景,例如:
- 企业级知识管理系统
- 自动客服与聊天机器人
- 企业级搜索引擎
- 个性化推荐系统
- 智能辅助写作
- 教育与在线学习平台
- 其他有意思的AI App
## ✏️功能示例
### 在线演示
[体验地址1](https://demo.antsk.cn/)
[体验地址2](https://antsk.ai-dotnet.com/)
```
默认账号test
默认密码test
由于云服务器配置较低,无法运行本地模型,所以把系统设置权限关闭了,大家看看界面即可,要使用本地模型,请下载自行使用
请勿在演示站点上传敏感信息
```
### 其他功能示例
[视频示例](https://www.bilibili.com/video/BV1zH4y1h7Y9/)
[在线文档http://antsk.cn](http://antsk.cn)
## ❓如何开始?
在这里我使用的是Postgres 作为数据存储和向量存储因为Semantic Kernel和Kernel Memory都支持他当然你也可以换成其他的。
模型默认支持openai、azure openai、讯飞星火、阿里云积、 和llama支持的gguf本地模型 以及llamafactory的本地模型,如果需要使用其他模型可以使用one-api进行集成。
配置文件中的Login配置是默认的登录账号和密码
需要配置如下的配置文件
## 为了方便体验,我已经把打包好的程序放进了网盘,你只需要安装.net8环境即可运行。
[.net8环境 ](https://dotnet.microsoft.com/zh-cn/download/dotnet/8.0)
[我用夸克网盘分享了「AntSK」](https://pan.quark.cn/s/9462c849cbad)
## 1⃣使用docker-compose
提供了pg版本 **appsettings.json** 和 简化版本(**Sqlite+disk** **docker-compose.simple.yml**
从项目根目录下载**docker-compose.yml**,然后把配置文件**appsettings.json**和它放在统一目录,
这里已经把pg的镜像做好了。在docker-compose.yml中可以修改默认账号密码然后你的**appsettings.json**的数据库连接需要保持一致。
然后你可以进入到目录后执行
```
docker-compose up -d
```
来启动AntSK
## 2⃣如何在docker中挂载本地模型和模型下载的目录
```
# 非 host 版本, 不使用本机代理
version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/AIDotNet/antsk:v0.3.1
ports:
- 5000:5000
networks:
- antsk
depends_on:
- antskpg
restart: always
environment:
- ASPNETCORE_URLS=http://*:5000
volumes:
- ./appsettings.json:/app/appsettings.json # 本地配置文件 需要放在同级目录
- D://model:/app/model
- D://model:/root/.cache/modelscope/hub/AI-ModelScope #使用Llamafactory时需要挂载 否则初始化的环境重启后会丢失
networks:
antsk:
```
以这个为示例意思是把windows本地D://model的文件夹挂载进 容器内/app/model 如果是这样你的appsettings.json中的模型地址应该配置为
[LiteDockerCompose](https://github.com/AIDotNet/AntSK/blob/main/docker-compose.simple.yml)
精简版使用sqlite+disk向量模式简化部署配置
[FullDockerCompose](https://github.com/AIDotNet/AntSK/blob/main/docker-compose.yml)
完整版使用pg+aspire 功能更完整,配置文件需要参考如下配置含义进行配置
## 3⃣配置文件的一些含义
```
{
"DBConnection": {
"DbType": "Sqlite",
"ConnectionStrings": "Data Source=AntSK.db;"
},
"KernelMemory": {
"VectorDb": "Disk",
"ConnectionString": "Host=;Port=;Database=antsk;Username=;Password=",
"TableNamePrefix": "km-"
},
"FileDir": {
"DirectoryPath": "D:\\git\\AntBlazor\\model"
},
"Login": {
"User": "admin",
"Password": "xuzeyu"
},
"BackgroundTaskBroker": {
"ImportKMSTask": {
"WorkerCount": 1
}
}
}
```
```
//支持多种数据库具体可以查看SqlSugarMySqlSqlServerSqliteOraclePostgreSQLDmKdbndpOscarMySqlConnectorAccessOpenGaussQuestDBHGClickHouseGBaseOdbcOceanBaseForOracleTDengineGaussDBOceanBaseTidbVastbasePolarDBCustom
DBConnection.DbType
//连接字符串需要根据不同DB类型用对应的字符串
DBConnection.ConnectionStrings
//向量存储的类型,支持 Postgres、Disk、Memory、Qdrant、Redis、AzureAISearch
//Postgres、Redis需要配置 ConnectionString
//Qdrant 和AzureAISearch 的 ConnectionString 使用 Endpoint|APIKey
KernelMemory.VectorDb
//本地模型路径用于在选择llama时可以快速选择目录下的模型以及保存下载的模型
FileDir.DirectoryPath
//默认管理员账号密码
Login
//导入异步处理的线程数使用在线API可以高一点本地模型建议1 否则容易内存溢出崩掉
BackgroundTaskBroker.ImportKMSTask.WorkerCount
```
## ⚠️找不到样式问题解决:
AntSK/src/AntSK下执行:
```
dotnet clean
dotnet build
dotnet publish "AntSK.csproj"
```
再去AntSK/src/AntSK/bin/Release/net8.0/publish下
```
dotnet AntSK.dll
```
然后启动就有样式了
DB我使用的是CodeFirst模式只要配置好数据库链接表结构是自动创建的
## ✔使用llamafactory
```
1、首先需要确保你的环境已经安装了python和pip如果使用镜像例如p0.2.4版本已经包含了 python全套环境则无需此步骤
2、进入模型添加页面选择llamafactory
3、点击初始化可以检查pip install 环境是否完成
4、选择一个喜欢的模型
5、点击启动,这会开始从魔塔下载模型,你可能需要有一个较为漫长的等待
6、等待模型下载完毕后在请求地址输入 http://localhost:8000/ 这里默认是使用8000端口
7、点击保存然后就可以开始聊天了
8、很多人会问 LLamaSharp与llamafactory有什么区别其实这两者LLamaSharp是llama.cpp的 dotnet实现但是只支持本地gguf模型 而llamafactory 支持的模型种类更多但使用的是python的实现其主要差异在这里另外llamafactory具有模型微调的能力这也是我们下一步需要重点集成的部分。
```

## 💕 贡献者
这个项目的存在要感谢所有的贡献者。

<a href="https://github.com/AIDotNet/AntSK/graphs/contributors">
<img src="https://contrib.rocks/image?repo=AIDotNet/AntSK&max=1000&columns=15&anon=1" />
</a>

## 🚨 使用协议
本仓库遵循 [AntSK License](https://github.com/AIDotNet/AntSK?tab=Apache-2.0-1-ov-file) 开源协议。
除以下附加条款外该项目遵循Apache 2.0协议
1. 本项目可以用于商业目的,但如果违反以下规定,它有权禁止您使用
2. 未经授权您不允许修改AntSK的徽标和标题信息
3. 未经授权,您不能修改页面底部的版权信息
4. 如果您需要授权可以联系微信xuzeyu91
如果您打算在商业项目中使用AntSK您需要确保遵守以下步骤
1. 包含AntSK许可证的版权声明。 [AntSK License](https://github.com/AIDotNet/AntSK?tab=Apache-2.0-1-ov-file) 。
2. 如果您修改了软件源代码,您需要在源代码中明确标明这些修改。
3. 满足以上要求
## 💕 特别感谢
助力企业级AI应用开发推荐使用 [AntBlazor](https://antblazor.com)
## ☎️联系我
如有任何问题或建议请通过以下方式关注我的公众号《许泽宇的技术分享》发消息与我联系我们也有AIDotnet交流群可以发送进群等消息然后我会拉你进交流群
另外您也可以通过邮箱与我联系antskpro@qq.com
![公众号](https://github.com/AIDotNet/AntSK/blob/main/images/gzh.jpg)
## 🌟 Star History
<a href="https://github.com/AIDotNet/AntSK/stargazers" target="_blank" style="display: block" align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=AIDotNet/AntSK&type=Date" />
</picture>
</a>

View File

@@ -3,9 +3,9 @@ version: '3.8'
services:
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.3
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.5.5
# 如果需要pytorch环境需要使用下面这个镜像镜像比较大
# image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.3.2
# image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:p0.5.5
ports:
- 5000:5000
networks:
@@ -15,5 +15,7 @@ services:
- ASPNETCORE_URLS=http://*:5000
volumes:
- ./appsettings.json:/app/appsettings.json # 本地配置文件 需要放在同级目录
- /AntSK/model:/app/model
- /AntSK/model:/root/.cache/modelscope/hub/AI-ModelScope # LLamaFactory模型文件
networks:
antsk:

View File

@@ -1,6 +1,20 @@
# 非 host 版本, 不使用本机代理
version: '3.8'
services:
aspire-dashboard:
container_name: aspire-dashboard
image: mcr.microsoft.com/dotnet/aspire-dashboard:8.0
networks:
- antsk
environment:
- DOTNET_DASHBOARD_UNSECURED_ALLOW_ANONYMOUS=true
- ASPIRE_ALLOW_UNSECURED_TRANSPORT=true
- DASHBOARD_OTLP_AUTHMODE=ApiKey
- DASHBOARD_OTLP_PRIMARYAPIKEY=antsk
ports:
- 18888:18888
- 18889:18889
restart: unless-stopped
antskpg:
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/pg:v0.5.0
container_name: antskpg
@@ -18,9 +32,9 @@ services:
- ./pg/data:/var/lib/postgresql/data
antsk:
container_name: antsk
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.3
image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.5.5
# 如果需要pytorch环境需要使用下面这个镜像镜像比较大
# image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:v0.2.3.2
# image: registry.cn-hangzhou.aliyuncs.com/xuzeyu91/antsk:p0.5.5
ports:
- 5000:5000
networks:
@@ -30,7 +44,15 @@ services:
restart: always
environment:
- ASPNETCORE_URLS=http://*:5000
- ASPNETCORE_FORWARDEDHEADERS_ENABLED=true
- OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES=true
- OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES= true
- OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY=in_memory
- OTEL_EXPORTER_OTLP_ENDPOINT=http://aspire-dashboard:18889
- OTEL_SERVICE_NAME=antsk
volumes:
- ./appsettings.json:/app/appsettings.json # 本地配置文件 需要放在同级目录
- /AntSK/model:/app/model
- /AntSK/model:/root/.cache/modelscope/hub/AI-ModelScope # LLamaFactory模型文件
networks:
antsk:

View File

@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsAspireHost>true</IsAspireHost>
<UserSecretsId>32ac67c8-178a-4eeb-871d-879023582e06</UserSecretsId>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Aspire.Hosting.AppHost" Version="8.0.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\AntSK\AntSK.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
var builder = DistributedApplication.CreateBuilder(args);
builder.AddProject<Projects.AntSK>("antsk");
builder.Build().Run();

View File

@@ -0,0 +1,8 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}

View File

@@ -0,0 +1,9 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning",
"Aspire.Hosting.Dcp": "Warning"
}
}
}

View File

@@ -0,0 +1,26 @@
services:
aspire-dashboard:
container_name: "aspire-dashboard"
image: "mcr.microsoft.com/dotnet/aspire-dashboard:8.0"
environment:
DOTNET_DASHBOARD_UNSECURED_ALLOW_ANONYMOUS: "true"
ports:
- target: 18888
published: 18888
restart: unless-stopped
antsk:
container_name: "antsk"
image: "antsk:latest"
environment:
OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES: "true"
OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES: "true"
OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY: "in_memory"
ASPNETCORE_FORWARDEDHEADERS_ENABLED: "true"
OTEL_EXPORTER_OTLP_ENDPOINT: "http://aspire-dashboard:18889"
OTEL_SERVICE_NAME: "antsk"
ports:
- target: 8080
published: 10000
- target: 8443
published: 10001
restart: unless-stopped

View File

@@ -0,0 +1,17 @@
{
"projectPath": ".",
"outputPath": "aspirate-output",
"containerImageTags": [
"latest"
],
"containerBuilder": "docker",
"outputFormat": "compose",
"privateRegistryEmail": "aspir8@aka.ms",
"includeDashboard": true,
"secrets": {
"salt": "fjamZa3pQbM1UyY4",
"hash": "QR\u002BSEr3p2SwD/w2oPE21vrWh/EerhNyVyTkr0atIREw=",
"secrets": {}
},
"processAllComponents": true
}

View File

@@ -0,0 +1,26 @@
{
"resources": {
"antsk": {
"type": "project.v0",
"path": "../AntSK/AntSK.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http"
},
"https": {
"scheme": "https",
"protocol": "tcp",
"transport": "http"
}
}
}
}
}

View File

@@ -0,0 +1,53 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<DocumentationFile>AntSK.Domain.xml</DocumentationFile>
<NoWarn>CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,,SKEXP0012,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0040,SKEXP0041,SKEXP0042,SKEXP0050,SKEXP0051,SKEXP0052,SKEXP0053,SKEXP0054,SKEXP0055,SKEXP0060,SKEXP0061,SKEXP0101,SKEXP0102</NoWarn>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AntDesign.Charts" Version="0.5.1" />
<PackageReference Include="AntDesign.ProLayout" Version="0.18.2" />
<PackageReference Include="BlazorComponents.Terminal" Version="0.6.0" />
<PackageReference Include="Blazored.LocalStorage" Version="4.5.0" />
<PackageReference Include="pythonnet" Version="3.0.3" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
<PackageReference Include="AutoMapper" Version="8.1.0" />
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3" />
<PackageReference Include="Markdig" Version="0.37.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SqlSugarCore" Version="5.1.4.151" />
<PackageReference Include="System.Data.SQLite.Core" Version="1.0.118" />
<PackageReference Include="RestSharp" Version="110.2.0" />
<PackageReference Include="NPOI" Version="2.7.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.7.1" />
<PackageReference Include="Microsoft.SemanticKernel.Core" Version="1.7.1" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Core" Version="1.7.1-alpha" />
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.36.240415.2" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Postgres" Version="0.36.240415.2" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Qdrant" Version="0.36.240415.2" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Redis" Version="0.36.240415.2" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.AzureAISearch" Version="0.36.240415.2" />
<PackageReference Include="LLamaSharp" Version="0.11.2" />
<PackageReference Include="LLamaSharp.Backend.Cpu" Version="0.11.2" />
<PackageReference Include="LLamaSharp.Backend.Cuda12" Version="0.11.2" />
<PackageReference Include="LLamaSharp.kernel-memory" Version="0.11.2" />
<PackageReference Include="LLamaSharp.semantic-kernel" Version="0.11.2" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\AntSK.LLamaFactory\AntSK.LLamaFactory.csproj" />
<ProjectReference Include="..\AntSk.LLM\AntSK.LLM.csproj" />
<ProjectReference Include="..\AntSK.OCR\AntSK.OCR.csproj" />
<ProjectReference Include="..\MiddleWare\AntSK.BackgroundTask\AntSK.BackgroundTask.csproj" />
</ItemGroup>
</Project>

View File

@@ -5,44 +5,48 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<DocumentationFile>AntSK.Domain.xml</DocumentationFile>
<NoWarn>CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,,SKEXP0012,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0040,SKEXP0041,SKEXP0042,SKEXP0050,SKEXP0051,SKEXP0052,SKEXP0053,SKEXP0054,SKEXP0055,SKEXP0060,SKEXP0061,SKEXP0101,SKEXP0102</NoWarn>
<NoWarn>CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,,SKEXP0012,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0040,SKEXP0041,SKEXP0042,SKEXP0050,SKEXP0051,SKEXP0052,SKEXP0053,SKEXP0054,SKEXP0055,SKEXP0060,SKEXP0061,SKEXP0101,SKEXP0102,KMEXP00</NoWarn>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AntDesign.Charts" Version="0.5.1" />
<PackageReference Include="AntDesign.ProLayout" Version="0.18.1" />
<PackageReference Include="AntDesign.Charts" Version="0.5.6" />
<PackageReference Include="AntDesign.ProLayout" Version="0.20.3" />
<PackageReference Include="BlazorComponents.Terminal" Version="0.6.0" />
<PackageReference Include="Blazored.LocalStorage" Version="4.5.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
<PackageReference Include="pythonnet" Version="3.0.4" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.9.0" />
<PackageReference Include="AutoMapper" Version="8.1.0" />
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3" />
<PackageReference Include="Markdig" Version="0.36.2" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SqlSugarCore" Version="5.1.4.148" />
<PackageReference Include="System.Data.SQLite.Core" Version="1.0.118" />
<PackageReference Include="RestSharp" Version="110.2.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.6.3" />
<PackageReference Include="Microsoft.SemanticKernel.Core" Version="1.6.3" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Core" Version="1.6.3-alpha" />
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Postgres" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Qdrant" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Redis" Version="0.35.240321.1" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.AzureAISearch" Version="0.35.240321.1" />
<PackageReference Include="Markdig" Version="0.37.0" />
<PackageReference Include="Newtonsoft.Json" Version="$(NewtonsoftVersion)" />
<PackageReference Include="SqlSugarCore" Version="5.1.4.169" />
<PackageReference Include="System.Data.SQLite.Core" Version="1.0.119" />
<PackageReference Include="RestSharp" Version="$(RestSharpVersion)" />
<PackageReference Include="NPOI" Version="2.7.1" />
<PackageReference Include="LLamaSharp" Version="0.10.0" />
<PackageReference Include="LLamaSharp.Backend.Cpu" Version="0.10.0" />
<PackageReference Include="LLamaSharp.Backend.Cuda12" Version="0.10.0" />
<PackageReference Include="LLamaSharp.kernel-memory" Version="0.10.0" />
<PackageReference Include="LLamaSharp.semantic-kernel" Version="0.10.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="$(SKVersion)" />
<PackageReference Include="Microsoft.SemanticKernel.Core" Version="$(SKVersion)" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Core" Version="$(SKVersion)-alpha" />
<PackageReference Include="Microsoft.KernelMemory.Core" Version="$(KMVersion)" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Postgres" Version="$(KMVersion)" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Qdrant" Version="$(KMVersion)" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.Redis" Version="$(KMVersion)" />
<PackageReference Include="Microsoft.KernelMemory.MemoryDb.AzureAISearch" Version="$(KMVersion)" />
<PackageReference Include="Serilog" Version="4.1.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Sinks.File" Version="6.0.0" />
<PackageReference Include="Serilog.Extensions.Logging" Version="8.0.1-dev-10391" />
<PackageReference Include="Serilog.Settings.Configuration" Version="8.0.4" />
<PackageReference Include="Serilog.Sinks.Seq" Version="8.0.0" />
<PackageReference Include="Serilog.Sinks.OpenTelemetry" Version="4.1.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\AntSK.LLamaFactory\AntSK.LLamaFactory.csproj" />
<ProjectReference Include="..\AntSk.LLM\AntSK.LLM.csproj" />
<ProjectReference Include="..\AntSK.LLM\AntSK.LLM.csproj" />
<ProjectReference Include="..\AntSK.OCR\AntSK.OCR.csproj" />
<ProjectReference Include="..\MiddleWare\AntSK.BackgroundTask\AntSK.BackgroundTask.csproj" />
</ItemGroup>

View File

@@ -69,6 +69,84 @@
<param name="value"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.ExcelToDataTable(System.String,System.Boolean)">
<summary>
将excel导入到datatable
</summary>
<param name="filePath">excel路径</param>
<param name="isColumnName">第一行是否是列名</param>
<returns>返回datatable</returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.ExcelToDataTable(System.IO.Stream,System.Boolean)">
<summary>
将excel导入到datatable
</summary>
<param name="stream"></param>
<param name="isColumnName">第一行是否是列名</param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.ExcelToList``1(System.IO.Stream)">
<summary>
excel转list
</summary>
<typeparam name="TResult"></typeparam>
<param name="stream"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.ExcelToList``1(System.IO.Stream,System.String)">
<summary>
excel转list-根据sheetName得到List
</summary>
<typeparam name="TResult"></typeparam>
<param name="stream"></param>
<param name="sheetName"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.ListToExcel``1(``0[],System.String)">
<summary>
List导出excel 二进制流
</summary>
<typeparam name="T">实体</typeparam>
<param name="data">List</param>
<param name="sheetName">sheetname 可不填默认Sheet0</param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.DataTableToExcel(System.Data.DataTable,System.String,System.String)">
<summary>
Dt导出excel 二进制流
</summary>
<param name="dt">datatable</param>
<param name="strFile">strFile</param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.ListWriteExcel``1(``0[],System.String,System.String)">
<summary>
List写入excel
</summary>
<typeparam name="T"></typeparam>
<param name="data"></param>
<param name="strFile">路径</param>
<param name="sheetName"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.DataTableWriteExcel(System.Data.DataTable,System.String,System.String)">
<summary>
dt写入excel
</summary>
<param name="dt">datatable</param>
<param name="strFile">路径</param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ExeclHelper.SetCellDropdownList(NPOI.SS.UserModel.IWorkbook,NPOI.SS.UserModel.ISheet,System.Collections.Generic.List{System.String},System.String,System.Int32,System.Int32,System.Int32)">
<summary>
设置单元格下拉框(除去标题行)
</summary>
<param name="workbook"></param>
<param name="sheet"></param>
<param name="ddlList"></param>
<param name="firstcol"></param>
<param name="lastcol"></param>
</member>
<member name="T:AntSK.Domain.Domain.Model.Enum.AIType">
<summary>
AI类型
@@ -79,11 +157,6 @@
模型类型
</summary>
</member>
<member name="P:AntSK.Domain.Domain.Model.MessageInfo.IsSend">
<summary>
发送是true 接收是false
</summary>
</member>
<member name="P:AntSK.Domain.Domain.Model.PageList`1.PageIndex">
<summary>
当前页从1开始
@@ -99,12 +172,29 @@
总数
</summary>
</member>
<member name="F:AntSK.Domain.Domain.Other.LLamaConfig.dicLLamaWeights">
<member name="M:AntSK.Domain.Domain.Other.Bge.BegRerankConfig.LoadModel(System.String,System.String)">
<summary>
避免模型重复加载,本地缓存
模型写死
</summary>
</member>
<member name="M:AntSK.Domain.Domain.Service.ChatService.SendChatByAppAsync(AntSK.Domain.Repositories.Apps,System.String,Microsoft.SemanticKernel.ChatCompletion.ChatHistory)">
<member name="M:AntSK.Domain.Domain.Other.Bge.BgeEmbeddingConfig.LoadModel(System.String,System.String)">
<summary>
模型写死
</summary>
</member>
<member name="P:AntSK.Domain.Domain.Other.KMExcelHandler.StepName">
<inheritdoc />
</member>
<member name="M:AntSK.Domain.Domain.Other.KMExcelHandler.InvokeAsync(Microsoft.KernelMemory.Pipeline.DataPipeline,System.Threading.CancellationToken)">
<inheritdoc />
</member>
<member name="P:AntSK.Domain.Domain.Other.QAHandler.StepName">
<inheritdoc />
</member>
<member name="M:AntSK.Domain.Domain.Other.QAHandler.InvokeAsync(Microsoft.KernelMemory.Pipeline.DataPipeline,System.Threading.CancellationToken)">
<inheritdoc />
</member>
<member name="M:AntSK.Domain.Domain.Service.ChatService.SendChatByAppAsync(AntSK.Domain.Repositories.Apps,Microsoft.SemanticKernel.ChatCompletion.ChatHistory)">
<summary>
发送消息
</summary>
@@ -162,6 +252,53 @@
<param name="history"></param>
<returns></returns>
</member>
<member name="P:AntSK.Domain.Result.Code">
<summary>
错误码0是正常返回异常返回错误码
</summary>
</member>
<member name="P:AntSK.Domain.Result.Data">
<summary>
返回数据
</summary>
</member>
<member name="P:AntSK.Domain.Result.Message">
<summary>
返回信息详情
</summary>
</member>
<member name="M:AntSK.Domain.ResponseResult.Success">
<summary>
执行成功
</summary>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ResponseResult.Success(System.Object,System.String,System.String)">
<summary>
执行成功
</summary>
<param name="data"></param>
<param name="code"></param>
<param name="message"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ResponseResult.Error(System.Object,System.String,System.String)">
<summary>
执行失败
</summary>
<param name="data"></param>
<param name="code"></param>
<param name="message"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.ResponseResult.Error(System.String,System.String)">
<summary>
执行失败
</summary>
<param name="code"></param>
<param name="message"></param>
<returns></returns>
</member>
<member name="P:AntSK.Domain.Options.DBConnectionOption.DbType">
<summary>
sqlite连接字符串
@@ -287,6 +424,56 @@
API调用秘钥
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Apps.Relevance">
<summary>
相似度
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Apps.MaxAskPromptSize">
<summary>
提问最大token数
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Apps.MaxMatchesCount">
<summary>
向量匹配数
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Apps.AnswerTokens">
<summary>
回答最大token数
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Chats.UserName">
<summary>
用户名
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Chats.AppId">
<summary>
应用ID
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Chats.Context">
<summary>
消息内容
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Chats.IsSend">
<summary>
发送是true 接收是false
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Chats.CreateTime">
<summary>
创建时间
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Chats.FileName">
<summary>
文件名
</summary>
</member>
<member name="P:AntSK.Domain.Repositories.Funs.Path">
<summary>
接口描述
@@ -771,6 +958,28 @@
<param name="parameters"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.Utils.ConvertUtils.ComparisonIgnoreCase(System.String,System.String)">
<summary>
忽略大小写匹配
</summary>
<param name="s"></param>
<param name="value"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.Utils.ConvertUtils.Unescape(System.String)">
<summary>
\uxxxx转中文,保留换行符号
</summary>
<param name="unicodeString"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.Utils.ConvertUtils.IsStream(System.String)">
<summary>
是否为流式请求
</summary>
<param name="value"></param>
<returns></returns>
</member>
<member name="M:AntSK.Domain.Utils.RepoFiles.SamplePluginsPath">
<summary>
Scan the local folders from the repo, looking for "samples/plugins" folder.

View File

@@ -1,7 +1,7 @@
namespace AntSK.Domain.Common
{
[AttributeUsage(AttributeTargets.Method)]
public class AntSkFunctionAttribute : Attribute
public class AntSKFunctionAttribute : Attribute
{
// 自定义的ActionAttribute
}

View File

@@ -5,6 +5,7 @@ using DocumentFormat.OpenXml.Office2016.Drawing.ChartDrawing;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.OpenApi.Models;
using SqlSugar;
using Swashbuckle.AspNetCore.SwaggerGen;
@@ -19,6 +20,12 @@ namespace AntSK.Domain.Common.DependencyInjection
{
public static class InitExtensions
{
private static ILogger _logger;
public static void InitLog(ILogger logger)
{
_logger = logger;
}
/// <summary>
/// 使用codefirst创建数据库表
/// </summary>
@@ -50,6 +57,10 @@ namespace AntSK.Domain.Common.DependencyInjection
_repository.GetDB().CodeFirst.InitTables(type);
}
}
//安装向量插件
_repository.GetDB().Ado.ExecuteCommandAsync($"CREATE EXTENSION IF NOT EXISTS vector;");
_logger.LogInformation("初始化表结构完成");
}
return app;
}
@@ -70,7 +81,7 @@ namespace AntSK.Domain.Common.DependencyInjection
llamafactoryStart.Value = "false";
_dic_Repository.Insert(llamafactoryStart);
}
_logger.LogInformation("初始化数据库初始数据完成");
}
return app;
}
@@ -97,7 +108,7 @@ namespace AntSK.Domain.Common.DependencyInjection
}
catch (Exception ex)
{
Console.WriteLine(ex.Message + " ---- " + ex.StackTrace);
_logger.LogError(ex.Message + " ---- " + ex.StackTrace);
}
return app;
}

View File

@@ -0,0 +1,20 @@
using Microsoft.KernelMemory.AI;
using Microsoft.KernelMemory;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.Domain.Common.Embedding
{
public static class BuilderBgeExtensions
{
public static IKernelMemoryBuilder WithBgeTextEmbeddingGeneration(this IKernelMemoryBuilder builder, HuggingfaceTextEmbeddingGenerator textEmbeddingGenerator)
{
builder.AddSingleton((ITextEmbeddingGenerator)textEmbeddingGenerator);
builder.AddIngestionEmbeddingGenerator(textEmbeddingGenerator);
return builder;
}
}
}

View File

@@ -0,0 +1,53 @@
using Microsoft.KernelMemory.AI;
using AntSK.Domain.Domain.Other.Bge;
namespace AntSK.Domain.Common.Embedding
{
public class HuggingfaceTextEmbeddingGenerator : ITextEmbeddingGenerator, ITextTokenizer, IDisposable
{
public int MaxTokens => 1024;
public int MaxTokenTotal => 1024;
private readonly dynamic _embedder;
public HuggingfaceTextEmbeddingGenerator(string pyDllPath,string modelName)
{
_embedder = BgeEmbeddingConfig.LoadModel(pyDllPath, modelName);
}
public void Dispose()
{
BgeEmbeddingConfig.Dispose();
}
//public async Task<IList<ReadOnlyMemory<float>>> GenerateEmbeddingAsync(IList<string> data, CancellationToken cancellationToken = default)
//{
// IList<ReadOnlyMemory<float>> results = new List<ReadOnlyMemory<float>>();
// foreach (var d in data)
// {
// var embeddings = await EmbeddingConfig.GetEmbedding(d);
// results.Add(new ReadOnlyMemory<float>(embeddings));
// }
// return results;
//}
public async Task<Microsoft.KernelMemory.Embedding> GenerateEmbeddingAsync(string text, CancellationToken cancellationToken = default)
{
var embeddings = await BgeEmbeddingConfig.GetEmbedding(text);
return new Microsoft.KernelMemory.Embedding(embeddings);
}
public int CountTokens(string text)
{
return BgeEmbeddingConfig.TokenCount(text);
}
public IReadOnlyList<string> GetTokens(string text)
{
return new List<string>();
}
}
}

View File

@@ -0,0 +1,822 @@
using NPOI.HSSF.UserModel;
using NPOI.SS.UserModel;
using NPOI.SS.Util;
using NPOI.XSSF.Streaming;
using NPOI.XSSF.UserModel;
using System;
using System.Collections.Generic;
using System.Data;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
namespace AntSK.Domain
{
public class ExeclHelper
{
/// <summary>
/// 将excel导入到datatable
/// </summary>
/// <param name="filePath">excel路径</param>
/// <param name="isColumnName">第一行是否是列名</param>
/// <returns>返回datatable</returns>
public static DataTable ExcelToDataTable(string filePath, bool isColumnName)
{
DataTable dataTable = null;
FileStream fs = null;
DataColumn column = null;
DataRow dataRow = null;
IWorkbook workbook = null;
ISheet sheet = null;
IRow row = null;
ICell cell = null;
int startRow = 0;
try
{
using (fs = File.OpenRead(filePath))
{
// 2007版本
if (filePath.Contains(".xlsx"))
workbook = new XSSFWorkbook(fs);
// 2003版本
else if (filePath.Contains(".xls"))
workbook = new HSSFWorkbook(fs);
if (workbook != null)
{
sheet = workbook.GetSheetAt(0);//读取第一个sheet当然也可以循环读取每个sheet
dataTable = new DataTable();
if (sheet != null)
{
int rowCount = sheet.LastRowNum;//总行数
if (rowCount > 0)
{
IRow firstRow = sheet.GetRow(0);//第一行
int cellCount = firstRow.LastCellNum;//列数
//构建datatable的列
if (isColumnName)
{
startRow = 1;//如果第一行是列名,则从第二行开始读取
for (int i = firstRow.FirstCellNum; i < cellCount; ++i)
{
cell = firstRow.GetCell(i);
if (cell != null)
{
if (cell.StringCellValue != null)
{
column = new DataColumn(cell.StringCellValue);
dataTable.Columns.Add(column);
}
}
}
}
else
{
for (int i = firstRow.FirstCellNum; i < cellCount; ++i)
{
column = new DataColumn("column" + (i + 1));
dataTable.Columns.Add(column);
}
}
//填充行
for (int i = startRow; i <= rowCount; ++i)
{
row = sheet.GetRow(i);
if (row == null) continue;
dataRow = dataTable.NewRow();
for (int j = row.FirstCellNum; j < cellCount; ++j)
{
cell = row.GetCell(j);
if (cell == null)
{
dataRow[j] = "";
}
else
{
//CellType(Unknown = -1,Numeric = 0,String = 1,Formula = 2,Blank = 3,Boolean = 4,Error = 5,)
switch (cell.CellType)
{
case CellType.Blank:
dataRow[j] = "";
break;
case CellType.Numeric:
short format = cell.CellStyle.DataFormat;
//对时间格式2015.12.5、2015/12/5、2015-12-5等的处理
if (format == 14 || format == 31 || format == 57 || format == 58)
dataRow[j] = cell.DateCellValue;
else
dataRow[j] = cell.NumericCellValue;
break;
case CellType.String:
dataRow[j] = cell.StringCellValue;
break;
}
}
}
dataTable.Rows.Add(dataRow);
}
}
}
}
}
return dataTable;
}
catch (Exception)
{
if (fs != null)
{
fs.Close();
}
return null;
}
}
/// <summary>
/// 将excel导入到datatable
/// </summary>
/// <param name="stream">流</param>
/// <param name="isColumnName">第一行是否是列名</param>
/// <returns></returns>
public static DataTable ExcelToDataTable(Stream stream, bool isColumnName)
{
DataTable dataTable = null;
DataColumn column = null;
DataRow dataRow = null;
IWorkbook workbook = new XSSFWorkbook(stream);
ISheet sheet = null;
IRow row = null;
ICell cell = null;
int startRow = 0;
try
{
if (workbook != null)
{
sheet = workbook.GetSheetAt(0);//读取第一个sheet当然也可以循环读取每个sheet
dataTable = new DataTable();
if (sheet != null)
{
int rowCount = sheet.LastRowNum;//总行数
if (rowCount > 0)
{
IRow firstRow = sheet.GetRow(0);//第一行
int cellCount = firstRow.LastCellNum;//列数
//构建datatable的列
if (isColumnName)
{
startRow = 1;//如果第一行是列名,则从第二行开始读取
for (int i = firstRow.FirstCellNum; i < cellCount; ++i)
{
cell = firstRow.GetCell(i);
if (cell != null)
{
if (cell.StringCellValue != null)
{
column = new DataColumn(cell.StringCellValue);
dataTable.Columns.Add(column);
}
}
}
}
else
{
for (int i = firstRow.FirstCellNum; i < cellCount; ++i)
{
column = new DataColumn("column" + (i + 1));
dataTable.Columns.Add(column);
}
}
//填充行
for (int i = startRow; i <= rowCount; ++i)
{
row = sheet.GetRow(i);
if (row == null) continue;
dataRow = dataTable.NewRow();
for (int j = row.FirstCellNum; j < cellCount; ++j)
{
cell = row.GetCell(j);
if (cell == null)
{
dataRow[j] = "";
}
else
{
//CellType(Unknown = -1,Numeric = 0,String = 1,Formula = 2,Blank = 3,Boolean = 4,Error = 5,)
switch (cell.CellType)
{
case CellType.Blank:
dataRow[j] = "";
break;
case CellType.Numeric:
short format = cell.CellStyle.DataFormat;
//对时间格式2015.12.5、2015/12/5、2015-12-5等的处理
if (format == 14 || format == 31 || format == 57 || format == 58)
dataRow[j] = cell.DateCellValue;
else
dataRow[j] = cell.NumericCellValue;
break;
case CellType.String:
dataRow[j] = cell.StringCellValue;
break;
}
}
}
dataTable.Rows.Add(dataRow);
}
}
}
}
return dataTable;
}
catch (Exception)
{
throw;
}
}
/// <summary>
/// excel转list
/// </summary>
/// <typeparam name="TResult"></typeparam>
/// <param name="stream"></param>
/// <returns></returns>
public static IEnumerable<TResult> ExcelToList<TResult>(Stream stream) where TResult : new()
{
var propertyInfos = typeof(TResult).GetProperties(BindingFlags.Public | BindingFlags.Instance).Where(p => p.CustomAttributes.Count() > 0)
.OrderBy(p => p.GetCustomAttribute<ExeclPropertyAttribute>().Order).ToArray();
List<TResult> list = new List<TResult>();
IWorkbook workbook = new XSSFWorkbook(stream);
ISheet sheet = null;
IRow row = null;
ICell cell = null;
int startRow = 1;
try
{
if (workbook != null)
{
sheet = workbook.GetSheetAt(0);//读取第一个sheet当然也可以循环读取每个sheet
if (sheet != null)
{
int rowCount = sheet.LastRowNum;//总行数
if (rowCount > 0)
{
IRow firstRow = sheet.GetRow(0);//第一行
int cellCount = firstRow.LastCellNum;//列数
//填充行
for (int i = startRow; i <= rowCount; ++i)
{
row = sheet.GetRow(i);
if (row == null) continue;
bool emptyRow = true;//是否空行
TResult dataModel = new TResult();
for (int j = row.FirstCellNum; j < cellCount; ++j)
{
var execlPropertyAttribute = propertyInfos[j].GetCustomAttribute<ExeclPropertyAttribute>();
cell = row.GetCell(j);
if (cell == null)
{
propertyInfos[j].SetValue(dataModel, "");
}
else
{
switch (cell.CellType)
{
case CellType.Blank:
propertyInfos[j].SetValue(dataModel, "");
break;
case CellType.Numeric:
short format = cell.CellStyle.DataFormat;
//对时间格式2015.12.5、2015/12/5、2015-12-5等的处理
if (format == 14 || format == 31 || format == 57 || format == 58)
propertyInfos[j].SetValue(dataModel, cell.DateCellValue);
else
{
if (execlPropertyAttribute.CellType == CellType.String)
{
propertyInfos[j].SetValue(dataModel, cell.NumericCellValue.ToString());
}
else
{
propertyInfos[j].SetValue(dataModel, cell.NumericCellValue);
}
}
break;
case CellType.String:
propertyInfos[j].SetValue(dataModel, cell.StringCellValue);
break;
}
}
if (cell != null && !string.IsNullOrEmpty(cell.ToString().Trim()))
{
emptyRow = false;
}
}
//非空数据行数据添加到DataTable
if (!emptyRow)
{
list.Add(dataModel);
}
}
}
}
}
return list;
}
catch (Exception)
{
throw;
}
}
public static IEnumerable<TResult> ExcelToListFileName<TResult>(Stream stream, string fileName) where TResult : new()
{
var propertyInfos = typeof(TResult).GetProperties(BindingFlags.Public | BindingFlags.Instance).Where(p => p.CustomAttributes.Count() > 0)
.OrderBy(p => p.GetCustomAttribute<ExeclPropertyAttribute>().Order).ToArray();
List<TResult> list = new List<TResult>();
IWorkbook workbook = null;
if (fileName.Contains(".xlsx"))
workbook = new XSSFWorkbook(stream);
// 2003版本
else if (fileName.Contains(".xls"))
workbook = new HSSFWorkbook(stream);
ISheet sheet = null;
IRow row = null;
ICell cell = null;
int startRow = 1;
try
{
if (workbook != null)
{
sheet = workbook.GetSheetAt(0);//读取第一个sheet当然也可以循环读取每个sheet
if (sheet != null)
{
int rowCount = sheet.LastRowNum;//总行数
if (rowCount > 0)
{
IRow firstRow = sheet.GetRow(0);//第一行
int cellCount = firstRow.LastCellNum;//列数
//填充行
for (int i = startRow; i <= rowCount; ++i)
{
row = sheet.GetRow(i);
if (row == null) continue;
bool emptyRow = true;//是否空行
TResult dataModel = new TResult();
for (int j = row.FirstCellNum; j < cellCount; ++j)
{
var execlPropertyAttribute = propertyInfos[j].GetCustomAttribute<ExeclPropertyAttribute>();
cell = row.GetCell(j);
if (cell == null)
{
propertyInfos[j].SetValue(dataModel, "");
}
else
{
switch (cell.CellType)
{
case CellType.Blank:
propertyInfos[j].SetValue(dataModel, "");
break;
case CellType.Numeric:
short format = cell.CellStyle.DataFormat;
//对时间格式2015.12.5、2015/12/5、2015-12-5等的处理
if (format == 14 || format == 31 || format == 57 || format == 58)
propertyInfos[j].SetValue(dataModel, cell.DateCellValue);
else
{
if (execlPropertyAttribute.CellType == CellType.String)
{
propertyInfos[j].SetValue(dataModel, cell.NumericCellValue.ToString());
}
else
{
propertyInfos[j].SetValue(dataModel, cell.NumericCellValue);
}
}
break;
case CellType.String:
propertyInfos[j].SetValue(dataModel, cell.StringCellValue);
break;
}
}
if (cell != null && !string.IsNullOrEmpty(cell.ToString().Trim()))
{
emptyRow = false;
}
}
//非空数据行数据添加到DataTable
if (!emptyRow)
{
list.Add(dataModel);
}
}
}
}
}
return list;
}
catch (Exception)
{
throw;
}
}
/// <summary>
/// excel转list-根据sheetName得到List
/// </summary>
/// <typeparam name="TResult"></typeparam>
/// <param name="stream"></param>
/// <param name="sheetName"></param>
/// <returns></returns>
public static IEnumerable<TResult> ExcelToList<TResult>(Stream stream, string sheetName) where TResult : new()
{
var propertyInfos = typeof(TResult).GetProperties(BindingFlags.Public | BindingFlags.Instance)
.OrderBy(p => p.GetCustomAttribute<ExeclPropertyAttribute>().Order).ToArray();
List<TResult> list = new List<TResult>();
IWorkbook workbook = new XSSFWorkbook(stream);
ISheet sheet = null;
IRow row = null;
ICell cell = null;
int startRow = 1;
try
{
if (workbook != null)
{
sheet = workbook.GetSheet(sheetName);//根据sheet读取对应的DataTable
if (sheet != null)
{
int rowCount = sheet.LastRowNum;//总行数
if (rowCount > 0)
{
IRow firstRow = sheet.GetRow(0);//第一行
int cellCount = firstRow.LastCellNum;//列数
//填充行
for (int i = startRow; i <= rowCount; ++i)
{
row = sheet.GetRow(i);
if (row == null) continue;
bool emptyRow = true;//是否空行
TResult dataModel = new TResult();
for (int j = row.FirstCellNum; j < cellCount; ++j)
{
var execlPropertyAttribute = propertyInfos[j].GetCustomAttribute<ExeclPropertyAttribute>();
cell = row.GetCell(j);
if (cell == null)
{
propertyInfos[j].SetValue(dataModel, "");
}
else
{
switch (cell.CellType)
{
case CellType.Blank:
propertyInfos[j].SetValue(dataModel, "");
break;
case CellType.Numeric:
short format = cell.CellStyle.DataFormat;
//对时间格式2015.12.5、2015/12/5、2015-12-5等的处理
if (format == 14 || format == 31 || format == 57 || format == 58)
propertyInfos[j].SetValue(dataModel, cell.DateCellValue);
else
{
if (execlPropertyAttribute.CellType == CellType.String)
{
propertyInfos[j].SetValue(dataModel, cell.NumericCellValue.ToString());
}
else
{
propertyInfos[j].SetValue(dataModel, cell.NumericCellValue);
}
}
break;
case CellType.String:
propertyInfos[j].SetValue(dataModel, cell.StringCellValue);
break;
}
}
if (cell != null && !string.IsNullOrEmpty(cell.ToString().Trim()))
{
emptyRow = false;
}
}
//非空数据行数据添加到DataTable
if (!emptyRow)
{
list.Add(dataModel);
}
}
}
}
}
return list;
}
catch (Exception ex)
{
throw;
}
}
/// <summary>
/// List导出excel 二进制流
/// </summary>
/// <typeparam name="T">实体</typeparam>
/// <param name="data">List</param>
/// <param name="sheetName">sheetname 可不填默认Sheet0</param>
/// <returns></returns>
public static byte[] ListToExcel<T>(T[] data, string sheetName = "Sheet0")
{
IWorkbook workbook = null;
IRow row = null;
ISheet sheet = null;
ICell cell = null;
var propertyInfos = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance)
.OrderBy(p => p.GetCustomAttribute<ExeclPropertyAttribute>().Order).ToArray();
workbook = new XSSFWorkbook();
sheet = workbook.CreateSheet(sheetName);//创建一个名称为Sheet0的表
int rowCount = data.Count();//行数
int columnCount = propertyInfos.Length;//列数
//设置列头
row = sheet.CreateRow(0);//excel第一行设为列头
for (int c = 0; c < columnCount; c++)
{
cell = row.CreateCell(c);
cell.SetCellValue(propertyInfos[c].GetCustomAttribute<ExeclPropertyAttribute>().DisplayName);
}
//设置每行每列的单元格,
for (int i = 0; i < rowCount; i++)
{
row = sheet.CreateRow(i + 1);
for (int j = 0; j < columnCount; j++)
{
cell = row.CreateCell(j);//excel第二行开始写入数据
cell.SetCellValue(propertyInfos[j].GetValue(data[i])?.ToString());
}
}
using (MemoryStream memoryStream = new MemoryStream())
{
workbook.Write(memoryStream);//向打开的这个xls文件中写入数据
return memoryStream.ToArray();
}
}
/// <summary>
/// Dt导出excel 二进制流
/// </summary>
/// <param name="dt">datatable</param>
/// <param name="strFile">strFile</param>
/// <returns></returns>
public static byte[] DataTableToExcel(DataTable dt, string strFile, string sheetName = "Sheet0")
{
bool result = false;
IWorkbook workbook = null;
FileStream fs = null;
IRow row = null;
ISheet sheet = null;
ICell cell = null;
if (dt != null && dt.Rows.Count > 0)
{
workbook = new XSSFWorkbook();
sheet = workbook.CreateSheet(sheetName);//创建一个名称为Sheet0的表
int rowCount = dt.Rows.Count;//行数
int columnCount = dt.Columns.Count;//列数
//设置列头
row = sheet.CreateRow(0);//excel第一行设为列头
for (int c = 0; c < columnCount; c++)
{
cell = row.CreateCell(c);
cell.SetCellValue(dt.Columns[c].ColumnName);
}
//设置每行每列的单元格,
for (int i = 0; i < rowCount; i++)
{
row = sheet.CreateRow(i + 1);
for (int j = 0; j < columnCount; j++)
{
cell = row.CreateCell(j);//excel第二行开始写入数据
cell.SetCellValue(dt.Rows[i][j].ToString());
}
}
using (MemoryStream memoryStream = new MemoryStream())
{
workbook.Write(memoryStream);//向打开的这个xls文件中写入数据
return memoryStream.ToArray();
}
}
else
{
return new byte[0];
}
}
/// <summary>
/// List写入excel
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="data"></param>
/// <param name="strFile">路径</param>
/// <param name="sheetName"></param>
/// <returns></returns>
public static bool ListWriteExcel<T>(T[] data, string strFile, string sheetName = "Sheet0")
{
bool result = false;
IWorkbook workbook = null;
FileStream fs = null;
IRow row = null;
ISheet sheet = null;
ICell cell = null;
try
{
var propertyInfos = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance)
.OrderBy(p => p.GetCustomAttribute<ExeclPropertyAttribute>().Order).ToArray();
workbook = new XSSFWorkbook();
sheet = workbook.CreateSheet(sheetName);//创建一个名称为Sheet0的表
int rowCount = data.Count();//行数
int columnCount = propertyInfos.Length;//列数
//设置列头
row = sheet.CreateRow(0);//excel第一行设为列头
for (int c = 0; c < columnCount; c++)
{
cell = row.CreateCell(c);
cell.SetCellValue(propertyInfos[c].GetCustomAttribute<ExeclPropertyAttribute>().DisplayName);
}
//设置每行每列的单元格,
for (int i = 0; i < rowCount; i++)
{
row = sheet.CreateRow(i + 1);
for (int j = 0; j < columnCount; j++)
{
cell = row.CreateCell(j);//excel第二行开始写入数据
cell.SetCellValue(propertyInfos[j].GetValue(data[i])?.ToString());
}
}
using (fs = File.OpenWrite(strFile))
{
workbook.Write(fs);//向打开的这个xls文件中写入数据
result = true;
}
return result;
}
catch (Exception ex)
{
if (fs != null)
{
fs.Close();
}
return false;
}
}
/// <summary>
/// dt写入excel
/// </summary>
/// <param name="dt">datatable</param>
/// <param name="strFile">路径</param>
/// <returns></returns>
public static bool DataTableWriteExcel(DataTable dt, string strFile, string sheetName = "Sheet0")
{
bool result = false;
IWorkbook workbook = null;
FileStream fs = null;
IRow row = null;
ISheet sheet = null;
ICell cell = null;
try
{
if (dt != null && dt.Rows.Count > 0)
{
workbook = new XSSFWorkbook();
sheet = workbook.CreateSheet(sheetName);//创建一个名称为Sheet0的表
int rowCount = dt.Rows.Count;//行数
int columnCount = dt.Columns.Count;//列数
//设置列头
row = sheet.CreateRow(0);//excel第一行设为列头
for (int c = 0; c < columnCount; c++)
{
cell = row.CreateCell(c);
cell.SetCellValue(dt.Columns[c].ColumnName);
}
//设置每行每列的单元格,
for (int i = 0; i < rowCount; i++)
{
row = sheet.CreateRow(i + 1);
for (int j = 0; j < columnCount; j++)
{
cell = row.CreateCell(j);//excel第二行开始写入数据
cell.SetCellValue(dt.Rows[i][j].ToString());
}
}
using (fs = File.OpenWrite(strFile))
{
workbook.Write(fs);//向打开的这个xls文件中写入数据
result = true;
}
}
return result;
}
catch (Exception ex)
{
if (fs != null)
{
fs.Close();
}
return false;
}
}
/// <summary>
/// 设置单元格下拉框(除去标题行)
/// </summary>
/// <param name="workbook"></param>
/// <param name="sheet"></param>
/// <param name="ddlList"></param>
/// <param name="firstcol"></param>
/// <param name="lastcol"></param>
public static void SetCellDropdownList(IWorkbook workbook, ISheet sheet, List<string> ddlList, string sheetname, int sheetIndex, int firstcol, int lastcol)
{
# region ExcelHSSFWorkbook
//ISheet sheet2 = workbook.CreateSheet(sheetname);
////隐藏
//workbook.SetSheetHidden(sheetIndex, 1);
//int rowIndex = 0;
//foreach (var item in ddlList)
//{
// IRow vrow = sheet2.CreateRow(rowIndex);
// vrow.CreateCell(0).SetCellValue(item);
// rowIndex++;
//}
////创建的下拉项的区域:
//var rangeName = sheetname + "Range";
//IName range = workbook.CreateName();
//range.RefersToFormula = sheetname + "!$A$1:$A$" + rowIndex;
//range.NameName = rangeName;
//CellRangeAddressList regions = new CellRangeAddressList(1, 65535, firstcol, lastcol);
//DVConstraint constraint = DVConstraint.CreateFormulaListConstraint(rangeName);
//HSSFDataValidation dataValidate = new HSSFDataValidation(regions, constraint);
//dataValidate.CreateErrorBox("输入不合法", "请输入或选择下拉列表中的值。");
//dataValidate.ShowPromptBox = true;
//sheet.AddValidationData(dataValidate);
#endregion
//高版本excel【XSSFWorkbook】 设置下拉框
XSSFSheet sheetDDL = (XSSFSheet)workbook.CreateSheet(sheetname);
workbook.SetSheetHidden(sheetIndex, 1); //隐藏下拉框数据sheet
String[] datas = ddlList.ToArray(); //下拉框数据源
XSSFDataValidationHelper dvHelper = new XSSFDataValidationHelper(sheetDDL);
XSSFDataValidationConstraint dvConstraint = (XSSFDataValidationConstraint)dvHelper.CreateExplicitListConstraint(datas);
CellRangeAddressList addressList = new CellRangeAddressList(1, 65535, firstcol, lastcol); //下拉设置列
XSSFDataValidation validation = (XSSFDataValidation)dvHelper.CreateValidation(dvConstraint, addressList);
validation.SuppressDropDownArrow = true;
validation.ShowErrorBox = true;
validation.ShowPromptBox = true;
sheet.AddValidationData(validation);
}
}
}

View File

@@ -0,0 +1,28 @@
using NPOI.SS.UserModel;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace AntSK.Domain
{
public class ExeclPropertyAttribute : Attribute
{
public ExeclPropertyAttribute()
{
}
public ExeclPropertyAttribute(string displayName, int order, CellType cellType = CellType.String)
{
DisplayName = displayName;
Order = order;
CellType = cellType;
}
public string DisplayName { get; set; }
public int Order { get; set; }
public CellType CellType { get; set; }
}
}

View File

@@ -1,4 +1,6 @@
using System;

using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
@@ -7,7 +9,7 @@ using System.Threading.Tasks;
namespace AntSK.Domain.Common.LLamaFactory
{
public class ProcessWrapper
public class ProcessWrapper(ILogger<ProcessWrapper> _logger)
{
private Process process;
@@ -41,7 +43,7 @@ namespace AntSK.Domain.Common.LLamaFactory
isProcessComplete = true;
}
}
Console.WriteLine(result);
_logger.LogInformation(result);
}
start.WaitForExit();
}

View File

@@ -5,6 +5,7 @@ using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
@@ -13,10 +14,10 @@ namespace AntSK.Domain.Domain.Interface
{
public interface IChatService
{
IAsyncEnumerable<StreamingKernelContent> SendChatByAppAsync(Apps app, string questions, ChatHistory history);
IAsyncEnumerable<string> SendChatByAppAsync(Apps app, ChatHistory history);
IAsyncEnumerable<StreamingKernelContent> SendKmsByAppAsync(Apps app, string questions, ChatHistory history, string filePath, List<RelevantSource> relevantSources = null);
Task<ChatHistory> GetChatHistory(List<MessageInfo> MessageList);
Task<string> SendImgByAppAsync(Apps app, string questions);
Task<ChatHistory> GetChatHistory(List<Chats> MessageList, ChatHistory history);
}
}

View File

@@ -7,13 +7,13 @@ namespace AntSK.Domain.Domain.Interface
{
public interface IKMService
{
MemoryServerless GetMemory(Apps app);
MemoryServerless GetMemoryByApp(Apps app);
MemoryServerless GetMemoryByKMS(string kmsID, SearchClientConfig searchClientConfig = null);
MemoryServerless GetMemoryByKMS(string kmsID);
Task<List<KMFile>> GetDocumentByFileID(string kmsId, string fileId);
Task<List<RelevantSource>> GetRelevantSourceList(string kmsIdListStr, string msg);
Task<List<RelevantSource>> GetRelevantSourceList(Apps app, string msg);
List<UploadFileItem> FileList { get; }

View File

@@ -6,6 +6,8 @@ namespace AntSK.Domain.Domain.Interface
public interface IKernelService
{
Kernel GetKernelByApp(Apps app);
Kernel GetKernelByAIModelID(string modelid);
void ImportFunctionsByApp(Apps app, Kernel _kernel);
Task<string> HistorySummarize(Kernel _kernel, string questions, string history);
}

View File

@@ -12,7 +12,9 @@ namespace AntSK.Domain.Domain.Interface
{
public event LogMessageHandler LogMessageReceived;
Task PipInstall();
Task StartLLamaFactory(string modelName, string templateName);
Task PipInstallName(string name);
Task StartLLamaFactory(string modelName);
void KillProcess();

View File

@@ -0,0 +1,15 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static AntSK.Domain.Domain.Service.OllamaService;
namespace AntSK.Domain.Domain.Interface
{
public interface IOllamaService
{
public event LogMessageHandler LogMessageReceived;
Task StartOllama(string modelName);
}
}

View File

@@ -9,7 +9,29 @@ namespace AntSK.Domain.Domain.Model.Constant
public class KmsConstantcs
{
public const string KmsIdTag = "kmsid";
public const string FileIdTag = "fileid";
public const string AppIdTag = "appid";
public const string KmsIndex = "kms";
public const string FileIndex = "kms";
public const string KmsSearchNull="知识库未搜索到相关内容";
public const string KmsPrompt = @"使用<data></data>标记的内容作为你的知识:
<data>
{{$doc}}
</data>
--------------------------
回答要求:
- 如果你不清楚答案,你需要澄清
- 避免提及你是从<data></data>获取的知识
- 保持答案与<data></data>众描述一致
- 使用Markdown语法优化回答格式。
- 如果Markdown有图片则正常显示
--------------------------
历史聊天记录:{{ConversationSummaryPlugin.SummarizeConversation $history}}
--------------------------
用户问题: {{$input}}";
public const string KMExcelSplit = "*&antsk_excel&*";
}
}

View File

@@ -18,4 +18,11 @@
public List<string> input { get; set; }
}
public class RerankModel
{
public string modelId { get; set; }
public string query { get; set; }
public string document { get; set; }
}
}

View File

@@ -8,6 +8,8 @@ namespace AntSK.Domain.Domain.Model.Dto
public string Text { get; set; }
public float Relevance { get; set; }
public double RerankScore { get; set; }
public override string ToString()
{
return $"[file:{SourceName};Relevance:{(Relevance * 100):F2}%]:{Text}";

View File

@@ -13,20 +13,26 @@ namespace AntSK.Domain.Domain.Model.Enum
[Display(Name = "Azure Open AI")]
AzureOpenAI = 2,
[Display(Name = "LLama本地模型")]
LLamaSharp = 3,
[Display(Name = "星火大模型")]
SparkDesk = 4,
[Display(Name = "灵积大模型")]
DashScope = 5,
[Display(Name = "LLamaFactory")]
LLamaFactory = 6,
[Display(Name = "Bge Embedding")]
BgeEmbedding = 7,
[Display(Name = "Bge Rerank")]
BgeRerank = 8,
[Display(Name = "Ollama")]
Ollama = 10,
[Display(Name = "OllamaEmbedding")]
OllamaEmbedding = 11,
[Display(Name = "模拟输出")]
Mock = 100,
}
/// <summary>
@@ -36,5 +42,6 @@ namespace AntSK.Domain.Domain.Model.Enum
{
Chat = 1,
Embedding = 2,
Rerank=4
}
}

View File

@@ -9,6 +9,7 @@ namespace AntSK.Domain.Domain.Model.Enum
public enum AppType
{
chat = 1,
kms = 2
kms = 2,
img=3
}
}

View File

@@ -0,0 +1,17 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.Domain.Domain.Model.Excel
{
public class KMSExcelModel
{
[ExeclProperty("问题",0)]
public string Question { get; set; }
[ExeclProperty("答案", 1)]
public string Answer { get; set; }
}
}

View File

@@ -17,11 +17,14 @@ namespace AntSK.Domain.Domain.Model
public string FilePath { get; set; } = "";
public string FileName { get; set; } = "";
public bool IsQA { get; set; } = false;
}
public class ImportKMSTaskReq : ImportKMSTaskDTO
{
public bool IsQA { get; set; }=false;
public KmsDetails KmsDetail { get; set; } = new KmsDetails();
}
@@ -29,6 +32,13 @@ namespace AntSK.Domain.Domain.Model
{
File = 1,
Url = 2,
Text = 3
Text = 3,
Excel=4
}
public class QAModel
{
public string ChatModelId { get; set; }
public string Context { get; set; }
}
}

View File

@@ -1,20 +0,0 @@
namespace AntSK.Domain.Domain.Model
{
public class MessageInfo
{
public string ID { get; set; } = "";
public string Context { get; set; } = "";
public string HtmlAnswers { get; set; } = "";
/// <summary>
/// 发送是true 接收是false
/// </summary>
public bool IsSend { get; set; } = false;
public DateTime CreateTime { get; set; }
public string? FilePath { get; set; }
public string? FileName { get; set; }
}
}

View File

@@ -0,0 +1,85 @@
namespace AntSK.Domain
{
public class Result
{
/// <summary>
/// 错误码0是正常返回异常返回错误码
/// </summary>
public string Code { get; set; } = "0";
/// <summary>
/// 返回数据
/// </summary>
public object Data { get; set; }
/// <summary>
/// 返回信息详情
/// </summary>
public string Message { get; set; }
}
public static class ResponseResult
{
/// <summary>
/// 执行成功
/// </summary>
/// <returns></returns>
public static Result Success()
{
return new Result
{
Data = "",
Code = "0",
Message = "ok"
};
}
/// <summary>
/// 执行成功
/// </summary>
/// <param name="data"></param>
/// <param name="code"></param>
/// <param name="message"></param>
/// <returns></returns>
public static Result Success(this object data, string code = "0", string message = "ok")
{
return new Result
{
Data = data,
Code = code,
Message = message
};
}
/// <summary>
/// 执行失败
/// </summary>
/// <param name="data"></param>
/// <param name="code"></param>
/// <param name="message"></param>
/// <returns></returns>
public static Result Error(this object data, string code, string message)
{
return new Result
{
Data = data,
Code = code,
Message = message
};
}
/// <summary>
/// 执行失败
/// </summary>
/// <param name="code"></param>
/// <param name="message"></param>
/// <returns></returns>
public static Result Error(string code, string message)
{
return new Result
{
Data = "",
Code = code,
Message = message
};
}
}
}

View File

@@ -2,26 +2,29 @@
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
namespace AntSK.Domain.Domain.Other
{
public class BackGroundTaskHandler : IBackgroundTaskHandler<ImportKMSTaskReq>
{
private readonly IServiceScopeFactory _scopeFactory;
private readonly ILogger<BackGroundTaskHandler> _logger;
public BackGroundTaskHandler(IServiceScopeFactory scopeFactory)
public BackGroundTaskHandler(IServiceScopeFactory scopeFactory, ILogger<BackGroundTaskHandler> logger)
{
_scopeFactory = scopeFactory;
_logger = logger;
}
public async Task ExecuteAsync(ImportKMSTaskReq item)
{
using (var scope = _scopeFactory.CreateScope())
{
Console.WriteLine("ExecuteAsync.开始执行后台任务");
_logger.LogInformation("ExecuteAsync.开始执行后台任务");
var importKMSService = scope.ServiceProvider.GetRequiredService<IImportKMSService>();
//不能使用异步
importKMSService.ImportKMSTask(item);
Console.WriteLine("ExecuteAsync.后台任务执行完成");
_logger.LogInformation("ExecuteAsync.后台任务执行完成");
}
}

View File

@@ -0,0 +1,94 @@
using Newtonsoft.Json;
using Python.Runtime;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static Python.Runtime.Py;
using AntSK.Domain.Utils;
namespace AntSK.Domain.Domain.Other.Bge
{
public static class BegRerankConfig
{
public static dynamic model { get; set; }
static object lockobj = new object();
/// <summary>
/// 模型写死
/// </summary>
public static dynamic LoadModel(string pythondllPath, string modelName)
{
lock (lockobj)
{
if (model == null)
{
PyRunTime.InitRunTime(pythondllPath);
try
{
using (GIL())// 初始化Python环境的Global Interpreter Lock)
{
dynamic modelscope = Py.Import("modelscope");
dynamic flagEmbedding = Py.Import("FlagEmbedding");
dynamic model_dir = modelscope.snapshot_download(modelName, revision: "master");
if (modelName == "BAAI/bge-reranker-v2-minicpm-layerwise")
{
dynamic flagReranker = flagEmbedding.LayerWiseFlagLLMReranker(model_dir, use_fp16: true);
model = flagReranker;
}
else
{
dynamic flagReranker = flagEmbedding.FlagReranker(model_dir, use_fp16: true);
model = flagReranker;
}
return model;
}
}
catch (Exception ex)
{
throw ex;
}
}
else
{
return model;
}
}
}
public static double Rerank(List<string> list)
{
using (GIL())
{
try
{
PyList pyList = new PyList();
foreach (string item in list)
{
pyList.Append(item.ToPython()); // 将C# string转换为Python对象并添加到PyList中
}
PyObject result = model.compute_score(pyList, normalize: true);
//BAAI/bge-reranker-v2-minicpm-layerwise
// https://www.modelscope.cn/models/AI-ModelScope/bge-reranker-v2-m3
//PyList cutoffLayers = new PyList();
//cutoffLayers.Append(new PyInt(8));
//dynamic scores = model.compute_score(pyList, cutoff_layers: cutoffLayers);
return result.ConvertToDouble();
}
catch (Exception ex)
{
throw ex;
}
}
}
}
}

View File

@@ -0,0 +1,92 @@
using Microsoft.KernelMemory.AI.OpenAI;
using Python.Runtime;
using Serilog;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static Python.Runtime.Py;
namespace AntSK.Domain.Domain.Other.Bge
{
public static class BgeEmbeddingConfig
{
public static dynamic model { get; set; }
static object lockobj = new object();
/// <summary>
/// 模型写死
/// </summary>
public static dynamic LoadModel(string pythondllPath, string modelName)
{
lock (lockobj)
{
if (model == null)
{
PyRunTime.InitRunTime(pythondllPath);
try
{
using (GIL())// 初始化Python环境的Global Interpreter Lock)
{
dynamic modelscope = Import("modelscope");
//dynamic model_dir = modelscope.snapshot_download("AI-ModelScope/bge-large-zh-v1.5", revision: "master");
dynamic model_dir = modelscope.snapshot_download(modelName, revision: "master");
dynamic HuggingFaceBgeEmbeddingstemp = Import("langchain_community.embeddings.huggingface");
dynamic HuggingFaceBgeEmbeddings = HuggingFaceBgeEmbeddingstemp.HuggingFaceBgeEmbeddings;
string model_name = model_dir;
dynamic model_kwargs = new PyDict();
model_kwargs["device"] = new PyString("cpu");
dynamic hugginmodel = HuggingFaceBgeEmbeddings(
model_name: model_dir,
model_kwargs: model_kwargs
);
model = hugginmodel;
return hugginmodel;
}
}
catch (Exception ex)
{
throw ex;
}
}
else
return model;
}
}
public static Task<float[]> GetEmbedding(string queryStr)
{
using (GIL())
{
PyObject queryResult = model.embed_query(queryStr);
var floatList = queryResult.As<float[]>();
return Task.FromResult(floatList); ;
}
}
public static int TokenCount(string queryStr)
{
//using (Py.GIL())
//{
// PyObject queryResult = model.client.tokenize(queryStr);
// // 使用Python的内置len()函数获取长度
// PyObject lenFunc = Py.Import("builtins").GetAttr("len");
// PyObject length = lenFunc.Invoke(queryResult["input_ids"]);
// int len = length.As<int>(); // 将PyObject转换为C#中的整数
// return len;
//}
var tokenCount1 = DefaultGPTTokenizer.StaticCountTokens(queryStr);
return tokenCount1;
}
public static void Dispose()
{
Log.Information("python dispose");
}
}
}

View File

@@ -0,0 +1,28 @@
using Python.Runtime;
namespace AntSK.Domain.Domain.Other.Bge
{
public static class PyRunTime
{
static object lockobj = new object();
static bool isInit = false;
public static void InitRunTime(string pythonPath)
{
lock (lockobj)
{
if (!isInit)
{
if (string.IsNullOrEmpty(Runtime.PythonDLL))
{
Runtime.PythonDLL = pythonPath;
}
PythonEngine.Initialize();
PythonEngine.BeginAllowThreads();
isInit = true;
}
}
}
}
}

View File

@@ -0,0 +1,157 @@
using AntSK.Domain.Domain.Model.Constant;
using AntSK.Domain.Utils;
using Microsoft.Extensions.Logging;
using Microsoft.KernelMemory.AI.OpenAI;
using Microsoft.KernelMemory.Configuration;
using Microsoft.KernelMemory.DataFormats.Text;
using Microsoft.KernelMemory.Diagnostics;
using Microsoft.KernelMemory.Extensions;
using Microsoft.KernelMemory.Pipeline;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.Domain.Domain.Other
{
public class KMExcelHandler: IPipelineStepHandler
{
private readonly TextPartitioningOptions _options;
private readonly IPipelineOrchestrator _orchestrator;
private readonly ILogger<KMExcelHandler> _log;
private readonly TextChunker.TokenCounter _tokenCounter;
public KMExcelHandler(
string stepName,
IPipelineOrchestrator orchestrator,
TextPartitioningOptions? options = null,
ILogger<KMExcelHandler>? log = null)
{
this.StepName = stepName;
this._orchestrator = orchestrator;
this._options = options ?? new TextPartitioningOptions();
this._options.Validate();
this._log = log ?? DefaultLogger<KMExcelHandler>.Instance;
this._tokenCounter = DefaultGPTTokenizer.StaticCountTokens;
}
/// <inheritdoc />
public string StepName { get; }
/// <inheritdoc />
public async Task<(bool success, DataPipeline updatedPipeline)> InvokeAsync(
DataPipeline pipeline, CancellationToken cancellationToken = default)
{
this._log.LogDebug("Partitioning text, pipeline '{0}/{1}'", pipeline.Index, pipeline.DocumentId);
if (pipeline.Files.Count == 0)
{
this._log.LogWarning("Pipeline '{0}/{1}': there are no files to process, moving to next pipeline step.", pipeline.Index, pipeline.DocumentId);
return (true, pipeline);
}
foreach (DataPipeline.FileDetails uploadedFile in pipeline.Files)
{
// Track new files being generated (cannot edit originalFile.GeneratedFiles while looping it)
Dictionary<string, DataPipeline.GeneratedFileDetails> newFiles = new();
foreach (KeyValuePair<string, DataPipeline.GeneratedFileDetails> generatedFile in uploadedFile.GeneratedFiles)
{
var file = generatedFile.Value;
if (file.AlreadyProcessedBy(this))
{
this._log.LogTrace("File {0} already processed by this handler", file.Name);
continue;
}
// Partition only the original text
if (file.ArtifactType != DataPipeline.ArtifactTypes.ExtractedText)
{
this._log.LogTrace("Skipping file {0} (not original text)", file.Name);
continue;
}
// Use a different partitioning strategy depending on the file type
List<string> partitions;
List<string> sentences;
BinaryData partitionContent = await this._orchestrator.ReadFileAsync(pipeline, file.Name, cancellationToken).ConfigureAwait(false);
// Skip empty partitions. Also: partitionContent.ToString() throws an exception if there are no bytes.
if (partitionContent.ToArray().Length == 0) { continue; }
switch (file.MimeType)
{
case MimeTypes.PlainText:
{
this._log.LogDebug("Partitioning text file {0}", file.Name);
string content = partitionContent.ToString();
var excelList = content.Split(KmsConstantcs.KMExcelSplit, StringSplitOptions.RemoveEmptyEntries).ToList();
sentences = excelList;
partitions = excelList;
break;
}
case MimeTypes.MarkDown:
{
this._log.LogDebug("Partitioning text file {0}", file.Name);
string content = partitionContent.ToString();
var excelList = content.Split(KmsConstantcs.KMExcelSplit, StringSplitOptions.RemoveEmptyEntries).ToList();
sentences = excelList;
partitions = excelList;
break;
}
default:
this._log.LogWarning("File {0} cannot be partitioned, type '{1}' not supported", file.Name, file.MimeType);
// Don't partition other files
continue;
}
if (partitions.Count == 0) { continue; }
this._log.LogDebug("Saving {0} file partitions", partitions.Count);
for (int partitionNumber = 0; partitionNumber < partitions.Count; partitionNumber++)
{
// TODO: turn partitions in objects with more details, e.g. page number
string text = partitions[partitionNumber];
int sectionNumber = 0; // TODO: use this to store the page number (if any)
BinaryData textData = new(text);
int tokenCount = this._tokenCounter(text);
this._log.LogDebug("Partition size: {0} tokens", tokenCount);
var destFile = uploadedFile.GetPartitionFileName(partitionNumber);
await this._orchestrator.WriteFileAsync(pipeline, destFile, textData, cancellationToken).ConfigureAwait(false);
var destFileDetails = new DataPipeline.GeneratedFileDetails
{
Id = Guid.NewGuid().ToString("N"),
ParentId = uploadedFile.Id,
Name = destFile,
Size = text.Length,
MimeType = MimeTypes.PlainText,
ArtifactType = DataPipeline.ArtifactTypes.TextPartition,
PartitionNumber = partitionNumber,
SectionNumber = sectionNumber,
Tags = pipeline.Tags,
ContentSHA256 = textData.AntSKCalculateSHA256(),
};
newFiles.Add(destFile, destFileDetails);
destFileDetails.MarkProcessedBy(this);
}
file.MarkProcessedBy(this);
}
// Add new files to pipeline status
foreach (var file in newFiles)
{
uploadedFile.GeneratedFiles.Add(file.Key, file.Value);
}
}
return (true, pipeline);
}
}
}

View File

@@ -1,44 +0,0 @@
using LLama;
using LLama.Common;
using LLamaSharp.KernelMemory;
namespace AntSK.Domain.Domain.Other
{
public static class LLamaConfig
{
static object lockobj = new object();
/// <summary>
/// 避免模型重复加载,本地缓存
/// </summary>
static Dictionary<string, (LLamaWeights, ModelParams)> dicLLamaWeights = new Dictionary<string, (LLamaWeights, ModelParams)>();
public static (LLamaWeights, ModelParams) GetLLamaConfig(string modelPath, LLamaSharpConfig config = null)
{
lock (lockobj)
{
if (dicLLamaWeights.ContainsKey(modelPath))
{
return dicLLamaWeights.GetValueOrDefault(modelPath);
}
else
{
InferenceParams infParams = new() { AntiPrompts = ["\n\n"] };
LLamaSharpConfig lsConfig = new(modelPath) { DefaultInferenceParams = infParams };
if (config != null)
{
lsConfig = config;
}
var parameters = new ModelParams(lsConfig.ModelPath)
{
ContextSize = lsConfig?.ContextSize ?? 2048,
Seed = lsConfig?.Seed ?? 0,
GpuLayerCount = lsConfig?.GpuLayerCount ?? 20,
EmbeddingMode = true
};
var weights = LLamaWeights.LoadFromFile(parameters);
dicLLamaWeights.Add(modelPath, (weights, parameters));
return (weights, parameters);
}
}
}
}
}

View File

@@ -0,0 +1,173 @@
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model;
using AntSK.Domain.Utils;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.KernelMemory.AI.OpenAI;
using Microsoft.KernelMemory.Configuration;
using Microsoft.KernelMemory.DataFormats.Text;
using Microsoft.KernelMemory.Diagnostics;
using Microsoft.KernelMemory.Extensions;
using Microsoft.KernelMemory.Pipeline;
using Microsoft.SemanticKernel;
using Newtonsoft.Json;
using RestSharp;
using System.Security.Policy;
using System.Text;
using System.Text.RegularExpressions;
namespace AntSK.Domain.Domain.Other
{
public class QAHandler : IPipelineStepHandler
{
private readonly TextPartitioningOptions _options;
private readonly IPipelineOrchestrator _orchestrator;
private readonly ILogger<QAHandler> _log;
private readonly TextChunker.TokenCounter _tokenCounter;
private readonly IKernelService _kernelService;
public QAHandler(
string stepName,
IPipelineOrchestrator orchestrator,
IKernelService kernelService,
TextPartitioningOptions? options = null,
ILogger<QAHandler>? log = null
)
{
this.StepName = stepName;
this._orchestrator = orchestrator;
this._options = options ?? new TextPartitioningOptions();
this._options.Validate();
this._log = log ?? DefaultLogger<QAHandler>.Instance;
this._tokenCounter = DefaultGPTTokenizer.StaticCountTokens;
this._kernelService = kernelService;
}
/// <inheritdoc />
public string StepName { get; }
/// <inheritdoc />
public async Task<(bool success, DataPipeline updatedPipeline)> InvokeAsync(
DataPipeline pipeline, CancellationToken cancellationToken = default)
{
this._log.LogDebug("Partitioning text, pipeline '{0}/{1}'", pipeline.Index, pipeline.DocumentId);
if (pipeline.Files.Count == 0)
{
this._log.LogWarning("Pipeline '{0}/{1}': there are no files to process, moving to next pipeline step.", pipeline.Index, pipeline.DocumentId);
return (true, pipeline);
}
foreach (DataPipeline.FileDetails uploadedFile in pipeline.Files)
{
// Track new files being generated (cannot edit originalFile.GeneratedFiles while looping it)
Dictionary<string, DataPipeline.GeneratedFileDetails> newFiles = new();
foreach (KeyValuePair<string, DataPipeline.GeneratedFileDetails> generatedFile in uploadedFile.GeneratedFiles)
{
var file = generatedFile.Value;
if (file.AlreadyProcessedBy(this))
{
this._log.LogTrace("File {0} already processed by this handler", file.Name);
continue;
}
// Partition only the original text
if (file.ArtifactType != DataPipeline.ArtifactTypes.ExtractedText)
{
this._log.LogTrace("Skipping file {0} (not original text)", file.Name);
continue;
}
// Use a different partitioning strategy depending on the file type
List<string> partitions;
List<string> sentences;
BinaryData partitionContent = await this._orchestrator.ReadFileAsync(pipeline, file.Name, cancellationToken).ConfigureAwait(false);
// Skip empty partitions. Also: partitionContent.ToString() throws an exception if there are no bytes.
if (partitionContent.ToArray().Length == 0) { continue; }
switch (file.MimeType)
{
case MimeTypes.PlainText:
case MimeTypes.MarkDown:
{
this._log.LogDebug("Partitioning text file {0}", file.Name);
string content = partitionContent.ToString();
var kernel = _kernelService.GetKernelByAIModelID(StepName);
var lines = TextChunker.SplitPlainTextLines(content, 299);
var paragraphs = TextChunker.SplitPlainTextParagraphs(lines, 3000);
KernelFunction jsonFun = kernel.Plugins.GetFunction("KMSPlugin", "QA");
List<string> qaList = new List<string>();
foreach (var para in paragraphs)
{
var qaresult = await kernel.InvokeAsync(function: jsonFun, new KernelArguments() { ["input"] = para });
var qaListStr = qaresult.GetValue<string>().ConvertToString();
string pattern = @"Q\d+:.*?A\d+:.*?(?=(Q\d+:|$))";
RegexOptions options = RegexOptions.Singleline;
foreach (Match match in Regex.Matches(qaListStr, pattern, options))
{
qaList.Add(match.Value.Trim()); // Trim用于删除可能的首尾空格
}
}
sentences = qaList;
partitions = qaList;
break;
}
default:
this._log.LogWarning("File {0} cannot be partitioned, type '{1}' not supported", file.Name, file.MimeType);
// Don't partition other files
continue;
}
if (partitions.Count == 0) { continue; }
this._log.LogDebug("Saving {0} file partitions", partitions.Count);
for (int partitionNumber = 0; partitionNumber < partitions.Count; partitionNumber++)
{
// TODO: turn partitions in objects with more details, e.g. page number
string text = partitions[partitionNumber];
int sectionNumber = 0; // TODO: use this to store the page number (if any)
BinaryData textData = new(text);
int tokenCount = this._tokenCounter(text);
this._log.LogDebug("Partition size: {0} tokens", tokenCount);
var destFile = uploadedFile.GetPartitionFileName(partitionNumber);
await this._orchestrator.WriteFileAsync(pipeline, destFile, textData, cancellationToken).ConfigureAwait(false);
var destFileDetails = new DataPipeline.GeneratedFileDetails
{
Id = Guid.NewGuid().ToString("N"),
ParentId = uploadedFile.Id,
Name = destFile,
Size = text.Length,
MimeType = MimeTypes.PlainText,
ArtifactType = DataPipeline.ArtifactTypes.TextPartition,
PartitionNumber = partitionNumber,
SectionNumber = sectionNumber,
Tags = pipeline.Tags,
ContentSHA256 = textData.AntSKCalculateSHA256(),
};
newFiles.Add(destFile, destFileDetails);
destFileDetails.MarkProcessedBy(this);
}
file.MarkProcessedBy(this);
}
// Add new files to pipeline status
foreach (var file in newFiles)
{
uploadedFile.GeneratedFiles.Add(file.Key, file.Value);
}
}
return (true, pipeline);
}
}
}

View File

@@ -1,21 +1,23 @@
using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Repositories;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel;
using System.Text;
using AntSK.Domain.Utils;
using AntSK.Domain.Domain.Model.Dto;
using AntSK.Domain.Domain.Model.Constant;
using DocumentFormat.OpenXml.Drawing;
using System.Reflection.Metadata;
using Microsoft.KernelMemory;
using System.Collections.Generic;
using Markdig;
using ChatHistory = Microsoft.SemanticKernel.ChatCompletion.ChatHistory;
using Microsoft.SemanticKernel.Plugins.Core;
using Azure.Core;
using AntSK.Domain.Domain.Model;
using AntSK.Domain.Domain.Model.Constant;
using AntSK.Domain.Domain.Model.Dto;
using AntSK.Domain.Domain.Other.Bge;
using AntSK.Domain.Repositories;
using AntSK.Domain.Utils;
using AntSK.LLM.StableDiffusion;
using Markdig;
using Microsoft.KernelMemory;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using System.Diagnostics;
using System.Drawing;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using ChatHistory = Microsoft.SemanticKernel.ChatCompletion.ChatHistory;
namespace AntSK.Domain.Domain.Service
{
@@ -23,7 +25,8 @@ namespace AntSK.Domain.Domain.Service
public class ChatService(
IKernelService _kernelService,
IKMService _kMService,
IKmsDetails_Repositories _kmsDetails_Repositories
IKmsDetails_Repositories _kmsDetails_Repositories,
IAIModels_Repositories _aIModels_Repositories
) : IChatService
{
/// <summary>
@@ -33,87 +36,190 @@ namespace AntSK.Domain.Domain.Service
/// <param name="questions"></param>
/// <param name="history"></param>
/// <returns></returns>
public async IAsyncEnumerable<StreamingKernelContent> SendChatByAppAsync(Apps app, string questions, ChatHistory history)
public async IAsyncEnumerable<string> SendChatByAppAsync(Apps app, ChatHistory history)
{
if (string.IsNullOrEmpty(app.Prompt) || !app.Prompt.Contains("{{$input}}"))
{
//如果模板为空,给默认提示词
app.Prompt = app.Prompt.ConvertToString() + "{{$input}}";
}
KernelArguments args =new KernelArguments();
if (history.Count > 10)
{
app.Prompt = @"${{ConversationSummaryPlugin.SummarizeConversation $history}}" + app.Prompt;
args = new() {
{ "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) },
{ "input", questions }
};
}
else
{
args=new()
{
{ "input", $"{string.Join("\n", history.Select(x => x.Role + ": " + x.Content))}{Environment.NewLine} user:{questions}" }
};
}
var _kernel = _kernelService.GetKernelByApp(app);
var chat = _kernel.GetRequiredService<IChatCompletionService>();
var temperature = app.Temperature / 100;//存的是0~100需要缩小
OpenAIPromptExecutionSettings settings = new() { Temperature = temperature };
List<string> completionList = new List<string>();
if (!string.IsNullOrEmpty(app.ApiFunctionList) || !string.IsNullOrEmpty(app.NativeFunctionList))//这里还需要加上本地插件的
{
_kernelService.ImportFunctionsByApp(app, _kernel);
settings.ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions;
settings.ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions;
while (true)
{
ChatMessageContent result = await chat.GetChatMessageContentAsync(history, settings, _kernel);
if (result.Content is not null)
{
string chunkCompletion = result.Content.ConvertToString();
completionList.Add(chunkCompletion);
foreach (var content in completionList)
{
yield return content.ConvertToString();
}
break;
}
history.Add(result);
IEnumerable<FunctionCallContent> functionCalls = FunctionCallContent.GetFunctionCalls(result);
if (!functionCalls.Any())
{
break;
}
foreach (var functionCall in functionCalls)
{
FunctionResultContent resultContent = await functionCall.InvokeAsync(_kernel);
history.Add(resultContent.ToChatMessage());
}
}
}
var func = _kernel.CreateFunctionFromPrompt(app.Prompt, settings);
var chatResult = _kernel.InvokeStreamingAsync(function: func,
arguments: args);
await foreach (var content in chatResult)
else
{
yield return content;
var chatResult = chat.GetStreamingChatMessageContentsAsync(history, settings, _kernel);
await foreach (var content in chatResult)
{
yield return content.ConvertToString();
}
}
}
public async IAsyncEnumerable<StreamingKernelContent> SendKmsByAppAsync(Apps app, string questions, ChatHistory history, string filePath, List<RelevantSource> relevantSources = null)
{
var relevantSourceList = await _kMService.GetRelevantSourceList(app.KmsIdList, questions);
relevantSources?.Clear();
List<RelevantSource> relevantSourceList = new List<RelevantSource>();
var _kernel = _kernelService.GetKernelByApp(app);
if (!string.IsNullOrWhiteSpace(filePath))
{
var memory = _kMService.GetMemory(app);
var fileId = Guid.NewGuid().ToString();
var result = await memory.ImportDocumentAsync(new Microsoft.KernelMemory.Document(fileId).AddFile(filePath)
.AddTag(KmsConstantcs.KmsIdTag, app.Id)
, index: KmsConstantcs.KmsIndex);
//上传文件问答
var memory = _kMService.GetMemoryByApp(app);
var filters = new MemoryFilter().ByTag(KmsConstantcs.KmsIdTag, app.Id);
// 匹配GUID的正则表达式
string pattern = @"\b[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\b";
var searchResult = await memory.SearchAsync(questions, index: KmsConstantcs.KmsIndex, filters: [filters]);
relevantSourceList.AddRange(searchResult.Results.SelectMany(item => item.Partitions.Select(part => new RelevantSource()
// 使用正则表达式找到匹配
Match match = Regex.Match(filePath, pattern);
if (match.Success)
{
SourceName = item.SourceName,
Text = Markdown.ToHtml(part.Text),
Relevance = part.Relevance
})));
var fileId = match.Value;
var status = await memory.IsDocumentReadyAsync(fileId, index: KmsConstantcs.KmsIndex);
if (!status)
{
var result = await memory.ImportDocumentAsync(new Document(fileId).AddFile(filePath)
.AddTag(KmsConstantcs.AppIdTag, app.Id)
.AddTag(KmsConstantcs.FileIdTag, fileId)
, index: KmsConstantcs.FileIndex);
}
var filters = new List<MemoryFilter>() {
new MemoryFilter().ByTag(KmsConstantcs.AppIdTag, app.Id),
new MemoryFilter().ByTag(KmsConstantcs.FileIdTag, fileId)
};
var searchResult = await memory.SearchAsync(questions, index: KmsConstantcs.FileIndex, filters: filters);
relevantSourceList.AddRange(searchResult.Results.SelectMany(item => item.Partitions.Select(part => new RelevantSource()
{
SourceName = item.SourceName,
Text = Markdown.ToHtml(part.Text),
Relevance = part.Relevance
})));
app.Prompt = KmsConstantcs.KmsPrompt;
}
}
else
{
//从知识库问答
relevantSourceList = await _kMService.GetRelevantSourceList(app, questions);
}
var dataMsg = new StringBuilder();
if (relevantSourceList.Any())
{
relevantSources?.AddRange(relevantSourceList);
if (!string.IsNullOrEmpty(app.RerankModelID))
{
var rerankModel=_aIModels_Repositories.GetById(app.RerankModelID);
BegRerankConfig.LoadModel(rerankModel.EndPoint, rerankModel.ModelName);
//进行rerank
foreach (var item in relevantSourceList)
{
List<string> rerank = new List<string>();
rerank.Add(questions);
rerank.Add(item.Text);
item.RerankScore = BegRerankConfig.Rerank(rerank);
}
relevantSourceList = relevantSourceList.OrderByDescending(p => p.RerankScore).Take(app.MaxMatchesCount).ToList();
}
bool isSearch = false;
foreach (var item in relevantSourceList)
{
dataMsg.AppendLine(item.ToString());
if (!string.IsNullOrEmpty(app.RerankModelID))
{
//匹配重排后相似度
if (item.RerankScore >= app.Relevance / 100)
{
dataMsg.AppendLine(item.ToString());
isSearch = true;
}
}
else
{
//匹配相似度
if (item.Relevance >= app.Relevance / 100)
{
dataMsg.AppendLine(item.ToString());
isSearch = true;
}
}
}
KernelFunction jsonFun = _kernel.Plugins.GetFunction("KMSPlugin", "Ask1");
var chatResult = _kernel.InvokeStreamingAsync(function: jsonFun,
arguments: new KernelArguments() { ["doc"] = dataMsg, ["history"] = string.Join("\n", history.Select(x => x.Role + ": " + x.Content)), ["questions"] = questions });
await foreach (var content in chatResult)
//处理markdown显示
relevantSources?.AddRange(relevantSourceList);
Dictionary<string, string> fileDic = new Dictionary<string, string>();
foreach (var item in relevantSourceList)
{
yield return content;
if (fileDic.ContainsKey(item.SourceName))
{
item.SourceName = fileDic[item.SourceName];
}
else
{
var fileDetail = _kmsDetails_Repositories.GetFirst(p => p.FileGuidName == item.SourceName);
if (fileDetail.IsNotNull())
{
string fileName = fileDetail.FileName;
fileDic.Add(item.SourceName, fileName);
item.SourceName = fileName;
}
}
item.Text = Markdown.ToHtml(item.Text);
}
if (isSearch)
{
//KernelFunction jsonFun = _kernel.Plugins.GetFunction("KMSPlugin", "Ask1");
var temperature = app.Temperature / 100;//存的是0~100需要缩小
OpenAIPromptExecutionSettings settings = new() { Temperature = temperature };
var func = _kernel.CreateFunctionFromPrompt(app.Prompt , settings);
var chatResult = _kernel.InvokeStreamingAsync(function: func,
arguments: new KernelArguments() { ["doc"] = dataMsg.ToString(), ["history"] = string.Join("\n", history.Select(x => x.Role + ": " + x.Content)), ["input"] = questions });
await foreach (var content in chatResult)
{
yield return content;
}
}
else
{
yield return new StreamingTextContent(KmsConstantcs.KmsSearchNull);
}
}
else
@@ -122,25 +228,127 @@ namespace AntSK.Domain.Domain.Service
}
}
public async Task<ChatHistory> GetChatHistory(List<MessageInfo> MessageList)
{
ChatHistory history = new ChatHistory();
if (MessageList.Count > 1)
{
foreach (var item in MessageList)
public async Task<string> SendImgByAppAsync(Apps app, string questions)
{
var imageModel = _aIModels_Repositories.GetFirst(p => p.Id == app.ImageModelID);
KernelArguments args = new() {
{ "input", questions }
};
var _kernel = _kernelService.GetKernelByApp(app);
var temperature = app.Temperature / 100; //存的是0~100需要缩小
OpenAIPromptExecutionSettings settings = new() { Temperature = temperature };
var func = _kernel.CreateFunctionFromPrompt("Translate this into English:{{$input}}", settings);
var chatResult = await _kernel.InvokeAsync(function: func, arguments: args);
if (chatResult.IsNotNull())
{
//Can Load stable-diffusion library in diffenert environment
//SDHelper.LoadLibrary()
string versionString = string.Empty;
string extensionString = string.Empty;
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
if (item.IsSend)
extensionString = ".dll";
}
else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
extensionString = ".so";
}
else
{
throw new InvalidOperationException("OS Platform no support");
}
ProcessStartInfo startInfo = new ProcessStartInfo("nvcc", "--version");
startInfo.RedirectStandardOutput = true;
startInfo.UseShellExecute = false;
startInfo.CreateNoWindow = true;
using (Process process = Process.Start(startInfo))
{
if (process != null)
{
history.AddUserMessage(item.Context);
string result = process.StandardOutput.ReadToEnd();
Regex regex = new Regex(@"release (\d+).[\d]");
Match match = regex.Match(result);
if (match.Success)
{
switch (match.Groups[1].Value.ToString())
{
case "11":
versionString = "Cuda11";
break;
case "12":
versionString = "Cuda12";
break;
default:
versionString = "CPU";
break;
}
}
}
else
{
history.AddAssistantMessage(item.Context);
throw new Exception("nvcc get an error");
}
}
string libraryPath = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "StableDiffusion", "Backend", versionString, "stable-diffusion" + extensionString);
NativeLibrary.TryLoad(libraryPath, out _);
string prompt = chatResult.GetValue<string>();
if (!SDHelper.IsInitialized)
{
Structs.ModelParams modelParams = new Structs.ModelParams
{
ModelPath = imageModel.ModelName,
RngType = Structs.RngType.CUDA_RNG,
//VaePath = vaePath,
//KeepVaeOnCpu = keepVaeOnCpu,
//set false can get a better image, otherwise can use lower vram
VaeTiling = false,
//LoraModelDir = loraModelDir,
};
bool result = SDHelper.Initialize(modelParams);
}
Structs.TextToImageParams textToImageParams = new Structs.TextToImageParams
{
Prompt = prompt,
NegativePrompt = "bad quality, wrong image, worst quality",
SampleMethod = (Structs.SampleMethod)Enum.Parse(typeof(Structs.SampleMethod), "EULER_A"),
//the base image size in SD1.5 is 512x512
Width = 512,
Height = 512,
NormalizeInput = true,
ClipSkip = -1,
CfgScale = 7,
SampleSteps = 20,
Seed = -1,
};
Bitmap[] outputImages = SDHelper.TextToImage(textToImageParams);
var base64 = ImageUtils.BitmapToBase64(outputImages[0]);
return base64;
}
else
{
return "";
}
}
public async Task<ChatHistory> GetChatHistory(List<Chats> MessageList, ChatHistory history)
{
foreach (var item in MessageList)
{
if (item.IsSend)
{
history.AddUserMessage(item.Context);
}
else
{
history.AddAssistantMessage(item.Context);
}
}
return history;
}
}
}
}

View File

@@ -8,6 +8,7 @@ using System.Text.RegularExpressions;
using Microsoft.SemanticKernel;
using HtmlAgilityPack;
using System.Collections.Generic;
using Serilog;
namespace AntSK.Domain.Domain.Service
{
@@ -115,7 +116,7 @@ namespace AntSK.Domain.Domain.Service
}
catch (Exception ex)
{
Console.WriteLine(ex.Message + " ---- " + ex.StackTrace);
Log.Error(ex.Message + " ---- " + ex.StackTrace);
}
}
}

View File

@@ -2,8 +2,13 @@
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model;
using AntSK.Domain.Domain.Model.Constant;
using AntSK.Domain.Domain.Model.Excel;
using AntSK.Domain.Domain.Other;
using AntSK.Domain.Repositories;
using Microsoft.Extensions.Logging;
using Microsoft.KernelMemory;
using Microsoft.KernelMemory.Handlers;
using System.Text;
namespace AntSK.Domain.Domain.Service
{
@@ -11,7 +16,8 @@ namespace AntSK.Domain.Domain.Service
public class ImportKMSService(
IKMService _kMService,
IKmsDetails_Repositories _kmsDetails_Repositories,
IKmss_Repositories _kmss_Repositories
IKmss_Repositories _kmss_Repositories,
ILogger<ImportKMSService> _logger
) : IImportKMSService
{
@@ -20,18 +26,40 @@ namespace AntSK.Domain.Domain.Service
try
{
var km = _kmss_Repositories.GetFirst(p => p.Id == req.KmsId);
var _memory = _kMService.GetMemoryByKMS(km.Id);
string fileid = req.KmsDetail.Id;
List<string> step = new List<string>();
if (req.IsQA)
{
_memory.Orchestrator.AddHandler<TextExtractionHandler>("extract_text");
_memory.Orchestrator.AddHandler<QAHandler>(km.ChatModelID);
_memory.Orchestrator.AddHandler<GenerateEmbeddingsHandler>("generate_embeddings");
_memory.Orchestrator.AddHandler<SaveRecordsHandler>("save_memory_records");
step.Add("extract_text");
step.Add(km.ChatModelID);
step.Add("generate_embeddings");
step.Add("save_memory_records");
}
switch (req.ImportType)
{
case ImportType.File:
//导入文件
{
var importResult = _memory.ImportDocumentAsync(new Document(fileid)
.AddFile(req.FilePath)
.AddTag(KmsConstantcs.KmsIdTag, req.KmsId)
, index: KmsConstantcs.KmsIndex).Result;
//导入文件
if (req.IsQA)
{
var importResult = _memory.ImportDocumentAsync(new Document(fileid)
.AddFile(req.FilePath)
.AddTag(KmsConstantcs.KmsIdTag, req.KmsId)
,index: KmsConstantcs.KmsIndex ,steps: step.ToArray()).Result;
}
else
{
var importResult = _memory.ImportDocumentAsync(new Document(fileid)
.AddFile(req.FilePath)
.AddTag(KmsConstantcs.KmsIdTag, req.KmsId)
, index: KmsConstantcs.KmsIndex).Result;
}
//查询文档数量
var docTextList = _kMService.GetDocumentByFileID(km.Id, fileid).Result;
string fileGuidName = Path.GetFileName(req.FilePath);
@@ -44,8 +72,16 @@ namespace AntSK.Domain.Domain.Service
case ImportType.Url:
{
//导入url
var importResult = _memory.ImportWebPageAsync(req.Url, fileid, new TagCollection() { { KmsConstantcs.KmsIdTag, req.KmsId } }
, index: KmsConstantcs.KmsIndex).Result;
if (req.IsQA)
{
var importResult = _memory.ImportWebPageAsync(req.Url, fileid, new TagCollection() { { KmsConstantcs.KmsIdTag, req.KmsId } }
, index: KmsConstantcs.KmsIndex, steps: step.ToArray()).Result;
}
else
{
var importResult = _memory.ImportWebPageAsync(req.Url, fileid, new TagCollection() { { KmsConstantcs.KmsIdTag, req.KmsId } }
, index: KmsConstantcs.KmsIndex).Result;
}
//查询文档数量
var docTextList = _kMService.GetDocumentByFileID(km.Id, fileid).Result;
req.KmsDetail.Url = req.Url;
@@ -55,8 +91,16 @@ namespace AntSK.Domain.Domain.Service
case ImportType.Text:
//导入文本
{
var importResult = _memory.ImportTextAsync(req.Text, fileid, new TagCollection() { { KmsConstantcs.KmsIdTag, req.KmsId } }
, index: KmsConstantcs.KmsIndex).Result;
if (req.IsQA)
{
var importResult = _memory.ImportTextAsync(req.Text, fileid, new TagCollection() { { KmsConstantcs.KmsIdTag, req.KmsId } }
, index: KmsConstantcs.KmsIndex, steps: step.ToArray()).Result;
}
else
{
var importResult = _memory.ImportTextAsync(req.Text, fileid, new TagCollection() { { KmsConstantcs.KmsIdTag, req.KmsId } }
, index: KmsConstantcs.KmsIndex).Result;
}
//查询文档数量
var docTextList = _kMService.GetDocumentByFileID(km.Id, fileid).Result;
req.KmsDetail.Url = req.Url;
@@ -64,17 +108,47 @@ namespace AntSK.Domain.Domain.Service
}
break;
case ImportType.Excel:
using (var fs = File.OpenRead(req.FilePath))
{
var excelList= ExeclHelper.ExcelToList<KMSExcelModel>(fs);
_memory.Orchestrator.AddHandler<TextExtractionHandler>("extract_text");
_memory.Orchestrator.AddHandler<KMExcelHandler>("antsk_excel_split");
_memory.Orchestrator.AddHandler<GenerateEmbeddingsHandler>("generate_embeddings");
_memory.Orchestrator.AddHandler<SaveRecordsHandler>("save_memory_records");
StringBuilder text = new StringBuilder();
foreach (var item in excelList)
{
text.AppendLine(@$"Question:{item.Question}{Environment.NewLine}Answer:{item.Answer}{KmsConstantcs.KMExcelSplit}");
}
var importResult = _memory.ImportTextAsync(text.ToString(), fileid, new TagCollection() { { KmsConstantcs.KmsIdTag, req.KmsId } }
, index: KmsConstantcs.KmsIndex,
steps: new[]
{
"extract_text",
"antsk_excel_split",
"generate_embeddings",
"save_memory_records"
}
).Result;
req.KmsDetail.FileName = req.FileName;
string fileGuidName = Path.GetFileName(req.FilePath);
req.KmsDetail.FileGuidName = fileGuidName;
req.KmsDetail.DataCount = excelList.Count();
}
break;
}
req.KmsDetail.Status = Model.Enum.ImportKmsStatus.Success;
_kmsDetails_Repositories.Update(req.KmsDetail);
//_kmsDetails_Repositories.GetList(p => p.KmsId == req.KmsId);
Console.WriteLine("后台导入任务成功:" + req.KmsDetail.DataCount);
_logger.LogInformation("后台导入任务成功:" + req.KmsDetail.DataCount);
}
catch (Exception ex)
{
req.KmsDetail.Status = Model.Enum.ImportKmsStatus.Fail;
_kmsDetails_Repositories.Update(req.KmsDetail);
Console.WriteLine("后台导入任务异常:" + ex.Message);
_logger.LogError("后台导入任务异常:" + ex.Message);
}
}
}

View File

@@ -1,5 +1,6 @@
using AntDesign;
using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Common.Embedding;
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model.Constant;
using AntSK.Domain.Domain.Model.Dto;
@@ -7,14 +8,14 @@ using AntSK.Domain.Domain.Other;
using AntSK.Domain.Options;
using AntSK.Domain.Repositories;
using AntSK.Domain.Utils;
using AntSK.OCR;
using DocumentFormat.OpenXml.Drawing.Diagrams;
using LLama;
using LLamaSharp.KernelMemory;
using Markdig;
using Microsoft.AspNetCore.Components;
using Microsoft.Extensions.Configuration;
using Microsoft.KernelMemory;
using Microsoft.KernelMemory.Configuration;
using Microsoft.KernelMemory.DataFormats;
using Microsoft.KernelMemory.FileSystem.DevTools;
using Microsoft.KernelMemory.MemoryStorage;
using Microsoft.KernelMemory.MemoryStorage.DevTools;
@@ -26,7 +27,8 @@ namespace AntSK.Domain.Domain.Service
public class KMService(
IKmss_Repositories _kmss_Repositories,
IAIModels_Repositories _aIModels_Repositories,
IMessageService? _message
IMessageService? _message,
IKernelService _kernelService
) : IKMService
{
private MemoryServerless _memory;
@@ -35,20 +37,36 @@ namespace AntSK.Domain.Domain.Service
public List<UploadFileItem> FileList => _fileList;
public MemoryServerless GetMemory(Apps app)
public MemoryServerless GetMemoryByApp(Apps app)
{
var chatModel = _aIModels_Repositories.GetFirst(p => p.Id == app.ChatModelID);
var embedModel = _aIModels_Repositories.GetFirst(p => p.Id == app.EmbeddingModelID);
var chatHttpClient = OpenAIHttpClientHandlerUtil.GetHttpClient(chatModel.EndPoint);
var embeddingHttpClient = OpenAIHttpClientHandlerUtil.GetHttpClient(embedModel.EndPoint);
var searchClientConfig = new SearchClientConfig
SearchClientConfig searchClientConfig;
if (string.IsNullOrEmpty(app.RerankModelID))
{
MaxAskPromptSize = 2048,
MaxMatchesCount = 3,
AnswerTokens = 1000,
EmptyAnswer = KmsConstantcs.KmsSearchNull
};
//不重排直接取查询数
searchClientConfig = new SearchClientConfig
{
MaxAskPromptSize = app.MaxAskPromptSize,
MaxMatchesCount = app.MaxMatchesCount,
AnswerTokens = app.AnswerTokens,
EmptyAnswer = KmsConstantcs.KmsSearchNull
};
}
else
{
//重排取rerank数
searchClientConfig = new SearchClientConfig
{
MaxAskPromptSize = app.MaxAskPromptSize,
MaxMatchesCount = app.RerankCount,
AnswerTokens = app.AnswerTokens,
EmptyAnswer = KmsConstantcs.KmsSearchNull
};
}
var memoryBuild = new KernelMemoryBuilder()
.WithSearchClientConfig(searchClientConfig)
@@ -70,7 +88,7 @@ namespace AntSK.Domain.Domain.Service
return _memory;
}
public MemoryServerless GetMemoryByKMS(string kmsID, SearchClientConfig searchClientConfig = null)
public MemoryServerless GetMemoryByKMS(string kmsID)
{
//if (_memory.IsNull())
{
@@ -84,33 +102,35 @@ namespace AntSK.Domain.Domain.Service
var embeddingHttpClient = OpenAIHttpClientHandlerUtil.GetHttpClient(embedModel.EndPoint);
//搜索配置
if (searchClientConfig.IsNull())
{
searchClientConfig = new SearchClientConfig
{
MaxAskPromptSize = 2048,
MaxMatchesCount = 3,
AnswerTokens = 1000,
EmptyAnswer = KmsConstantcs.KmsSearchNull
};
}
//if (searchClientConfig.IsNull())
//{
// searchClientConfig = new SearchClientConfig
// {
// MaxAskPromptSize = 2048,
// MaxMatchesCount = 3,
// AnswerTokens = 1000,
// EmptyAnswer = KmsConstantcs.KmsSearchNull
// };
//}
var memoryBuild = new KernelMemoryBuilder()
.WithSearchClientConfig(searchClientConfig)
//.WithSearchClientConfig(searchClientConfig)
.WithCustomTextPartitioningOptions(new TextPartitioningOptions
{
MaxTokensPerLine = kms.MaxTokensPerLine,
MaxTokensPerParagraph = kms.MaxTokensPerParagraph,
OverlappingTokens = kms.OverlappingTokens
});
//加载OCR
WithOcr(memoryBuild, kms);
//加载会话模型
WithTextGenerationByAIType(memoryBuild, chatModel, chatHttpClient);
//加载向量模型
WithTextEmbeddingGenerationByAIType(memoryBuild, embedModel, embeddingHttpClient);
//加载向量库
WithMemoryDbByVectorDB(memoryBuild);
_memory = memoryBuild.Build<MemoryServerless>();
_memory = memoryBuild.AddSingleton<IKernelService>(_kernelService).Build<MemoryServerless>();
return _memory;
}
//else {
@@ -118,6 +138,14 @@ namespace AntSK.Domain.Domain.Service
//}
}
private static void WithOcr(IKernelMemoryBuilder memoryBuild, Kmss kms)
{
if (kms.IsOCR == 1)
{
memoryBuild.WithCustomImageOcr(new AntSKOcrEngine());
}
}
private void WithTextEmbeddingGenerationByAIType(IKernelMemoryBuilder memory, AIModels embedModel,
HttpClient embeddingHttpClient)
{
@@ -141,15 +169,21 @@ namespace AntSK.Domain.Domain.Service
APIType = AzureOpenAIConfig.APITypes.EmbeddingGeneration,
});
break;
case Model.Enum.AIType.LLamaSharp:
var (weights, parameters) = LLamaConfig.GetLLamaConfig(embedModel.ModelName);
var embedder = new LLamaEmbedder(weights, parameters);
memory.WithLLamaSharpTextEmbeddingGeneration(new LLamaSharpTextEmbeddingGenerator(embedder));
case Model.Enum.AIType.BgeEmbedding:
string pyDll = embedModel.EndPoint;
string bgeEmbeddingModelName = embedModel.ModelName;
memory.WithBgeTextEmbeddingGeneration(new HuggingfaceTextEmbeddingGenerator(pyDll,bgeEmbeddingModelName));
break;
case Model.Enum.AIType.DashScope:
memory.WithDashScopeDefaults(embedModel.ModelKey);
break;
case Model.Enum.AIType.OllamaEmbedding:
memory.WithOpenAITextEmbeddingGeneration(new OpenAIConfig()
{
APIKey = "NotNull",
EmbeddingModel = embedModel.ModelName
}, null, false, embeddingHttpClient);
break;
}
}
@@ -176,12 +210,20 @@ namespace AntSK.Domain.Domain.Service
APIType = AzureOpenAIConfig.APITypes.TextCompletion,
});
break;
case Model.Enum.AIType.LLamaFactory:
case Model.Enum.AIType.LLamaSharp:
var (weights, parameters) = LLamaConfig.GetLLamaConfig(chatModel.ModelName);
var context = weights.CreateContext(parameters);
var executor = new StatelessExecutor(weights, parameters);
memory.WithLLamaSharpTextGeneration(new LlamaSharpTextGenerator(weights, context, executor));
memory.WithOpenAITextGeneration(new OpenAIConfig()
{
APIKey = "NotNull",
TextModel = chatModel.ModelName
}, null, chatHttpClient);
break;
case Model.Enum.AIType.Ollama:
memory.WithOpenAITextGeneration(new OpenAIConfig()
{
APIKey = "NotNull",
TextModel = chatModel.ModelName
}, null, chatHttpClient);
break;
case Model.Enum.AIType.DashScope:
memory.WithDashScopeTextGeneration(new Cnblogs.KernelMemory.AI.DashScope.DashScopeConfig
@@ -248,12 +290,12 @@ namespace AntSK.Domain.Domain.Service
{
foreach (var memoryDb in memoryDbs)
{
var items = await memoryDb.GetListAsync(memoryIndex.Name, new List<MemoryFilter>() { new MemoryFilter().ByDocument(fileId) }, 100, true).ToListAsync();
var items = await memoryDb.GetListAsync(memoryIndex.Name, new List<MemoryFilter>() { new MemoryFilter().ByDocument(fileId) }, 1000, true).ToListAsync();
docTextList.AddRange(items.Select(item => new KMFile()
{
DocumentId = item.GetDocumentId(),
Text = item.GetPartitionText(),
Url = item.GetWebPageUrl(),
Url = item.GetWebPageUrl(KmsConstantcs.KmsIndex),
LastUpdate = item.GetLastUpdate().LocalDateTime.ToString("yyyy-MM-dd HH:mm:ss"),
File = item.GetFileName()
}));
@@ -263,15 +305,15 @@ namespace AntSK.Domain.Domain.Service
return docTextList;
}
public async Task<List<RelevantSource>> GetRelevantSourceList(string kmsIdListStr, string msg)
public async Task<List<RelevantSource>> GetRelevantSourceList(Apps app ,string msg)
{
var result = new List<RelevantSource>();
if (string.IsNullOrWhiteSpace(kmsIdListStr))
if (string.IsNullOrWhiteSpace(app.KmsIdList))
return result;
var kmsIdList = kmsIdListStr.Split(",");
var kmsIdList = app.KmsIdList.Split(",");
if (!kmsIdList.Any()) return result;
var memory = GetMemoryByKMS(kmsIdList.FirstOrDefault()!);
var memory = GetMemoryByApp(app);
var filters = kmsIdList.Select(kmsId => new MemoryFilter().ByTag(KmsConstantcs.KmsIdTag, kmsId)).ToList();
@@ -283,7 +325,7 @@ namespace AntSK.Domain.Domain.Service
result.AddRange(item.Partitions.Select(part => new RelevantSource()
{
SourceName = item.SourceName,
Text = Markdown.ToHtml(part.Text),
Text = part.Text,
Relevance = part.Relevance
}));
}
@@ -305,7 +347,10 @@ namespace AntSK.Domain.Domain.Service
"application/pdf",
"application/json",
"text/x-markdown",
"text/markdown"
"text/markdown",
"image/jpeg",
"image/png",
"image/tiff"
};
string[] exceptExts = [".md", ".pdf"];

View File

@@ -4,20 +4,16 @@ using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Other;
using AntSK.Domain.Repositories;
using AntSK.Domain.Utils;
using LLama;
using LLamaSharp.SemanticKernel.TextCompletion;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Plugins.Core;
using Microsoft.SemanticKernel.TextGeneration;
using RestSharp;
using System;
using ServiceLifetime = AntSK.Domain.Common.DependencyInjection.ServiceLifetime;
using AntSK.LLM.Mock;
using AntSK.Domain.Domain.Model.Enum;
using AntSK.LLM.LLamaFactory;
using System.Reflection;
using DocumentFormat.OpenXml.Drawing;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.Extensions.Logging;
namespace AntSK.Domain.Domain.Service
{
@@ -29,17 +25,20 @@ namespace AntSK.Domain.Domain.Service
private readonly FunctionService _functionService;
private readonly IServiceProvider _serviceProvider;
private Kernel _kernel;
private readonly ILogger<KernelService> _logger;
public KernelService(
IApis_Repositories apis_Repositories,
IAIModels_Repositories aIModels_Repositories,
FunctionService functionService,
IServiceProvider serviceProvider)
IServiceProvider serviceProvider,
ILogger<KernelService> logger)
{
_apis_Repositories = apis_Repositories;
_aIModels_Repositories = aIModels_Repositories;
_functionService = functionService;
_serviceProvider = serviceProvider;
_logger = logger;
}
/// <summary>
@@ -57,7 +56,7 @@ namespace AntSK.Domain.Domain.Service
var chatHttpClient = OpenAIHttpClientHandlerUtil.GetHttpClient(chatModel.EndPoint);
var builder = Kernel.CreateBuilder();
WithTextGenerationByAIType(builder, app, chatModel, chatHttpClient);
WithTextGenerationByAIType(builder, chatModel, chatHttpClient);
_kernel = builder.Build();
RegisterPluginsWithKernel(_kernel);
@@ -69,7 +68,18 @@ namespace AntSK.Domain.Domain.Service
//}
}
private void WithTextGenerationByAIType(IKernelBuilder builder, Apps app, AIModels chatModel, HttpClient chatHttpClient)
public Kernel GetKernelByAIModelID(string modelid)
{
var chatModel = _aIModels_Repositories.GetById(modelid);
var chatHttpClient = OpenAIHttpClientHandlerUtil.GetHttpClient(chatModel.EndPoint);
var builder = Kernel.CreateBuilder();
WithTextGenerationByAIType(builder, chatModel, chatHttpClient);
_kernel = builder.Build();
RegisterPluginsWithKernel(_kernel);
return _kernel;
}
private void WithTextGenerationByAIType(IKernelBuilder builder,AIModels chatModel, HttpClient chatHttpClient)
{
switch (chatModel.AIType)
{
@@ -88,15 +98,32 @@ namespace AntSK.Domain.Domain.Service
);
break;
case Model.Enum.AIType.LLamaSharp:
var (weights, parameters) = LLamaConfig.GetLLamaConfig(chatModel.ModelName);
var ex = new StatelessExecutor(weights, parameters);
builder.Services.AddKeyedSingleton<ITextGenerationService>("local-llama", new LLamaSharpTextCompletion(ex));
break;
case Model.Enum.AIType.SparkDesk:
var options = new SparkDeskOptions { AppId = chatModel.EndPoint, ApiSecret = chatModel.ModelKey, ApiKey = chatModel.ModelName, ModelVersion = Sdcb.SparkDesk.ModelVersion.V3_5 };
builder.Services.AddKeyedSingleton<ITextGenerationService>("spark-desk", new SparkDeskTextCompletion(options, app.Id));
var settings = chatModel.ModelKey.Split("|");
Sdcb.SparkDesk.ModelVersion modelVersion = Sdcb.SparkDesk.ModelVersion.V3_5;
switch (chatModel.ModelName)
{
case "V3_5":
modelVersion = Sdcb.SparkDesk.ModelVersion.V3_5;
break;
case "V3":
modelVersion = Sdcb.SparkDesk.ModelVersion.V3;
break;
case "V2":
modelVersion = Sdcb.SparkDesk.ModelVersion.V2;
break;
case "V1_5":
modelVersion = Sdcb.SparkDesk.ModelVersion.V1_5;
break;
}
SparkDeskOptions options = new SparkDeskOptions { AppId = settings[0], ApiSecret = settings[1], ApiKey = settings[2], ModelVersion = modelVersion };
builder.Services.AddKeyedSingleton<ITextGenerationService>("spark-desk", new SparkDeskTextCompletion(options, chatModel.Id));
builder.Services.AddKeyedSingleton<IChatCompletionService>("spark-desk-chat", new SparkDeskChatCompletion(options, chatModel.Id));
break;
case Model.Enum.AIType.DashScope:
@@ -105,11 +132,19 @@ namespace AntSK.Domain.Domain.Service
case Model.Enum.AIType.Mock:
builder.Services.AddKeyedSingleton<ITextGenerationService>("mock", new MockTextCompletion());
builder.Services.AddKeyedSingleton<IChatCompletionService>("mock-chat", new MockChatCompletion());
break;
case Model.Enum.AIType.LLamaFactory:
builder.AddOpenAIChatCompletion(
modelId: chatModel.ModelName,
apiKey: "123",
apiKey: "NotNull",
httpClient: chatHttpClient
);
break;
case AIType.Ollama:
builder.AddOpenAIChatCompletion(
modelId: chatModel.ModelName,
apiKey: "NotNull",
httpClient: chatHttpClient
);
break;
@@ -124,7 +159,7 @@ namespace AntSK.Domain.Domain.Service
public void ImportFunctionsByApp(Apps app, Kernel _kernel)
{
//插件不能重复注册,否则会异常
if (_kernel.Plugins.Any(p => p.Name == "AntSkFunctions"))
if (_kernel.Plugins.Any(p => p.Name == "AntSKFunctions"))
{
return;
}
@@ -135,7 +170,7 @@ namespace AntSK.Domain.Domain.Service
//本地函数插件
ImportNativeFunction(app, functions);
_kernel.ImportPluginFromFunctions("AntSkFunctions", functions);
_kernel.ImportPluginFromFunctions("AntSKFunctions", functions);
}
/// <summary>
@@ -160,7 +195,6 @@ namespace AntSK.Domain.Domain.Service
var getParametes = new List<KernelParameterMetadata>() {
new KernelParameterMetadata("jsonbody"){
Name="json参数字符串",
ParameterType=typeof(string),
Description=$"背景文档:{Environment.NewLine}{api.InputPrompt} {Environment.NewLine}提取出对应的json格式字符串参考如下格式:{Environment.NewLine}{api.Query}"
}
@@ -199,7 +233,6 @@ namespace AntSK.Domain.Domain.Service
//处理json body
var postParametes = new List<KernelParameterMetadata>() {
new KernelParameterMetadata("jsonbody"){
Name="json参数字符串",
ParameterType=typeof(string),
Description=$"背景文档:{Environment.NewLine}{api.InputPrompt} {Environment.NewLine}提取出对应的json格式字符串参考如下格式:{Environment.NewLine}{api.JsonBody}"
}
@@ -208,7 +241,7 @@ namespace AntSK.Domain.Domain.Service
{
try
{
Console.WriteLine(jsonBody);
_logger.LogInformation(jsonBody);
RestClient client = new RestClient();
RestRequest request = new RestRequest(api.Url, Method.Post);
foreach (var header in api.Header.ConvertToString().Split("\n"))
@@ -287,8 +320,8 @@ namespace AntSK.Domain.Domain.Service
KernelFunction sunFun = _kernel.Plugins.GetFunction("ConversationSummaryPlugin", "SummarizeConversation");
var summary = await _kernel.InvokeAsync(sunFun, new() { ["input"] = $"内容是:{history.ToString()} {Environment.NewLine} 请注意用中文总结" });
string his = summary.GetValue<string>();
var msg = $"history{Environment.NewLine}{history.ToString()}{Environment.NewLine} user{questions}{Environment.NewLine}"; ;
var msg = $"history{Environment.NewLine}{his}{Environment.NewLine} user{questions}{Environment.NewLine}";
return msg;
}
}
}
}

View File

@@ -2,12 +2,16 @@
using AntSK.Domain.Domain.Interface;
using AntSK.Domain.Domain.Model.Dto;
using AntSK.Domain.Options;
using AntSK.Domain.Utils;
using AntSK.LLamaFactory.Model;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using Serilog;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Tracing;
using System.Linq;
using System.Text;
using System.Text.Json;
@@ -16,16 +20,14 @@ using System.Threading.Tasks;
namespace AntSK.Domain.Domain.Service
{
[ServiceDescription(typeof(ILLamaFactoryService), ServiceLifetime.Singleton)]
public class LLamaFactoryService : ILLamaFactoryService
public class LLamaFactoryService(ILogger<LLamaFactoryService> _logger) : ILLamaFactoryService
{
private Process process;
public static bool isProcessComplete = false;
private readonly object _syncLock = new object();
private List<LLamaModel> modelList = new List<LLamaModel>();
public LLamaFactoryService() { }
public delegate Task LogMessageHandler(string message);
public event LogMessageHandler LogMessageReceived;
protected virtual async Task OnLogMessageReceived(string message)
@@ -39,7 +41,6 @@ namespace AntSK.Domain.Domain.Service
var cmdTask = Task.Factory.StartNew(() =>
{
var isProcessComplete = false;
process = new Process
{
@@ -55,58 +56,103 @@ namespace AntSK.Domain.Domain.Service
};
process.OutputDataReceived += (sender, eventArgs) =>
{
Console.WriteLine($"{eventArgs.Data}");
_logger.LogInformation($"{eventArgs.Data}");
OnLogMessageReceived(eventArgs.Data);
};
process.ErrorDataReceived += (sender, eventArgs) =>
{
Console.WriteLine($"{eventArgs.Data}");
_logger.LogInformation($"{eventArgs.Data}");
OnLogMessageReceived(eventArgs.Data);
};
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
process.WaitForExit();
OnLogMessageReceived("--------------------完成--------------------");
}, TaskCreationOptions.LongRunning);
await cmdTask;
}
public async Task StartLLamaFactory(string modelName, string templateName)
public async Task PipInstallName(string name)
{
var cmdTask = Task.Factory.StartNew(() =>
{
var isProcessComplete = false;
process = new Process
{
StartInfo = new ProcessStartInfo
{
FileName = "pip",
Arguments = $"install {name} -i https://pypi.tuna.tsinghua.edu.cn/simple",
UseShellExecute = false,
RedirectStandardOutput = true,
RedirectStandardError = true,
WorkingDirectory = AppDomain.CurrentDomain.BaseDirectory,
}
};
process.OutputDataReceived += (sender, eventArgs) =>
{
Log.Information($"{eventArgs.Data}");
OnLogMessageReceived(eventArgs.Data);
};
process.ErrorDataReceived += (sender, eventArgs) =>
{
Log.Information($"{eventArgs.Data}");
OnLogMessageReceived(eventArgs.Data);
};
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
process.WaitForExit();
OnLogMessageReceived("--------------------完成--------------------");
}, TaskCreationOptions.LongRunning);
await cmdTask;
}
public async Task StartLLamaFactory(string modelName)
{
var cmdTask = Task.Factory.StartNew(() =>
{
string templateName = "default";
var modelList = GetLLamaFactoryModels();
var model = modelList.Where(p => p.ModelScope == modelName).FirstOrDefault();
if (model.IsNotNull() && !string.IsNullOrEmpty(model.Template))
{
templateName = model.Template;
}
process = new Process
{
StartInfo = new ProcessStartInfo
{
FileName = "python",
Arguments = "api_demo.py --model_name_or_path " + modelName + " --template " + templateName + " ",
Arguments = "api_antsk.py --model_name_or_path " + modelName + " --template " + templateName + " ",
UseShellExecute = false,
RedirectStandardOutput = true,
RedirectStandardError=true,
WorkingDirectory = Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location), "llamafactory"),
}
};
process.StartInfo.Environment["CUDA_VISIBLE_DEVICES"] = "0";
process.StartInfo.Environment["CUDA_VISIBLE_DEVICES"] = Environment.GetEnvironmentVariable("CUDA_VISIBLE_DEVICES") ?? "0";
process.StartInfo.Environment["API_PORT"] = "8000";
process.StartInfo.EnvironmentVariables["USE_MODELSCOPE_HUB"] = "1";
process.StartInfo.EnvironmentVariables["USE_MODELSCOPE_HUB"] = Environment.GetEnvironmentVariable("USE_MODELSCOPE_HUB") ?? "1";
process.OutputDataReceived += (sender, eventArgs) =>
{
Console.WriteLine($"{eventArgs.Data}");
_logger.LogInformation($"{eventArgs.Data}");
OnLogMessageReceived(eventArgs.Data);
};
process.ErrorDataReceived += (sender, eventArgs) =>
{
Console.WriteLine($"{eventArgs.Data}");
_logger.LogInformation($"{eventArgs.Data}");
OnLogMessageReceived(eventArgs.Data);
};
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
process.WaitForExit();
OnLogMessageReceived("--------------------完成--------------------");
}, TaskCreationOptions.LongRunning);
await cmdTask;
}
private void Process_OutputDataReceived(object sender, DataReceivedEventArgs e)
@@ -131,7 +177,7 @@ namespace AntSK.Domain.Domain.Service
if (process1.ProcessName.ToLower() == "python")
{
process1.Kill();
System.Console.WriteLine("kill python");
_logger.LogInformation("kill python");
}
}
}
@@ -154,7 +200,7 @@ namespace AntSK.Domain.Domain.Service
{
foreach (var m in model.Models)
{
modelList.Add(new LLamaModel() { Name=m.Key, ModelScope=m.Value.MODELSCOPE });
modelList.Add(new LLamaModel() { Name = m.Key, ModelScope = m.Value.MODELSCOPE, Template = model.Template });
}
}
}

View File

@@ -0,0 +1,74 @@
using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Domain.Interface;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Serilog;
using AntSK.Domain.Utils;
namespace AntSK.Domain.Domain.Service
{
[ServiceDescription(typeof(IOllamaService), ServiceLifetime.Singleton)]
public class OllamaService : IOllamaService
{
private Process process;
public delegate Task LogMessageHandler(string message);
public event LogMessageHandler LogMessageReceived;
protected virtual async Task OnLogMessageReceived(string message)
{
LogMessageReceived?.Invoke(message);
}
public async Task StartOllama(string modelName)
{
Console.OutputEncoding = Encoding.UTF8;
var cmdTask = Task.Factory.StartNew(() =>
{
var isProcessComplete = false;
process = new Process
{
StartInfo = new ProcessStartInfo
{
FileName = "ollama",
Arguments = "run " + modelName,
UseShellExecute = false,
RedirectStandardOutput = true,
RedirectStandardError = true,
}
};
process.OutputDataReceived += (sender, eventArgs) =>
{
Log.Information($"{eventArgs.Data.ConvertToString()}");
if (!eventArgs.Data.ConvertToString().Contains("The handle is invalid"))
{
OnLogMessageReceived(eventArgs.Data.ConvertToString());
}
};
process.ErrorDataReceived += (sender, eventArgs) =>
{
Log.Error($"{eventArgs.Data.ConvertToString()}");
if (!eventArgs.Data.ConvertToString().Contains("The handle is invalid"))
{
OnLogMessageReceived(eventArgs.Data.ConvertToString());
}
};
process.StartInfo.StandardOutputEncoding = Encoding.UTF8;
process.StartInfo.StandardErrorEncoding = Encoding.UTF8;
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
process.WaitForExit();
OnLogMessageReceived("--------------------完成--------------------");
}, TaskCreationOptions.LongRunning);
await cmdTask;
}
}
}

View File

@@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.Domain.Options
{
public class FileDirOption
{
public static string DirectoryPath { get; set; } = Directory.GetCurrentDirectory();
}
}

View File

@@ -1,8 +0,0 @@
namespace AntSK.Domain.Options
{
public class LLamaSharpOption
{
public static string RunType { get; set; }
public static string FileDirectory { get; set; } = Directory.GetCurrentDirectory();
}
}

View File

@@ -25,7 +25,7 @@ namespace AntSK.Domain.Repositories
/// 图标
/// </summary>
[Required]
public string Icon { get; set; }
public string Icon { get; set; } = "windows";
/// <summary>
/// 类型
@@ -42,8 +42,13 @@ namespace AntSK.Domain.Repositories
/// <summary>
/// Embedding 模型Id
/// </summary>
[Required]
public string? EmbeddingModelID { get; set; }
public string? RerankModelID { get; set; }
public string? ImageModelID { get; set; }
/// <summary>
/// 温度
/// </summary>
@@ -53,6 +58,7 @@ namespace AntSK.Domain.Repositories
/// <summary>
/// 提示词
/// </summary>
[SugarColumn(ColumnDataType = "varchar(2000)")]
public string? Prompt { get; set; }
/// <summary>
@@ -70,11 +76,38 @@ namespace AntSK.Domain.Repositories
/// <summary>
/// 知识库ID列表
/// </summary>
[SugarColumn(ColumnDataType = "varchar(1000)")]
public string? KmsIdList { get; set; }
/// <summary>
/// API调用秘钥
/// </summary>
public string? SecretKey { get; set; }
/// <summary>
/// 相似度
/// </summary>
[SugarColumn(DefaultValue = "60")]
public double Relevance { get; set; } = 60f;
/// <summary>
/// 提问最大token数
/// </summary>
[SugarColumn(DefaultValue = "2048")]
public int MaxAskPromptSize { get; set; } = 2048;
/// <summary>
/// 向量匹配数
/// </summary>
[SugarColumn(DefaultValue = "3")]
public int MaxMatchesCount { get; set; } = 3;
[SugarColumn(DefaultValue = "20")]
public int RerankCount { get; set; } = 20;
/// <summary>
/// 回答最大token数
/// </summary>
[SugarColumn(DefaultValue = "2048")]
public int AnswerTokens { get; set; } = 2048;
}
}

View File

@@ -0,0 +1,41 @@
using AntSK.Domain.Domain.Model.Enum;
using SqlSugar;
using System.ComponentModel.DataAnnotations;
namespace AntSK.Domain.Repositories
{
[SugarTable("Chats")]
public partial class Chats
{
[SugarColumn(IsPrimaryKey = true)]
public string Id { get; set; }
/// <summary>
/// 用户名
/// </summary>
public string UserName { get; set; }
/// <summary>
/// 应用ID
/// </summary>
public string AppId { get; set; }
/// <summary>
/// 消息内容
/// </summary>
[SugarColumn(ColumnDataType = "varchar(4000)")]
public string Context { get; set; } = "";
/// <summary>
/// 发送是true 接收是false
/// </summary>
public bool IsSend { get; set; } = false;
/// <summary>
/// 创建时间
/// </summary>
public DateTime CreateTime { get; set; }
/// <summary>
/// 文件名
/// </summary>
public string? FileName { get; set; }
}
}

View File

@@ -0,0 +1,11 @@

using AntSK.Domain.Common.DependencyInjection;
using AntSK.Domain.Repositories.Base;
namespace AntSK.Domain.Repositories
{
[ServiceDescription(typeof(IChats_Repositories), ServiceLifetime.Scoped)]
public class Chats_Repositories : Repository<Chats>, IChats_Repositories
{
}
}

View File

@@ -0,0 +1,8 @@
using AntSK.Domain.Repositories.Base;
namespace AntSK.Domain.Repositories
{
public interface IChats_Repositories : IRepository<Chats>
{
}
}

View File

@@ -12,7 +12,7 @@ namespace AntSK.Domain.Repositories
/// 图标
/// </summary>
[Required]
public string Icon { get; set; }
public string Icon { get; set; } = "question-circle";
/// <summary>
/// 名称
/// </summary>
@@ -55,6 +55,7 @@ namespace AntSK.Domain.Repositories
[SugarColumn(DefaultValue = "49")]
public int OverlappingTokens { get; set; } = 49;
[SugarColumn(DefaultValue = "0")]
public int IsOCR { get; set; } = 0;
}
}

View File

@@ -1,4 +1,8 @@
using System.Web;
using Newtonsoft.Json;
using Serilog;
using System.Security.Cryptography;
using System.Text.RegularExpressions;
using System.Web;
namespace AntSK.Domain.Utils
{
@@ -250,5 +254,66 @@ namespace AntSK.Domain.Utils
return nameValueCollection.ToString();
}
/// <summary>
/// 忽略大小写匹配
/// </summary>
/// <param name="s"></param>
/// <param name="value"></param>
/// <returns></returns>
public static bool ComparisonIgnoreCase(this string s, string value)
{
return s.Equals(value, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// \uxxxx转中文,保留换行符号
/// </summary>
/// <param name="unicodeString"></param>
/// <returns></returns>
public static string Unescape(this string value)
{
if (value.IsNull())
{
return "";
}
try
{
Formatting formatting = Formatting.None;
object jsonObj = JsonConvert.DeserializeObject(value);
string unescapeValue = JsonConvert.SerializeObject(jsonObj, formatting);
return unescapeValue;
}
catch (Exception ex)
{
Log.Error(ex.ToString());
return "";
}
}
/// <summary>
/// 是否为流式请求
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
public static bool IsStream(this string value)
{
// 正则表达式忽略空格的情况
string pattern = @"\s*""stream""\s*:\s*true\s*";
// 使用正则表达式匹配
bool contains = Regex.IsMatch(value, pattern);
return contains;
}
public static string AntSKCalculateSHA256(this BinaryData binaryData)
{
byte[] byteArray = SHA256.HashData(binaryData.ToMemory().Span);
return Convert.ToHexString(byteArray).ToLowerInvariant();
}
}
}

View File

@@ -0,0 +1,39 @@
using System;
using System.Collections.Generic;
using System.Drawing.Imaging;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.Domain.Utils
{
public class ImageUtils
{
public static string BitmapToBase64(Bitmap bitmap)
{
using (MemoryStream memoryStream = new MemoryStream())
{
// 保存为JPEG格式也可以选择PngGif等等
bitmap.Save(memoryStream, ImageFormat.Jpeg);
// 获取内存流的字节数组
byte[] imageBytes = memoryStream.ToArray();
// 将字节转换为Base64字符串
string base64String = Convert.ToBase64String(imageBytes);
return base64String;
}
}
public static List<string> BitmapListToBase64(Bitmap[] bitmaps)
{
List<string> base64Strings = new List<string>();
foreach (Bitmap bitmap in bitmaps)
{
base64Strings.Add(BitmapToBase64(bitmap));
}
return base64Strings;
}
}
}

View File

@@ -1,4 +1,6 @@
using System.Text.RegularExpressions;
using Serilog;
using System.Text;
using System.Text.RegularExpressions;
namespace AntSK.Domain.Utils
{
@@ -15,12 +17,19 @@ namespace AntSK.Domain.Utils
UriBuilder uriBuilder;
Regex regex = new Regex(@"(https?)://([^/:]+)(:\d+)?/(.*)");
Match match = regex.Match(_endPoint);
if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") == "Development" && request.Content != null)
string guid = Guid.NewGuid().ToString();
var mediaType = request.Content.Headers.ContentType.MediaType;
string requestBody = (await request.Content.ReadAsStringAsync()).Unescape();
var uncaseBody = new StringContent(requestBody, Encoding.UTF8, mediaType);
request.Content = uncaseBody;
if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT").ConvertToString() == "Development")
{
string requestBody = await request.Content.ReadAsStringAsync();
//生产环境根据环境变量可去关闭日志
//便于调试查看请求prompt
Console.WriteLine(requestBody);
Log.Information("{Message}", $"【模型服务接口调用-{guid},host:{_endPoint}】:{Environment.NewLine}{requestBody}");
}
if (match.Success)
{
string xieyi = match.Groups[1].Value;
@@ -70,7 +79,11 @@ namespace AntSK.Domain.Utils
// 接着,调用基类的 SendAsync 方法将你的修改后的请求发出去
HttpResponseMessage response = await base.SendAsync(request, cancellationToken);
if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT").ConvertToString() == "Development")
{
string responseContent = requestBody.IsStream() ? response.Content.ReadAsStringAsync().Result : response.Content.ReadAsStringAsync().Result.Unescape();
Log.Information("{Message}", $"【模型服务接口返回-{guid},host:{_endPoint}】:{Environment.NewLine}{responseContent}");
}
return response;
}
}
@@ -82,7 +95,7 @@ namespace AntSK.Domain.Utils
{
var handler = new OpenAIHttpClientHandler(endPoint.ConvertToString());
var httpClient = new HttpClient(handler);
httpClient.Timeout = TimeSpan.FromMinutes(5);
httpClient.Timeout = TimeSpan.FromMinutes(10);
return httpClient;
}
}

View File

@@ -0,0 +1,55 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SemanticKernel" Version="$(SKVersion)" />
<PackageReference Include="Newtonsoft.Json" Version="$(NewtonsoftVersion)" />
<PackageReference Include="RestSharp" Version="$(RestSharpVersion)" />
<PackageReference Include="Cnblogs.KernelMemory.AI.DashScope" Version="0.3.0" />
<PackageReference Include="Cnblogs.SemanticKernel.Connectors.DashScope" Version="0.3.2" />
<PackageReference Include="Sdcb.SparkDesk" Version="3.0.0" />
<PackageReference Include="System.Drawing.Common" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<None Update="OllamaEmbeddingModelList.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="OllamaModelList.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\CPU\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\CPU\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda11\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda11\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda12\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\Cuda12\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\ROCm\stable-diffusion.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusion\Backend\ROCm\stable-diffusion.so">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="StableDiffusionModelList.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,55 @@
using AntSK.LLM.SparkDesk;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel;
using Sdcb.SparkDesk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json.Serialization;
using System.Text.Json;
using System.Text.Unicode;
using System.Threading.Tasks;
namespace AntSK.LLM.Mock
{
public class MockChatCompletion : IChatCompletionService
{
private readonly Dictionary<string, object?> _attributes = new();
private readonly SparkDeskClient _client;
private string _chatId;
private readonly SparkDeskOptions _options;
private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
NumberHandling = JsonNumberHandling.AllowReadingFromString,
Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
};
public IReadOnlyDictionary<string, object?> Attributes => _attributes;
public MockChatCompletion()
{
}
public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
string result = $"这是一条Mock数据便于聊天测试你的消息是{chatHistory.LastOrDefault().ToString()}";
return [new(AuthorRole.Assistant, result.ToString())];
}
public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
string result = $"这是一条Mock数据便于聊天测试你的消息是{chatHistory.LastOrDefault().ToString()}";
foreach (var c in result)
{
yield return new StreamingChatMessageContent(AuthorRole.Assistant, c.ToString());
}
}
}
}

View File

@@ -0,0 +1,111 @@
gemma2
gemma2:27b
gemma:2b
gemma:7b
llama3
llama3:70b
yi:6b
yi:9B
yi:34B
qwen2:0.5b
qwen2:1.5b
qwen2:7b
qwen2:72b
qwen:0.5b
qwen:1.8b
qwen:4b
qwen:7b
qwen:14b
qwen:32b
qwen:72b
qwen:110b
deepseek-coder:1.3b
deepseek-coder:6.7b
deepseek-coder:33b
deepseek-coder-v2:16b
deepseek-coder-v2:236b
deepseek-r1:1.5b
deepseek-r1:7b
deepseek-r1:8b
deepseek-r1:14b
deepseek-r1:32b
deepseek-r1:70b
phi:2.7b
phi3:mini
phi3:medium
phi3:medium-128k
aya:8b
aya:35b
mistral:7b
mixtral:8x22b
mixtral:8x7b
codegemma:2b
codegemma:7b
command-r:35b
llava
gemma:2b
gemma:7b
llama2:7b
llama2:13b
llama2:70b
llama2-chinese:7b
llama2-chinese:13b
llama3.1:8b
llama3.1:70b
llama3.1:405b
codellama:7b
codellama:13b
codellama:34b
codellama:70b
dolphin-mistral:7b
dolphin-mixtral:8x22b
dolphin-mixtral:8x7b
llama2-uncensored:7b
llama2-uncensored:70b
tinyllama:1.1b
openchat:7b
orca-mini:3b
orca-mini:7b
orca-mini:13b
orca-mini:70b
mistral-openorca:7b
dolphin-llama3:8b
dolphin-llama3:70b
starcoder:1b
starcoder:3b
starcoder:7b
starcoder:15b
starcoder2:3b
starcoder2:7b
starcoder2:15b
zephyr:7b
zephyr:141b
nous-hermes2:10.7b
nous-hermes2:34b
vicuna:7b
vicuna:13b
vicuna:33b
wizard-vicuna-uncensored:7b
wizard-vicuna-uncensored:13b
wizard-vicuna-uncensored:30b
wizardlm2:7b
codestral:22b
tinydolphin:1.1b
openhermes:v2.5
neural-chat:7b
codeqwen:7b
phind-codellama:34b
nous-hermes:7b
nous-hermes:13b
nous-hermes:13b
starling-lm:7b
llama3-gradient:8b
llama3-gradient:70b
yarn-llama2:7b
yarn-llama2:13b
llava-llama3:8b
llama-pro:instruct
everythinglm:13b
llava-phi3:3.8b
mistrallite:7b
notus:7b

View File

@@ -0,0 +1,231 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel;
using Sdcb.SparkDesk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json.Serialization;
using System.Text.Json;
using System.Text.Unicode;
using System.Threading.Tasks;
namespace AntSK.LLM.SparkDesk
{
public class SparkDeskChatCompletion : IChatCompletionService
{
private readonly Dictionary<string, object?> _attributes = new();
private readonly SparkDeskClient _client;
private string _chatId;
private readonly SparkDeskOptions _options;
private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
NumberHandling = JsonNumberHandling.AllowReadingFromString,
Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
};
public IReadOnlyDictionary<string, object?> Attributes => _attributes;
public SparkDeskChatCompletion(SparkDeskOptions options, string chatId)
{
_options = options;
_chatId = chatId;
_client = new(options.AppId, options.ApiKey, options.ApiSecret);
}
public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
StringBuilder sb = new();
var parameters = new ChatRequestParameters
{
ChatId = _chatId,
};
OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
parameters.Temperature = (float)chatExecutionSettings.Temperature;
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSKFunctions").ToList() ?? [];
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
List<ChatMessage> messages = GetSparkMessage(chatHistory);
var result = await _client.ChatAsync(_options.ModelVersion, messages.ToArray(), parameters, functionDefs.Count > 0 ? [.. functionDefs] : null, cancellationToken: cancellationToken);
if (result.FunctionCall != null)
{
var func = functions.Where(x => x.Name == result.FunctionCall.Name).FirstOrDefault();
if (func == null)
{
return new List<ChatMessageContent> { new(AuthorRole.Assistant, $"插件{result.FunctionCall.Name}未注册") }.AsReadOnly();
}
if (kernel.Plugins.TryGetFunction(func.PluginName, func.Name, out var function))
{
var arguments = new KernelArguments();
var JsonElement = JsonDocument.Parse(result.FunctionCall.Arguments).RootElement;
foreach (var parameter in func.Parameters)
{
var error = "";
try
{
if (JsonElement.TryGetProperty(parameter.Name, out var property))
{
arguments.Add(parameter.Name, property.Deserialize(parameter.ParameterType!, _jsonSerializerOptions));
}
}
catch (Exception ex)
{
error = $"参数{parameter.Name}解析错误:{ex.Message}";
}
if (!string.IsNullOrEmpty(error))
{
return new List<ChatMessageContent> { new(AuthorRole.Assistant, error) }.AsReadOnly();
}
}
var functionResult = await function.InvokeAsync(kernel, arguments, cancellationToken);
messages = [ ChatMessage.FromUser(messages.LastOrDefault().Content),
ChatMessage.FromSystem($@"
执行函数调用成功
函数描述:{func.Description}
函数执行结果:{functionResult}
"),
ChatMessage.FromUser("请根据函数调用结果回答我的问题,不要超出函数调用结果的返回,以及不要有多余描述:")];
var callResult = await _client.ChatAsync(_options.ModelVersion, messages.ToArray(), parameters, null);
ChatMessageContent chatMessageContent = new(AuthorRole.Assistant, callResult.Text.ToString(), modelId: "SparkDesk");
return new List<ChatMessageContent> { chatMessageContent }.AsReadOnly();
}
return new List<ChatMessageContent> { new(AuthorRole.Assistant, "未找到插件") }.AsReadOnly();
}
else
{
ChatMessageContent chatMessageContent = new(AuthorRole.Assistant, result.Text.ToString(), modelId: "SparkDesk");
return new List<ChatMessageContent> { chatMessageContent }.AsReadOnly();
}
}
public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var parameters = new ChatRequestParameters
{
ChatId = _chatId,
};
OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
parameters.Temperature = (float)chatExecutionSettings.Temperature;
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSKFunctions").ToList() ?? [];
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
List<ChatMessage> messages = GetSparkMessage(chatHistory);
await foreach (StreamedChatResponse msg in _client.ChatAsStreamAsync(_options.ModelVersion, messages.ToArray(), parameters, functionDefs.Count > 0 ? [.. functionDefs] : null, cancellationToken: cancellationToken))
{
yield return new StreamingChatMessageContent(AuthorRole.Assistant, msg);
};
}
private static List<ChatMessage> GetSparkMessage(ChatHistory chatHistory)
{
List<ChatMessage> messages = new List<ChatMessage>();
foreach (var msg in chatHistory.ToList())
{
string role = "";
if (msg.Role == AuthorRole.User)
{
role = "user";
}
else if (msg.Role == AuthorRole.System)
{
role = "system";
}
else
{
role = "assistant";
}
messages.Add(new ChatMessage(role, msg.ToString()));
}
return messages;
}
private static string? ProcessFunctionResult(object functionResult, ToolCallBehavior? toolCallBehavior)
{
if (functionResult is string stringResult)
{
return stringResult;
}
if (functionResult is ChatMessageContent chatMessageContent)
{
return chatMessageContent.ToString();
}
return JsonSerializer.Serialize(functionResult, _jsonSerializerOptions);
}
public static Dictionary<string, object> ParseJsonElement(JsonElement element, string propertyName)
{
Dictionary<string, object> dict = new();
switch (element.ValueKind)
{
case JsonValueKind.Object:
foreach (JsonProperty property in element.EnumerateObject())
{
dict.Add(property.Name, ParseJsonElement(property.Value, property.Name));
}
break;
case JsonValueKind.Array:
List<object> list = new List<object>();
foreach (JsonElement arrayElement in element.EnumerateArray())
{
list.Add(ParseJsonElement(arrayElement, ""));
}
dict.Add(propertyName, list);
break;
case JsonValueKind.String:
dict.Add(propertyName, element.GetString());
break;
case JsonValueKind.Number:
dict.Add(propertyName, element.GetInt32());
break;
case JsonValueKind.True:
case JsonValueKind.False:
dict.Add(propertyName, element.GetBoolean());
break;
default:
dict.Add(propertyName, "Unsupported value type");
break;
}
return dict;
}
}
}

View File

@@ -67,7 +67,7 @@ namespace AntSK.LLM.SparkDesk
parameters.Temperature = (float)chatExecutionSettings.Temperature;
parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSkFunctions").ToList() ?? [];
IList<KernelFunctionMetadata> functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSKFunctions").ToList() ?? [];
var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
//var messages = GetHistories(prompt);

View File

@@ -0,0 +1,108 @@
using System;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using static AntSK.LLM.StableDiffusion.Structs;
using int32_t = Int32;
using int64_t = Int64;
using SdContext = IntPtr;
using SDImagePtr = IntPtr;
using UpscalerContext = IntPtr;
internal class Native
{
const string DllName = "stable-diffusion";
internal delegate void SdLogCallback(SdLogLevel level, [MarshalAs(UnmanagedType.LPStr)] string text, IntPtr data);
internal delegate void SdProgressCallback(int step, int steps, float time, IntPtr data);
[DllImport(DllName, EntryPoint = "new_sd_ctx", CallingConvention = CallingConvention.Cdecl)]
internal extern static SdContext new_sd_ctx(string model_path,
string vae_path,
string taesd_path,
string control_net_path_c_str,
string lora_model_dir,
string embed_dir_c_str,
string stacked_id_embed_dir_c_str,
bool vae_decode_only,
bool vae_tiling,
bool free_params_immediately,
int n_threads,
WeightType weightType,
RngType rng_type,
ScheduleType s,
bool keep_clip_on_cpu,
bool keep_control_net_cpu,
bool keep_vae_on_cpu);
[DllImport(DllName, EntryPoint = "txt2img", CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImagePtr txt2img(SdContext sd_ctx,
string prompt,
string negative_prompt,
int clip_skip,
float cfg_scale,
int width,
int height,
SampleMethod sample_method,
int sample_steps,
int64_t seed,
int batch_count,
SDImagePtr control_cond,
float control_strength,
float style_strength,
bool normalize_input,
string input_id_images_path);
[DllImport(DllName, EntryPoint = "img2img", CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImagePtr img2img(SdContext sd_ctx,
SDImage init_image,
string prompt_c_str,
string negative_prompt_c_str,
int clip_skip,
float cfg_scale,
int width,
int height,
SampleMethod sample_method,
int sample_steps,
float strength,
int64_t seed,
int batch_count);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern IntPtr preprocess_canny(IntPtr imgData,
int width,
int height,
float high_threshold,
float low_threshold,
float weak,
float strong,
bool inverse);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern UpscalerContext new_upscaler_ctx(string esrgan_path,
int n_threads,
WeightType wtype);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern int32_t get_num_physical_cores();
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern void free_sd_ctx(SdContext sd_ctx);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern void free_upscaler_ctx(UpscalerContext upscaler_ctx);
[DllImport(DllName, CallingConvention = CallingConvention.Cdecl)]
internal static extern SDImage upscale(UpscalerContext upscaler_ctx, SDImage input_image, int upscale_factor);
[DllImport(DllName, EntryPoint = "sd_set_log_callback", CallingConvention = CallingConvention.Cdecl)]
internal static extern void sd_set_log_callback(SdLogCallback cb, IntPtr data);
[DllImport(DllName, EntryPoint = "sd_set_progress_callback", CallingConvention = CallingConvention.Cdecl)]
internal static extern void sd_set_progress_callback(SdProgressCallback cb, IntPtr data);
}
}

View File

@@ -0,0 +1,234 @@
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using static AntSK.LLM.StableDiffusion.Structs;
using SdContext = IntPtr;
using SDImagePtr = IntPtr;
using UpscalerContext = IntPtr;
public static class SDHelper
{
public static bool IsInitialized => SdContext.Zero != sd_ctx;
public static bool IsUpscalerInitialized => UpscalerContext.Zero != upscaler_ctx;
private static SdContext sd_ctx = new SdContext();
private static UpscalerContext upscaler_ctx = new UpscalerContext();
public static event EventHandler<StableDiffusionEventArgs.StableDiffusionLogEventArgs> Log;
public static event EventHandler<StableDiffusionEventArgs.StableDiffusionProgressEventArgs> Progress;
static readonly Native.SdLogCallback sd_Log_Cb;
static readonly Native.SdProgressCallback sd_Progress_Cb;
//Hide the code below so that the process can be seen in console.
//static SDHelper()
//{
// sd_Log_Cb = new Native.SdLogCallback(OnNativeLog);
// Native.sd_set_log_callback(sd_Log_Cb, IntPtr.Zero);
// sd_Progress_Cb = new Native.SdProgressCallback(OnProgressRunning);
// Native.sd_set_progress_callback(sd_Progress_Cb, IntPtr.Zero);
//}
public static bool Initialize(ModelParams modelParams)
{
sd_ctx = Native.new_sd_ctx(modelParams.ModelPath,
modelParams.VaePath,
modelParams.TaesdPath,
modelParams.ControlnetPath,
modelParams.LoraModelDir,
modelParams.EmbeddingsPath,
modelParams.StackedIdEmbeddingsPath,
modelParams.VaeDecodeOnly,
modelParams.VaeTiling,
modelParams.FreeParamsImmediately,
modelParams.Threads,
modelParams.SdType,
modelParams.RngType,
modelParams.Schedule,
modelParams.KeepClipOnCpu,
modelParams.KeepControlNetOnCpu,
modelParams.KeepVaeOnCpu);
return SdContext.Zero != sd_ctx;
}
public static bool InitializeUpscaler(UpscalerParams @params)
{
upscaler_ctx = Native.new_upscaler_ctx(@params.ESRGANPath, @params.Threads, @params.SdType);
return UpscalerContext.Zero != upscaler_ctx;
}
public static void FreeSD()
{
if (SdContext.Zero != sd_ctx)
{
Native.free_sd_ctx(sd_ctx);
sd_ctx = SdContext.Zero;
}
}
public static void FreeUpscaler()
{
if (UpscalerContext.Zero != upscaler_ctx)
{
Native.free_upscaler_ctx(upscaler_ctx);
upscaler_ctx = UpscalerContext.Zero;
}
}
public static Bitmap[] TextToImage(TextToImageParams textToImageParams)
{
if (!IsInitialized) throw new ArgumentNullException("Model not loaded!");
IntPtr cnPtr = IntPtr.Zero;
if (textToImageParams.ControlCond != null)
{
if (textToImageParams.ControlCond.Width > 1)
{
SDImage cnImg = GetSDImageFromBitmap(textToImageParams.ControlCond);
cnPtr = GetPtrFromImage(cnImg);
}
}
SDImagePtr sd_Image_ptr = Native.txt2img(sd_ctx,
textToImageParams.Prompt,
textToImageParams.NegativePrompt,
textToImageParams.ClipSkip,
textToImageParams.CfgScale,
textToImageParams.Width,
textToImageParams.Height,
textToImageParams.SampleMethod,
textToImageParams.SampleSteps,
textToImageParams.Seed,
textToImageParams.BatchCount,
cnPtr,
textToImageParams.ControlStrength,
textToImageParams.StyleStrength,
textToImageParams.NormalizeInput,
textToImageParams.InputIdImagesPath);
Bitmap[] images = new Bitmap[textToImageParams.BatchCount];
for (int i = 0; i < textToImageParams.BatchCount; i++)
{
SDImage sd_image = Marshal.PtrToStructure<SDImage>(sd_Image_ptr + i * Marshal.SizeOf<SDImage>());
images[i] = GetBitmapFromSdImage(sd_image);
}
return images;
}
public static Bitmap ImageToImage(ImageToImageParams imageToImageParams)
{
if (!IsInitialized) throw new ArgumentNullException("Model not loaded!");
SDImage input_sd_image = GetSDImageFromBitmap(imageToImageParams.InputImage);
SDImagePtr sdImgPtr = Native.img2img(sd_ctx,
input_sd_image,
imageToImageParams.Prompt,
imageToImageParams.NegativePrompt,
imageToImageParams.ClipSkip,
imageToImageParams.CfgScale,
imageToImageParams.Width,
imageToImageParams.Height,
imageToImageParams.SampleMethod,
imageToImageParams.SampleSteps,
imageToImageParams.Strength,
imageToImageParams.Seed,
imageToImageParams.BatchCount);
SDImage sdImg = Marshal.PtrToStructure<SDImage>(sdImgPtr);
return GetBitmapFromSdImage(sdImg);
}
public static Bitmap UpscaleImage(Bitmap image, int upscaleFactor)
{
if (!IsUpscalerInitialized) throw new ArgumentNullException("Upscaler not loaded!");
SDImage inputSDImg = GetSDImageFromBitmap(image);
SDImage result = Native.upscale(upscaler_ctx, inputSDImg, upscaleFactor);
return GetBitmapFromSdImage(result);
}
private static Bitmap GetBitmapFromSdImage(SDImage sd_Image)
{
int width = (int)sd_Image.Width;
int height = (int)sd_Image.Height;
int channel = (int)sd_Image.Channel;
byte[] bytes = new byte[width * height * channel];
Marshal.Copy(sd_Image.Data, bytes, 0, bytes.Length);
Bitmap bmp = new Bitmap(width, height, PixelFormat.Format24bppRgb);
int stride = bmp.Width * channel;
byte[] des = new byte[bytes.Length];
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++)
{
des[stride * i + channel * j + 0] = bytes[stride * i + channel * j + 2];
des[stride * i + channel * j + 1] = bytes[stride * i + channel * j + 1];
des[stride * i + channel * j + 2] = bytes[stride * i + channel * j + 0];
}
}
BitmapData bitmapData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.WriteOnly, bmp.PixelFormat);
Marshal.Copy(des, 0, bitmapData.Scan0, bytes.Length);
bmp.UnlockBits(bitmapData);
return bmp;
}
private static SDImage GetSDImageFromBitmap(Bitmap bmp)
{
int width = bmp.Width;
int height = bmp.Height;
int channel = Bitmap.GetPixelFormatSize(bmp.PixelFormat) / 8;
int stride = width * channel;
byte[] bytes = new byte[width * height * channel];
BitmapData bitmapData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, bmp.PixelFormat);
Marshal.Copy(bitmapData.Scan0, bytes, 0, bytes.Length);
bmp.UnlockBits(bitmapData);
byte[] sdImageBytes = new byte[bytes.Length];
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++)
{
sdImageBytes[stride * i + j * 3 + 0] = bytes[stride * i + j * 3 + 2];
sdImageBytes[stride * i + j * 3 + 1] = bytes[stride * i + j * 3 + 1];
sdImageBytes[stride * i + j * 3 + 2] = bytes[stride * i + j * 3 + 0];
}
}
SDImage sd_Image = new SDImage
{
Width = (uint)width,
Height = (uint)height,
Channel = 3,
Data = Marshal.UnsafeAddrOfPinnedArrayElement(sdImageBytes, 0),
};
return sd_Image;
}
private static IntPtr GetPtrFromImage(SDImage sdImg)
{
IntPtr imgPtr = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(SDImage)));
Marshal.StructureToPtr(sdImg, imgPtr, false);
return imgPtr;
}
private static void OnNativeLog(SdLogLevel level, string text, IntPtr data)
{
Log?.Invoke(null, new StableDiffusionEventArgs.StableDiffusionLogEventArgs { Level = level, Text = text });
}
private static void OnProgressRunning(int step, int steps, float time, IntPtr data)
{
Progress?.Invoke(null, new StableDiffusionEventArgs.StableDiffusionProgressEventArgs { Step = step, Steps = steps, Time = time });
}
}
}

View File

@@ -0,0 +1,33 @@
using System;
using static AntSK.LLM.StableDiffusion.Structs;
namespace AntSK.LLM.StableDiffusion
{
public class StableDiffusionEventArgs
{
public class StableDiffusionProgressEventArgs : EventArgs
{
#region Properties & Fields
public int Step { get; set; }
public int Steps { get; set; }
public float Time { get; set; }
public IntPtr Data { get; set; }
public double Progress => (double)Step / Steps;
public float IterationsPerSecond => 1.0f / Time;
#endregion
}
public class StableDiffusionLogEventArgs : EventArgs
{
#region Properties & Fields
public SdLogLevel Level { get; set; }
public string Text { get; set; }
#endregion
}
}
}

View File

@@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntSK.LLM.StableDiffusion
{
public static class StableDiffusionService
{
}
}

View File

@@ -0,0 +1,154 @@
using System;
using System.Drawing;
using System.Runtime.InteropServices;
namespace AntSK.LLM.StableDiffusion
{
using int64_t = Int64;
using uint32_t = UInt32;
public class Structs
{
public class ModelParams
{
public string ModelPath = string.Empty;
public string VaePath = string.Empty;
public string TaesdPath = string.Empty;
public string ControlnetPath = string.Empty;
public string LoraModelDir = string.Empty;
public string EmbeddingsPath = string.Empty;
public string StackedIdEmbeddingsPath = string.Empty;
public bool VaeDecodeOnly = false;
public bool VaeTiling = true;
public bool FreeParamsImmediately = false;
public int Threads = Native.get_num_physical_cores();
public WeightType SdType = WeightType.SD_TYPE_COUNT;
public RngType RngType = RngType.CUDA_RNG;
public ScheduleType Schedule = ScheduleType.DEFAULT;
public bool KeepClipOnCpu = false;
public bool KeepControlNetOnCpu = false;
public bool KeepVaeOnCpu = false;
}
public class TextToImageParams
{
public string Prompt = string.Empty;
public string NegativePrompt = string.Empty;
public int ClipSkip = 0;
public float CfgScale = 7;
public int Width = 512;
public int Height = 512;
public SampleMethod SampleMethod = SampleMethod.EULER_A;
public int SampleSteps = 20;
public int64_t Seed = -1;
public int BatchCount = 1;
public Bitmap ControlCond = new Bitmap(1, 1);
public float ControlStrength = 0.9f;
public float StyleStrength = 0.75f;
public bool NormalizeInput = false;
public string InputIdImagesPath = string.Empty;
}
public class ImageToImageParams
{
public Bitmap InputImage;
public string Prompt = string.Empty;
public string NegativePrompt = string.Empty;
public int ClipSkip = -1;
public float CfgScale = 7.0f;
public int Width = 512;
public int Height = 512;
public SampleMethod SampleMethod = SampleMethod.EULER_A;
public int SampleSteps = 20;
public float Strength = 0.75f;
public int64_t Seed = 42;
public int BatchCount = 1;
}
public class UpscalerParams
{
public string ESRGANPath = string.Empty;
public int Threads = Native.get_num_physical_cores();
public WeightType SdType = WeightType.SD_TYPE_COUNT;
}
[StructLayout(LayoutKind.Sequential)]
internal struct SDImage
{
public uint32_t Width;
public uint32_t Height;
public uint32_t Channel;
public IntPtr Data;
}
public enum WeightType
{
SD_TYPE_F32 = 0,
SD_TYPE_F16 = 1,
SD_TYPE_Q4_0 = 2,
SD_TYPE_Q4_1 = 3,
// SD_TYPE_Q4_2 = 4, support has been removed
// SD_TYPE_Q4_3 (5) support has been removed
SD_TYPE_Q5_0 = 6,
SD_TYPE_Q5_1 = 7,
SD_TYPE_Q8_0 = 8,
SD_TYPE_Q8_1 = 9,
// k-quantizations
SD_TYPE_Q2_K = 10,
SD_TYPE_Q3_K = 11,
SD_TYPE_Q4_K = 12,
SD_TYPE_Q5_K = 13,
SD_TYPE_Q6_K = 14,
SD_TYPE_Q8_K = 15,
SD_TYPE_IQ2_XXS = 16,
SD_TYPE_IQ2_XS = 17,
SD_TYPE_IQ3_XXS = 18,
SD_TYPE_IQ1_S = 19,
SD_TYPE_IQ4_NL = 20,
SD_TYPE_IQ3_S = 21,
SD_TYPE_IQ2_S = 22,
SD_TYPE_IQ4_XS = 23,
SD_TYPE_I8,
SD_TYPE_I16,
SD_TYPE_I32,
SD_TYPE_COUNT,
};
public enum RngType
{
STD_DEFAULT_RNG,
CUDA_RNG
};
public enum ScheduleType
{
DEFAULT,
DISCRETE,
KARRAS,
N_SCHEDULES
};
public enum SampleMethod
{
EULER_A,
EULER,
HEUN,
DPM2,
DPMPP2S_A,
DPMPP2M,
DPMPP2Mv2,
LCM,
N_SAMPLE_METHODS
};
public enum SdLogLevel
{
Debug,
Info,
Warn,
Error
}
}
}

View File

@@ -0,0 +1,6 @@
AsAHuman/chilloutmix
GraMpa7/dreamsharper
Airic/Anything-V4.5
liqira/anythingv3
wind1/MoYou
Reuploadingfromcivitai/DosMix

View File

@@ -23,5 +23,7 @@ namespace AntSK.LLamaFactory.Model
{
public string Name { get; set; }
public string ModelScope { get; set; }
public string Template { get; set; }
}
}

View File

@@ -0,0 +1,19 @@
import os
import uvicorn
from llamafactory.api.app import create_app
from llamafactory.chat import ChatModel
def main():
chat_model = ChatModel()
app = create_app(chat_model)
api_host = os.environ.get("API_HOST", "0.0.0.0")
api_port = int(os.environ.get("API_PORT", "8000"))
print("Visit http://localhost:{}/docs for API document.".format(api_port))
uvicorn.run(app, host=api_host, port=api_port)
if __name__ == "__main__":
main()

View File

@@ -1,16 +0,0 @@
import os
import uvicorn
from llmtuner import ChatModel, create_app
def main():
chat_model = ChatModel()
app = create_app(chat_model)
print("Visit http://localhost:{}/docs for API document.".format(os.environ.get("API_PORT", 8000)))
uvicorn.run(app, host="0.0.0.0", port=int(os.environ.get("API_PORT", 8000)), workers=1)
if __name__ == "__main__":
main()

View File

@@ -1,27 +0,0 @@
import subprocess
import shlex
import os
class Start(object):
def __init__(self,model_name_or_path):
self.model_name_or_path=model_name_or_path
def StartCommand(self):
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
os.environ['API_PORT'] = '8000'
# 构建要执行的命令
command = (
'python api_demo.py'
' --model_name_or_path E:/model/Qwen1.5-0.5B-Chat_back'
' --template default '
)
# 使用shlex.split()去安全地分割命令字符串
command = shlex.split(command)
# 执行命令
subprocess.run(command, shell=True)
if __name__ == "__main__":
star= Start('model_name_or_path')
star.StartCommand()

View File

@@ -1,49 +0,0 @@
from llmtuner import ChatModel
from llmtuner.extras.misc import torch_gc
try:
import platform
if platform.system() != "Windows":
import readline # noqa: F401
except ImportError:
print("Install `readline` for a better experience.")
def main():
chat_model = ChatModel()
messages = []
print("Welcome to the CLI application, use `clear` to remove the history, use `exit` to exit the application.")
while True:
try:
query = input("\nUser: ")
except UnicodeDecodeError:
print("Detected decoding error at the inputs, please set the terminal encoding to utf-8.")
continue
except Exception:
raise
if query.strip() == "exit":
break
if query.strip() == "clear":
messages = []
torch_gc()
print("History has been removed.")
continue
messages.append({"role": "user", "content": query})
print("Assistant: ", end="", flush=True)
response = ""
for new_text in chat_model.stream_chat(messages):
print(new_text, end="", flush=True)
response += new_text
print()
messages.append({"role": "assistant", "content": response})
if __name__ == "__main__":
main()

View File

@@ -1,10 +0,0 @@
from llmtuner import Evaluator
def main():
evaluator = Evaluator()
evaluator.eval()
if __name__ == "__main__":
main()

View File

@@ -1,9 +0,0 @@
from llmtuner import export_model
def main():
export_model()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,6 @@
# Level: api, webui > chat, eval, train > data, model > hparams > extras
from .cli import VERSION
__version__ = VERSION

View File

@@ -0,0 +1,108 @@
import os
from contextlib import asynccontextmanager
from typing import Optional
from typing_extensions import Annotated
from ..chat import ChatModel
from ..extras.misc import torch_gc
from ..extras.packages import is_fastapi_available, is_starlette_available, is_uvicorn_available
from .chat import (
create_chat_completion_response,
create_score_evaluation_response,
create_stream_chat_completion_response,
)
from .protocol import (
ChatCompletionRequest,
ChatCompletionResponse,
ModelCard,
ModelList,
ScoreEvaluationRequest,
ScoreEvaluationResponse,
)
if is_fastapi_available():
from fastapi import Depends, FastAPI, HTTPException, status
from fastapi.middleware.cors import CORSMiddleware
from fastapi.security.http import HTTPAuthorizationCredentials, HTTPBearer
if is_starlette_available():
from sse_starlette import EventSourceResponse
if is_uvicorn_available():
import uvicorn
@asynccontextmanager
async def lifespan(app: "FastAPI"): # collects GPU memory
yield
torch_gc()
def create_app(chat_model: "ChatModel") -> "FastAPI":
app = FastAPI(lifespan=lifespan)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
api_key = os.environ.get("API_KEY")
security = HTTPBearer(auto_error=False)
async def verify_api_key(auth: Annotated[Optional[HTTPAuthorizationCredentials], Depends(security)]):
if api_key and (auth is None or auth.credentials != api_key):
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key.")
@app.get(
"/v1/models",
response_model=ModelList,
status_code=status.HTTP_200_OK,
dependencies=[Depends(verify_api_key)],
)
async def list_models():
model_card = ModelCard(id="gpt-3.5-turbo")
return ModelList(data=[model_card])
@app.post(
"/v1/chat/completions",
response_model=ChatCompletionResponse,
status_code=status.HTTP_200_OK,
dependencies=[Depends(verify_api_key)],
)
async def create_chat_completion(request: ChatCompletionRequest):
if not chat_model.engine.can_generate:
raise HTTPException(status_code=status.HTTP_405_METHOD_NOT_ALLOWED, detail="Not allowed")
if request.stream:
generate = create_stream_chat_completion_response(request, chat_model)
return EventSourceResponse(generate, media_type="text/event-stream")
else:
return await create_chat_completion_response(request, chat_model)
@app.post(
"/v1/score/evaluation",
response_model=ScoreEvaluationResponse,
status_code=status.HTTP_200_OK,
dependencies=[Depends(verify_api_key)],
)
async def create_score_evaluation(request: ScoreEvaluationRequest):
if chat_model.engine.can_generate:
raise HTTPException(status_code=status.HTTP_405_METHOD_NOT_ALLOWED, detail="Not allowed")
return await create_score_evaluation_response(request, chat_model)
return app
def run_api() -> None:
chat_model = ChatModel()
app = create_app(chat_model)
api_host = os.environ.get("API_HOST", "0.0.0.0")
api_port = int(os.environ.get("API_PORT", "8000"))
print("Visit http://localhost:{}/docs for API document.".format(api_port))
uvicorn.run(app, host=api_host, port=api_port)

View File

@@ -0,0 +1,219 @@
import base64
import io
import json
import os
import uuid
from typing import TYPE_CHECKING, AsyncGenerator, Dict, List, Optional, Tuple
from ..data import Role as DataRole
from ..extras.logging import get_logger
from ..extras.packages import is_fastapi_available, is_pillow_available, is_requests_available
from .common import dictify, jsonify
from .protocol import (
ChatCompletionMessage,
ChatCompletionResponse,
ChatCompletionResponseChoice,
ChatCompletionResponseUsage,
ChatCompletionStreamResponse,
ChatCompletionStreamResponseChoice,
Finish,
Function,
FunctionCall,
Role,
ScoreEvaluationResponse,
)
if is_fastapi_available():
from fastapi import HTTPException, status
if is_pillow_available():
from PIL import Image
if is_requests_available():
import requests
if TYPE_CHECKING:
from numpy.typing import NDArray
from ..chat import ChatModel
from .protocol import ChatCompletionRequest, ScoreEvaluationRequest
logger = get_logger(__name__)
ROLE_MAPPING = {
Role.USER: DataRole.USER.value,
Role.ASSISTANT: DataRole.ASSISTANT.value,
Role.SYSTEM: DataRole.SYSTEM.value,
Role.FUNCTION: DataRole.FUNCTION.value,
Role.TOOL: DataRole.OBSERVATION.value,
}
def _process_request(
request: "ChatCompletionRequest",
) -> Tuple[List[Dict[str, str]], Optional[str], Optional[str], Optional["NDArray"]]:
logger.info("==== request ====\n{}".format(json.dumps(dictify(request), indent=2, ensure_ascii=False)))
if len(request.messages) == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid length")
if request.messages[0].role == Role.SYSTEM:
system = request.messages.pop(0).content
else:
system = None
if len(request.messages) % 2 == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Only supports u/a/u/a/u...")
input_messages = []
image = None
for i, message in enumerate(request.messages):
if i % 2 == 0 and message.role not in [Role.USER, Role.TOOL]:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid role")
elif i % 2 == 1 and message.role not in [Role.ASSISTANT, Role.FUNCTION]:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid role")
if message.role == Role.ASSISTANT and isinstance(message.tool_calls, list) and len(message.tool_calls):
name = message.tool_calls[0].function.name
arguments = message.tool_calls[0].function.arguments
content = json.dumps({"name": name, "argument": arguments}, ensure_ascii=False)
input_messages.append({"role": ROLE_MAPPING[Role.FUNCTION], "content": content})
elif isinstance(message.content, list):
for input_item in message.content:
if input_item.type == "text":
input_messages.append({"role": ROLE_MAPPING[message.role], "content": input_item.text})
else:
image_url = input_item.image_url.url
if image_url.startswith("data:image"): # base64 image
image_data = base64.b64decode(image_url.split(",", maxsplit=1)[1])
image_path = io.BytesIO(image_data)
elif os.path.isfile(image_url): # local file
image_path = open(image_url, "rb")
else: # web uri
image_path = requests.get(image_url, stream=True).raw
image = Image.open(image_path).convert("RGB")
else:
input_messages.append({"role": ROLE_MAPPING[message.role], "content": message.content})
tool_list = request.tools
if isinstance(tool_list, list) and len(tool_list):
try:
tools = json.dumps([dictify(tool.function) for tool in tool_list], ensure_ascii=False)
except Exception:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid tools")
else:
tools = None
return input_messages, system, tools, image
def _create_stream_chat_completion_chunk(
completion_id: str,
model: str,
delta: "ChatCompletionMessage",
index: Optional[int] = 0,
finish_reason: Optional["Finish"] = None,
) -> str:
choice_data = ChatCompletionStreamResponseChoice(index=index, delta=delta, finish_reason=finish_reason)
chunk = ChatCompletionStreamResponse(id=completion_id, model=model, choices=[choice_data])
return jsonify(chunk)
async def create_chat_completion_response(
request: "ChatCompletionRequest", chat_model: "ChatModel"
) -> "ChatCompletionResponse":
completion_id = "chatcmpl-{}".format(uuid.uuid4().hex)
input_messages, system, tools, image = _process_request(request)
responses = await chat_model.achat(
input_messages,
system,
tools,
image,
do_sample=request.do_sample,
temperature=request.temperature,
top_p=request.top_p,
max_new_tokens=request.max_tokens,
num_return_sequences=request.n,
stop=request.stop,
)
prompt_length, response_length = 0, 0
choices = []
for i, response in enumerate(responses):
if tools:
result = chat_model.engine.template.format_tools.extract(response.response_text)
else:
result = response.response_text
if isinstance(result, tuple):
name, arguments = result
function = Function(name=name, arguments=arguments)
tool_call = FunctionCall(id="call_{}".format(uuid.uuid4().hex), function=function)
response_message = ChatCompletionMessage(role=Role.ASSISTANT, tool_calls=[tool_call])
finish_reason = Finish.TOOL
else:
response_message = ChatCompletionMessage(role=Role.ASSISTANT, content=result)
finish_reason = Finish.STOP if response.finish_reason == "stop" else Finish.LENGTH
choices.append(ChatCompletionResponseChoice(index=i, message=response_message, finish_reason=finish_reason))
prompt_length = response.prompt_length
response_length += response.response_length
usage = ChatCompletionResponseUsage(
prompt_tokens=prompt_length,
completion_tokens=response_length,
total_tokens=prompt_length + response_length,
)
return ChatCompletionResponse(id=completion_id, model=request.model, choices=choices, usage=usage)
async def create_stream_chat_completion_response(
request: "ChatCompletionRequest", chat_model: "ChatModel"
) -> AsyncGenerator[str, None]:
completion_id = "chatcmpl-{}".format(uuid.uuid4().hex)
input_messages, system, tools, image = _process_request(request)
if tools:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Cannot stream function calls.")
if request.n > 1:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Cannot stream multiple responses.")
yield _create_stream_chat_completion_chunk(
completion_id=completion_id, model=request.model, delta=ChatCompletionMessage(role=Role.ASSISTANT, content="")
)
async for new_token in chat_model.astream_chat(
input_messages,
system,
tools,
image,
do_sample=request.do_sample,
temperature=request.temperature,
top_p=request.top_p,
max_new_tokens=request.max_tokens,
stop=request.stop,
):
if len(new_token) != 0:
yield _create_stream_chat_completion_chunk(
completion_id=completion_id, model=request.model, delta=ChatCompletionMessage(content=new_token)
)
yield _create_stream_chat_completion_chunk(
completion_id=completion_id, model=request.model, delta=ChatCompletionMessage(), finish_reason=Finish.STOP
)
yield "[DONE]"
async def create_score_evaluation_response(
request: "ScoreEvaluationRequest", chat_model: "ChatModel"
) -> "ScoreEvaluationResponse":
if len(request.messages) == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid request")
scores = await chat_model.aget_scores(request.messages, max_length=request.max_length)
return ScoreEvaluationResponse(model=request.model, scores=scores)

View File

@@ -0,0 +1,20 @@
import json
from typing import TYPE_CHECKING, Any, Dict
if TYPE_CHECKING:
from pydantic import BaseModel
def dictify(data: "BaseModel") -> Dict[str, Any]:
try: # pydantic v2
return data.model_dump(exclude_unset=True)
except AttributeError: # pydantic v1
return data.dict(exclude_unset=True)
def jsonify(data: "BaseModel") -> str:
try: # pydantic v2
return json.dumps(data.model_dump(exclude_unset=True), ensure_ascii=False)
except AttributeError: # pydantic v1
return data.json(exclude_unset=True, ensure_ascii=False)

View File

@@ -1,6 +1,6 @@
import time
from enum import Enum, unique
from typing import List, Optional
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Field
from typing_extensions import Literal
@@ -39,15 +39,37 @@ class Function(BaseModel):
arguments: str
class FunctionDefinition(BaseModel):
name: str
description: str
parameters: Dict[str, Any]
class FunctionAvailable(BaseModel):
type: Literal["function", "code_interpreter"] = "function"
function: Optional[FunctionDefinition] = None
class FunctionCall(BaseModel):
id: Literal["call_default"] = "call_default"
id: str
type: Literal["function"] = "function"
function: Function
class ImageURL(BaseModel):
url: str
class MultimodalInputItem(BaseModel):
type: Literal["text", "image_url"]
text: Optional[str] = None
image_url: Optional[ImageURL] = None
class ChatMessage(BaseModel):
role: Role
content: str
content: Optional[Union[str, List[MultimodalInputItem]]] = None
tool_calls: Optional[List[FunctionCall]] = None
class ChatCompletionMessage(BaseModel):
@@ -59,12 +81,13 @@ class ChatCompletionMessage(BaseModel):
class ChatCompletionRequest(BaseModel):
model: str
messages: List[ChatMessage]
tools: list = []
tools: Optional[List[FunctionAvailable]] = None
do_sample: bool = True
temperature: Optional[float] = None
top_p: Optional[float] = None
n: int = 1
max_tokens: Optional[int] = None
stop: Optional[Union[str, List[str]]] = None
stream: bool = False
@@ -74,7 +97,7 @@ class ChatCompletionResponseChoice(BaseModel):
finish_reason: Finish
class ChatCompletionResponseStreamChoice(BaseModel):
class ChatCompletionStreamResponseChoice(BaseModel):
index: int
delta: ChatCompletionMessage
finish_reason: Optional[Finish] = None
@@ -87,7 +110,7 @@ class ChatCompletionResponseUsage(BaseModel):
class ChatCompletionResponse(BaseModel):
id: Literal["chatcmpl-default"] = "chatcmpl-default"
id: str
object: Literal["chat.completion"] = "chat.completion"
created: int = Field(default_factory=lambda: int(time.time()))
model: str
@@ -96,11 +119,11 @@ class ChatCompletionResponse(BaseModel):
class ChatCompletionStreamResponse(BaseModel):
id: Literal["chatcmpl-default"] = "chatcmpl-default"
id: str
object: Literal["chat.completion.chunk"] = "chat.completion.chunk"
created: int = Field(default_factory=lambda: int(time.time()))
model: str
choices: List[ChatCompletionResponseStreamChoice]
choices: List[ChatCompletionStreamResponseChoice]
class ScoreEvaluationRequest(BaseModel):
@@ -110,7 +133,7 @@ class ScoreEvaluationRequest(BaseModel):
class ScoreEvaluationResponse(BaseModel):
id: Literal["scoreeval-default"] = "scoreeval-default"
id: str
object: Literal["score.evaluation"] = "score.evaluation"
model: str
scores: List[float]

Some files were not shown because too many files have changed in this diff Show More