160 Commits

Author SHA1 Message Date
c6cdacc48e feat(ai-web): 增加本地配置文件 2025-06-29 14:28:44 +08:00
8884495a89 fix(ai-web): 恢复配置文件 2025-06-29 14:27:42 +08:00
d08a6babbe feat(ai-web): 增加任务和任务模板 2025-06-28 21:08:31 +08:00
9a3375bd03 fix(ai-web): 修复跨域错误 2025-06-28 17:23:19 +08:00
v-zhangjc9
2c808a5bc9 feat(web): 增加节点的参数展示 2025-06-27 11:31:02 +08:00
v-zhangjc9
6e667c45e1 feat(web): 节点增加入参变量 2025-06-27 10:04:48 +08:00
v-zhangjc9
635c6537ed feat(web): 实现EL表达式的解析 2025-06-27 10:04:00 +08:00
v-zhangjc9
d6b70b1750 feat(web): 尝试增加并行节点解决解析问题 2025-06-26 20:48:49 +08:00
v-zhangjc9
c92a374591 feat(web): 优化UI展示 2025-06-26 20:48:20 +08:00
v-zhangjc9
a2aba82b6e feat(web): 调整节点入参形式 2025-06-26 14:38:09 +08:00
873c1a1d20 feat(web): 增加EL表达式转换 2025-06-26 00:31:04 +08:00
v-zhangjc9
f6bd7e52e1 feat(ai-web): 尝试解析流程图 2025-06-25 17:55:05 +08:00
v-zhangjc9
6f7f7cea67 feat(web): 增加代码节点 2025-06-25 17:54:43 +08:00
v-zhangjc9
33df256863 feat(web): 增加知识库节点 2025-06-25 17:25:50 +08:00
v-zhangjc9
3a51d1e33f feat(web): 升级依赖 2025-06-25 12:36:43 +08:00
v-zhangjc9
d3c7457889 fix(web): 调整样式 2025-06-25 10:43:26 +08:00
2d2eaafcd4 feat(web): 优化样式 2025-06-25 00:04:27 +08:00
v-zhangjc9
566dfef208 feat(web): 增加流程图连线限制 2025-06-24 14:07:07 +08:00
1cba0f4422 feat(web): 完成流程图外部加载 2025-06-24 10:44:18 +08:00
ab56385c8a feat(web): 增加节点输出编辑 2025-06-24 10:15:08 +08:00
b58c34443f feat(web): 增加跨站导航列表 2025-06-24 09:29:48 +08:00
53638a8a6d feat(web): 优化流程界面显示 2025-06-23 23:55:31 +08:00
dc55605c99 feat(web): 正式提取流程设计能力到AI目录下,做代码拆分 2025-06-23 22:51:40 +08:00
7345774258 feat(web): 增加编辑侧边栏关闭时保存节点数据 2025-06-23 22:27:23 +08:00
fcf5f8ad18 feat(web): 优化编辑界面 2025-06-23 22:27:23 +08:00
v-zhangjc9
b53ee57dc3 feat(web): 替换amis渲染,amis渲染太慢,导致卡顿 2025-06-23 22:27:23 +08:00
v-zhangjc9
b916acb1c3 feat(web): 增加流程定义基本能力 2025-06-23 22:27:23 +08:00
v-zhangjc9
c9616eb63a fix(web): 提交遗漏文件 2025-06-23 16:55:36 +08:00
v-zhangjc9
5b3c27ea48 feat(ai-web): 完成JPA存储适配 2025-06-23 16:53:34 +08:00
e48d7e8649 feat(ai-web): 尝试使用jpa作为通用数据库后端 2025-06-23 00:26:31 +08:00
v-zhangjc9
306c20aa7f fix(web): 修复删除按钮hover不是红色 2025-06-19 11:31:07 +08:00
v-zhangjc9
24d5d10ecb fix(web): 优化字号 2025-06-19 11:14:59 +08:00
v-zhangjc9
4a9a9ec238 fix(ai-web): 优化qa嵌入提示词 2025-06-19 11:10:14 +08:00
v-zhangjc9
08aa1d8382 fix(ai-web): 修复chartjs工具获取bean错误 2025-06-19 11:09:52 +08:00
v-zhangjc9
1b3045dfd4 feat(web): 统一字体展示 2025-06-19 11:09:05 +08:00
v-zhangjc9
0f5ae1c4d4 fix(ai-web): 修复超时时间设置过短导致反复重连 2025-06-18 16:09:37 +08:00
v-zhangjc9
48e42ee99a fix(ai-web): 升级部份依赖版本 2025-06-18 14:53:45 +08:00
v-zhangjc9
0914b458d3 fix(ai-web): 修复页面失去焦点的时候没有断开对话的连接 2025-06-18 10:34:56 +08:00
v-zhangjc9
368c30676e feat(ai-web): 尝试优化对话连接的稳定性 2025-06-18 10:33:44 +08:00
v-zhangjc9
60477f99f5 fix(ai-web): 修复错别字 2025-06-17 17:43:18 +08:00
v-zhangjc9
565c530dd5 feat(ai-web): 知识库增加描述 2025-06-17 17:19:04 +08:00
v-zhangjc9
5130885033 fix(ai-web): 改正包名 2025-06-17 16:18:38 +08:00
v-zhangjc9
8e6463845b feat(ai-web): 开启gzip 2025-06-17 16:16:01 +08:00
v-zhangjc9
e89bffe289 feat(ai-web): 增加Feedback详情展示和处理情况确认 2025-06-17 16:15:42 +08:00
v-zhangjc9
1dd00d329c refactor(ai-web): 优化流程编排 2025-06-17 10:35:39 +08:00
e470a87372 feat(web): 修复yarn页面查看资源队列错误 2025-06-16 23:51:36 +08:00
v-zhangjc9
45da452f18 fix(ai-web): 完成feedback AI流程 2025-06-16 20:37:14 +08:00
v-zhangjc9
e6a1bc5383 fix(ai-web): 修复取值错误 2025-06-16 17:13:16 +08:00
v-zhangjc9
c5916703cd fix(web): 修复base url错误 2025-06-16 16:12:39 +08:00
v-zhangjc9
807ddbe5cb feat(ai-web): 完成图片上传和显示 2025-06-16 13:38:42 +08:00
v-zhangjc9
13de694e37 fix(all): 修复配置错误 2025-06-16 12:13:42 +08:00
v-zhangjc9
1962dd586c feat(ai-web): 增加辅助插件 2025-06-16 11:07:56 +08:00
138ee140e1 feat(ai-web): 增加feedback 2025-06-15 23:42:26 +08:00
e2d69bc6e8 refactor(ai-web): 优化id的生成 2025-06-15 22:19:05 +08:00
b9d707dc8f refactor(gateway): 适配ai web的路由 2025-06-15 20:10:54 +08:00
44d1473c6b refactor(ai): 移除chat,合并chat和knowledge为web
以后有需要再拆分
2025-06-15 20:08:51 +08:00
9c658afbd7 refactor(ai): 迁移chat到知识库中 2025-06-15 19:59:35 +08:00
e3f86e6497 refactor(knowledge): 优化配置文件 2025-06-15 19:15:40 +08:00
256c8c6bd5 refactor(knowledge): 修改代码结构,增加多环境配置文件方便本地开发 2025-06-15 19:10:38 +08:00
b627c91acb refactor(knowledge): 重构大模型配置
Spring AI默认大模型配置不支持同时配置两个文本大模型,比如一个文本大模型和一个图像大模型,改用自定义的配置
2025-06-15 17:57:09 +08:00
7fb490778a refactor(knowledge): 优化接口结构,统一到一个路径下,为合并做准备 2025-06-15 17:02:53 +08:00
d4d5aede31 feat(ai): restClient和webClient提供给其他类使用 2025-06-14 17:45:56 +08:00
v-zhangjc9
f11f5e7656 feat(ai): 调整模型 2025-06-13 19:07:13 +08:00
v-zhangjc9
bc32a89fea fix(ai): 移除不支持的模型 2025-06-13 16:11:37 +08:00
v-zhangjc9
2e24bdb90b feat(ai): 增加大模型对话测试 2025-06-12 20:29:37 +08:00
v-zhangjc9
5160c59ab0 feat(ai): 增加llama-swap配置文件生成工具 2025-06-12 20:24:46 +08:00
v-zhangjc9
506e28c9f7 feat(chat): 优化提示词,增加mermaid图表生成 2025-06-11 19:45:57 +08:00
9076bd4c09 fix(web): 修复vite类型没有引入导致import出现问题 2025-06-08 19:17:32 +08:00
69f0bed9a1 fix(web): 提交遗漏的文件 2025-06-08 02:42:58 +08:00
c04269c3fa feat(web): AI对话框增加图表显示
支持Mermaid、EChart、Chart.js
2025-06-08 02:42:05 +08:00
4fe21f3d8b refactor(chat): 提示词汇总,方便使用 2025-06-08 00:13:17 +08:00
cb42376e46 feat(bin): 优化脚本 2025-06-06 22:34:11 +08:00
34bdb59501 feat(bin): 优化脚本 2025-06-06 22:32:14 +08:00
v-zhangjc9
72c23d916a feat(chat): 优化提示词,增加外部调用方法 2025-06-06 19:56:52 +08:00
v-zhangjc9
e01a883d37 feat(chat): 优化提示词,增加外部调用方法 2025-06-06 17:20:15 +08:00
v-zhangjc9
951075fc9f feat(chat): 优化提示词和知识库查询 2025-06-06 10:57:27 +08:00
v-zhangjc9
90fea22de5 feat(chat): 增加数据库SQL访问接口 2025-06-05 19:53:25 +08:00
v-zhangjc9
a35980a5f4 perf(knowledge): 暂时关闭rerank提升性能 2025-06-05 15:21:03 +08:00
v-zhangjc9
e359bed97c feat(knowledge): 实现外部直接插入知识库 2025-06-05 15:20:33 +08:00
v-zhangjc9
8b4827b164 feat(web): 增加代码混淆 2025-06-04 19:19:37 +08:00
v-zhangjc9
577834568b feat(ai): 优化UI 2025-06-04 18:52:28 +08:00
v-zhangjc9
c4d5a7b300 feat(knowledge): 增加rerank模型适配 2025-06-04 17:43:22 +08:00
v-zhangjc9
4124a8a851 feat(chat): 优化提示词,时间参数直接放在提示词中 2025-06-04 17:42:46 +08:00
v-zhangjc9
6d4dedc3f4 fix(web): 增加访问超时时间 2025-06-04 17:40:54 +08:00
v-zhangjc9
b8aea3bdf0 feat(web): 优化界面,移除思考开关 2025-06-04 17:40:21 +08:00
v-zhangjc9
d36ad95a85 fix(web): 修复知识库id没有及时添加到http请求中 2025-06-04 17:39:55 +08:00
v-zhangjc9
fdec62b56e fix(bin): 优化打包脚本 2025-06-04 10:02:25 +08:00
1217d114bd feat(bin): 尝试合并编译脚本 2025-06-03 23:53:19 +08:00
2d7b30bb7a fix(bin): 适配在Windows下的打包 2025-06-03 23:22:01 +08:00
v-zhangjc9
c2af2d6365 feat(chat): 尝试在对话中加入知识库 2025-06-03 20:23:52 +08:00
v-zhangjc9
536c4e9cab feat(bin): 增加forest发布脚本 2025-06-03 20:22:29 +08:00
v-zhangjc9
c9a1ea2be5 feat(web): 用markdown显示思考过程 2025-06-03 16:12:23 +08:00
v-zhangjc9
602a337923 fix(chat): 修复上传文件选择错误 2025-05-30 17:35:59 +08:00
v-zhangjc9
fe9e185a9a feat(web): 增加根据环境切换debug状态 2025-05-30 17:35:39 +08:00
v-zhangjc9
3901a47da0 feat(web): 增加知识库选择 2025-05-30 17:35:24 +08:00
v-zhangjc9
dc5998cf72 feat(web): 增加命中测试按钮 2025-05-30 17:35:15 +08:00
v-zhangjc9
993940e810 feat(web): 升级前端依赖 2025-05-30 17:34:41 +08:00
v-zhangjc9
b8cc8fee67 fix(knowledge): 修复文件上传失败 2025-05-30 11:03:26 +08:00
v-zhangjc9
2cac589b0f fix(web): 优化代码 2025-05-30 10:31:32 +08:00
v-zhangjc9
29859664e3 fix(ai): 优化cors配置 2025-05-30 10:30:27 +08:00
v-zhangjc9
0ceb5d7fc3 feat(bin): 增加上传文件md5显示 2025-05-30 10:29:59 +08:00
v-zhangjc9
947c831609 fix(all): 优化忽略名单 2025-05-30 10:28:43 +08:00
v-zhangjc9
ce95ec7444 fix(gateway): 优化cors配置,网关统一处理 2025-05-30 10:28:16 +08:00
v-zhangjc9
95214f7af3 fix(gateway): 优化security配置 2025-05-29 18:36:50 +08:00
v-zhangjc9
0f49c91fde feat(bin): 优化上传命令 2025-05-29 11:15:04 +08:00
v-zhangjc9
0262c573ae fix(bin): 修复编译脚本获取jar包的匹配式 2025-05-29 10:36:14 +08:00
v-zhangjc9
8c9cb6f21d feat(knowledge): 完成知识库部署相关脚本调整 2025-05-28 18:46:20 +08:00
v-zhangjc9
e6e24dff27 fix(knowledge): 修复没有指定数据库名称导致无法查询 2025-05-28 17:11:13 +08:00
v-zhangjc9
fc2ea107d2 feat(knowledge): 补充建表语句和脚本 2025-05-28 16:06:07 +08:00
v-zhangjc9
6f9c898d51 fix(knowledge): 移除多余的日志打印 2025-05-28 15:41:36 +08:00
v-zhangjc9
7fd484eeab fix(knowledge): 优化多文件上传体验 2025-05-28 15:40:57 +08:00
v-zhangjc9
3ee6303cf5 feat(knowledge): 完成知识库基本功能开发 2025-05-28 15:06:30 +08:00
v-zhangjc9
f7ed3bd270 feat(core): 改用amis推荐的返回结构 2025-05-27 09:35:11 +08:00
v-zhangjc9
e57c81ce75 feat(knowledge): 初步完成知识库分片预览 2025-05-23 19:12:41 +08:00
v-zhangjc9
fce4816880 feat(all): 移除远程日志集中 2025-05-23 09:49:37 +08:00
v-zhangjc9
79f792b6cf feat(web): 增加预览 2025-05-23 09:48:16 +08:00
v-zhangjc9
0d7d009be2 refactor(knowledge): 加入数据库,优化代码结构 2025-05-22 18:10:44 +08:00
v-zhangjc9
907d2826a4 feat(bin): 增加打包耗时 2025-05-22 10:31:44 +08:00
v-zhangjc9
d190c59e57 fix(bin): 优化忽略列表 2025-05-21 17:58:10 +08:00
v-zhangjc9
bd2205a5b9 fix(bin): 提交未提交的文件 2025-05-21 17:57:48 +08:00
v-zhangjc9
42aab784c2 feat(ai): 完成基础版本功能 2025-05-21 17:57:11 +08:00
v-zhangjc9
8c2b94f6c9 feat(bin): 优化打包脚本到跨平台 2025-05-21 17:56:10 +08:00
v-zhangjc9
6e7cef6170 fix(ai): 移除多余的git跟踪 2025-05-20 17:06:31 +08:00
v-zhangjc9
0156a12b3b feat(gateway): 增加AI相关端口 2025-05-20 16:36:36 +08:00
v-zhangjc9
fe58cee730 feat(bin): 修改仓库到私有仓库中 2025-05-20 14:43:56 +08:00
v-zhangjc9
778a6df984 feat(cli): 增加多jdk的支持 2025-05-20 14:42:51 +08:00
v-zhangjc9
5d49c82190 feat(ai): 完善知识库接口 2025-05-16 19:00:55 +08:00
v-zhangjc9
be976290b6 feat(ai): 完善AI对话 2025-05-16 19:00:26 +08:00
v-zhangjc9
8fbc665abf feat(ai): 增加ai相关子项目 2025-05-15 17:15:18 +08:00
v-zhangjc9
a129caf5f4 fix(web): 修复思考开关无效 2025-05-14 09:14:35 +08:00
v-zhangjc9
aea8a7ed59 feat(web): 优化对话界面显示 2025-05-13 16:13:07 +08:00
v-zhangjc9
dd2e56e27b feat(web): 增加AI对话的能力 2025-05-13 16:03:08 +08:00
v-zhangjc9
819d56fbe3 feat(web): 优化图表展示 2025-05-13 12:06:21 +08:00
v-zhangjc9
255aad4987 feat(web): 优化图标显示 2025-05-12 19:28:23 +08:00
v-zhangjc9
f23de7c959 fix(web): 修复页面显示不正确、logo显示异常 2025-05-12 18:15:10 +08:00
v-zhangjc9
b0603d10bc feat(web): 优化页面跳转和菜单展现 2025-05-12 15:59:46 +08:00
v-zhangjc9
1e7b195f9f feat(web): 更换页面框架为pro-layout 2025-05-12 10:42:59 +08:00
v-zhangjc9
aa93b52dd9 refactor(web): 优化部署打包方案 2025-05-09 17:23:26 +08:00
v-zhangjc9
121f6688c6 refactor(web): 更换client代码的目录 2025-05-09 12:14:06 +08:00
v-zhangjc9
8a7ad32df9 fix(web): 修复切换路由不刷新页面的问题 2025-05-09 12:13:34 +08:00
v-zhangjc9
de445d7061 feat(web): 完成基本适配 2025-05-09 12:13:18 +08:00
v-zhangjc9
fa295b15c6 feat(launcher): 增加批量执行ssh命令脚本 2025-04-24 15:56:52 +08:00
v-zhangjc9
51c9e71b0d fix(launcher): 修复空值环境变量错误 2025-04-23 14:08:10 +08:00
v-zhangjc9
224115e938 refactor(all): 移除juice-fs依赖 2025-04-21 12:28:18 +08:00
v-zhangjc9
2f3eaa9e1a feat(scheduler): 恢复调度时间点 2025-02-12 10:13:52 +08:00
v-zhangjc9
f791b60fd5 feat(sync): 增加日志输出 2025-02-12 10:13:52 +08:00
v-zhangjc9
e6a03122a6 feat(cli): 增加主机是否启用的能力 2025-02-12 10:13:52 +08:00
v-zhangjc9
7249419624 feat(scheduler): 微调b12集群的资源限制 2025-02-12 10:13:52 +08:00
v-zhangjc9
58140fa0e8 feat(bin): 移除ytp传输 2025-02-12 10:13:52 +08:00
v-zhangjc9
b3ccbce16e feat(all): 移除b5集群 2025-02-12 10:13:52 +08:00
v-zhangjc9
6dbad6825d feat(scheduler): 调整b12的资源限制
反正也没有备用集群的需求,一个集群用到头就好
2025-02-12 10:13:52 +08:00
v-zhangjc9
686c523274 fix(scheduler): 修复闲时压缩crm重点表调度到A4集群 2025-02-12 10:13:52 +08:00
v-zhangjc9
1e88c62987 feat(scheduler): 禁止b5、a4在闲时调度期间使用 2025-02-12 10:13:52 +08:00
v-zhangjc9
fb79468eee feat(web): 增加指标采集进度显示 2025-02-12 10:13:52 +08:00
v-zhangjc9
7efd9129c2 feat(monitor): 增加关于hudi表文件数的监控指标 2025-02-12 10:13:52 +08:00
v-zhangjc9
e30a720cea fix(hudi-query): 修复接口调用错误 2025-02-12 10:13:52 +08:00
v-zhangjc9
28b3fd9ca1 feat(hudi-query): 增加关于hdfs文件数相关的接口 2025-02-12 10:13:52 +08:00
v-zhangjc9
70c2442ff1 fix(forest): 修复接口类型错误 2025-02-12 10:13:52 +08:00
v-zhangjc9
3c971e1438 feat(scheduler): 修复调度 2025-02-12 10:13:52 +08:00
v-zhangjc9
2c7d72bdb8 feat(scheduler): 调整日常调度的时间点
停止11、14点的全表压缩调度
2025-02-12 10:13:52 +08:00
417 changed files with 23410 additions and 121908 deletions

3
.gitignore vendored
View File

@@ -32,7 +32,7 @@ buildNumber.properties
!.vscode/*.code-snippets
.history/
*.vsix
.idea/**
**/.idea/**
cmake-build-*/
.idea/**/mongoSettings.xml
*.iws
@@ -109,3 +109,4 @@ Icon
Network Trash Folder
Temporary Items
.apdisk
**/temp/

View File

@@ -1,3 +1,168 @@
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s20.hdp.dc:19521/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T154854.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154825.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154754.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154616.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/list?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154529.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/list?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151839.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/file_count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151753.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/file_count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151727.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T151704.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151540.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151442.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151417.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?hdfs=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151409.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs?hdfs=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T151340.404.json
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:31719/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_sz/acct_item_755&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
@@ -358,163 +523,3 @@ Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164901.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164758.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164303.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164220.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164107.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=C5D2666661F27F68E53223FE5B74AF35
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T163410.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/queue/queue/clear?name=compaction-queue-pre
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=8516C92140B5118AF9AA61025D0F8C93
Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/service_scheduler/schedule/all
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=7A4C34E0240A98C1186F3A2551BC5E80
Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/service_web/cloud/list
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=F5F155198FAF72435339CC2E21B873CC
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-09T170723.200.json
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/hudi_api/api/message_id?flink_job_id=1542097984132706304&alias=crm_cfguse_mkt_cam_strategy_rel
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=F5F155198FAF72435339CC2E21B873CC
Accept-Encoding: br,deflate,gzip,x-gzip
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:15391/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Type: text/plain
Content-Length: 738
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=E6FF5447C8553BA4268979B8C5779363
Accept-Encoding: br,deflate,gzip,x-gzip
\#Properties saved on 2023-12-26T09:18:39.583Z
\#Tue Dec 26 17:18:39 CST 2023
hoodie.table.precombine.field=update_ts
hoodie.datasource.write.drop.partition.columns=false
hoodie.table.partition.fields=CITY_ID
hoodie.table.type=MERGE_ON_READ
hoodie.archivelog.folder=archived
hoodie.compaction.payload.class=org.apache.hudi.common.model.OverwriteWithLatestAvroPayload
hoodie.timeline.layout.version=1
hoodie.table.version=4
hoodie.table.recordkey.fields=_key
hoodie.datasource.write.partitionpath.urlencode=false
hoodie.table.name=dws_account
hoodie.table.keygenerator.class=org.apache.hudi.keygen.SimpleKeyGenerator
hoodie.table.timeline.timezone=LOCAL
hoodie.datasource.write.hive_style_partitioning=false
hoodie.table.checksum=989688289
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:15391/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:34469/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:34469/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
<> 2024-05-08T095641.500.txt
###

102
bin/.gitignore vendored Normal file
View File

@@ -0,0 +1,102 @@
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
.history/
*.vsix
*~
.fuse_hidden*
.directory
.Trash-*
.nfs*
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
pids
*.pid
*.seed
*.pid.lock
lib-cov
coverage
*.lcov
.nyc_output
.grunt
bower_components
.lock-wscript
build/Release
node_modules/
jspm_packages/
web_modules/
*.tsbuildinfo
.npm
.eslintcache
.stylelintcache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
.node_repl_history
*.tgz
.yarn-integrity
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
.cache
.parcel-cache
.next
out
.nuxt
dist
.cache/
.vuepress/dist
.temp
.docusaurus
.serverless/
.fusebox/
.dynamodb/
.tern-port
.vscode-test
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
*.stackdump
[Dd]esktop.ini
$RECYCLE.BIN/
*.cab
*.msi
*.msix
*.msm
*.msp
*.lnk
.DS_Store
.AppleDouble
.LSOverride
Icon
._*
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

54
bin/build-all.js Normal file
View File

@@ -0,0 +1,54 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_package_batch, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch([
'service-common',
'service-dependencies',
'service-configuration',
'service-forest',
'service-cli',
'service-cli/service-cli-core',
'service-executor',
'service-executor/service-executor-core',
'utils/executor',
])
await run_package_batch([
'service-api',
'service-check',
'service-cli/service-cli-runner',
'service-cloud-query',
'service-executor/service-executor-manager',
'service-executor/service-executor-task',
'service-command',
'service-command-pro',
'service-exporter',
'service-flink-query',
'service-gateway',
'service-hudi-query',
'service-info-query',
'service-monitor',
'service-loki-query',
'service-pulsar-query',
'service-queue',
'service-scheduler',
'service-uploader',
'service-web',
'service-yarn-query',
'service-zookeeper-query',
'utils/patch',
'utils/sync',
])
for (const profile of ['b2a4', 'b2b1', 'b2b12']) {
await run_package('service-launcher', profile)
await run_upload(`**/service-launcher-${profile}-1.0.0-SNAPSHOT.jar`)
}
await run_upload('**/target/*-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,38 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
mvn install -N -D skipTests
deploy service-common service-dependencies service-configuration service-forest service-cli service-cli/service-cli-core service-executor service-executor/service-executor-core utils/executor
package service-api service-check service-cli/service-cli-runner service-cloud-query service-executor/service-executor-manager service-executor/service-executor-task service-command service-command-pro service-exporter service-flink-query service-gateway service-hudi-query service-info-query service-monitor service-loki-query service-pulsar-query service-queue service-scheduler service-uploader service-web service-yarn-query service-zookeeper-query utils/patch utils/sync
configs=(b2a4 b2b1 b2b5 b2b12)
for config in ${configs[*]};
do
mvn -pl service-launcher clean package -D skipTests -P $config
upload $root_path/service-launcher/target/service-launcher-$config-1.0.0-SNAPSHOT.jar
done
upload $root_path/service-api/target/service-api-1.0.0-SNAPSHOT.jar
upload $root_path/service-check/target/service-check-1.0.0-SNAPSHOT.jar
upload $root_path/service-cloud-query/target/service-cloud-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-command/target/service-command-1.0.0-SNAPSHOT.jar
upload $root_path/service-command-pro/target/service-command-pro-1.0.0-SNAPSHOT.jar
upload $root_path/service-executor/service-executor-manager/target/service-executor-manager-1.0.0-SNAPSHOT.jar
upload $root_path/service-executor/service-executor-task/target/service-executor-task-1.0.0-SNAPSHOT.jar
upload $root_path/service-exporter/target/service-exporter-1.0.0-SNAPSHOT.jar
upload $root_path/service-flink-query/target/service-flink-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-gateway/target/service-gateway-1.0.0-SNAPSHOT.jar
upload $root_path/service-hudi-query/target/service-hudi-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-info-query/target/service-info-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-loki-query/target/service-loki-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-monitor/target/service-monitor-1.0.0-SNAPSHOT.jar
upload $root_path/service-pulsar-query/target/service-pulsar-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-queue/target/service-queue-1.0.0-SNAPSHOT.jar
upload $root_path/service-scheduler/target/service-scheduler-1.0.0-SNAPSHOT.jar
upload $root_path/service-web/target/service-web-1.0.0-SNAPSHOT.jar
upload $root_path/service-yarn-query/target/service-yarn-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-zookeeper-query/target/service-zookeeper-query-1.0.0-SNAPSHOT.jar
upload $root_path/utils/sync/target/sync-1.0.0-SNAPSHOT.jar
upload_ytp $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar
upload_ytp $root_path/service-uploader/target/service-uploader-1.0.0-SNAPSHOT.jar

15
bin/build-api.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-api')
await run_upload_normal('service-api')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-api
upload $root_path/service-api/target/service-api-1.0.0-SNAPSHOT.jar

15
bin/build-check.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-check')
await run_upload_normal('service-check')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-check
upload $root_path/service-check/target/service-check-1.0.0-SNAPSHOT.jar

15
bin/build-cli.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-cli', 'service-cli/service-cli-core'])
await run_package('service-cli/service-cli-runner')
await run_upload('**/service-cli-runner-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-cli service-cli/service-cli-core
package service-cli/service-cli-runner
ytp-transfer2 $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar

15
bin/build-cloud-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-cloud-query')
await run_upload_normal('service-cloud-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-cloud-query
upload $root_path/service-cloud-query/target/service-cloud-query-1.0.0-SNAPSHOT.jar

15
bin/build-command-pro.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-command-pro')
await run_upload_normal('service-command-pro')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-command-pro
upload $root_path/service-command-pro/target/service-command-pro-1.0.0-SNAPSHOT.jar

15
bin/build-command.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-command')
await run_upload_normal('service-command')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-command
upload $root_path/service-command/target/service-command-1.0.0-SNAPSHOT.jar

View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest', 'service-executor', 'service-executor/service-executor-core'])
await run_package('service-executor/service-executor-manager')
await run_upload('**/service-executor-manager-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest service-executor service-executor/service-executor-core
package service-executor/service-executor-manager
upload $root_path/service-executor/service-executor-manager/target/service-executor-manager-1.0.0-SNAPSHOT.jar

View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest', 'service-executor', 'service-executor/service-executor-core'])
await run_package('service-executor/service-executor-task')
await run_upload('**/service-executor-task-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest service-executor service-executor/service-executor-core
package service-executor/service-executor-task
upload $root_path/service-executor/service-executor-task/target/service-executor-task-1.0.0-SNAPSHOT.jar

15
bin/build-exporter.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-exporter')
await run_upload_normal('service-exporter')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-exporter
upload $root_path/service-exporter/target/service-exporter-1.0.0-SNAPSHOT.jar

15
bin/build-flink-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-flink-query')
await run_upload_normal('service-flink-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-flink-query
upload $root_path/service-flink-query/target/service-flink-query-1.0.0-SNAPSHOT.jar

13
bin/build-forest.js Normal file
View File

@@ -0,0 +1,13 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
} catch (e) {
console.error(e)
}

15
bin/build-gateway.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-gateway')
await run_upload_normal('service-gateway')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-gateway
upload $root_path/service-gateway/target/service-gateway-1.0.0-SNAPSHOT.jar

15
bin/build-hudi-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-hudi-query')
await run_upload_normal('service-hudi-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-hudi-query
upload $root_path/service-hudi-query/target/service-hudi-query-1.0.0-SNAPSHOT.jar

15
bin/build-info-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-info-query')
await run_upload_normal('service-info-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-info-query
upload $root_path/service-info-query/target/service-info-query-1.0.0-SNAPSHOT.jar

17
bin/build-launcher.js Normal file
View File

@@ -0,0 +1,17 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
for (const profile of ['b2a4', 'b2b1', 'b2b12']) {
await run_package('service-launcher', profile)
await run_upload(`**/service-launcher-${profile}-1.0.0-SNAPSHOT.jar`)
}
} catch (e) {
console.error(e)
}

View File

@@ -1,11 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
configs=(b2a4 b2b1 b2b5 b2b12)
for config in ${configs[*]};
do
mvn -pl service-launcher clean package -D skipTests -P $config
upload $root_path/service-launcher/target/service-launcher-$config-1.0.0-SNAPSHOT.jar
done

15
bin/build-loki-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-loki-query')
await run_upload_normal('service-loki-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-loki-query
upload $root_path/service-loki-query/target/service-loki-query-1.0.0-SNAPSHOT.jar

15
bin/build-monitor.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-monitor')
await run_upload_normal('service-monitor')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-monitor
upload $root_path/service-monitor/target/service-monitor-1.0.0-SNAPSHOT.jar

15
bin/build-patch.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common'])
await run_package('utils/patch')
await run_upload('**/patch-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package utils/patch
upload $root_path/utils/patch/target/patch-1.0.0-SNAPSHOT.jar

15
bin/build-pulsar-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-pulsar-query')
await run_upload_normal('service-pulsar-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-pulsar-query
upload $root_path/service-pulsar-query/target/service-pulsar-query-1.0.0-SNAPSHOT.jar

15
bin/build-queue.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-queue')
await run_upload_normal('service-queue')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-queue
upload $root_path/service-queue/target/service-queue-1.0.0-SNAPSHOT.jar

15
bin/build-scheduler.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-scheduler')
await run_upload_normal('service-scheduler')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-scheduler
upload $root_path/service-scheduler/target/service-scheduler-1.0.0-SNAPSHOT.jar

15
bin/build-sync.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'utils/patch'])
await run_package('utils/sync')
await run_upload('**/sync-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest utils/patch
package utils/sync
upload $root_path/utils/sync/target/sync-1.0.0-SNAPSHOT.jar

15
bin/build-uploader.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-uploader')
await run_upload_normal('service-uploader')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-uploader
ytp-transfer2 $root_path/service-uploader/target/service-uploader-1.0.0-SNAPSHOT.jar

15
bin/build-web.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-web')
await run_upload_normal('service-web')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-web
upload $root_path/service-web/target/service-web-1.0.0-SNAPSHOT.jar

15
bin/build-yarn-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-yarn-query')
await run_upload_normal('service-yarn-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-yarn-query
upload $root_path/service-yarn-query/target/service-yarn-query-1.0.0-SNAPSHOT.jar

View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-zookeeper-query')
await run_upload_normal('service-zookeeper-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-zookeeper-query
upload $root_path/service-zookeeper-query/target/service-zookeeper-query-1.0.0-SNAPSHOT.jar

View File

@@ -3,5 +3,5 @@
root_path=/apps/zone_scfp/hudi/cloud
jdk_path=/opt/jdk8u252-b09/bin/java
curl ftp://yyy:QeY\!68\)4nH1@132.121.122.15:2222/service-check-1.0.0-SNAPSHOT.jar -o ${root_path}/service-check.jar
curl http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.124:36800/file/download/service-check-1.0.0-SNAPSHOT.jar -o ${root_path}/service-check.jar
${jdk_path} -jar ${root_path}/service-check.jar

View File

@@ -4,8 +4,8 @@ jars_path=/data/datalake/jars
jdk_path=/opt/jdk1.8.0_162/bin/java
arguments=$@
# 手动上传jar包则注释掉这行显神通吧反正是
curl ftp://yyy:QeY\!68\)4nH1@132.121.122.15:2222/service-cli-runner-1.0.0-SNAPSHOT.jar -o ${jars_path}/service-cli-runner.jar
# 手动上传jar包则注释掉这行显神通吧反正是
curl http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.124:36800/file/download/service-cli-runner-1.0.0-SNAPSHOT.jar -o ${jars_path}/service-cli-runner.jar
${jdk_path} -jar ${jars_path}/service-cli-runner.jar \
--spring.profiles.active=b12 \
--deploy.generate.command=true \

114
bin/library.js Normal file
View File

@@ -0,0 +1,114 @@
import {$, fetch, fs, glob, os, path, spinner, syncProcessCwd, usePowerShell} from 'zx'
import {isEqual, trim, fileSize} from "licia";
import md5file from 'md5-file'
syncProcessCwd(true)
if (isEqual(os.platform(), 'win32')) {
usePowerShell()
}
const maven_setting = path.join(os.homedir(), '.m2', 'settings-nas.xml')
const upload_url = 'http://132.126.207.124:36800'
const upload_username = 'AxhEbscwsJDbYMH2'
const upload_password = 'cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4'
/**
* 时间戳转自然语言
*
* @param timestamp 时间戳
* @returns {string} 自然语言描述的时间
*/
const millisecondToString = (timestamp) => {
const totalSeconds = Math.floor(parseFloat(timestamp) / 1000)
if (isNaN(totalSeconds) || totalSeconds < 0) {
return "0秒";
}
const days = Math.floor(totalSeconds / 86400)
let remaining = totalSeconds % 86400
const hours = Math.floor(remaining / 3600)
remaining %= 3600
const minutes = Math.floor(remaining / 60)
const seconds = remaining % 60
const parts = []
if (days > 0) parts.push(`${days}`)
if (days > 0 || hours > 0) parts.push(`${hours}小时`)
if (days > 0 || hours > 0 || minutes > 0) parts.push(`${minutes}分钟`)
parts.push(`${seconds}`)
return parts.join('')
}
export const run_deploy = async (project) => {
let output = await spinner(
`Deploying project ${project}`,
() => $`mvn -pl ${project} clean deploy -D skipTests -s ${maven_setting}`
)
console.log(`✅ Finished deploy ${project} (${millisecondToString(output['duration'])})`)
}
export const run_deploy_root = async () => {
let output = await spinner(
`Deploying root`,
() => $`mvn clean deploy -N -D skipTests -s ${maven_setting}`
)
console.log(`✅ Finished deploy root (${millisecondToString(output['duration'])})`)
}
export const run_deploy_batch = async (projects) => {
for (const project of projects) {
await run_deploy(project)
}
}
export const run_package = async (project, profile = 'b2b12') => {
let output = await spinner(
`Packaging project ${project}${isEqual(profile, 'b2b12') ? '' : ` ${profile}`}`,
() => $`mvn -pl ${project} clean package -D skipTests -P ${profile} -s ${maven_setting}`
)
console.log(`✅ Finished package ${project}${isEqual(profile, 'b2b12') ? '' : ` ${profile}`} (${millisecondToString(output['duration'])})`)
}
export const run_package_batch = async (projects) => {
for (const project of projects) {
await run_package(project)
}
}
export const upload = async (file_path) => {
let start = new Date().getTime()
let basename = path.basename(file_path)
let response = await spinner(
`Uploading project ${file_path}`,
() => fetch(`${upload_url}/file/upload/${basename}`, {
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Authorization': `Basic ${Buffer.from(`${upload_username}:${upload_password}`).toString('base64')}`,
},
body: fs.createReadStream(file_path),
duplex: 'half',
})
)
if (!isEqual(response.status, 200)) {
throw response
}
console.log(`✅ Finished upload ${file_path} (${millisecondToString((new Date().getTime()) - start)})`)
console.log(`📘 Uploaded ${fileSize(fs.statSync(file_path).size)}`)
console.log(`📘 MD5 ${md5file.sync(file_path)}`)
console.log(`📘 Download curl http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.124:36800/file/download/${basename} -o ${basename}`)
fs.rmSync(file_path)
}
export const run_upload = async (pattern) => {
for (let p of glob.sync(pattern)) {
await upload(path.join(trim($.sync`pwd`.text()), p))
}
}
export const run_upload_normal = async (project) => {
await run_upload(`${project}/target/${project}-1.0.0-SNAPSHOT.jar`)
}

View File

@@ -1,61 +0,0 @@
#!/bin/bash
build_profile=b2b12
iap_username=iap
iap_password=IAPAb123456!
iap_url=$iap_username@132.122.1.162
ytp_username=yyy
ytp_password='QeY\!68\)4nH1'
ytp_url=ftp://$ytp_username:$ytp_password@132.121.122.15:2222
upload_username=AxhEbscwsJDbYMH2
upload_password=cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
upload_url=http://$upload_username:$upload_password@132.126.207.124:36800
root_path=$(dirname $(cd $(dirname $0);pwd))
function upload() {
source_file_path=$(realpath $1)
file_name=$(basename $source_file_path)
echo "↪ Source md5: $(md5sum $source_file_path | awk '{print $1}')"
echo "↪ Uploading $source_file_path"
curl $upload_url/file/upload/$file_name -T $source_file_path
echo "↪ Upload ytp success"
echo "↪ Download: curl $upload_url/file/download/$file_name -o $file_name"
echo "↪ Delete source"
rm $source_file_path
}
function upload_ytp() {
source_file_path=$(realpath $1)
file_name=$(basename $source_file_path)
echo "↪ Source md5: $(md5sum $source_file_path | awk '{print $1}')"
echo "↪ Uploading $source_file_path ↪ /tmp/$file_name"
sshpass -p $iap_password scp $source_file_path $iap_url:/tmp
echo "↪ Upload 162 success"
target_md5=$(sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url "md5sum /tmp/$file_name | awk '{print \$1}'")
echo "↪ Target md5: $target_md5"
echo "↪ Command: sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url \"curl --retry 5 $ytp_url -T /tmp/$file_name\""
sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url "curl --retry 5 $ytp_url -T /tmp/$file_name"
echo "↪ Upload ytp success"
echo "↪ Download: curl $ytp_url/$file_name -o $file_name"
echo "↪ Delete source"
rm $source_file_path
}
function joining {
local d=${1-} f=${2-}
if shift 2; then
printf %s "$f" "${@/#/$d}"
fi
}
function deploy() {
mvn -pl $(joining , $@) clean install -D skipTests
}
function package() {
mvn -pl $(joining , $@) clean package -D skipTests -P $build_profile
}

19
bin/package.json Normal file
View File

@@ -0,0 +1,19 @@
{
"name": "bin",
"version": "1.0.0",
"type": "module",
"dependencies": {
"@webpod/ps": "^0.1.1",
"chalk": "^5.4.1",
"envapi": "^0.2.3",
"fs-extra": "^11.3.0",
"globby": "^14.1.0",
"licia": "^1.48.0",
"md5-file": "^5.0.0",
"minimist": "^1.2.8",
"node-fetch-native": "^1.6.6",
"which": "^5.0.0",
"yaml": "^2.8.0",
"zx": "^8.5.4"
}
}

337
bin/pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,337 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
'@webpod/ps':
specifier: ^0.1.1
version: 0.1.1
chalk:
specifier: ^5.4.1
version: 5.4.1
envapi:
specifier: ^0.2.3
version: 0.2.3
fs-extra:
specifier: ^11.3.0
version: 11.3.0
globby:
specifier: ^14.1.0
version: 14.1.0
licia:
specifier: ^1.48.0
version: 1.48.0
md5-file:
specifier: ^5.0.0
version: 5.0.0
minimist:
specifier: ^1.2.8
version: 1.2.8
node-fetch-native:
specifier: ^1.6.6
version: 1.6.6
which:
specifier: ^5.0.0
version: 5.0.0
yaml:
specifier: ^2.8.0
version: 2.8.0
zx:
specifier: ^8.5.4
version: 8.5.4
packages:
'@nodelib/fs.scandir@2.1.5':
resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
engines: {node: '>= 8'}
'@nodelib/fs.stat@2.0.5':
resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
engines: {node: '>= 8'}
'@nodelib/fs.walk@1.2.8':
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
'@sindresorhus/merge-streams@2.3.0':
resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==}
engines: {node: '>=18'}
'@webpod/ingrid@0.0.0-beta.3':
resolution: {integrity: sha512-PkorwT+q/MiIF+It47ORX0wCYHumOeMKwp5KX5WbUvbCeOtSB6b5UUC5FvzlijdwK/YPR+sOitQzyVSsRrMmJA==}
'@webpod/ps@0.1.1':
resolution: {integrity: sha512-SIgb4wWEVlKgdRByMMz9c3y1hpKfNm2sbretCPD49O9LG6itibULMkiRISdkpMdGRiUpbGHp8tiN3ZLYRDHj1g==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
chalk@5.4.1:
resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==}
engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
envapi@0.2.3:
resolution: {integrity: sha512-kSPSecU+/eH0IajEYZ/LndeBjzSBmLyp/SZFgx8Zgyeu0SoGioHkICOOVJgJLaX/rqZrCrQ+eDxiaYNVcyCsbQ==}
fast-glob@3.3.3:
resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==}
engines: {node: '>=8.6.0'}
fastq@1.19.1:
resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fs-extra@11.3.0:
resolution: {integrity: sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==}
engines: {node: '>=14.14'}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
globby@14.1.0:
resolution: {integrity: sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==}
engines: {node: '>=18'}
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
ignore@7.0.4:
resolution: {integrity: sha512-gJzzk+PQNznz8ysRrC0aOkBNVRBDtE1n53IqyqEf3PXrYwomFs5q4pGMizBMJF+ykh03insJ27hB8gSrD2Hn8A==}
engines: {node: '>= 4'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
isexe@3.1.1:
resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==}
engines: {node: '>=16'}
jsonfile@6.1.0:
resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==}
licia@1.48.0:
resolution: {integrity: sha512-bBWiT5CSdEtwuAHiYTJ74yItCjIFdHi4xiFk6BRDfKa+sdCpkUHp69YKb5udNOJlHDzFjNjcMgNZ/+wQIHrB8A==}
md5-file@5.0.0:
resolution: {integrity: sha512-xbEFXCYVWrSx/gEKS1VPlg84h/4L20znVIulKw6kMfmBUAZNAnF00eczz9ICMl+/hjQGo5KSXRxbL/47X3rmMw==}
engines: {node: '>=10.13.0'}
hasBin: true
merge2@1.4.1:
resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
engines: {node: '>= 8'}
micromatch@4.0.8:
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
engines: {node: '>=8.6'}
minimist@1.2.8:
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
node-fetch-native@1.6.6:
resolution: {integrity: sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==}
path-type@6.0.0:
resolution: {integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==}
engines: {node: '>=18'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
queue-microtask@1.2.3:
resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
reusify@1.1.0:
resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
run-parallel@1.2.0:
resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
slash@5.1.0:
resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
engines: {node: '>=14.16'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
unicorn-magic@0.3.0:
resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==}
engines: {node: '>=18'}
universalify@2.0.1:
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
engines: {node: '>= 10.0.0'}
which@5.0.0:
resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==}
engines: {node: ^18.17.0 || >=20.5.0}
hasBin: true
yaml@2.8.0:
resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==}
engines: {node: '>= 14.6'}
hasBin: true
zurk@0.11.2:
resolution: {integrity: sha512-OKUQsmG588B18hzO4ThzOU0NUwr4C8aKl9NjGQfXUv5fskLfS6Sj3XGNbTzKj3d2+jWvmnqS2cgrwYX6bIkDyA==}
zx@8.5.4:
resolution: {integrity: sha512-44oKea9Sa8ZnOkTnS6fRJpg3quzgnbB43nLrVfYnqE86J4sxgZMUDLezzKET/FdOAVkF4X+Alm9Bume+W+RW9Q==}
engines: {node: '>= 12.17.0'}
hasBin: true
snapshots:
'@nodelib/fs.scandir@2.1.5':
dependencies:
'@nodelib/fs.stat': 2.0.5
run-parallel: 1.2.0
'@nodelib/fs.stat@2.0.5': {}
'@nodelib/fs.walk@1.2.8':
dependencies:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.19.1
'@sindresorhus/merge-streams@2.3.0': {}
'@webpod/ingrid@0.0.0-beta.3': {}
'@webpod/ps@0.1.1':
dependencies:
'@webpod/ingrid': 0.0.0-beta.3
zurk: 0.11.2
braces@3.0.3:
dependencies:
fill-range: 7.1.1
chalk@5.4.1: {}
envapi@0.2.3: {}
fast-glob@3.3.3:
dependencies:
'@nodelib/fs.stat': 2.0.5
'@nodelib/fs.walk': 1.2.8
glob-parent: 5.1.2
merge2: 1.4.1
micromatch: 4.0.8
fastq@1.19.1:
dependencies:
reusify: 1.1.0
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fs-extra@11.3.0:
dependencies:
graceful-fs: 4.2.11
jsonfile: 6.1.0
universalify: 2.0.1
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
globby@14.1.0:
dependencies:
'@sindresorhus/merge-streams': 2.3.0
fast-glob: 3.3.3
ignore: 7.0.4
path-type: 6.0.0
slash: 5.1.0
unicorn-magic: 0.3.0
graceful-fs@4.2.11: {}
ignore@7.0.4: {}
is-extglob@2.1.1: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
isexe@3.1.1: {}
jsonfile@6.1.0:
dependencies:
universalify: 2.0.1
optionalDependencies:
graceful-fs: 4.2.11
licia@1.48.0: {}
md5-file@5.0.0: {}
merge2@1.4.1: {}
micromatch@4.0.8:
dependencies:
braces: 3.0.3
picomatch: 2.3.1
minimist@1.2.8: {}
node-fetch-native@1.6.6: {}
path-type@6.0.0: {}
picomatch@2.3.1: {}
queue-microtask@1.2.3: {}
reusify@1.1.0: {}
run-parallel@1.2.0:
dependencies:
queue-microtask: 1.2.3
slash@5.1.0: {}
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
unicorn-magic@0.3.0: {}
universalify@2.0.1: {}
which@5.0.0:
dependencies:
isexe: 3.1.1
yaml@2.8.0: {}
zurk@0.11.2: {}
zx@8.5.4: {}

View File

@@ -1,265 +0,0 @@
<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
<property>
<name>fs.azure.user.agent.prefix</name>
<value>User-Agent: APN/1.0 Hortonworks/1.0 HDP/</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://b2</value>
<final>true</final>
</property>
<property>
<name>fs.s3a.fast.upload</name>
<value>true</value>
</property>
<property>
<name>fs.s3a.fast.upload.buffer</name>
<value>disk</value>
</property>
<property>
<name>fs.s3a.multipart.size</name>
<value>67108864</value>
</property>
<property>
<name>fs.trash.interval</name>
<value>4320</value>
</property>
<property>
<name>fs.trash.checkpoint.interval</name>
<value>360</value>
</property>
<property>
<name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
<value>120</value>
</property>
<property>
<name>ha.zookeeper.acl</name>
<value>sasl:nn:rwcda</value>
</property>
<property>
<name>ha.zookeeper.quorum</name>
<value>b5m1.hdp.dc:2181,b5m2.hdp.dc:2181,b5m3.hdp.dc:2181</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>hadoop.http.authentication.signature.secret.file</name>
<value>/etc/security/http_secret</value>
</property>
<property>
<name>hadoop.http.authentication.simple.anonymous.allowed</name>
<value>true</value>
</property>
<property>
<name>hadoop.http.authentication.type</name>
<value>simple</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-headers</name>
<value>X-Requested-With,Content-Type,Accept,Origin,WWW-Authenticate,Accept-Encoding,Transfer-Encoding</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-methods</name>
<value>GET,PUT,POST,OPTIONS,HEAD,DELETE</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-origins</name>
<value>*</value>
</property>
<property>
<name>hadoop.http.cross-origin.max-age</name>
<value>1800</value>
</property>
<property>
<name>hadoop.http.filter.initializers</name>
<value>org.apache.hadoop.security.AuthenticationFilterInitializer,org.apache.hadoop.security.HttpCrossOriginFilterInitializer</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.HTTP.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.HTTP.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.iap.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.iap.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.livy.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.livy.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.rpc.protection</name>
<value>authentication,privacy</value>
</property>
<property>
<name>hadoop.security.auth_to_local</name>
<value>RULE:[1:$1@$0](hbase-b5@ECLD.COM)s/.*/hbase/
RULE:[1:$1@$0](hdfs-b5@ECLD.COM)s/.*/hdfs/
RULE:[1:$1@$0](spark-b5@ECLD.COM)s/.*/spark/
RULE:[1:$1@$0](yarn-ats-b5@ECLD.COM)s/.*/yarn-ats/
RULE:[1:$1@$0](.*@ECLD.COM)s/@.*//
RULE:[2:$1@$0](dn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](hbase@ECLD.COM)s/.*/hbase/
RULE:[2:$1@$0](hive@ECLD.COM)s/.*/hive/
RULE:[2:$1@$0](jhs@ECLD.COM)s/.*/mapred/
RULE:[2:$1@$0](jn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](livy@ECLD.COM)s/.*/livy/
RULE:[2:$1@$0](nm@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](nn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](rangeradmin@ECLD.COM)s/.*/ranger/
RULE:[2:$1@$0](rangerlookup@ECLD.COM)s/.*/ranger/
RULE:[2:$1@$0](rangertagsync@ECLD.COM)s/.*/rangertagsync/
RULE:[2:$1@$0](rangerusersync@ECLD.COM)s/.*/rangerusersync/
RULE:[2:$1@$0](rm@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](spark@ECLD.COM)s/.*/spark/
RULE:[2:$1@$0](yarn@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](yarn-ats-hbase@ECLD.COM)s/.*/yarn-ats/
DEFAULT</value>
</property>
<property>
<name>hadoop.security.authentication</name>
<value>kerberos</value>
</property>
<property>
<name>hadoop.security.authorization</name>
<value>true</value>
</property>
<property>
<name>hadoop.security.instrumentation.requires.admin</name>
<value>false</value>
</property>
<property>
<name>io.compression.codec.lzo.class</name>
<value>com.hadoop.compression.lzo.LzoCodec</value>
</property>
<property>
<name>io.compression.codecs</name>
<value>org.apache.hadoop.io.compress.GzipCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131072</value>
</property>
<property>
<name>io.serializations</name>
<value>org.apache.hadoop.io.serializer.WritableSerialization</value>
</property>
<property>
<name>ipc.client.connect.max.retries</name>
<value>50</value>
</property>
<property>
<name>ipc.client.connection.maxidletime</name>
<value>30000</value>
</property>
<property>
<name>ipc.client.idlethreshold</name>
<value>8000</value>
</property>
<property>
<name>ipc.server.tcpnodelay</name>
<value>true</value>
</property>
<property>
<name>mapreduce.jobtracker.webinterface.trusted</name>
<value>false</value>
</property>
<property>
<name>ipc.client.fallback-to-simple-auth-allowed</name>
<value>true</value>
</property>
<property>
<name>fs.hdfs.impl.disable.cache</name>
<value>true</value>
</property>
</configuration>

View File

@@ -1,713 +0,0 @@
<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
<property>
<name>dfs.block.access.token.enable</name>
<value>true</value>
</property>
<property>
<name>dfs.blockreport.initialDelay</name>
<value>120</value>
</property>
<property>
<name>dfs.blocksize</name>
<value>134217728</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b5</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.client.read.shortcircuit</name>
<value>true</value>
</property>
<property>
<name>dfs.client.read.shortcircuit.streams.cache.size</name>
<value>4096</value>
</property>
<property>
<name>dfs.client.retry.policy.enabled</name>
<value>false</value>
</property>
<property>
<name>dfs.cluster.administrators</name>
<value> hdfs</value>
</property>
<property>
<name>dfs.content-summary.limit</name>
<value>5000</value>
</property>
<property>
<name>dfs.data.transfer.protection</name>
<value>authentication,privacy</value>
</property>
<property>
<name>dfs.datanode.address</name>
<value>0.0.0.0:1019</value>
</property>
<property>
<name>dfs.datanode.balance.bandwidthPerSec</name>
<value>6250000</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>[DISK]file:///data1/hadoop/hdfs/data,[DISK]file:///data2/hadoop/hdfs/data,[DISK]file:///data3/hadoop/hdfs/data,[DISK]file:///data4/hadoop/hdfs/data,[DISK]file:///data5/hadoop/hdfs/data,[DISK]file:///data6/hadoop/hdfs/data,[DISK]file:///data7/hadoop/hdfs/data,[DISK]file:///data8/hadoop/hdfs/data,[DISK]file:///data9/hadoop/hdfs/data,[DISK]file:///data10/hadoop/hdfs/data,[DISK]file:///data11/hadoop/hdfs/data,[DISK]file:///data12/hadoop/hdfs/data,[DISK]file:///data13/hadoop/hdfs/data,[DISK]file:///data14/hadoop/hdfs/data,[DISK]file:///data15/hadoop/hdfs/data,[DISK]file:///data16/hadoop/hdfs/data,[DISK]file:///data17/hadoop/hdfs/data,[DISK]file:///data18/hadoop/hdfs/data,[DISK]file:///data19/hadoop/hdfs/data,[DISK]file:///data20/hadoop/hdfs/data,[DISK]file:///data21/hadoop/hdfs/data,[DISK]file:///data22/hadoop/hdfs/data,[DISK]file:///data23/hadoop/hdfs/data,[DISK]file:///data24/hadoop/hdfs/data</value>
<final>true</final>
</property>
<property>
<name>dfs.datanode.data.dir.perm</name>
<value>750</value>
</property>
<property>
<name>dfs.datanode.du.reserved</name>
<value>26405499904</value>
</property>
<property>
<name>dfs.datanode.failed.volumes.tolerated</name>
<value>2</value>
<final>true</final>
</property>
<property>
<name>dfs.datanode.http.address</name>
<value>0.0.0.0:1022</value>
</property>
<property>
<name>dfs.datanode.https.address</name>
<value>0.0.0.0:50475</value>
</property>
<property>
<name>dfs.datanode.ipc.address</name>
<value>0.0.0.0:8010</value>
</property>
<property>
<name>dfs.datanode.kerberos.principal</name>
<value>dn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.datanode.keytab.file</name>
<value>/etc/security/keytabs/dn.service.keytab</value>
</property>
<property>
<name>dfs.datanode.max.transfer.threads</name>
<value>16384</value>
</property>
<property>
<name>dfs.domain.socket.path</name>
<value>/var/lib/hadoop-hdfs/dn_socket</value>
</property>
<property>
<name>dfs.encrypt.data.transfer.cipher.suites</name>
<value>AES/CTR/NoPadding</value>
</property>
<property>
<name>dfs.ha.automatic-failover.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.ha.fencing.methods</name>
<value>shell(/bin/true)</value>
</property>
<property>
<name>dfs.ha.namenodes.b5</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.heartbeat.interval</name>
<value>3</value>
</property>
<property>
<name>dfs.hosts.exclude</name>
<value>/etc/hadoop/conf/dfs.exclude</value>
</property>
<property>
<name>dfs.http.policy</name>
<value>HTTP_ONLY</value>
</property>
<property>
<name>dfs.https.port</name>
<value>50470</value>
</property>
<property>
<name>dfs.internal.nameservices</name>
<value>b5</value>
</property>
<property>
<name>dfs.journalnode.edits.dir.b5</name>
<value>/data2/hadoop/hdfs/journal</value>
</property>
<property>
<name>dfs.journalnode.http-address</name>
<value>0.0.0.0:8480</value>
</property>
<property>
<name>dfs.journalnode.https-address</name>
<value>0.0.0.0:8481</value>
</property>
<property>
<name>dfs.journalnode.kerberos.internal.spnego.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.journalnode.kerberos.principal</name>
<value>jn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.journalnode.keytab.file</name>
<value>/etc/security/keytabs/jn.service.keytab</value>
</property>
<property>
<name>dfs.namenode.accesstime.precision</name>
<value>0</value>
</property>
<property>
<name>dfs.namenode.acls.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.audit.log.async</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.avoid.read.stale.datanode</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.avoid.write.stale.datanode</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.checkpoint.dir</name>
<value>/data/hadoop/hdfs/namesecondary</value>
</property>
<property>
<name>dfs.namenode.checkpoint.edits.dir</name>
<value>${dfs.namenode.checkpoint.dir}</value>
</property>
<property>
<name>dfs.namenode.checkpoint.period</name>
<value>21600</value>
</property>
<property>
<name>dfs.namenode.checkpoint.txns</name>
<value>1000000</value>
</property>
<property>
<name>dfs.namenode.fslock.fair</name>
<value>false</value>
</property>
<property>
<name>dfs.namenode.handler.count</name>
<value>100</value>
</property>
<property>
<name>dfs.namenode.http-address.b5.nn1</name>
<value>b5m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b5.nn2</name>
<value>b5m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b5.nn1</name>
<value>b5m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b5.nn2</name>
<value>b5m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.kerberos.internal.spnego.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.namenode.kerberos.principal</name>
<value>nn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.namenode.keytab.file</name>
<value>/etc/security/keytabs/nn.service.keytab</value>
</property>
<property>
<name>dfs.namenode.max.extra.edits.segments.retained</name>
<value>180</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/data1/hadoop/hdfs/namenode,/data2/hadoop/hdfs/namenode</value>
<final>true</final>
</property>
<property>
<name>dfs.namenode.name.dir.restore</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.num.extra.edits.retained</name>
<value>18000</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b5.nn1</name>
<value>b5m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b5.nn2</name>
<value>b5m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.safemode.threshold-pct</name>
<value>0.99</value>
</property>
<property>
<name>dfs.namenode.shared.edits.dir.b5</name>
<value>qjournal://b5m1.hdp.dc:8485;b5m2.hdp.dc:8485;b5m3.hdp.dc:8485/b5</value>
</property>
<property>
<name>dfs.namenode.stale.datanode.interval</name>
<value>30000</value>
</property>
<property>
<name>dfs.namenode.startup.delay.block.deletion.sec</name>
<value>3600</value>
</property>
<property>
<name>dfs.namenode.write.stale.datanode.ratio</name>
<value>1.0f</value>
</property>
<property>
<name>dfs.nameservices</name>
<value>b5,b1,b2,b3,b4,a3,a4,f1,e1,d2</value>
</property>
<property>
<name>dfs.permissions.ContentSummary.subAccess</name>
<value>true</value>
</property>
<property>
<name>dfs.permissions.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.permissions.superusergroup</name>
<value>hdfs</value>
</property>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.replication.max</name>
<value>50</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
<final>true</final>
</property>
<property>
<name>fs.permissions.umask-mode</name>
<value>022</value>
</property>
<property>
<name>hadoop.caller.context.enabled</name>
<value>true</value>
</property>
<property>
<name>manage.include.files</name>
<value>false</value>
</property>
<property>
<name>nfs.exports.allowed.hosts</name>
<value>* rw</value>
</property>
<property>
<name>nfs.file.dump.dir</name>
<value>/tmp/.hdfs-nfs</value>
</property>
<property>
<name>dfs.client.datanode-restart.timeout</name>
<value>30</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.a4</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.a4</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.a4.nn1</name>
<value>a4m1.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.a4.nn2</name>
<value>a4m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.a4.nn1</name>
<value>a4m1.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.a4.nn2</name>
<value>a4m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a4.nn1</name>
<value>a4m1.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a4.nn2</name>
<value>a4m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.a3</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.a3</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.a3.nn1</name>
<value>a3m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.a3.nn2</name>
<value>a3m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.a3.nn1</name>
<value>a3m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.a3.nn2</name>
<value>a3m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a3.nn1</name>
<value>a3m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a3.nn2</name>
<value>a3m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b3</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b3</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.b3.nn1</name>
<value>b3m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b3.nn2</name>
<value>b3m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b3.nn1</name>
<value>b3m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b3.nn2</name>
<value>b3m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b3.nn1</name>
<value>b3m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b3.nn2</name>
<value>b3m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b2</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.ha.namenodes.b2</name>
<value>nn3,nn4</value>
</property>
<property>
<name>dfs.namenode.http-address.b1.nn1</name>
<value>b1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b1.nn2</name>
<value>b1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b1.nn1</name>
<value>b1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b1.nn2</name>
<value>b1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b1.nn1</name>
<value>b1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b1.nn2</name>
<value>b1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.http-address.b2.nn3</name>
<value>b1m5.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b2.nn4</name>
<value>b1m6.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b2.nn3</name>
<value>b1m5.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b2.nn4</name>
<value>b1m6.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b2.nn3</name>
<value>b1m5.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b2.nn4</name>
<value>b1m6.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.f1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.f1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.f1.nn1</name>
<value>f1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.f1.nn2</name>
<value>f1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.f1.nn1</name>
<value>f1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.f1.nn2</name>
<value>f1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.f1.nn1</name>
<value>f1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.f1.nn2</name>
<value>f1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.d2</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.d2</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.d2.nn1</name>
<value>d2m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.d2.nn2</name>
<value>d2m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.d2.nn1</name>
<value>d2m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.d2.nn2</name>
<value>d2m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.d2.nn1</name>
<value>d2m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.d2.nn2</name>
<value>d2m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.e1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.e1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.e1.nn1</name>
<value>e1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.e1.nn2</name>
<value>e1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.e1.nn1</name>
<value>e1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.e1.nn2</name>
<value>e1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.e1.nn1</name>
<value>e1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.e1.nn2</name>
<value>e1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b4</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b4</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.b4.nn1</name>
<value>b4m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b4.nn2</name>
<value>b4m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b4.nn1</name>
<value>b4m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b4.nn2</name>
<value>b4m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b4.nn1</name>
<value>b4m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b4.nn2</name>
<value>b4m3.hdp.dc:8020</value>
</property>
</configuration>

File diff suppressed because it is too large Load Diff

16
pom.xml
View File

@@ -74,12 +74,6 @@
<build-tag>b2b1</build-tag>
</properties>
</profile>
<profile>
<id>b2b5</id>
<properties>
<build-tag>b2b5</build-tag>
</properties>
</profile>
<profile>
<id>b2b12</id>
<properties>
@@ -423,6 +417,16 @@
<artifactId>maven-shade-plugin</artifactId>
<version>3.3.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.4.2</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-invoker-plugin</artifactId>
<version>3.9.0</version>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>

102
service-ai/bin/.gitignore vendored Normal file
View File

@@ -0,0 +1,102 @@
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
.history/
*.vsix
*~
.fuse_hidden*
.directory
.Trash-*
.nfs*
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
pids
*.pid
*.seed
*.pid.lock
lib-cov
coverage
*.lcov
.nyc_output
.grunt
bower_components
.lock-wscript
build/Release
node_modules/
jspm_packages/
web_modules/
*.tsbuildinfo
.npm
.eslintcache
.stylelintcache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
.node_repl_history
*.tgz
.yarn-integrity
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
.cache
.parcel-cache
.next
out
.nuxt
dist
.cache/
.vuepress/dist
.temp
.docusaurus
.serverless/
.fusebox/
.dynamodb/
.tern-port
.vscode-test
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
*.stackdump
[Dd]esktop.ini
$RECYCLE.BIN/
*.cab
*.msi
*.msix
*.msm
*.msp
*.lnk
.DS_Store
.AppleDouble
.LSOverride
Icon
._*
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

View File

@@ -0,0 +1,12 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy} from '../../bin/library.js'
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy('service-ai-core')
} catch (e) {
console.error(e)
}

View File

@@ -0,0 +1,21 @@
import {
cd,
path,
} from 'zx'
import {trim} from "licia";
import {
run_deploy,
run_package,
run_upload_normal,
} from '../../bin/library.js'
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy('service-ai-core')
await run_package('service-ai-web')
await run_upload_normal('service-ai-web')
} catch (e) {
console.error(e)
}

View File

@@ -0,0 +1,19 @@
{
"name": "bin",
"version": "1.0.0",
"type": "module",
"dependencies": {
"@webpod/ps": "^0.1.1",
"chalk": "^5.4.1",
"envapi": "^0.2.3",
"fs-extra": "^11.3.0",
"globby": "^14.1.0",
"licia": "^1.48.0",
"md5-file": "^5.0.0",
"minimist": "^1.2.8",
"node-fetch-native": "^1.6.6",
"which": "^5.0.0",
"yaml": "^2.8.0",
"zx": "^8.5.4"
}
}

337
service-ai/bin/pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,337 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
'@webpod/ps':
specifier: ^0.1.1
version: 0.1.1
chalk:
specifier: ^5.4.1
version: 5.4.1
envapi:
specifier: ^0.2.3
version: 0.2.3
fs-extra:
specifier: ^11.3.0
version: 11.3.0
globby:
specifier: ^14.1.0
version: 14.1.0
licia:
specifier: ^1.48.0
version: 1.48.0
md5-file:
specifier: ^5.0.0
version: 5.0.0
minimist:
specifier: ^1.2.8
version: 1.2.8
node-fetch-native:
specifier: ^1.6.6
version: 1.6.6
which:
specifier: ^5.0.0
version: 5.0.0
yaml:
specifier: ^2.8.0
version: 2.8.0
zx:
specifier: ^8.5.4
version: 8.5.4
packages:
'@nodelib/fs.scandir@2.1.5':
resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
engines: {node: '>= 8'}
'@nodelib/fs.stat@2.0.5':
resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
engines: {node: '>= 8'}
'@nodelib/fs.walk@1.2.8':
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
'@sindresorhus/merge-streams@2.3.0':
resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==}
engines: {node: '>=18'}
'@webpod/ingrid@0.0.0-beta.3':
resolution: {integrity: sha512-PkorwT+q/MiIF+It47ORX0wCYHumOeMKwp5KX5WbUvbCeOtSB6b5UUC5FvzlijdwK/YPR+sOitQzyVSsRrMmJA==}
'@webpod/ps@0.1.1':
resolution: {integrity: sha512-SIgb4wWEVlKgdRByMMz9c3y1hpKfNm2sbretCPD49O9LG6itibULMkiRISdkpMdGRiUpbGHp8tiN3ZLYRDHj1g==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
chalk@5.4.1:
resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==}
engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
envapi@0.2.3:
resolution: {integrity: sha512-kSPSecU+/eH0IajEYZ/LndeBjzSBmLyp/SZFgx8Zgyeu0SoGioHkICOOVJgJLaX/rqZrCrQ+eDxiaYNVcyCsbQ==}
fast-glob@3.3.3:
resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==}
engines: {node: '>=8.6.0'}
fastq@1.19.1:
resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fs-extra@11.3.0:
resolution: {integrity: sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==}
engines: {node: '>=14.14'}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
globby@14.1.0:
resolution: {integrity: sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==}
engines: {node: '>=18'}
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
ignore@7.0.4:
resolution: {integrity: sha512-gJzzk+PQNznz8ysRrC0aOkBNVRBDtE1n53IqyqEf3PXrYwomFs5q4pGMizBMJF+ykh03insJ27hB8gSrD2Hn8A==}
engines: {node: '>= 4'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
isexe@3.1.1:
resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==}
engines: {node: '>=16'}
jsonfile@6.1.0:
resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==}
licia@1.48.0:
resolution: {integrity: sha512-bBWiT5CSdEtwuAHiYTJ74yItCjIFdHi4xiFk6BRDfKa+sdCpkUHp69YKb5udNOJlHDzFjNjcMgNZ/+wQIHrB8A==}
md5-file@5.0.0:
resolution: {integrity: sha512-xbEFXCYVWrSx/gEKS1VPlg84h/4L20znVIulKw6kMfmBUAZNAnF00eczz9ICMl+/hjQGo5KSXRxbL/47X3rmMw==}
engines: {node: '>=10.13.0'}
hasBin: true
merge2@1.4.1:
resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
engines: {node: '>= 8'}
micromatch@4.0.8:
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
engines: {node: '>=8.6'}
minimist@1.2.8:
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
node-fetch-native@1.6.6:
resolution: {integrity: sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==}
path-type@6.0.0:
resolution: {integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==}
engines: {node: '>=18'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
queue-microtask@1.2.3:
resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
reusify@1.1.0:
resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
run-parallel@1.2.0:
resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
slash@5.1.0:
resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
engines: {node: '>=14.16'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
unicorn-magic@0.3.0:
resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==}
engines: {node: '>=18'}
universalify@2.0.1:
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
engines: {node: '>= 10.0.0'}
which@5.0.0:
resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==}
engines: {node: ^18.17.0 || >=20.5.0}
hasBin: true
yaml@2.8.0:
resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==}
engines: {node: '>= 14.6'}
hasBin: true
zurk@0.11.2:
resolution: {integrity: sha512-OKUQsmG588B18hzO4ThzOU0NUwr4C8aKl9NjGQfXUv5fskLfS6Sj3XGNbTzKj3d2+jWvmnqS2cgrwYX6bIkDyA==}
zx@8.5.4:
resolution: {integrity: sha512-44oKea9Sa8ZnOkTnS6fRJpg3quzgnbB43nLrVfYnqE86J4sxgZMUDLezzKET/FdOAVkF4X+Alm9Bume+W+RW9Q==}
engines: {node: '>= 12.17.0'}
hasBin: true
snapshots:
'@nodelib/fs.scandir@2.1.5':
dependencies:
'@nodelib/fs.stat': 2.0.5
run-parallel: 1.2.0
'@nodelib/fs.stat@2.0.5': {}
'@nodelib/fs.walk@1.2.8':
dependencies:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.19.1
'@sindresorhus/merge-streams@2.3.0': {}
'@webpod/ingrid@0.0.0-beta.3': {}
'@webpod/ps@0.1.1':
dependencies:
'@webpod/ingrid': 0.0.0-beta.3
zurk: 0.11.2
braces@3.0.3:
dependencies:
fill-range: 7.1.1
chalk@5.4.1: {}
envapi@0.2.3: {}
fast-glob@3.3.3:
dependencies:
'@nodelib/fs.stat': 2.0.5
'@nodelib/fs.walk': 1.2.8
glob-parent: 5.1.2
merge2: 1.4.1
micromatch: 4.0.8
fastq@1.19.1:
dependencies:
reusify: 1.1.0
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fs-extra@11.3.0:
dependencies:
graceful-fs: 4.2.11
jsonfile: 6.1.0
universalify: 2.0.1
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
globby@14.1.0:
dependencies:
'@sindresorhus/merge-streams': 2.3.0
fast-glob: 3.3.3
ignore: 7.0.4
path-type: 6.0.0
slash: 5.1.0
unicorn-magic: 0.3.0
graceful-fs@4.2.11: {}
ignore@7.0.4: {}
is-extglob@2.1.1: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
isexe@3.1.1: {}
jsonfile@6.1.0:
dependencies:
universalify: 2.0.1
optionalDependencies:
graceful-fs: 4.2.11
licia@1.48.0: {}
md5-file@5.0.0: {}
merge2@1.4.1: {}
micromatch@4.0.8:
dependencies:
braces: 3.0.3
picomatch: 2.3.1
minimist@1.2.8: {}
node-fetch-native@1.6.6: {}
path-type@6.0.0: {}
picomatch@2.3.1: {}
queue-microtask@1.2.3: {}
reusify@1.1.0: {}
run-parallel@1.2.0:
dependencies:
queue-microtask: 1.2.3
slash@5.1.0: {}
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
unicorn-magic@0.3.0: {}
universalify@2.0.1: {}
which@5.0.0:
dependencies:
isexe: 3.1.1
yaml@2.8.0: {}
zurk@0.11.2: {}
zx@8.5.4: {}

0
service-ai/bin/test.js Normal file
View File

View File

@@ -0,0 +1,18 @@
CREATE TABLE `service_ai_feedback`
(
`id` bigint NOT NULL,
`created_time` datetime(6) DEFAULT NULL,
`modified_time` datetime(6) DEFAULT NULL,
`analysis` longtext,
`conclusion` longtext,
`source` longtext NOT NULL,
`status` tinyint NOT NULL,
PRIMARY KEY (`id`)
) DEFAULT CHARSET = utf8mb4;
CREATE TABLE `service_ai_feedback_pictures`
(
`feedback_id` bigint NOT NULL,
`pictures_id` bigint NOT NULL,
PRIMARY KEY (`feedback_id`, `pictures_id`)
) DEFAULT CHARSET = utf8mb4;

View File

@@ -0,0 +1,12 @@
CREATE TABLE `service_ai_file`
(
`id` bigint NOT NULL,
`created_time` datetime(6) DEFAULT NULL,
`modified_time` datetime(6) DEFAULT NULL,
`filename` varchar(255) DEFAULT NULL,
`md5` varchar(255) DEFAULT NULL,
`path` varchar(255) DEFAULT NULL,
`size` bigint DEFAULT NULL,
`type` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) DEFAULT CHARSET = utf8mb4;

View File

@@ -0,0 +1,10 @@
CREATE TABLE `service_ai_group`
(
`id` bigint NOT NULL,
`created_time` datetime(6) DEFAULT NULL,
`modified_time` datetime(6) DEFAULT NULL,
`name` varchar(255) NOT NULL,
`status` tinyint NOT NULL,
`knowledge_id` bigint NOT NULL,
PRIMARY KEY (`id`)
) DEFAULT CHARSET=utf8mb4;

View File

@@ -0,0 +1,11 @@
CREATE TABLE `service_ai_knowledge`
(
`id` bigint NOT NULL,
`created_time` datetime(6) DEFAULT NULL,
`modified_time` datetime(6) DEFAULT NULL,
`description` longtext NOT NULL,
`name` varchar(255) NOT NULL,
`strategy` tinyint NOT NULL,
`vector_source_id` bigint NOT NULL,
PRIMARY KEY (`id`)
) DEFAULT CHARSET = utf8mb4;

221
service-ai/pom.xml Normal file
View File

@@ -0,0 +1,221 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>pom</packaging>
<description>Hudi AI服务集合</description>
<modules>
<module>service-ai-core</module>
<module>service-ai-web</module>
<module>service-ai-cli</module>
</modules>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<build-tag>b2b12</build-tag>
<spring-boot.version>3.4.3</spring-boot.version>
<spring-cloud.version>2024.0.1</spring-cloud.version>
<spring-ai.version>1.0.0</spring-ai.version>
<solon-ai.version>3.3.1</solon-ai.version>
<eclipse-collections.version>11.1.0</eclipse-collections.version>
<curator.version>5.1.0</curator.version>
<hutool.version>5.8.27</hutool.version>
<mapstruct.version>1.6.3</mapstruct.version>
</properties>
<dependencies>
<dependency>
<groupId>org.mapstruct</groupId>
<artifactId>mapstruct</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<!-- 当前项目依赖 -->
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-configuration</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-dependencies</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-forest</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
<version>${project.version}</version>
</dependency>
<!-- spring boot 相关依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>${spring-cloud.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-bom</artifactId>
<version>${spring-ai.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot3-starter</artifactId>
<version>1.5.36</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-eclipse-collections</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>com.github.ulisesbocchio</groupId>
<artifactId>jasypt-spring-boot-starter</artifactId>
<version>3.0.5</version>
</dependency>
<dependency>
<groupId>com.blinkfox</groupId>
<artifactId>fenix-spring-boot-starter</artifactId>
<version>3.0.0</version>
</dependency>
<!-- 日志相关 -->
<dependency>
<groupId>pl.tkowalcz.tjahzi</groupId>
<artifactId>logback-appender</artifactId>
<version>0.9.23</version>
</dependency>
<dependency>
<groupId>com.github.loki4j</groupId>
<artifactId>loki-logback-appender-jdk8</artifactId>
<version>1.4.2</version>
</dependency>
<!-- 其他 -->
<dependency>
<groupId>dev.failsafe</groupId>
<artifactId>failsafe</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>org.eclipse.collections</groupId>
<artifactId>eclipse-collections</artifactId>
<version>${eclipse-collections.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.eclipse.collections</groupId>
<artifactId>eclipse-collections-api</artifactId>
<version>${eclipse-collections.version}</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
</dependency>
<dependency>
<groupId>com.yomahub</groupId>
<artifactId>liteflow-spring-boot-starter</artifactId>
<version>2.13.2</version>
</dependency>
<dependency>
<groupId>com.yomahub</groupId>
<artifactId>liteflow-el-builder</artifactId>
<version>2.13.2</version>
</dependency>
<dependency>
<groupId>org.noear</groupId>
<artifactId>solon-ai</artifactId>
<version>${solon-ai.version}</version>
</dependency>
<dependency>
<groupId>org.noear</groupId>
<artifactId>solon-ai-dialect-openai</artifactId>
<version>${solon-ai.version}</version>
</dependency>
<dependency>
<groupId>org.mapstruct</groupId>
<artifactId>mapstruct</artifactId>
<version>${mapstruct.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.14.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.6.0</version>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${spring-boot.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<distributionManagement>
<repository>
<id>${releases.id}</id>
<name>${releases.name}</name>
<url>${releases.url}</url>
</repository>
<snapshotRepository>
<id>${snapshots.id}</id>
<name>${snapshots.name}</name>
<url>${snapshots.url}</url>
</snapshotRepository>
</distributionManagement>
</project>

View File

@@ -0,0 +1,46 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-cli</artifactId>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,22 @@
package com.lanyuanxiaoyao.service.ai.cli;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @author lanyuanxiaoyao
* @version 20250612
*/
@SpringBootApplication
public class Generator implements ApplicationRunner {
public static void main(String[] args) {
SpringApplication.run(Generator.class, args);
}
@Override
public void run(ApplicationArguments args) throws Exception {
new LlamaSwapTool().generate();
}
}

View File

@@ -0,0 +1,26 @@
package com.lanyuanxiaoyao.service.ai.cli;
import cn.hutool.core.io.FileUtil;
import cn.hutool.extra.template.Template;
import cn.hutool.extra.template.TemplateConfig;
import cn.hutool.extra.template.TemplateEngine;
import cn.hutool.extra.template.TemplateUtil;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/**
* @author lanyuanxiaoyao
* @version 20250612
*/
public abstract class GeneratorTool {
private final TemplateEngine engine = TemplateUtil.createEngine(new TemplateConfig("template", TemplateConfig.ResourceMode.CLASSPATH));
protected void generateTemplate(String templatePath, Map<?, ?> data, String targetScriptPath) {
Template template = engine.getTemplate(templatePath);
String script = template.render(data);
FileUtil.del(targetScriptPath);
FileUtil.writeString(script, targetScriptPath, StandardCharsets.UTF_8);
}
public abstract void generate() throws Exception;
}

View File

@@ -0,0 +1,244 @@
package com.lanyuanxiaoyao.service.ai.cli;
import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.util.StrUtil;
import java.util.List;
import java.util.Map;
/**
* @author lanyuanxiaoyao
* @version 20250612
*/
public class LlamaSwapTool extends GeneratorTool {
private static final String API_KEY = "*XMySqV%>hR&v>>g*NwCs3tpQ5FVMFEF2VHVTj<MYQd$&@$sY7CgqNyea4giJi4";
public static String displayName(String name) {
return name.replaceAll("\\s+", "_")
.replaceAll("\\.", "_")
.toLowerCase();
}
@Override
public void generate() {
generateTemplate(
"llama-swap.ftl",
Map.of(
"models", List.of(
llamaCppEmbeddingCmd("BGE/bge-m3-q4km", "bge-m3-Q4_K_M.gguf", 20),
vllmEmbeddingCmd("BGE/bge-m3", "bge-m3", 5),
llamaCppRerankerCmd("BGE/beg-reranker-v2-q4km", "bge-reranker-v2-m3-Q4_K_M.gguf", 20),
vllmRerankerCmd("BGE/beg-reranker-v2", "bge-reranker-v2-m3", 5),
vllmCmd("Qwen3/qwen3-0.6b", "Qwen3-0.6B", 5, true),
vllmCmd("Qwen3/qwen3-1.7b", "Qwen3-1.7B", 5, true),
vllmCmd("Qwen3/qwen3-4b", "Qwen3-4B", 8, true),
llamaCppCmd("Qwen3/qwen3-4b-q4km", "Qwen3-4B-Q4_K_M.gguf", 35),
llamaCppCmd("Qwen3/qwen3-8b-q4km", "Qwen3-8B-Q4_K_M.gguf", 35),
vllmEmbeddingCmd("Qwen3/qwen3-embedding-0.6b", "Qwen3-Embedding-0.6B", 5),
vllmEmbeddingCmd("Qwen3/qwen3-embedding-4b", "Qwen3-Embedding-4B", 8),
llamaCppEmbeddingCmd("Qwen3/qwen3-embedding-4b-q4km", "Qwen3-Embedding-4B-Q4_K_M.gguf", 35),
llamaCppEmbeddingCmd("Qwen3/qwen3-embedding-8b-q4km", "Qwen3-Embedding-8B-Q4_K_M.gguf", 35),
// 0.9.1 vllm还未支持
// vllmRerankerCmd("Qwen3/qwen3-reranker-0.6b", "Qwen3-Reranker-0.6B", 5),
// vllmRerankerCmd("Qwen3/qwen3-reranker-4b", "Qwen3-Reranker-4B", 8),
llamaCppVisualCmd("Qwen2.5/qwen2.5-vl-7b", "Qwen2.5-VL-7B-Instruct-BF16.gguf", 35),
llamaCppVisualCmd("Qwen2.5/qwen2.5-vl-7b-q4km", "Qwen2.5-VL-7B-Instruct-Q4_K_M.gguf", 35),
vllmCmd("Qwen2.5/qwen2.5-vl-3b-instruct", "Qwen2.5-VL-3B-Instruct", 8, false),
vllmCmd("Qwen2.5/qwen2.5-vl-7b-instruct", "Qwen2.5-VL-7B-Instruct", 8, false),
llamaCppVisualCmd("MiniCPM/minicpm-o-2.6-7.6b-q4km", "MiniCPM-o-2_6-7.6B-Q4_K_M.gguf", 35),
vllmCmd("MiniCPM/minicpm-o-2.6-7.6b", "MiniCPM-o-2_6", 10, false)
)
),
"config.yaml"
);
}
private DockerCmd llamaCppCmd(String name, String model, Integer thread) {
return llamaCppCmd(name, model, thread, false, false, false);
}
private DockerCmd llamaCppEmbeddingCmd(String name, String model, Integer thread) {
return llamaCppCmd(name, model, thread, true, false, false);
}
private DockerCmd llamaCppRerankerCmd(String name, String model, Integer thread) {
return llamaCppCmd(name, model, thread, false, true, false);
}
private DockerCmd llamaCppVisualCmd(String name, String model, Integer thread) {
return llamaCppCmd(name, model, thread, false, false, true);
}
private DockerCmd llamaCppCmd(String name, String model, Integer thread, Boolean isEmbedding, Boolean isReranker, Boolean isVisual) {
List<String> arguments = ListUtil.list(
false,
StrUtil.format("-m /models/{}", model),
"--port ${PORT}",
StrUtil.format("--api-key {}", API_KEY),
"-c 0",
"-b 4096",
StrUtil.format("-t {}", thread),
"-np 5",
"--log-disable",
"--no-webui"
);
if (isEmbedding) {
arguments.add("--embedding");
arguments.add("-ub 8192");
arguments.add("--pooling mean");
} else if (isReranker) {
arguments.add("--reranking");
} else if (isVisual) {
arguments.add(StrUtil.format("--mmproj /models/{}.mmproj", model));
} else {
arguments.add("--jinja");
}
return new DockerCmd(
"ghcr.io/ggml-org/llama.cpp:server",
name,
model,
StrUtil.format("http://llamacpp-{}:${PORT}", displayName(model)),
List.of(StrUtil.format("--name llamacpp-{}", displayName(model))),
arguments
);
}
private DockerCmd vllmCmd(String name, String model, Integer cache, Boolean isReasonable) {
return vllmCmd(name, model, cache, false, false, isReasonable);
}
private DockerCmd vllmEmbeddingCmd(String name, String model, Integer cache) {
return vllmCmd(name, model, cache, true, false, false);
}
private DockerCmd vllmRerankerCmd(String name, String model, Integer cache) {
return vllmCmd(name, model, cache, false, true, false);
}
private DockerCmd vllmVisualCmd(String name, String model, Integer cache, Boolean isReasonable) {
return vllmCmd(name, model, cache, false, false, isReasonable);
}
private DockerCmd vllmCmd(String name, String model, Integer cache, Boolean isEmbedding, Boolean isReranker, Boolean isReasonable) {
List<String> arguments = ListUtil.list(
false,
StrUtil.format("--model /models/{}", model),
StrUtil.format("--served-model-name {}", name),
"--port ${PORT}",
StrUtil.format("--api-key {}", API_KEY),
"--disable-log-requests",
"--uvicorn-log-level error",
"--trust-remote-code"
);
if (isEmbedding) {
arguments.add("--task embedding");
} else if (isReranker) {
arguments.add("--task score");
} else if (isReasonable) {
arguments.add("--enable-auto-tool-choice");
arguments.add("--tool-call-parser hermes");
arguments.add("--enable-reasoning");
arguments.add("--reasoning-parser deepseek_r1");
}
return new DockerCmd(
"vllm-server-cpu:0.8.5.post1",
name,
model,
StrUtil.format("http://vllm-{}:${PORT}", displayName(model)),
List.of(
StrUtil.format("--name vllm-{}", displayName(model)),
"--privileged=true",
"--shm-size=4g",
StrUtil.format("-e VLLM_CPU_KVCACHE_SPACE={}", cache)
),
arguments
);
}
public static class DockerCmd {
private String image;
private String name;
private String model;
private String proxy;
private List<String> options = ListUtil.list(
false,
"--rm",
"--network llama",
"-v /data/models:/models"
);
private List<String> arguments = ListUtil.list(false);
public DockerCmd(String image, String name, String model, String proxy, List<String> options, List<String> arguments) {
this.image = image;
this.name = name;
this.model = model;
this.proxy = proxy;
this.options.addAll(options);
this.arguments.addAll(arguments);
}
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getProxy() {
return proxy;
}
public void setProxy(String proxy) {
this.proxy = proxy;
}
public List<String> getOptions() {
return options;
}
public void setOptions(List<String> options) {
this.options = options;
}
public List<String> getArguments() {
return arguments;
}
public void setArguments(List<String> arguments) {
this.arguments = arguments;
}
@Override
public String toString() {
return "DockerCmd{" +
"image='" + image + '\'' +
", name='" + name + '\'' +
", model='" + model + '\'' +
", proxy='" + proxy + '\'' +
", options=" + options +
", arguments=" + arguments +
'}';
}
}
}

View File

@@ -0,0 +1,25 @@
healthCheckTimeout: 600
logLevel: warn
models:
<#list models as model>
"${model.name}":
proxy: ${model.proxy}
ttl: 86400
cmd: |
docker run
<#list model.options as option>
${option}
</#list>
${model.image}
<#list model.arguments as arg>
${arg}
</#list>
</#list>
groups:
"persistent":
swap: false
exclusive: false
members:
<#list models as model>
- "${model.name}"
</#list>

View File

@@ -0,0 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-core</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-forest</artifactId>
<exclusions>
<exclusion>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot-starter</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-sleuth</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot3-starter</artifactId>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,47 @@
package com.lanyuanxiaoyao.service.ai.core.configuration;
import java.net.http.HttpClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.http.client.JdkClientHttpRequestFactory;
import org.springframework.http.client.reactive.JdkClientHttpConnector;
import org.springframework.web.client.RestClient;
import org.springframework.web.reactive.function.client.WebClient;
/**
* vLLM只能使用http1.0
*
* @author lanyuanxiaoyao
* @version 20250519
*/
@Configuration
public class WebClientConfiguration {
@Bean
@Primary
public RestClient.Builder restClientBuilder() {
return generateRestClientBuilder();
}
@Bean
@Primary
public WebClient.Builder webClientBuilder() {
return generateWebClientBuilder();
}
private static HttpClient httpClient() {
return HttpClient.newBuilder()
.version(HttpClient.Version.HTTP_1_1)
.build();
}
public static RestClient.Builder generateRestClientBuilder() {
return RestClient.builder()
.requestFactory(new JdkClientHttpRequestFactory(httpClient()));
}
public static WebClient.Builder generateWebClientBuilder() {
return WebClient.builder()
.clientConnector(new JdkClientHttpConnector(httpClient()));
}
}

View File

@@ -0,0 +1,30 @@
package com.lanyuanxiaoyao.service.ai.core.entity.amis;
/**
* Crud 响应
*
* @author lanyuanxiaoyao
* @date 2023-07-06
*/
public class AmisCrudResponse extends AmisMapResponse {
public void setData(Iterable<?> list) {
getData().put("items", list);
}
public void setTotal(Long total) {
getData().put("total", total);
}
public void setTotal(Integer total) {
setTotal(total.longValue());
}
public void setData(Iterable<?> list, Long total) {
setData(list);
setTotal(total);
}
public void setData(Iterable<?> list, Integer total) {
setData(list, total.longValue());
}
}

View File

@@ -0,0 +1,13 @@
package com.lanyuanxiaoyao.service.ai.core.entity.amis;
/**
* Crud 响应
*
* @author lanyuanxiaoyao
* @date 2023-07-06
*/
public class AmisDetailResponse extends AmisMapResponse {
public void setDetail(Object detail) {
getData().put("detail", detail);
}
}

View File

@@ -0,0 +1,21 @@
package com.lanyuanxiaoyao.service.ai.core.entity.amis;
import java.util.HashMap;
import java.util.Map;
/**
* Map 响应
*
* @author lanyuanxiaoyao
* @date 2023-07-06
*/
public class AmisMapResponse extends AmisResponse<Map<String, Object>> {
public AmisMapResponse() {
setData(new HashMap<>());
}
public AmisMapResponse setData(String key, Object value) {
getData().put(key, value);
return this;
}
}

View File

@@ -0,0 +1,138 @@
package com.lanyuanxiaoyao.service.ai.core.entity.amis;
import java.util.Map;
/**
* Amis 组件结构化返回值
*
* @author lanyuanxiaoyao
* @date 2022-09-21
*/
public class AmisResponse<T> {
private static final int SUCCESS_STATUS = 0;
private static final int ERROR_STATUS = 500;
private static final String SUCCESS_MESSAGE = "OK";
private static final String ERROR_MESSAGE = "ERROR";
private Integer status;
private String message;
private T data;
public static AmisResponse<Object> responseError() {
return responseError(ERROR_MESSAGE);
}
public static AmisResponse<Object> responseError(String message) {
AmisResponse<Object> response = new AmisResponse<>();
response.setStatus(ERROR_STATUS);
response.setMessage(message);
return response;
}
public static AmisResponse<Object> responseSuccess() {
AmisResponse<Object> response = new AmisResponse<>();
response.setStatus(SUCCESS_STATUS);
response.setMessage(SUCCESS_MESSAGE);
return response;
}
public static AmisResponse<Object> responseSuccess(String message) {
AmisResponse<Object> response = new AmisResponse<>();
response.setStatus(SUCCESS_STATUS);
response.setMessage(message);
return response;
}
public static <E> AmisResponse<E> responseSuccess(String message, E data) {
AmisResponse<E> response = new AmisResponse<>();
response.setStatus(SUCCESS_STATUS);
response.setMessage(message);
response.setData(data);
return response;
}
public static <E> AmisResponse<E> responseSuccess(E data) {
AmisResponse<E> response = new AmisResponse<>();
response.setStatus(SUCCESS_STATUS);
response.setMessage(SUCCESS_MESSAGE);
response.setData(data);
return response;
}
public static AmisMapResponse responseMapData() {
AmisMapResponse response = new AmisMapResponse();
response.setStatus(SUCCESS_STATUS);
response.setMessage(SUCCESS_MESSAGE);
return response;
}
public static AmisMapResponse responseMapData(Map<String, Object> data) {
AmisMapResponse response = responseMapData();
response.setData(data);
return response;
}
public static AmisMapResponse responseMapData(String key, Object value) {
AmisMapResponse response = responseMapData();
response.setData(key, value);
return response;
}
public static AmisCrudResponse responseCrudData(Iterable<?> data) {
AmisCrudResponse response = new AmisCrudResponse();
response.setStatus(SUCCESS_STATUS);
response.setMessage(SUCCESS_MESSAGE);
response.setData(data);
return response;
}
public static AmisCrudResponse responseCrudData(Iterable<?> data, Integer total) {
AmisCrudResponse response = responseCrudData(data);
response.setTotal(total);
return response;
}
public static AmisCrudResponse responseCrudData(Iterable<?> data, Long total) {
AmisCrudResponse response = responseCrudData(data);
response.setTotal(total);
return response;
}
public static AmisDetailResponse responseDetailData(Object detail) {
AmisDetailResponse response = new AmisDetailResponse();
response.setDetail(detail);
return response;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}
@Override
public String toString() {
return "AmisResponse{" +
"status=" + status +
", message='" + message + '\'' +
", data=" + data +
'}';
}
}

View File

@@ -0,0 +1,64 @@
package com.lanyuanxiaoyao.service.configuration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpMethod;
import org.springframework.security.config.Customizer;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.SecurityFilterChain;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
import org.springframework.web.filter.CorsFilter;
/**
* @author lanyuanxiaoyao
* @version 20250514
*/
@Configuration
@EnableWebSecurity
public class SecurityConfig {
@Bean
public CorsFilter corsFilter() {
CorsConfiguration configuration = new CorsConfiguration();
configuration.setAllowCredentials(true);
configuration.addAllowedOriginPattern("*");
configuration.addAllowedHeader("*");
configuration.addAllowedMethod("*");
configuration.setMaxAge(7200L);
configuration.setAllowPrivateNetwork(true);
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
source.registerCorsConfiguration("/**", configuration);
return new CorsFilter(source);
}
@Bean
public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception {
return http.authorizeHttpRequests(
registry -> registry
.requestMatchers(HttpMethod.OPTIONS, "/**")
.permitAll()
.anyRequest()
.authenticated()
)
.httpBasic(Customizer.withDefaults())
.cors(Customizer.withDefaults())
.csrf(AbstractHttpConfigurer::disable)
.formLogin(AbstractHttpConfigurer::disable)
.build();
}
@Bean
public InMemoryUserDetailsManager userDetailsService(SecurityProperties securityProperties) {
UserDetails user = User.builder()
.username(securityProperties.getUsername())
.password("{noop}" + securityProperties.getDarkcode())
.authorities(securityProperties.getAuthority())
.build();
return new InMemoryUserDetailsManager(user);
}
}

View File

@@ -0,0 +1,134 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-web</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jetty</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-openai</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-deepseek</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-vector-store-qdrant</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-markdown-document-reader</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>com.blinkfox</groupId>
<artifactId>fenix-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
<dependency>
<groupId>com.yomahub</groupId>
<artifactId>liteflow-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>com.yomahub</groupId>
<artifactId>liteflow-el-builder</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-tika-document-reader</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-pdf-document-reader</artifactId>
</dependency>
<dependency>
<groupId>org.noear</groupId>
<artifactId>solon-ai</artifactId>
</dependency>
<dependency>
<groupId>org.noear</groupId>
<artifactId>solon-ai-dialect-openai</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<annotationProcessorPaths>
<path>
<groupId>org.mapstruct</groupId>
<artifactId>mapstruct-processor</artifactId>
<version>${mapstruct.version}</version>
</path>
<path>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</path>
<path>
<groupId>org.projectlombok</groupId>
<artifactId>lombok-mapstruct-binding</artifactId>
<version>0.2.0</version>
</path>
<path>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
<version>6.6.8.Final</version>
</path>
</annotationProcessorPaths>
<compilerArgs>
<arg>-Amapstruct.defaultComponentModel=spring</arg>
<arg>-Amapstruct.defaultInjectionStrategy=constructor</arg>
</compilerArgs>
</configuration>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,53 @@
package com.lanyuanxiaoyao.service.ai.web;
import com.blinkfox.fenix.EnableFenix;
import com.ulisesbocchio.jasyptspringboot.annotation.EnableEncryptableProperties;
import org.springframework.beans.BeansException;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import org.springframework.retry.annotation.EnableRetry;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
* @author lanyuanxiaoyao
* @version 20250515
*/
@SpringBootApplication(scanBasePackages = "com.lanyuanxiaoyao.service")
@EnableDiscoveryClient
@EnableConfigurationProperties
@EnableEncryptableProperties
@EnableRetry
@EnableScheduling
@EnableFenix
@EnableJpaAuditing
public class WebApplication implements ApplicationRunner, ApplicationContextAware {
private static ApplicationContext context;
public static void main(String[] args) {
SpringApplication.run(WebApplication.class, args);
}
public static <T> T getBean(Class<T> clazz) {
return context.getBean(clazz);
}
public static <T> T getBean(String name, Class<T> clazz) {
return context.getBean(name, clazz);
}
@Override
public void run(ApplicationArguments args) {
}
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
WebApplication.context = context;
}
}

View File

@@ -0,0 +1,13 @@
package com.lanyuanxiaoyao.service.ai.web.base.controller;
import com.lanyuanxiaoyao.service.ai.core.entity.amis.AmisResponse;
/**
* @author lanyuanxiaoyao
* @date 2024-11-28
*/
public interface DetailController<DETAIL_ITEM> {
String DETAIL = "/detail/{id}";
AmisResponse<DETAIL_ITEM> detail(Long id) throws Exception;
}

View File

@@ -0,0 +1,16 @@
package com.lanyuanxiaoyao.service.ai.web.base.controller;
import com.lanyuanxiaoyao.service.ai.core.entity.amis.AmisCrudResponse;
import com.lanyuanxiaoyao.service.ai.web.base.controller.query.Query;
/**
* @author lanyuanxiaoyao
* @date 2024-11-28
*/
public interface ListController {
String LIST = "/list";
AmisCrudResponse list() throws Exception;
AmisCrudResponse list(Query query) throws Exception;
}

View File

@@ -0,0 +1,13 @@
package com.lanyuanxiaoyao.service.ai.web.base.controller;
import com.lanyuanxiaoyao.service.ai.core.entity.amis.AmisResponse;
/**
* @author lanyuanxiaoyao
* @date 2024-11-28
*/
public interface RemoveController {
String REMOVE = "/remove/{id}";
AmisResponse<Object> remove(Long id) throws Exception;
}

View File

@@ -0,0 +1,13 @@
package com.lanyuanxiaoyao.service.ai.web.base.controller;
import com.lanyuanxiaoyao.service.ai.core.entity.amis.AmisResponse;
/**
* @author lanyuanxiaoyao
* @date 2024-11-28
*/
public interface SaveController<SAVE_ITEM> {
String SAVE = "/save";
AmisResponse<Long> save(SAVE_ITEM item) throws Exception;
}

View File

@@ -0,0 +1,8 @@
package com.lanyuanxiaoyao.service.ai.web.base.controller;
/**
* @author lanyuanxiaoyao
* @date 2024-11-28
*/
public interface SimpleController<SAVE_ITEM, LIST_ITEM, DETAIL_ITEM> extends SaveController<SAVE_ITEM>, ListController, DetailController<DETAIL_ITEM>, RemoveController {
}

View File

@@ -0,0 +1,105 @@
package com.lanyuanxiaoyao.service.ai.web.base.controller;
import cn.hutool.core.util.ObjectUtil;
import com.lanyuanxiaoyao.service.ai.core.entity.amis.AmisCrudResponse;
import com.lanyuanxiaoyao.service.ai.core.entity.amis.AmisResponse;
import com.lanyuanxiaoyao.service.ai.web.base.controller.query.Query;
import com.lanyuanxiaoyao.service.ai.web.base.entity.SimpleEntity;
import com.lanyuanxiaoyao.service.ai.web.base.service.SimpleServiceSupport;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
/**
* @author lanyuanxiaoyao
* @date 2024-11-26
*/
@Slf4j
public abstract class SimpleControllerSupport<ENTITY extends SimpleEntity, SAVE_ITEM, LIST_ITEM, DETAIL_ITEM> implements SimpleController<SAVE_ITEM, LIST_ITEM, DETAIL_ITEM> {
protected final SimpleServiceSupport<ENTITY> service;
public SimpleControllerSupport(SimpleServiceSupport<ENTITY> service) {
this.service = service;
}
@PostMapping(SAVE)
@Override
public AmisResponse<Long> save(@RequestBody SAVE_ITEM item) throws Exception {
SaveItemMapper<ENTITY, SAVE_ITEM> mapper = saveItemMapper();
return AmisResponse.responseSuccess(service.save(mapper.from(item)));
}
@GetMapping(LIST)
@Override
public AmisCrudResponse list() throws Exception {
ListItemMapper<ENTITY, LIST_ITEM> mapper = listItemMapper();
return AmisCrudResponse.responseCrudData(
service.list()
.collect(entity -> {
try {
return mapper.from(entity);
} catch (Exception e) {
throw new RuntimeException(e);
}
})
);
}
@PostMapping(LIST)
@Override
public AmisCrudResponse list(@RequestBody Query query) throws Exception {
if (ObjectUtil.isNull(query)) {
return AmisCrudResponse.responseCrudData(List.of(), 0);
}
ListItemMapper<ENTITY, LIST_ITEM> mapper = listItemMapper();
Page<ENTITY> result = service.list(query);
return AmisCrudResponse.responseCrudData(
result.get()
.map(entity -> {
try {
return mapper.from(entity);
} catch (Exception e) {
throw new RuntimeException(e);
}
})
.toList(),
result.getTotalElements()
);
}
@GetMapping(DETAIL)
@Override
public AmisResponse<DETAIL_ITEM> detail(@PathVariable("id") Long id) throws Exception {
DetailItemMapper<ENTITY, DETAIL_ITEM> mapper = detailItemMapper();
return AmisResponse.responseSuccess(mapper.from(service.detailOrThrow(id)));
}
@GetMapping(REMOVE)
@Override
public AmisResponse<Object> remove(@PathVariable("id") Long id) throws Exception {
service.remove(id);
return AmisResponse.responseSuccess();
}
protected abstract SaveItemMapper<ENTITY, SAVE_ITEM> saveItemMapper();
protected abstract ListItemMapper<ENTITY, LIST_ITEM> listItemMapper();
protected abstract DetailItemMapper<ENTITY, DETAIL_ITEM> detailItemMapper();
public interface SaveItemMapper<ENTITY, SAVE_ITEM> {
ENTITY from(SAVE_ITEM item) throws Exception;
}
public interface ListItemMapper<ENTITY, LIST_ITEM> {
LIST_ITEM from(ENTITY entity) throws Exception;
}
public interface DetailItemMapper<ENTITY, DETAIL_ITEM> {
DETAIL_ITEM from(ENTITY entity) throws Exception;
}
}

View File

@@ -0,0 +1,61 @@
package com.lanyuanxiaoyao.service.ai.web.base.controller.query;
import lombok.Data;
import org.eclipse.collections.api.list.ImmutableList;
import org.eclipse.collections.api.map.ImmutableMap;
/**
* 统一查询
*
* @author lanyuanxiaoyao
* @date 2024-12-03
*/
@Data
public class Query {
private Queryable query;
private ImmutableList<Sortable> sort;
private Pageable page;
@Data
public static class Queryable {
private ImmutableList<String> nullEqual;
private ImmutableList<String> notNullEqual;
private ImmutableList<String> empty;
private ImmutableList<String> notEmpty;
private ImmutableMap<String, ?> equal;
private ImmutableMap<String, ?> notEqual;
private ImmutableMap<String, String> like;
private ImmutableMap<String, String> notLike;
private ImmutableMap<String, ?> great;
private ImmutableMap<String, ?> less;
private ImmutableMap<String, ?> greatEqual;
private ImmutableMap<String, ?> lessEqual;
private ImmutableMap<String, ImmutableList<?>> in;
private ImmutableMap<String, ImmutableList<?>> notIn;
private ImmutableMap<String, Between> between;
private ImmutableMap<String, Between> notBetween;
@Data
public static class Between {
private String start;
private String end;
}
}
@Data
public static class Sortable {
private String column;
private Direction direction;
public enum Direction {
ASC,
DESC,
}
}
@Data
public static class Pageable {
private Integer index;
private Integer size;
}
}

View File

@@ -0,0 +1,29 @@
package com.lanyuanxiaoyao.service.ai.web.base.entity;
import jakarta.persistence.EntityListeners;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id;
import jakarta.persistence.MappedSuperclass;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.hibernate.annotations.GenericGenerator;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
/**
* 实体类公共字段
*
* @author lanyuanxiaoyao
* @date 2024-11-20
*/
@Getter
@Setter
@ToString
@MappedSuperclass
@EntityListeners(AuditingEntityListener.class)
public class IdOnlyEntity {
@Id
@GeneratedValue(generator = "snowflake")
@GenericGenerator(name = "snowflake", strategy = "com.lanyuanxiaoyao.service.ai.web.configuration.SnowflakeIdGenerator")
private Long id;
}

View File

@@ -0,0 +1,29 @@
package com.lanyuanxiaoyao.service.ai.web.base.entity;
import jakarta.persistence.EntityListeners;
import jakarta.persistence.MappedSuperclass;
import java.time.LocalDateTime;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.annotation.LastModifiedDate;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
/**
* 实体类公共字段
*
* @author lanyuanxiaoyao
* @date 2024-11-20
*/
@Getter
@Setter
@ToString(callSuper = true)
@MappedSuperclass
@EntityListeners(AuditingEntityListener.class)
public class SimpleEntity extends IdOnlyEntity {
@CreatedDate
private LocalDateTime createdTime;
@LastModifiedDate
private LocalDateTime modifiedTime;
}

Some files were not shown because too many files have changed in this diff Show More