46 Commits

Author SHA1 Message Date
v-zhangjc9
fce4816880 feat(all): 移除远程日志集中 2025-05-23 09:49:37 +08:00
v-zhangjc9
79f792b6cf feat(web): 增加预览 2025-05-23 09:48:16 +08:00
v-zhangjc9
0d7d009be2 refactor(knowledge): 加入数据库,优化代码结构 2025-05-22 18:10:44 +08:00
v-zhangjc9
907d2826a4 feat(bin): 增加打包耗时 2025-05-22 10:31:44 +08:00
v-zhangjc9
d190c59e57 fix(bin): 优化忽略列表 2025-05-21 17:58:10 +08:00
v-zhangjc9
bd2205a5b9 fix(bin): 提交未提交的文件 2025-05-21 17:57:48 +08:00
v-zhangjc9
42aab784c2 feat(ai): 完成基础版本功能 2025-05-21 17:57:11 +08:00
v-zhangjc9
8c2b94f6c9 feat(bin): 优化打包脚本到跨平台 2025-05-21 17:56:10 +08:00
v-zhangjc9
6e7cef6170 fix(ai): 移除多余的git跟踪 2025-05-20 17:06:31 +08:00
v-zhangjc9
0156a12b3b feat(gateway): 增加AI相关端口 2025-05-20 16:36:36 +08:00
v-zhangjc9
fe58cee730 feat(bin): 修改仓库到私有仓库中 2025-05-20 14:43:56 +08:00
v-zhangjc9
778a6df984 feat(cli): 增加多jdk的支持 2025-05-20 14:42:51 +08:00
v-zhangjc9
5d49c82190 feat(ai): 完善知识库接口 2025-05-16 19:00:55 +08:00
v-zhangjc9
be976290b6 feat(ai): 完善AI对话 2025-05-16 19:00:26 +08:00
v-zhangjc9
8fbc665abf feat(ai): 增加ai相关子项目 2025-05-15 17:15:18 +08:00
v-zhangjc9
a129caf5f4 fix(web): 修复思考开关无效 2025-05-14 09:14:35 +08:00
v-zhangjc9
aea8a7ed59 feat(web): 优化对话界面显示 2025-05-13 16:13:07 +08:00
v-zhangjc9
dd2e56e27b feat(web): 增加AI对话的能力 2025-05-13 16:03:08 +08:00
v-zhangjc9
819d56fbe3 feat(web): 优化图表展示 2025-05-13 12:06:21 +08:00
v-zhangjc9
255aad4987 feat(web): 优化图标显示 2025-05-12 19:28:23 +08:00
v-zhangjc9
f23de7c959 fix(web): 修复页面显示不正确、logo显示异常 2025-05-12 18:15:10 +08:00
v-zhangjc9
b0603d10bc feat(web): 优化页面跳转和菜单展现 2025-05-12 15:59:46 +08:00
v-zhangjc9
1e7b195f9f feat(web): 更换页面框架为pro-layout 2025-05-12 10:42:59 +08:00
v-zhangjc9
aa93b52dd9 refactor(web): 优化部署打包方案 2025-05-09 17:23:26 +08:00
v-zhangjc9
121f6688c6 refactor(web): 更换client代码的目录 2025-05-09 12:14:06 +08:00
v-zhangjc9
8a7ad32df9 fix(web): 修复切换路由不刷新页面的问题 2025-05-09 12:13:34 +08:00
v-zhangjc9
de445d7061 feat(web): 完成基本适配 2025-05-09 12:13:18 +08:00
v-zhangjc9
fa295b15c6 feat(launcher): 增加批量执行ssh命令脚本 2025-04-24 15:56:52 +08:00
v-zhangjc9
51c9e71b0d fix(launcher): 修复空值环境变量错误 2025-04-23 14:08:10 +08:00
v-zhangjc9
224115e938 refactor(all): 移除juice-fs依赖 2025-04-21 12:28:18 +08:00
v-zhangjc9
2f3eaa9e1a feat(scheduler): 恢复调度时间点 2025-02-12 10:13:52 +08:00
v-zhangjc9
f791b60fd5 feat(sync): 增加日志输出 2025-02-12 10:13:52 +08:00
v-zhangjc9
e6a03122a6 feat(cli): 增加主机是否启用的能力 2025-02-12 10:13:52 +08:00
v-zhangjc9
7249419624 feat(scheduler): 微调b12集群的资源限制 2025-02-12 10:13:52 +08:00
v-zhangjc9
58140fa0e8 feat(bin): 移除ytp传输 2025-02-12 10:13:52 +08:00
v-zhangjc9
b3ccbce16e feat(all): 移除b5集群 2025-02-12 10:13:52 +08:00
v-zhangjc9
6dbad6825d feat(scheduler): 调整b12的资源限制
反正也没有备用集群的需求,一个集群用到头就好
2025-02-12 10:13:52 +08:00
v-zhangjc9
686c523274 fix(scheduler): 修复闲时压缩crm重点表调度到A4集群 2025-02-12 10:13:52 +08:00
v-zhangjc9
1e88c62987 feat(scheduler): 禁止b5、a4在闲时调度期间使用 2025-02-12 10:13:52 +08:00
v-zhangjc9
fb79468eee feat(web): 增加指标采集进度显示 2025-02-12 10:13:52 +08:00
v-zhangjc9
7efd9129c2 feat(monitor): 增加关于hudi表文件数的监控指标 2025-02-12 10:13:52 +08:00
v-zhangjc9
e30a720cea fix(hudi-query): 修复接口调用错误 2025-02-12 10:13:52 +08:00
v-zhangjc9
28b3fd9ca1 feat(hudi-query): 增加关于hdfs文件数相关的接口 2025-02-12 10:13:52 +08:00
v-zhangjc9
70c2442ff1 fix(forest): 修复接口类型错误 2025-02-12 10:13:52 +08:00
v-zhangjc9
3c971e1438 feat(scheduler): 修复调度 2025-02-12 10:13:52 +08:00
v-zhangjc9
2c7d72bdb8 feat(scheduler): 调整日常调度的时间点
停止11、14点的全表压缩调度
2025-02-12 10:13:52 +08:00
322 changed files with 16231 additions and 121841 deletions

2
.gitignore vendored
View File

@@ -32,7 +32,7 @@ buildNumber.properties
!.vscode/*.code-snippets !.vscode/*.code-snippets
.history/ .history/
*.vsix *.vsix
.idea/** **/.idea/**
cmake-build-*/ cmake-build-*/
.idea/**/mongoSettings.xml .idea/**/mongoSettings.xml
*.iws *.iws

View File

@@ -1,3 +1,168 @@
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s20.hdp.dc:19521/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T154854.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154825.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154754.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154616.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/list?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154529.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/list?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151839.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/file_count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151753.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/file_count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151727.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T151704.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151540.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151442.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151417.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?hdfs=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151409.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs?hdfs=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T151340.404.json
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:31719/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_sz/acct_item_755&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:31719/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_sz/acct_item_755&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10) User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
@@ -358,163 +523,3 @@ Accept-Encoding: br,deflate,gzip,x-gzip
### ###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164901.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164758.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164303.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164220.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164107.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=C5D2666661F27F68E53223FE5B74AF35
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T163410.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/queue/queue/clear?name=compaction-queue-pre
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=8516C92140B5118AF9AA61025D0F8C93
Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/service_scheduler/schedule/all
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=7A4C34E0240A98C1186F3A2551BC5E80
Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/service_web/cloud/list
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=F5F155198FAF72435339CC2E21B873CC
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-09T170723.200.json
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/hudi_api/api/message_id?flink_job_id=1542097984132706304&alias=crm_cfguse_mkt_cam_strategy_rel
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=F5F155198FAF72435339CC2E21B873CC
Accept-Encoding: br,deflate,gzip,x-gzip
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:15391/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Type: text/plain
Content-Length: 738
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=E6FF5447C8553BA4268979B8C5779363
Accept-Encoding: br,deflate,gzip,x-gzip
\#Properties saved on 2023-12-26T09:18:39.583Z
\#Tue Dec 26 17:18:39 CST 2023
hoodie.table.precombine.field=update_ts
hoodie.datasource.write.drop.partition.columns=false
hoodie.table.partition.fields=CITY_ID
hoodie.table.type=MERGE_ON_READ
hoodie.archivelog.folder=archived
hoodie.compaction.payload.class=org.apache.hudi.common.model.OverwriteWithLatestAvroPayload
hoodie.timeline.layout.version=1
hoodie.table.version=4
hoodie.table.recordkey.fields=_key
hoodie.datasource.write.partitionpath.urlencode=false
hoodie.table.name=dws_account
hoodie.table.keygenerator.class=org.apache.hudi.keygen.SimpleKeyGenerator
hoodie.table.timeline.timezone=LOCAL
hoodie.datasource.write.hive_style_partitioning=false
hoodie.table.checksum=989688289
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:15391/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:34469/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:34469/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
<> 2024-05-08T095641.500.txt
###

102
bin/.gitignore vendored Normal file
View File

@@ -0,0 +1,102 @@
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
.history/
*.vsix
*~
.fuse_hidden*
.directory
.Trash-*
.nfs*
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
pids
*.pid
*.seed
*.pid.lock
lib-cov
coverage
*.lcov
.nyc_output
.grunt
bower_components
.lock-wscript
build/Release
node_modules/
jspm_packages/
web_modules/
*.tsbuildinfo
.npm
.eslintcache
.stylelintcache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
.node_repl_history
*.tgz
.yarn-integrity
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
.cache
.parcel-cache
.next
out
.nuxt
dist
.cache/
.vuepress/dist
.temp
.docusaurus
.serverless/
.fusebox/
.dynamodb/
.tern-port
.vscode-test
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
*.stackdump
[Dd]esktop.ini
$RECYCLE.BIN/
*.cab
*.msi
*.msix
*.msm
*.msp
*.lnk
.DS_Store
.AppleDouble
.LSOverride
Icon
._*
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

54
bin/build-all.js Normal file
View File

@@ -0,0 +1,54 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_package_batch, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch([
'service-common',
'service-dependencies',
'service-configuration',
'service-forest',
'service-cli',
'service-cli/service-cli-core',
'service-executor',
'service-executor/service-executor-core',
'utils/executor',
])
await run_package_batch([
'service-api',
'service-check',
'service-cli/service-cli-runner',
'service-cloud-query',
'service-executor/service-executor-manager',
'service-executor/service-executor-task',
'service-command',
'service-command-pro',
'service-exporter',
'service-flink-query',
'service-gateway',
'service-hudi-query',
'service-info-query',
'service-monitor',
'service-loki-query',
'service-pulsar-query',
'service-queue',
'service-scheduler',
'service-uploader',
'service-web',
'service-yarn-query',
'service-zookeeper-query',
'utils/patch',
'utils/sync',
])
for (const profile of ['b2a4', 'b2b1', 'b2b12']) {
await run_package('service-launcher', profile)
await run_upload(`**/service-launcher-${profile}-1.0.0-SNAPSHOT.jar`)
}
await run_upload('**/target/*-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,38 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
mvn install -N -D skipTests
deploy service-common service-dependencies service-configuration service-forest service-cli service-cli/service-cli-core service-executor service-executor/service-executor-core utils/executor
package service-api service-check service-cli/service-cli-runner service-cloud-query service-executor/service-executor-manager service-executor/service-executor-task service-command service-command-pro service-exporter service-flink-query service-gateway service-hudi-query service-info-query service-monitor service-loki-query service-pulsar-query service-queue service-scheduler service-uploader service-web service-yarn-query service-zookeeper-query utils/patch utils/sync
configs=(b2a4 b2b1 b2b5 b2b12)
for config in ${configs[*]};
do
mvn -pl service-launcher clean package -D skipTests -P $config
upload $root_path/service-launcher/target/service-launcher-$config-1.0.0-SNAPSHOT.jar
done
upload $root_path/service-api/target/service-api-1.0.0-SNAPSHOT.jar
upload $root_path/service-check/target/service-check-1.0.0-SNAPSHOT.jar
upload $root_path/service-cloud-query/target/service-cloud-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-command/target/service-command-1.0.0-SNAPSHOT.jar
upload $root_path/service-command-pro/target/service-command-pro-1.0.0-SNAPSHOT.jar
upload $root_path/service-executor/service-executor-manager/target/service-executor-manager-1.0.0-SNAPSHOT.jar
upload $root_path/service-executor/service-executor-task/target/service-executor-task-1.0.0-SNAPSHOT.jar
upload $root_path/service-exporter/target/service-exporter-1.0.0-SNAPSHOT.jar
upload $root_path/service-flink-query/target/service-flink-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-gateway/target/service-gateway-1.0.0-SNAPSHOT.jar
upload $root_path/service-hudi-query/target/service-hudi-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-info-query/target/service-info-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-loki-query/target/service-loki-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-monitor/target/service-monitor-1.0.0-SNAPSHOT.jar
upload $root_path/service-pulsar-query/target/service-pulsar-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-queue/target/service-queue-1.0.0-SNAPSHOT.jar
upload $root_path/service-scheduler/target/service-scheduler-1.0.0-SNAPSHOT.jar
upload $root_path/service-web/target/service-web-1.0.0-SNAPSHOT.jar
upload $root_path/service-yarn-query/target/service-yarn-query-1.0.0-SNAPSHOT.jar
upload $root_path/service-zookeeper-query/target/service-zookeeper-query-1.0.0-SNAPSHOT.jar
upload $root_path/utils/sync/target/sync-1.0.0-SNAPSHOT.jar
upload_ytp $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar
upload_ytp $root_path/service-uploader/target/service-uploader-1.0.0-SNAPSHOT.jar

15
bin/build-api.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-api')
await run_upload_normal('service-api')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-api
upload $root_path/service-api/target/service-api-1.0.0-SNAPSHOT.jar

15
bin/build-check.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-check')
await run_upload_normal('service-check')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-check
upload $root_path/service-check/target/service-check-1.0.0-SNAPSHOT.jar

15
bin/build-cli.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-cli', 'service-cli/service-cli-core'])
await run_package('service-cli/service-cli-runner')
await run_upload_normal('service-cli-runner', 'service-cli')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-cli service-cli/service-cli-core
package service-cli/service-cli-runner
ytp-transfer2 $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar

15
bin/build-cloud-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-cloud-query')
await run_upload_normal('service-cloud-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-cloud-query
upload $root_path/service-cloud-query/target/service-cloud-query-1.0.0-SNAPSHOT.jar

15
bin/build-command-pro.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-command-pro')
await run_upload_normal('service-command-pro')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-command-pro
upload $root_path/service-command-pro/target/service-command-pro-1.0.0-SNAPSHOT.jar

15
bin/build-command.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-command')
await run_upload_normal('service-command')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-command
upload $root_path/service-command/target/service-command-1.0.0-SNAPSHOT.jar

View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest', 'service-executor', 'service-executor/service-executor-core'])
await run_package('service-executor/service-executor-manager')
await run_upload_normal('service-executor-manager', 'service-executor')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest service-executor service-executor/service-executor-core
package service-executor/service-executor-manager
upload $root_path/service-executor/service-executor-manager/target/service-executor-manager-1.0.0-SNAPSHOT.jar

View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest', 'service-executor', 'service-executor/service-executor-core'])
await run_package('service-executor/service-executor-task')
await run_upload_normal('service-executor-task', 'service-executor')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest service-executor service-executor/service-executor-core
package service-executor/service-executor-task
upload $root_path/service-executor/service-executor-task/target/service-executor-task-1.0.0-SNAPSHOT.jar

15
bin/build-exporter.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-exporter')
await run_upload_normal('service-exporter')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-exporter
upload $root_path/service-exporter/target/service-exporter-1.0.0-SNAPSHOT.jar

15
bin/build-flink-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-flink-query')
await run_upload_normal('service-flink-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-flink-query
upload $root_path/service-flink-query/target/service-flink-query-1.0.0-SNAPSHOT.jar

15
bin/build-gateway.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-gateway')
await run_upload_normal('service-gateway')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-gateway
upload $root_path/service-gateway/target/service-gateway-1.0.0-SNAPSHOT.jar

15
bin/build-hudi-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-hudi-query')
await run_upload_normal('service-hudi-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-hudi-query
upload $root_path/service-hudi-query/target/service-hudi-query-1.0.0-SNAPSHOT.jar

15
bin/build-info-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-info-query')
await run_upload_normal('service-info-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-info-query
upload $root_path/service-info-query/target/service-info-query-1.0.0-SNAPSHOT.jar

17
bin/build-launcher.js Normal file
View File

@@ -0,0 +1,17 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
for (const profile of ['b2a4', 'b2b1', 'b2b12']) {
await run_package('service-launcher', profile)
await run_upload(`**/service-launcher-${profile}-1.0.0-SNAPSHOT.jar`)
}
} catch (e) {
console.error(e)
}

View File

@@ -1,11 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
configs=(b2a4 b2b1 b2b5 b2b12)
for config in ${configs[*]};
do
mvn -pl service-launcher clean package -D skipTests -P $config
upload $root_path/service-launcher/target/service-launcher-$config-1.0.0-SNAPSHOT.jar
done

15
bin/build-loki-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-loki-query')
await run_upload_normal('service-loki-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-loki-query
upload $root_path/service-loki-query/target/service-loki-query-1.0.0-SNAPSHOT.jar

15
bin/build-monitor.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-monitor')
await run_upload_normal('service-monitor')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-monitor
upload $root_path/service-monitor/target/service-monitor-1.0.0-SNAPSHOT.jar

15
bin/build-patch.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common'])
await run_package('utils/patch')
await run_upload_normal('**/patch-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package utils/patch
upload $root_path/utils/patch/target/patch-1.0.0-SNAPSHOT.jar

15
bin/build-pulsar-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-pulsar-query')
await run_upload_normal('service-pulsar-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-pulsar-query
upload $root_path/service-pulsar-query/target/service-pulsar-query-1.0.0-SNAPSHOT.jar

15
bin/build-queue.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-queue')
await run_upload_normal('service-queue')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-queue
upload $root_path/service-queue/target/service-queue-1.0.0-SNAPSHOT.jar

15
bin/build-scheduler.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-scheduler')
await run_upload_normal('service-scheduler')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-scheduler
upload $root_path/service-scheduler/target/service-scheduler-1.0.0-SNAPSHOT.jar

15
bin/build-sync.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'utils/patch'])
await run_package('utils/sync')
await run_upload('**/sync-1.0.0-SNAPSHOT.jar')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest utils/patch
package utils/sync
upload $root_path/utils/sync/target/sync-1.0.0-SNAPSHOT.jar

15
bin/build-uploader.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-uploader')
await run_upload_normal('service-uploader')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-uploader
ytp-transfer2 $root_path/service-uploader/target/service-uploader-1.0.0-SNAPSHOT.jar

15
bin/build-web.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-web')
await run_upload_normal('service-web')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-web
upload $root_path/service-web/target/service-web-1.0.0-SNAPSHOT.jar

15
bin/build-yarn-query.js Normal file
View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-yarn-query')
await run_upload_normal('service-yarn-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-yarn-query
upload $root_path/service-yarn-query/target/service-yarn-query-1.0.0-SNAPSHOT.jar

View File

@@ -0,0 +1,15 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy_batch, run_deploy_root, run_package, run_upload, run_upload_normal} from "./library.js";
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy_root()
await run_deploy_batch(['service-common', 'service-dependencies', 'service-configuration', 'service-forest'])
await run_package('service-zookeeper-query')
await run_upload_normal('service-zookeeper-query')
} catch (e) {
console.error(e)
}

View File

@@ -1,6 +0,0 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
package service-zookeeper-query
upload $root_path/service-zookeeper-query/target/service-zookeeper-query-1.0.0-SNAPSHOT.jar

View File

@@ -3,5 +3,5 @@
root_path=/apps/zone_scfp/hudi/cloud root_path=/apps/zone_scfp/hudi/cloud
jdk_path=/opt/jdk8u252-b09/bin/java jdk_path=/opt/jdk8u252-b09/bin/java
curl ftp://yyy:QeY\!68\)4nH1@132.121.122.15:2222/service-check-1.0.0-SNAPSHOT.jar -o ${root_path}/service-check.jar curl http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.124:36800/file/download/service-check-1.0.0-SNAPSHOT.jar -o ${root_path}/service-check.jar
${jdk_path} -jar ${root_path}/service-check.jar ${jdk_path} -jar ${root_path}/service-check.jar

View File

@@ -5,7 +5,7 @@ jdk_path=/opt/jdk1.8.0_162/bin/java
arguments=$@ arguments=$@
# 手动上传jar包则注释掉这行更显神通吧反正是 # 手动上传jar包则注释掉这行更显神通吧反正是
curl ftp://yyy:QeY\!68\)4nH1@132.121.122.15:2222/service-cli-runner-1.0.0-SNAPSHOT.jar -o ${jars_path}/service-cli-runner.jar curl http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.124:36800/file/download/service-cli-runner-1.0.0-SNAPSHOT.jar -o ${jars_path}/service-cli-runner.jar
${jdk_path} -jar ${jars_path}/service-cli-runner.jar \ ${jdk_path} -jar ${jars_path}/service-cli-runner.jar \
--spring.profiles.active=b12 \ --spring.profiles.active=b12 \
--deploy.generate.command=true \ --deploy.generate.command=true \

102
bin/library.js Normal file
View File

@@ -0,0 +1,102 @@
import {$, fetch, fs, glob, os, path, spinner, syncProcessCwd, usePowerShell} from 'zx'
import {isEqual, trim} from "licia";
syncProcessCwd(true)
if (isEqual(os.platform(), 'win32')) {
usePowerShell()
}
const maven_setting = '/Users/lanyuanxiaoyao/.m2/settings-nas.xml'
const upload_url = 'http://132.126.207.124:36800'
const upload_username = 'AxhEbscwsJDbYMH2'
const upload_password = 'cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4'
const millisecondToString = (timestamp) => {
const totalSeconds = Math.floor(parseFloat(timestamp) / 1000)
if (isNaN(totalSeconds) || totalSeconds < 0) {
return "0秒";
}
const days = Math.floor(totalSeconds / 86400)
let remaining = totalSeconds % 86400
const hours = Math.floor(remaining / 3600)
remaining %= 3600
const minutes = Math.floor(remaining / 60)
const seconds = remaining % 60
const parts = []
if (days > 0) parts.push(`${days}`)
if (days > 0 || hours > 0) parts.push(`${hours}小时`)
if (days > 0 || hours > 0 || minutes > 0) parts.push(`${minutes}分钟`)
parts.push(`${seconds}`)
return parts.join('')
}
export const run_deploy = async (project) => {
let output = await spinner(
`Deploying project ${project}`,
() => $`mvn -pl ${project} clean deploy -D skipTests -s ${maven_setting}`
)
console.log(`✅ Finish deploy ${project} (${millisecondToString(output['duration'])})`)
}
export const run_deploy_root = async () => {
let output = await spinner(
`Deploying root`,
() => $`mvn clean deploy -N -D skipTests -s ${maven_setting}`
)
console.log(`✅ Finish deploy root (${millisecondToString(output['duration'])})`)
}
export const run_deploy_batch = async (projects) => {
for (const project of projects) {
await run_deploy(project)
}
}
export const run_package = async (project, profile = 'b2b12') => {
let output = await spinner(
`Packaging project ${project}${isEqual(profile, 'b2b12') ? '' : ` ${profile}`}`,
() => $`mvn -pl ${project} clean package -D skipTests -P ${profile} -s ${maven_setting}`
)
console.log(`✅ Finish package ${project}${isEqual(profile, 'b2b12') ? '' : ` ${profile}`} (${millisecondToString(output['duration'])})`)
}
export const run_package_batch = async (projects) => {
for (const project of projects) {
await run_package(project)
}
}
const upload = async (file_path) => {
let response = await spinner(
`Uploading project ${file_path}`,
() => fetch(`${upload_url}/file/upload/${path.basename(file_path)}`, {
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Authorization': `Basic ${Buffer.from(`${upload_username}:${upload_password}`).toString('base64')}`,
},
body: fs.createReadStream(file_path),
duplex: 'half',
})
)
if (!isEqual(response.status, 200)) {
throw response
}
console.log(`✅ Finish upload ${file_path}`)
fs.rmSync(file_path)
}
export const run_upload = async (pattern) => {
for (let p of glob.sync(pattern)) {
await upload(path.join(trim($.sync`pwd`.text()), p))
}
}
export const run_upload_normal = async (project) => {
await run_upload(`${project}/target/${project}-1.0.0-SNAPSHOT.jar`)
}

View File

@@ -1,15 +1,9 @@
#!/bin/bash #!/bin/bash
maven_setting=/Users/lanyuanxiaoyao/.m2/settings-nas.xml
build_profile=b2b12 build_profile=b2b12
iap_username=iap
iap_password=IAPAb123456!
iap_url=$iap_username@132.122.1.162
ytp_username=yyy
ytp_password='QeY\!68\)4nH1'
ytp_url=ftp://$ytp_username:$ytp_password@132.121.122.15:2222
upload_username=AxhEbscwsJDbYMH2 upload_username=AxhEbscwsJDbYMH2
upload_password=cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4 upload_password=cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
upload_url=http://$upload_username:$upload_password@132.126.207.124:36800 upload_url=http://$upload_username:$upload_password@132.126.207.124:36800
@@ -28,23 +22,6 @@ function upload() {
rm $source_file_path rm $source_file_path
} }
function upload_ytp() {
source_file_path=$(realpath $1)
file_name=$(basename $source_file_path)
echo "↪ Source md5: $(md5sum $source_file_path | awk '{print $1}')"
echo "↪ Uploading $source_file_path ↪ /tmp/$file_name"
sshpass -p $iap_password scp $source_file_path $iap_url:/tmp
echo "↪ Upload 162 success"
target_md5=$(sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url "md5sum /tmp/$file_name | awk '{print \$1}'")
echo "↪ Target md5: $target_md5"
echo "↪ Command: sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url \"curl --retry 5 $ytp_url -T /tmp/$file_name\""
sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url "curl --retry 5 $ytp_url -T /tmp/$file_name"
echo "↪ Upload ytp success"
echo "↪ Download: curl $ytp_url/$file_name -o $file_name"
echo "↪ Delete source"
rm $source_file_path
}
function joining { function joining {
local d=${1-} f=${2-} local d=${1-} f=${2-}
if shift 2; then if shift 2; then
@@ -52,10 +29,14 @@ function joining {
fi fi
} }
function deploy_root() {
mvn deploy -N -D skipTests -s $maven_setting
}
function deploy() { function deploy() {
mvn -pl $(joining , $@) clean install -D skipTests mvn -pl $(joining , $@) clean deploy -D skipTests -s $maven_setting
} }
function package() { function package() {
mvn -pl $(joining , $@) clean package -D skipTests -P $build_profile mvn -pl $(joining , $@) clean package -D skipTests -P $build_profile -s $maven_setting
} }

18
bin/package.json Normal file
View File

@@ -0,0 +1,18 @@
{
"name": "bin",
"version": "1.0.0",
"type": "module",
"dependencies": {
"@webpod/ps": "^0.1.1",
"chalk": "^5.4.1",
"envapi": "^0.2.3",
"fs-extra": "^11.3.0",
"globby": "^14.1.0",
"licia": "^1.48.0",
"minimist": "^1.2.8",
"node-fetch-native": "^1.6.6",
"which": "^5.0.0",
"yaml": "^2.8.0",
"zx": "^8.5.4"
}
}

327
bin/pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,327 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
'@webpod/ps':
specifier: ^0.1.1
version: 0.1.1
chalk:
specifier: ^5.4.1
version: 5.4.1
envapi:
specifier: ^0.2.3
version: 0.2.3
fs-extra:
specifier: ^11.3.0
version: 11.3.0
globby:
specifier: ^14.1.0
version: 14.1.0
licia:
specifier: ^1.48.0
version: 1.48.0
minimist:
specifier: ^1.2.8
version: 1.2.8
node-fetch-native:
specifier: ^1.6.6
version: 1.6.6
which:
specifier: ^5.0.0
version: 5.0.0
yaml:
specifier: ^2.8.0
version: 2.8.0
zx:
specifier: ^8.5.4
version: 8.5.4
packages:
'@nodelib/fs.scandir@2.1.5':
resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
engines: {node: '>= 8'}
'@nodelib/fs.stat@2.0.5':
resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
engines: {node: '>= 8'}
'@nodelib/fs.walk@1.2.8':
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
'@sindresorhus/merge-streams@2.3.0':
resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==}
engines: {node: '>=18'}
'@webpod/ingrid@0.0.0-beta.3':
resolution: {integrity: sha512-PkorwT+q/MiIF+It47ORX0wCYHumOeMKwp5KX5WbUvbCeOtSB6b5UUC5FvzlijdwK/YPR+sOitQzyVSsRrMmJA==}
'@webpod/ps@0.1.1':
resolution: {integrity: sha512-SIgb4wWEVlKgdRByMMz9c3y1hpKfNm2sbretCPD49O9LG6itibULMkiRISdkpMdGRiUpbGHp8tiN3ZLYRDHj1g==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
chalk@5.4.1:
resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==}
engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
envapi@0.2.3:
resolution: {integrity: sha512-kSPSecU+/eH0IajEYZ/LndeBjzSBmLyp/SZFgx8Zgyeu0SoGioHkICOOVJgJLaX/rqZrCrQ+eDxiaYNVcyCsbQ==}
fast-glob@3.3.3:
resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==}
engines: {node: '>=8.6.0'}
fastq@1.19.1:
resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fs-extra@11.3.0:
resolution: {integrity: sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==}
engines: {node: '>=14.14'}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
globby@14.1.0:
resolution: {integrity: sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==}
engines: {node: '>=18'}
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
ignore@7.0.4:
resolution: {integrity: sha512-gJzzk+PQNznz8ysRrC0aOkBNVRBDtE1n53IqyqEf3PXrYwomFs5q4pGMizBMJF+ykh03insJ27hB8gSrD2Hn8A==}
engines: {node: '>= 4'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
isexe@3.1.1:
resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==}
engines: {node: '>=16'}
jsonfile@6.1.0:
resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==}
licia@1.48.0:
resolution: {integrity: sha512-bBWiT5CSdEtwuAHiYTJ74yItCjIFdHi4xiFk6BRDfKa+sdCpkUHp69YKb5udNOJlHDzFjNjcMgNZ/+wQIHrB8A==}
merge2@1.4.1:
resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
engines: {node: '>= 8'}
micromatch@4.0.8:
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
engines: {node: '>=8.6'}
minimist@1.2.8:
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
node-fetch-native@1.6.6:
resolution: {integrity: sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==}
path-type@6.0.0:
resolution: {integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==}
engines: {node: '>=18'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
queue-microtask@1.2.3:
resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
reusify@1.1.0:
resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
run-parallel@1.2.0:
resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
slash@5.1.0:
resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
engines: {node: '>=14.16'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
unicorn-magic@0.3.0:
resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==}
engines: {node: '>=18'}
universalify@2.0.1:
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
engines: {node: '>= 10.0.0'}
which@5.0.0:
resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==}
engines: {node: ^18.17.0 || >=20.5.0}
hasBin: true
yaml@2.8.0:
resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==}
engines: {node: '>= 14.6'}
hasBin: true
zurk@0.11.2:
resolution: {integrity: sha512-OKUQsmG588B18hzO4ThzOU0NUwr4C8aKl9NjGQfXUv5fskLfS6Sj3XGNbTzKj3d2+jWvmnqS2cgrwYX6bIkDyA==}
zx@8.5.4:
resolution: {integrity: sha512-44oKea9Sa8ZnOkTnS6fRJpg3quzgnbB43nLrVfYnqE86J4sxgZMUDLezzKET/FdOAVkF4X+Alm9Bume+W+RW9Q==}
engines: {node: '>= 12.17.0'}
hasBin: true
snapshots:
'@nodelib/fs.scandir@2.1.5':
dependencies:
'@nodelib/fs.stat': 2.0.5
run-parallel: 1.2.0
'@nodelib/fs.stat@2.0.5': {}
'@nodelib/fs.walk@1.2.8':
dependencies:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.19.1
'@sindresorhus/merge-streams@2.3.0': {}
'@webpod/ingrid@0.0.0-beta.3': {}
'@webpod/ps@0.1.1':
dependencies:
'@webpod/ingrid': 0.0.0-beta.3
zurk: 0.11.2
braces@3.0.3:
dependencies:
fill-range: 7.1.1
chalk@5.4.1: {}
envapi@0.2.3: {}
fast-glob@3.3.3:
dependencies:
'@nodelib/fs.stat': 2.0.5
'@nodelib/fs.walk': 1.2.8
glob-parent: 5.1.2
merge2: 1.4.1
micromatch: 4.0.8
fastq@1.19.1:
dependencies:
reusify: 1.1.0
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fs-extra@11.3.0:
dependencies:
graceful-fs: 4.2.11
jsonfile: 6.1.0
universalify: 2.0.1
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
globby@14.1.0:
dependencies:
'@sindresorhus/merge-streams': 2.3.0
fast-glob: 3.3.3
ignore: 7.0.4
path-type: 6.0.0
slash: 5.1.0
unicorn-magic: 0.3.0
graceful-fs@4.2.11: {}
ignore@7.0.4: {}
is-extglob@2.1.1: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
isexe@3.1.1: {}
jsonfile@6.1.0:
dependencies:
universalify: 2.0.1
optionalDependencies:
graceful-fs: 4.2.11
licia@1.48.0: {}
merge2@1.4.1: {}
micromatch@4.0.8:
dependencies:
braces: 3.0.3
picomatch: 2.3.1
minimist@1.2.8: {}
node-fetch-native@1.6.6: {}
path-type@6.0.0: {}
picomatch@2.3.1: {}
queue-microtask@1.2.3: {}
reusify@1.1.0: {}
run-parallel@1.2.0:
dependencies:
queue-microtask: 1.2.3
slash@5.1.0: {}
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
unicorn-magic@0.3.0: {}
universalify@2.0.1: {}
which@5.0.0:
dependencies:
isexe: 3.1.1
yaml@2.8.0: {}
zurk@0.11.2: {}
zx@8.5.4: {}

View File

@@ -1,265 +0,0 @@
<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
<property>
<name>fs.azure.user.agent.prefix</name>
<value>User-Agent: APN/1.0 Hortonworks/1.0 HDP/</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://b2</value>
<final>true</final>
</property>
<property>
<name>fs.s3a.fast.upload</name>
<value>true</value>
</property>
<property>
<name>fs.s3a.fast.upload.buffer</name>
<value>disk</value>
</property>
<property>
<name>fs.s3a.multipart.size</name>
<value>67108864</value>
</property>
<property>
<name>fs.trash.interval</name>
<value>4320</value>
</property>
<property>
<name>fs.trash.checkpoint.interval</name>
<value>360</value>
</property>
<property>
<name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
<value>120</value>
</property>
<property>
<name>ha.zookeeper.acl</name>
<value>sasl:nn:rwcda</value>
</property>
<property>
<name>ha.zookeeper.quorum</name>
<value>b5m1.hdp.dc:2181,b5m2.hdp.dc:2181,b5m3.hdp.dc:2181</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>hadoop.http.authentication.signature.secret.file</name>
<value>/etc/security/http_secret</value>
</property>
<property>
<name>hadoop.http.authentication.simple.anonymous.allowed</name>
<value>true</value>
</property>
<property>
<name>hadoop.http.authentication.type</name>
<value>simple</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-headers</name>
<value>X-Requested-With,Content-Type,Accept,Origin,WWW-Authenticate,Accept-Encoding,Transfer-Encoding</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-methods</name>
<value>GET,PUT,POST,OPTIONS,HEAD,DELETE</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-origins</name>
<value>*</value>
</property>
<property>
<name>hadoop.http.cross-origin.max-age</name>
<value>1800</value>
</property>
<property>
<name>hadoop.http.filter.initializers</name>
<value>org.apache.hadoop.security.AuthenticationFilterInitializer,org.apache.hadoop.security.HttpCrossOriginFilterInitializer</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.HTTP.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.HTTP.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.iap.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.iap.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.livy.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.livy.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.rpc.protection</name>
<value>authentication,privacy</value>
</property>
<property>
<name>hadoop.security.auth_to_local</name>
<value>RULE:[1:$1@$0](hbase-b5@ECLD.COM)s/.*/hbase/
RULE:[1:$1@$0](hdfs-b5@ECLD.COM)s/.*/hdfs/
RULE:[1:$1@$0](spark-b5@ECLD.COM)s/.*/spark/
RULE:[1:$1@$0](yarn-ats-b5@ECLD.COM)s/.*/yarn-ats/
RULE:[1:$1@$0](.*@ECLD.COM)s/@.*//
RULE:[2:$1@$0](dn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](hbase@ECLD.COM)s/.*/hbase/
RULE:[2:$1@$0](hive@ECLD.COM)s/.*/hive/
RULE:[2:$1@$0](jhs@ECLD.COM)s/.*/mapred/
RULE:[2:$1@$0](jn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](livy@ECLD.COM)s/.*/livy/
RULE:[2:$1@$0](nm@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](nn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](rangeradmin@ECLD.COM)s/.*/ranger/
RULE:[2:$1@$0](rangerlookup@ECLD.COM)s/.*/ranger/
RULE:[2:$1@$0](rangertagsync@ECLD.COM)s/.*/rangertagsync/
RULE:[2:$1@$0](rangerusersync@ECLD.COM)s/.*/rangerusersync/
RULE:[2:$1@$0](rm@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](spark@ECLD.COM)s/.*/spark/
RULE:[2:$1@$0](yarn@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](yarn-ats-hbase@ECLD.COM)s/.*/yarn-ats/
DEFAULT</value>
</property>
<property>
<name>hadoop.security.authentication</name>
<value>kerberos</value>
</property>
<property>
<name>hadoop.security.authorization</name>
<value>true</value>
</property>
<property>
<name>hadoop.security.instrumentation.requires.admin</name>
<value>false</value>
</property>
<property>
<name>io.compression.codec.lzo.class</name>
<value>com.hadoop.compression.lzo.LzoCodec</value>
</property>
<property>
<name>io.compression.codecs</name>
<value>org.apache.hadoop.io.compress.GzipCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131072</value>
</property>
<property>
<name>io.serializations</name>
<value>org.apache.hadoop.io.serializer.WritableSerialization</value>
</property>
<property>
<name>ipc.client.connect.max.retries</name>
<value>50</value>
</property>
<property>
<name>ipc.client.connection.maxidletime</name>
<value>30000</value>
</property>
<property>
<name>ipc.client.idlethreshold</name>
<value>8000</value>
</property>
<property>
<name>ipc.server.tcpnodelay</name>
<value>true</value>
</property>
<property>
<name>mapreduce.jobtracker.webinterface.trusted</name>
<value>false</value>
</property>
<property>
<name>ipc.client.fallback-to-simple-auth-allowed</name>
<value>true</value>
</property>
<property>
<name>fs.hdfs.impl.disable.cache</name>
<value>true</value>
</property>
</configuration>

View File

@@ -1,713 +0,0 @@
<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
<property>
<name>dfs.block.access.token.enable</name>
<value>true</value>
</property>
<property>
<name>dfs.blockreport.initialDelay</name>
<value>120</value>
</property>
<property>
<name>dfs.blocksize</name>
<value>134217728</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b5</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.client.read.shortcircuit</name>
<value>true</value>
</property>
<property>
<name>dfs.client.read.shortcircuit.streams.cache.size</name>
<value>4096</value>
</property>
<property>
<name>dfs.client.retry.policy.enabled</name>
<value>false</value>
</property>
<property>
<name>dfs.cluster.administrators</name>
<value> hdfs</value>
</property>
<property>
<name>dfs.content-summary.limit</name>
<value>5000</value>
</property>
<property>
<name>dfs.data.transfer.protection</name>
<value>authentication,privacy</value>
</property>
<property>
<name>dfs.datanode.address</name>
<value>0.0.0.0:1019</value>
</property>
<property>
<name>dfs.datanode.balance.bandwidthPerSec</name>
<value>6250000</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>[DISK]file:///data1/hadoop/hdfs/data,[DISK]file:///data2/hadoop/hdfs/data,[DISK]file:///data3/hadoop/hdfs/data,[DISK]file:///data4/hadoop/hdfs/data,[DISK]file:///data5/hadoop/hdfs/data,[DISK]file:///data6/hadoop/hdfs/data,[DISK]file:///data7/hadoop/hdfs/data,[DISK]file:///data8/hadoop/hdfs/data,[DISK]file:///data9/hadoop/hdfs/data,[DISK]file:///data10/hadoop/hdfs/data,[DISK]file:///data11/hadoop/hdfs/data,[DISK]file:///data12/hadoop/hdfs/data,[DISK]file:///data13/hadoop/hdfs/data,[DISK]file:///data14/hadoop/hdfs/data,[DISK]file:///data15/hadoop/hdfs/data,[DISK]file:///data16/hadoop/hdfs/data,[DISK]file:///data17/hadoop/hdfs/data,[DISK]file:///data18/hadoop/hdfs/data,[DISK]file:///data19/hadoop/hdfs/data,[DISK]file:///data20/hadoop/hdfs/data,[DISK]file:///data21/hadoop/hdfs/data,[DISK]file:///data22/hadoop/hdfs/data,[DISK]file:///data23/hadoop/hdfs/data,[DISK]file:///data24/hadoop/hdfs/data</value>
<final>true</final>
</property>
<property>
<name>dfs.datanode.data.dir.perm</name>
<value>750</value>
</property>
<property>
<name>dfs.datanode.du.reserved</name>
<value>26405499904</value>
</property>
<property>
<name>dfs.datanode.failed.volumes.tolerated</name>
<value>2</value>
<final>true</final>
</property>
<property>
<name>dfs.datanode.http.address</name>
<value>0.0.0.0:1022</value>
</property>
<property>
<name>dfs.datanode.https.address</name>
<value>0.0.0.0:50475</value>
</property>
<property>
<name>dfs.datanode.ipc.address</name>
<value>0.0.0.0:8010</value>
</property>
<property>
<name>dfs.datanode.kerberos.principal</name>
<value>dn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.datanode.keytab.file</name>
<value>/etc/security/keytabs/dn.service.keytab</value>
</property>
<property>
<name>dfs.datanode.max.transfer.threads</name>
<value>16384</value>
</property>
<property>
<name>dfs.domain.socket.path</name>
<value>/var/lib/hadoop-hdfs/dn_socket</value>
</property>
<property>
<name>dfs.encrypt.data.transfer.cipher.suites</name>
<value>AES/CTR/NoPadding</value>
</property>
<property>
<name>dfs.ha.automatic-failover.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.ha.fencing.methods</name>
<value>shell(/bin/true)</value>
</property>
<property>
<name>dfs.ha.namenodes.b5</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.heartbeat.interval</name>
<value>3</value>
</property>
<property>
<name>dfs.hosts.exclude</name>
<value>/etc/hadoop/conf/dfs.exclude</value>
</property>
<property>
<name>dfs.http.policy</name>
<value>HTTP_ONLY</value>
</property>
<property>
<name>dfs.https.port</name>
<value>50470</value>
</property>
<property>
<name>dfs.internal.nameservices</name>
<value>b5</value>
</property>
<property>
<name>dfs.journalnode.edits.dir.b5</name>
<value>/data2/hadoop/hdfs/journal</value>
</property>
<property>
<name>dfs.journalnode.http-address</name>
<value>0.0.0.0:8480</value>
</property>
<property>
<name>dfs.journalnode.https-address</name>
<value>0.0.0.0:8481</value>
</property>
<property>
<name>dfs.journalnode.kerberos.internal.spnego.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.journalnode.kerberos.principal</name>
<value>jn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.journalnode.keytab.file</name>
<value>/etc/security/keytabs/jn.service.keytab</value>
</property>
<property>
<name>dfs.namenode.accesstime.precision</name>
<value>0</value>
</property>
<property>
<name>dfs.namenode.acls.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.audit.log.async</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.avoid.read.stale.datanode</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.avoid.write.stale.datanode</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.checkpoint.dir</name>
<value>/data/hadoop/hdfs/namesecondary</value>
</property>
<property>
<name>dfs.namenode.checkpoint.edits.dir</name>
<value>${dfs.namenode.checkpoint.dir}</value>
</property>
<property>
<name>dfs.namenode.checkpoint.period</name>
<value>21600</value>
</property>
<property>
<name>dfs.namenode.checkpoint.txns</name>
<value>1000000</value>
</property>
<property>
<name>dfs.namenode.fslock.fair</name>
<value>false</value>
</property>
<property>
<name>dfs.namenode.handler.count</name>
<value>100</value>
</property>
<property>
<name>dfs.namenode.http-address.b5.nn1</name>
<value>b5m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b5.nn2</name>
<value>b5m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b5.nn1</name>
<value>b5m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b5.nn2</name>
<value>b5m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.kerberos.internal.spnego.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.namenode.kerberos.principal</name>
<value>nn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.namenode.keytab.file</name>
<value>/etc/security/keytabs/nn.service.keytab</value>
</property>
<property>
<name>dfs.namenode.max.extra.edits.segments.retained</name>
<value>180</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/data1/hadoop/hdfs/namenode,/data2/hadoop/hdfs/namenode</value>
<final>true</final>
</property>
<property>
<name>dfs.namenode.name.dir.restore</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.num.extra.edits.retained</name>
<value>18000</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b5.nn1</name>
<value>b5m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b5.nn2</name>
<value>b5m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.safemode.threshold-pct</name>
<value>0.99</value>
</property>
<property>
<name>dfs.namenode.shared.edits.dir.b5</name>
<value>qjournal://b5m1.hdp.dc:8485;b5m2.hdp.dc:8485;b5m3.hdp.dc:8485/b5</value>
</property>
<property>
<name>dfs.namenode.stale.datanode.interval</name>
<value>30000</value>
</property>
<property>
<name>dfs.namenode.startup.delay.block.deletion.sec</name>
<value>3600</value>
</property>
<property>
<name>dfs.namenode.write.stale.datanode.ratio</name>
<value>1.0f</value>
</property>
<property>
<name>dfs.nameservices</name>
<value>b5,b1,b2,b3,b4,a3,a4,f1,e1,d2</value>
</property>
<property>
<name>dfs.permissions.ContentSummary.subAccess</name>
<value>true</value>
</property>
<property>
<name>dfs.permissions.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.permissions.superusergroup</name>
<value>hdfs</value>
</property>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.replication.max</name>
<value>50</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
<final>true</final>
</property>
<property>
<name>fs.permissions.umask-mode</name>
<value>022</value>
</property>
<property>
<name>hadoop.caller.context.enabled</name>
<value>true</value>
</property>
<property>
<name>manage.include.files</name>
<value>false</value>
</property>
<property>
<name>nfs.exports.allowed.hosts</name>
<value>* rw</value>
</property>
<property>
<name>nfs.file.dump.dir</name>
<value>/tmp/.hdfs-nfs</value>
</property>
<property>
<name>dfs.client.datanode-restart.timeout</name>
<value>30</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.a4</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.a4</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.a4.nn1</name>
<value>a4m1.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.a4.nn2</name>
<value>a4m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.a4.nn1</name>
<value>a4m1.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.a4.nn2</name>
<value>a4m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a4.nn1</name>
<value>a4m1.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a4.nn2</name>
<value>a4m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.a3</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.a3</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.a3.nn1</name>
<value>a3m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.a3.nn2</name>
<value>a3m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.a3.nn1</name>
<value>a3m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.a3.nn2</name>
<value>a3m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a3.nn1</name>
<value>a3m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a3.nn2</name>
<value>a3m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b3</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b3</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.b3.nn1</name>
<value>b3m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b3.nn2</name>
<value>b3m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b3.nn1</name>
<value>b3m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b3.nn2</name>
<value>b3m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b3.nn1</name>
<value>b3m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b3.nn2</name>
<value>b3m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b2</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.ha.namenodes.b2</name>
<value>nn3,nn4</value>
</property>
<property>
<name>dfs.namenode.http-address.b1.nn1</name>
<value>b1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b1.nn2</name>
<value>b1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b1.nn1</name>
<value>b1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b1.nn2</name>
<value>b1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b1.nn1</name>
<value>b1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b1.nn2</name>
<value>b1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.http-address.b2.nn3</name>
<value>b1m5.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b2.nn4</name>
<value>b1m6.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b2.nn3</name>
<value>b1m5.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b2.nn4</name>
<value>b1m6.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b2.nn3</name>
<value>b1m5.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b2.nn4</name>
<value>b1m6.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.f1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.f1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.f1.nn1</name>
<value>f1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.f1.nn2</name>
<value>f1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.f1.nn1</name>
<value>f1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.f1.nn2</name>
<value>f1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.f1.nn1</name>
<value>f1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.f1.nn2</name>
<value>f1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.d2</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.d2</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.d2.nn1</name>
<value>d2m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.d2.nn2</name>
<value>d2m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.d2.nn1</name>
<value>d2m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.d2.nn2</name>
<value>d2m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.d2.nn1</name>
<value>d2m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.d2.nn2</name>
<value>d2m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.e1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.e1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.e1.nn1</name>
<value>e1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.e1.nn2</name>
<value>e1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.e1.nn1</name>
<value>e1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.e1.nn2</name>
<value>e1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.e1.nn1</name>
<value>e1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.e1.nn2</name>
<value>e1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b4</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b4</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.b4.nn1</name>
<value>b4m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b4.nn2</name>
<value>b4m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b4.nn1</name>
<value>b4m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b4.nn2</name>
<value>b4m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b4.nn1</name>
<value>b4m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b4.nn2</name>
<value>b4m3.hdp.dc:8020</value>
</property>
</configuration>

File diff suppressed because it is too large Load Diff

View File

@@ -74,12 +74,6 @@
<build-tag>b2b1</build-tag> <build-tag>b2b1</build-tag>
</properties> </properties>
</profile> </profile>
<profile>
<id>b2b5</id>
<properties>
<build-tag>b2b5</build-tag>
</properties>
</profile>
<profile> <profile>
<id>b2b12</id> <id>b2b12</id>
<properties> <properties>

102
service-ai/bin/.gitignore vendored Normal file
View File

@@ -0,0 +1,102 @@
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
.history/
*.vsix
*~
.fuse_hidden*
.directory
.Trash-*
.nfs*
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
pids
*.pid
*.seed
*.pid.lock
lib-cov
coverage
*.lcov
.nyc_output
.grunt
bower_components
.lock-wscript
build/Release
node_modules/
jspm_packages/
web_modules/
*.tsbuildinfo
.npm
.eslintcache
.stylelintcache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
.node_repl_history
*.tgz
.yarn-integrity
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
.cache
.parcel-cache
.next
out
.nuxt
dist
.cache/
.vuepress/dist
.temp
.docusaurus
.serverless/
.fusebox/
.dynamodb/
.tern-port
.vscode-test
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
*.stackdump
[Dd]esktop.ini
$RECYCLE.BIN/
*.cab
*.msi
*.msix
*.msm
*.msp
*.lnk
.DS_Store
.AppleDouble
.LSOverride
Icon
._*
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

View File

@@ -0,0 +1,14 @@
import {cd, path} from 'zx'
import {trim} from "licia";
import {run_deploy, run_package, run_upload} from '../../bin/library.js'
// 切换目录
cd(trim(path.dirname(import.meta.dirname)))
// 执行流程
try {
await run_deploy('service-ai-core')
await run_package('service-ai-chat')
await run_upload('service-ai-chat')
} catch (e) {
console.error(e)
}

View File

@@ -0,0 +1,18 @@
{
"name": "bin",
"version": "1.0.0",
"type": "module",
"dependencies": {
"@webpod/ps": "^0.1.1",
"chalk": "^5.4.1",
"envapi": "^0.2.3",
"fs-extra": "^11.3.0",
"globby": "^14.1.0",
"licia": "^1.48.0",
"minimist": "^1.2.8",
"node-fetch-native": "^1.6.6",
"which": "^5.0.0",
"yaml": "^2.8.0",
"zx": "^8.5.4"
}
}

327
service-ai/bin/pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,327 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
'@webpod/ps':
specifier: ^0.1.1
version: 0.1.1
chalk:
specifier: ^5.4.1
version: 5.4.1
envapi:
specifier: ^0.2.3
version: 0.2.3
fs-extra:
specifier: ^11.3.0
version: 11.3.0
globby:
specifier: ^14.1.0
version: 14.1.0
licia:
specifier: ^1.48.0
version: 1.48.0
minimist:
specifier: ^1.2.8
version: 1.2.8
node-fetch-native:
specifier: ^1.6.6
version: 1.6.6
which:
specifier: ^5.0.0
version: 5.0.0
yaml:
specifier: ^2.8.0
version: 2.8.0
zx:
specifier: ^8.5.4
version: 8.5.4
packages:
'@nodelib/fs.scandir@2.1.5':
resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
engines: {node: '>= 8'}
'@nodelib/fs.stat@2.0.5':
resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
engines: {node: '>= 8'}
'@nodelib/fs.walk@1.2.8':
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
'@sindresorhus/merge-streams@2.3.0':
resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==}
engines: {node: '>=18'}
'@webpod/ingrid@0.0.0-beta.3':
resolution: {integrity: sha512-PkorwT+q/MiIF+It47ORX0wCYHumOeMKwp5KX5WbUvbCeOtSB6b5UUC5FvzlijdwK/YPR+sOitQzyVSsRrMmJA==}
'@webpod/ps@0.1.1':
resolution: {integrity: sha512-SIgb4wWEVlKgdRByMMz9c3y1hpKfNm2sbretCPD49O9LG6itibULMkiRISdkpMdGRiUpbGHp8tiN3ZLYRDHj1g==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
chalk@5.4.1:
resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==}
engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
envapi@0.2.3:
resolution: {integrity: sha512-kSPSecU+/eH0IajEYZ/LndeBjzSBmLyp/SZFgx8Zgyeu0SoGioHkICOOVJgJLaX/rqZrCrQ+eDxiaYNVcyCsbQ==}
fast-glob@3.3.3:
resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==}
engines: {node: '>=8.6.0'}
fastq@1.19.1:
resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fs-extra@11.3.0:
resolution: {integrity: sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==}
engines: {node: '>=14.14'}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
globby@14.1.0:
resolution: {integrity: sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==}
engines: {node: '>=18'}
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
ignore@7.0.4:
resolution: {integrity: sha512-gJzzk+PQNznz8ysRrC0aOkBNVRBDtE1n53IqyqEf3PXrYwomFs5q4pGMizBMJF+ykh03insJ27hB8gSrD2Hn8A==}
engines: {node: '>= 4'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
isexe@3.1.1:
resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==}
engines: {node: '>=16'}
jsonfile@6.1.0:
resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==}
licia@1.48.0:
resolution: {integrity: sha512-bBWiT5CSdEtwuAHiYTJ74yItCjIFdHi4xiFk6BRDfKa+sdCpkUHp69YKb5udNOJlHDzFjNjcMgNZ/+wQIHrB8A==}
merge2@1.4.1:
resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
engines: {node: '>= 8'}
micromatch@4.0.8:
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
engines: {node: '>=8.6'}
minimist@1.2.8:
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
node-fetch-native@1.6.6:
resolution: {integrity: sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==}
path-type@6.0.0:
resolution: {integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==}
engines: {node: '>=18'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
queue-microtask@1.2.3:
resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
reusify@1.1.0:
resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
run-parallel@1.2.0:
resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
slash@5.1.0:
resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
engines: {node: '>=14.16'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
unicorn-magic@0.3.0:
resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==}
engines: {node: '>=18'}
universalify@2.0.1:
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
engines: {node: '>= 10.0.0'}
which@5.0.0:
resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==}
engines: {node: ^18.17.0 || >=20.5.0}
hasBin: true
yaml@2.8.0:
resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==}
engines: {node: '>= 14.6'}
hasBin: true
zurk@0.11.2:
resolution: {integrity: sha512-OKUQsmG588B18hzO4ThzOU0NUwr4C8aKl9NjGQfXUv5fskLfS6Sj3XGNbTzKj3d2+jWvmnqS2cgrwYX6bIkDyA==}
zx@8.5.4:
resolution: {integrity: sha512-44oKea9Sa8ZnOkTnS6fRJpg3quzgnbB43nLrVfYnqE86J4sxgZMUDLezzKET/FdOAVkF4X+Alm9Bume+W+RW9Q==}
engines: {node: '>= 12.17.0'}
hasBin: true
snapshots:
'@nodelib/fs.scandir@2.1.5':
dependencies:
'@nodelib/fs.stat': 2.0.5
run-parallel: 1.2.0
'@nodelib/fs.stat@2.0.5': {}
'@nodelib/fs.walk@1.2.8':
dependencies:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.19.1
'@sindresorhus/merge-streams@2.3.0': {}
'@webpod/ingrid@0.0.0-beta.3': {}
'@webpod/ps@0.1.1':
dependencies:
'@webpod/ingrid': 0.0.0-beta.3
zurk: 0.11.2
braces@3.0.3:
dependencies:
fill-range: 7.1.1
chalk@5.4.1: {}
envapi@0.2.3: {}
fast-glob@3.3.3:
dependencies:
'@nodelib/fs.stat': 2.0.5
'@nodelib/fs.walk': 1.2.8
glob-parent: 5.1.2
merge2: 1.4.1
micromatch: 4.0.8
fastq@1.19.1:
dependencies:
reusify: 1.1.0
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fs-extra@11.3.0:
dependencies:
graceful-fs: 4.2.11
jsonfile: 6.1.0
universalify: 2.0.1
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
globby@14.1.0:
dependencies:
'@sindresorhus/merge-streams': 2.3.0
fast-glob: 3.3.3
ignore: 7.0.4
path-type: 6.0.0
slash: 5.1.0
unicorn-magic: 0.3.0
graceful-fs@4.2.11: {}
ignore@7.0.4: {}
is-extglob@2.1.1: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
isexe@3.1.1: {}
jsonfile@6.1.0:
dependencies:
universalify: 2.0.1
optionalDependencies:
graceful-fs: 4.2.11
licia@1.48.0: {}
merge2@1.4.1: {}
micromatch@4.0.8:
dependencies:
braces: 3.0.3
picomatch: 2.3.1
minimist@1.2.8: {}
node-fetch-native@1.6.6: {}
path-type@6.0.0: {}
picomatch@2.3.1: {}
queue-microtask@1.2.3: {}
reusify@1.1.0: {}
run-parallel@1.2.0:
dependencies:
queue-microtask: 1.2.3
slash@5.1.0: {}
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
unicorn-magic@0.3.0: {}
universalify@2.0.1: {}
which@5.0.0:
dependencies:
isexe: 3.1.1
yaml@2.8.0: {}
zurk@0.11.2: {}
zx@8.5.4: {}

0
service-ai/bin/test.js Normal file
View File

177
service-ai/pom.xml Normal file
View File

@@ -0,0 +1,177 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>pom</packaging>
<description>Hudi AI服务集合</description>
<modules>
<module>service-ai-core</module>
<module>service-ai-chat</module>
<module>service-ai-knowledge</module>
</modules>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<build-tag>b2b12</build-tag>
<spring-boot.version>3.4.3</spring-boot.version>
<spring-cloud.version>2024.0.1</spring-cloud.version>
<spring-ai.version>1.0.0-RC1</spring-ai.version>
<eclipse-collections.version>11.1.0</eclipse-collections.version>
<curator.version>5.1.0</curator.version>
<hutool.version>5.8.27</hutool.version>
</properties>
<dependencyManagement>
<dependencies>
<!-- 当前项目依赖 -->
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-configuration</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-dependencies</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-forest</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
<version>${project.version}</version>
</dependency>
<!-- spring boot 相关依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>${spring-cloud.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-bom</artifactId>
<version>${spring-ai.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot3-starter</artifactId>
<version>1.5.36</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-eclipse-collections</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>com.github.ulisesbocchio</groupId>
<artifactId>jasypt-spring-boot-starter</artifactId>
<version>3.0.5</version>
</dependency>
<!-- 日志相关 -->
<dependency>
<groupId>pl.tkowalcz.tjahzi</groupId>
<artifactId>logback-appender</artifactId>
<version>0.9.23</version>
</dependency>
<dependency>
<groupId>com.github.loki4j</groupId>
<artifactId>loki-logback-appender-jdk8</artifactId>
<version>1.4.2</version>
</dependency>
<!-- 其他 -->
<dependency>
<groupId>dev.failsafe</groupId>
<artifactId>failsafe</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>org.eclipse.collections</groupId>
<artifactId>eclipse-collections</artifactId>
<version>${eclipse-collections.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.eclipse.collections</groupId>
<artifactId>eclipse-collections-api</artifactId>
<version>${eclipse-collections.version}</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
</dependency>
<dependency>
<groupId>com.yomahub</groupId>
<artifactId>liteflow-spring-boot-starter</artifactId>
<version>2.13.2</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.3.0</version>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${spring-boot.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<distributionManagement>
<repository>
<id>${releases.id}</id>
<name>${releases.name}</name>
<url>${releases.url}</url>
</repository>
<snapshotRepository>
<id>${snapshots.id}</id>
<name>${snapshots.name}</name>
<url>${snapshots.url}</url>
</snapshotRepository>
</distributionManagement>
</project>

View File

@@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-chat</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-openai</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,23 @@
package com.lanyuanxiaoyao.service.ai.chat;
import com.ulisesbocchio.jasyptspringboot.annotation.EnableEncryptableProperties;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.retry.annotation.EnableRetry;
/**
* @author lanyuanxiaoyao
* @version 20250514
*/
@SpringBootApplication(scanBasePackages = "com.lanyuanxiaoyao.service")
@EnableDiscoveryClient
@EnableConfigurationProperties
@EnableEncryptableProperties
@EnableRetry
public class AiChatApplication {
public static void main(String[] args) {
SpringApplication.run(AiChatApplication.class, args);
}
}

View File

@@ -0,0 +1,87 @@
package com.lanyuanxiaoyao.service.ai.chat.controller;
import cn.hutool.core.util.StrUtil;
import com.lanyuanxiaoyao.service.ai.chat.entity.MessageVO;
import com.lanyuanxiaoyao.service.ai.chat.tools.DatetimeTools;
import java.io.IOException;
import org.eclipse.collections.api.list.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
/**
* 聊天
*
* @author lanyuanxiaoyao
* @version 20250514
*/
@Controller
@RequestMapping("chat")
public class ChatController {
private static final Logger logger = LoggerFactory.getLogger(ChatController.class);
private final ChatClient chatClient;
public ChatController(ChatClient.Builder builder) {
this.chatClient = builder.build();
}
private ChatClient.ChatClientRequestSpec buildRequest(ImmutableList<MessageVO> messages) {
return chatClient.prompt()
.system("""
你是一名专业的AI运维助手负责“Hudi数据同步服务平台”的运维工作
你将会友好地帮助用户解答关于该平台运维工作的问题,你会尽可能通过各种方式获取知识和数据来解答;
对于无法通过已有知识回答的问题,你会提示用户你无法解答该问题,而不是虚构不存在的数据或答案;
对于与该平台无关的问题,你会委婉地拒绝用户,并提示无法回答;
你将始终在中文语境下进行对话。
""")
.tools(new DatetimeTools())
.messages(
messages
.collect(message -> StrUtil.equals(message.getRole(), "assistant")
? new AssistantMessage(message.getContent())
: new UserMessage(message.getContent()))
.collect(message -> (Message) message)
.toList()
);
}
@PostMapping("sync")
@ResponseBody
public String chatSync(@RequestBody ImmutableList<MessageVO> messages) {
String content = buildRequest(messages)
.call()
.content();
return StrUtil.trimToEmpty(content);
}
@PostMapping("async")
public SseEmitter chatAsync(@RequestBody ImmutableList<MessageVO> messages) {
SseEmitter emitter = new SseEmitter();
buildRequest(messages)
.stream()
.content()
.subscribe(
content -> {
try {
emitter.send(content);
} catch (IOException e) {
emitter.completeWithError(e);
throw new RuntimeException(e);
}
},
emitter::completeWithError,
emitter::complete
);
return emitter;
}
}

View File

@@ -0,0 +1,34 @@
package com.lanyuanxiaoyao.service.ai.chat.entity;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class MessageVO {
private String role;
private String content;
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
@Override
public String toString() {
return "MessageVO{" +
"role='" + role + '\'' +
", content='" + content + '\'' +
'}';
}
}

View File

@@ -0,0 +1,18 @@
package com.lanyuanxiaoyao.service.ai.chat.tools;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import org.springframework.ai.tool.annotation.Tool;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class DatetimeTools {
private final static DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
@Tool(description = "获取当前日期和时间")
public String getCurrentDateTime() {
return LocalDateTime.now().format(formatter);
}
}

View File

@@ -0,0 +1,15 @@
spring:
application:
name: service-ai-chat
profiles:
include: random-port,common,discovery,metrics,forest
ai:
openai:
base-url: http://132.121.206.65:10086
api-key: ENC(K+Hff9QGC+fcyi510VIDd9CaeK/IN5WBJ9rlkUsHEdDgIidW+stHHJlsK0lLPUXXREha+ToQZqqDXJrqSE+GUKCXklFhelD8bRHFXBIeP/ZzT2cxhzgKUXgjw3S0Qw2R)
chat:
options:
model: 'Qwen3-1.7-vllm'
mvc:
async:
request-timeout: 300000

View File

@@ -0,0 +1,34 @@
<configuration>
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<springProperty scope="context" name="LOKI_PUSH_URL" source="loki.url"/>
<springProperty scope="context" name="LOGGING_PARENT" source="logging.parent"/>
<springProperty scope="context" name="APP_NAME" source="spring.application.name"/>
<appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %clr(%5p) %clr([${HOSTNAME}]){yellow} %clr([%t]){magenta} %clr(%logger{40}){cyan} #@# %m%n%wEx</pattern>
</encoder>
</appender>
<appender name="RollingFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOGGING_PARENT:-.}/${APP_NAME:-run}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGGING_PARENT:-.}/archive/${APP_NAME:-run}-%d{yyyy-MM-dd}.gz</fileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %p [${HOSTNAME}] [%t] %logger #@# %m%n%wEx</pattern>
</encoder>
</appender>
<logger name="com.zaxxer.hikari" level="ERROR"/>
<logger name="com.netflix.discovery.shared.resolver.aws.ConfigClusterResolver" level="WARN"/>
<root level="INFO">
<appender-ref ref="Console"/>
<appender-ref ref="RollingFile"/>
</root>
</configuration>

View File

@@ -0,0 +1,37 @@
package com.lanyuanxiaoyao.service.ai.chat;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.openai.api.OpenAiApi;
import reactor.core.Disposable;
/**
* @author lanyuanxiaoyao
* @version 20250514
*/
public class TestChat {
public static void main(String[] args) {
ChatClient client = ChatClient.builder(
OpenAiChatModel.builder()
.openAiApi(
OpenAiApi.builder()
.baseUrl("http://132.121.206.65:10086")
.apiKey("*XMySqV%>hR&v>>g*NwCs3tpQ5FVMFEF2VHVTj<MYQd$&@$sY7CgqNyea4giJi4")
.build()
)
.defaultOptions(
OpenAiChatOptions.builder()
.model("Qwen3-1.7")
.build()
)
.build()
)
.build();
String content = client.prompt()
.user("你好")
.call()
.content();
System.out.println(content);
}
}

View File

@@ -0,0 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-core</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-forest</artifactId>
<exclusions>
<exclusion>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot-starter</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-sleuth</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot3-starter</artifactId>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,39 @@
package com.lanyuanxiaoyao.service.ai.core.configuration;
import java.net.http.HttpClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.http.client.JdkClientHttpRequestFactory;
import org.springframework.http.client.reactive.JdkClientHttpConnector;
import org.springframework.web.client.RestClient;
import org.springframework.web.reactive.function.client.WebClient;
/**
* vLLM只能使用http1.0
*
* @author lanyuanxiaoyao
* @version 20250519
*/
@Configuration
public class WebClientConfiguration {
private HttpClient httpClient() {
return HttpClient.newBuilder()
.version(HttpClient.Version.HTTP_1_1)
.build();
}
@Bean
@Primary
public RestClient.Builder restClientBuilder() {
return RestClient.builder()
.requestFactory(new JdkClientHttpRequestFactory(httpClient()));
}
@Bean
@Primary
public WebClient.Builder webClientBuilder() {
return WebClient.builder()
.clientConnector(new JdkClientHttpConnector(httpClient()));
}
}

View File

@@ -0,0 +1,58 @@
package com.lanyuanxiaoyao.service.configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.Customizer;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.SecurityFilterChain;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.CorsConfigurationSource;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
/**
* @author lanyuanxiaoyao
* @version 20250514
*/
@Configuration
@EnableWebSecurity
public class SecurityConfig {
private static final Logger logger = LoggerFactory.getLogger(SecurityConfig.class);
@Bean
public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception {
return http.authorizeHttpRequests(registry -> registry.anyRequest().authenticated())
.httpBasic(Customizer.withDefaults())
.csrf(AbstractHttpConfigurer::disable)
.cors(configurer -> configurer.configurationSource(corsConfigurationSource()))
.formLogin(AbstractHttpConfigurer::disable)
.build();
}
private CorsConfigurationSource corsConfigurationSource() {
CorsConfiguration configuration = new CorsConfiguration();
configuration.setAllowCredentials(true);
configuration.addAllowedHeader("*");
configuration.addAllowedMethod("*");
configuration.addAllowedOriginPattern("*");
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
source.registerCorsConfiguration("/**", configuration);
return source;
}
@Bean
public InMemoryUserDetailsManager userDetailsService(SecurityProperties securityProperties) {
UserDetails user = User.builder()
.username(securityProperties.getUsername())
.password("{noop}" + securityProperties.getDarkcode())
.authorities(securityProperties.getAuthority())
.build();
return new InMemoryUserDetailsManager(user);
}
}

View File

@@ -0,0 +1,67 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-knowledge</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-openai</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-vector-store-qdrant</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-markdown-document-reader</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
<dependency>
<groupId>com.yomahub</groupId>
<artifactId>liteflow-spring-boot-starter</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,29 @@
package com.lanyuanxiaoyao.service.ai.knowledge;
import com.ulisesbocchio.jasyptspringboot.annotation.EnableEncryptableProperties;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.retry.annotation.EnableRetry;
/**
* @author lanyuanxiaoyao
* @version 20250515
*/
@SpringBootApplication(scanBasePackages = "com.lanyuanxiaoyao.service")
@EnableDiscoveryClient
@EnableConfigurationProperties
@EnableEncryptableProperties
@EnableRetry
public class KnowledgeApplication implements ApplicationRunner {
public static void main(String[] args) {
SpringApplication.run(KnowledgeApplication.class, args);
}
@Override
public void run(ApplicationArguments args) {
}
}

View File

@@ -0,0 +1,129 @@
package com.lanyuanxiaoyao.service.ai.knowledge.controller;
import com.lanyuanxiaoyao.service.ai.knowledge.entity.vo.KnowledgeVO;
import com.lanyuanxiaoyao.service.ai.knowledge.entity.vo.PointVO;
import com.lanyuanxiaoyao.service.ai.knowledge.reader.TextLineReader;
import com.lanyuanxiaoyao.service.ai.knowledge.service.KnowledgeService;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Points;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.list.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.embedding.EmbeddingModel;
import org.springframework.ai.reader.TextReader;
import org.springframework.ai.reader.markdown.MarkdownDocumentReader;
import org.springframework.ai.reader.markdown.config.MarkdownDocumentReaderConfig;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.ai.vectorstore.qdrant.QdrantVectorStore;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* @author lanyuanxiaoyao
* @version 20250515
*/
@RestController
@RequestMapping("knowledge")
public class KnowledgeController {
private static final Logger logger = LoggerFactory.getLogger(KnowledgeController.class);
private final KnowledgeService knowledgeService;
private final QdrantClient client;
private final EmbeddingModel embeddingModel;
public KnowledgeController(KnowledgeService knowledgeService, VectorStore vectorStore, EmbeddingModel embeddingModel) {
this.knowledgeService = knowledgeService;
client = (QdrantClient) vectorStore.getNativeClient().orElseThrow();
this.embeddingModel = embeddingModel;
}
@PostMapping("add")
public void add(
@RequestParam("name") String name,
@RequestParam("strategy") String strategy
) throws ExecutionException, InterruptedException {
knowledgeService.add(name, strategy);
}
@GetMapping("list")
public ImmutableList<KnowledgeVO> list() {
return knowledgeService.list();
}
@GetMapping("list_points")
public ImmutableList<PointVO> listPoints(@RequestParam("name") String name) throws ExecutionException, InterruptedException {
Points.ScrollResponse response = client.scrollAsync(
Points.ScrollPoints.newBuilder()
.setCollectionName(name)
// .setLimit(2)
.setWithPayload(Points.WithPayloadSelector.newBuilder().setEnable(true).build())
.setWithVectors(Points.WithVectorsSelector.newBuilder().setEnable(false).build())
.build()
)
.get();
return response.getResultList()
.stream()
.collect(Collectors.toCollection(Lists.mutable::empty))
.collect(point -> {
PointVO vo = new PointVO();
vo.setId(point.getId().getUuid());
vo.setText(point.getPayloadMap().get("doc_content").getStringValue());
return vo;
})
.toImmutable();
}
@GetMapping("delete")
public void delete(@RequestParam("name") String name) throws ExecutionException, InterruptedException {
knowledgeService.remove(name);
}
@PostMapping("preview_text")
public ImmutableList<PointVO> previewText(
@RequestParam("name") String name,
@RequestParam(value = "mode", defaultValue = "normal") String mode,
@RequestParam(value = "type", defaultValue = "text") String type,
@RequestParam("content") String content
) {
TextReader reader = new TextLineReader(new ByteArrayResource(content.getBytes(StandardCharsets.UTF_8)));
return reader.get()
.stream()
.collect(Collectors.toCollection(Lists.mutable::empty))
.collect(doc -> {
PointVO vo = new PointVO();
vo.setId(doc.getId());
vo.setText(doc.getText());
return vo;
})
.toImmutable();
}
@PostMapping(value = "process_text", consumes = "text/plain;charset=utf-8")
public void processText(
@RequestParam("name") String name,
@RequestBody String text
) {
VectorStore source = QdrantVectorStore.builder(client, embeddingModel)
.collectionName(name)
.initializeSchema(true)
.build();
MarkdownDocumentReader reader = new MarkdownDocumentReader(
new ByteArrayResource(text.getBytes(StandardCharsets.UTF_8)),
MarkdownDocumentReaderConfig.builder()
.withHorizontalRuleCreateDocument(true)
.withIncludeCodeBlock(false)
.withIncludeBlockquote(false)
.build()
);
source.add(reader.get());
}
}

View File

@@ -0,0 +1,54 @@
package com.lanyuanxiaoyao.service.ai.knowledge.entity;
/**
* @author lanyuanxiaoyao
* @version 20250522
*/
public class Knowledge {
private Long id;
private Long vectorSourceId;
private String name;
private String strategy;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getVectorSourceId() {
return vectorSourceId;
}
public void setVectorSourceId(Long vectorSourceId) {
this.vectorSourceId = vectorSourceId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getStrategy() {
return strategy;
}
public void setStrategy(String strategy) {
this.strategy = strategy;
}
@Override
public String toString() {
return "Knowledge{" +
"id=" + id +
", vectorSourceId=" + vectorSourceId +
", name='" + name + '\'' +
", strategy='" + strategy + '\'' +
'}';
}
}

View File

@@ -0,0 +1,74 @@
package com.lanyuanxiaoyao.service.ai.knowledge.entity.vo;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class KnowledgeVO {
private String name;
private String strategy;
private Long size;
private Long points;
private Long segments;
private String status;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getStrategy() {
return strategy;
}
public void setStrategy(String strategy) {
this.strategy = strategy;
}
public Long getSize() {
return size;
}
public void setSize(Long size) {
this.size = size;
}
public Long getPoints() {
return points;
}
public void setPoints(Long points) {
this.points = points;
}
public Long getSegments() {
return segments;
}
public void setSegments(Long segments) {
this.segments = segments;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
@Override
public String toString() {
return "CollectionVO{" +
"name='" + name + '\'' +
", strategy='" + strategy + '\'' +
", size=" + size +
", points=" + points +
", segments=" + segments +
", status='" + status + '\'' +
'}';
}
}

View File

@@ -0,0 +1,34 @@
package com.lanyuanxiaoyao.service.ai.knowledge.entity.vo;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class PointVO {
private String id;
private String text;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
@Override
public String toString() {
return "PointVO{" +
"id='" + id + '\'' +
", text='" + text + '\'' +
'}';
}
}

View File

@@ -0,0 +1,34 @@
package com.lanyuanxiaoyao.service.ai.knowledge.reader;
import cn.hutool.core.util.StrUtil;
import java.util.List;
import java.util.stream.Stream;
import org.springframework.ai.document.Document;
import org.springframework.ai.reader.TextReader;
import org.springframework.core.io.Resource;
/**
* @author lanyuanxiaoyao
* @version 20250522
*/
public class TextLineReader extends TextReader {
public TextLineReader(Resource resource) {
super(resource);
}
@Override
public List<Document> get() {
return super.get()
.stream()
.flatMap(doc -> {
String text = doc.getText();
if (StrUtil.isBlank(text)) {
return Stream.of(doc);
}
return Stream.of(text.split("\n\n"))
.filter(StrUtil::isNotBlank)
.map(line -> new Document(line, doc.getMetadata()));
})
.toList();
}
}

View File

@@ -0,0 +1,16 @@
package com.lanyuanxiaoyao.service.ai.knowledge.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
/**
* @author lanyuanxiaoyao
* @version 20250522
*/
@Service
public class EmbeddingService {
private static final Logger logger = LoggerFactory.getLogger(EmbeddingService.class);
}

View File

@@ -0,0 +1,58 @@
package com.lanyuanxiaoyao.service.ai.knowledge.service;
import club.kingon.sql.builder.SqlBuilder;
import cn.hutool.core.util.IdUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @author lanyuanxiaoyao
* @version 20250522
*/
@Service
public class KnowledgeGroupService {
private static final Logger logger = LoggerFactory.getLogger(KnowledgeGroupService.class);
private static final String GROUP_TABLE_NAME = "service_ai_group";
private final JdbcTemplate template;
public KnowledgeGroupService(JdbcTemplate template) {
this.template = template;
}
@Transactional(rollbackFor = Exception.class)
public void add(Long knowledgeId, String name) {
template.update(
SqlBuilder.insertInto(GROUP_TABLE_NAME, "id", "knowledge_id", "name")
.values()
.addValue("?", "?", "?")
.precompileSql(),
IdUtil.getSnowflakeNextId(),
knowledgeId,
name
);
}
@Transactional(rollbackFor = Exception.class)
public void remove(Long groupId) {
template.update(
SqlBuilder.delete(GROUP_TABLE_NAME)
.whereEq("id", "?")
.precompileSql(),
groupId
);
}
@Transactional(rollbackFor = Exception.class)
public void removeByKnowledgeId(Long knowledgeId) {
template.update(
SqlBuilder.delete(GROUP_TABLE_NAME)
.whereEq("knowledge_id", "?")
.precompileSql(),
knowledgeId
);
}
}

View File

@@ -0,0 +1,151 @@
package com.lanyuanxiaoyao.service.ai.knowledge.service;
import club.kingon.sql.builder.SqlBuilder;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.lanyuanxiaoyao.service.ai.knowledge.entity.Knowledge;
import com.lanyuanxiaoyao.service.ai.knowledge.entity.vo.KnowledgeVO;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.list.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.embedding.EmbeddingModel;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @author lanyuanxiaoyao
* @version 20250522
*/
@Service
public class KnowledgeService {
private static final Logger logger = LoggerFactory.getLogger(KnowledgeService.class);
private static final String KNOWLEDGE_TABLE_NAME = "service_ai_knowledge";
private final JdbcTemplate template;
private final EmbeddingModel embeddingModel;
private final QdrantClient client;
private final KnowledgeGroupService knowledgeGroupService;
public KnowledgeService(JdbcTemplate template, EmbeddingModel embeddingModel, VectorStore vectorStore, KnowledgeGroupService knowledgeGroupService) {
this.template = template;
this.embeddingModel = embeddingModel;
this.client = (QdrantClient) vectorStore.getNativeClient().orElseThrow();
this.knowledgeGroupService = knowledgeGroupService;
}
public Knowledge get(Long id) {
return template.queryForObject(
SqlBuilder.select("id", "vector_source_id", "name", "strategy")
.from(KNOWLEDGE_TABLE_NAME)
.whereEq("id", "?")
.precompileSql(),
Knowledge.class,
id
);
}
public Knowledge get(String name) {
return template.queryForObject(
SqlBuilder.select("id", "vector_source_id", "name", "strategy")
.from(KNOWLEDGE_TABLE_NAME)
.whereEq("name", "?")
.precompileSql(),
Knowledge.class,
name
);
}
@Transactional(rollbackFor = Exception.class)
public void add(String name, String strategy) throws ExecutionException, InterruptedException {
Integer count = template.queryForObject(
SqlBuilder.select("count(*)")
.from(KNOWLEDGE_TABLE_NAME)
.whereEq("name", "?")
.precompileSql(),
Integer.class,
name
);
if (count > 0) {
throw new RuntimeException("名称已存在");
}
long id = IdUtil.getSnowflakeNextId();
long vectorSourceId = IdUtil.getSnowflakeNextId();
template.update(
SqlBuilder.insertInto(KNOWLEDGE_TABLE_NAME, "id", "vector_source_id", "name", "strategy")
.values()
.addValue("?", "?", "?", "?")
.precompileSql(),
id,
vectorSourceId,
name,
strategy
);
client.createCollectionAsync(
String.valueOf(vectorSourceId),
Collections.VectorParams.newBuilder()
.setDistance(Collections.Distance.valueOf(strategy))
.setSize(embeddingModel.dimensions())
.build()
).get();
}
public ImmutableList<KnowledgeVO> list() {
return template.query(
SqlBuilder.select("id", "vector_source_id", "name", "strategy")
.from(KNOWLEDGE_TABLE_NAME)
.build(),
(rs, index) -> {
Knowledge knowledge = new Knowledge();
knowledge.setId(rs.getLong(1));
knowledge.setVectorSourceId(rs.getLong(2));
knowledge.setName(rs.getString(3));
knowledge.setStrategy(rs.getString(4));
return knowledge;
}
)
.stream()
.map(knowledge -> {
try {
Collections.CollectionInfo info = client.getCollectionInfoAsync(String.valueOf(knowledge.getVectorSourceId())).get();
KnowledgeVO vo = new KnowledgeVO();
vo.setName(knowledge.getName());
vo.setPoints(info.getPointsCount());
vo.setSegments(info.getSegmentsCount());
vo.setStatus(info.getStatus().name());
Collections.VectorParams vectorParams = info.getConfig().getParams().getVectorsConfig().getParams();
vo.setStrategy(vectorParams.getDistance().name());
vo.setSize(vectorParams.getSize());
return vo;
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
})
.collect(Collectors.toCollection(Lists.mutable::empty))
.toImmutable();
}
@Transactional(rollbackFor = Exception.class)
public void remove(String name) throws ExecutionException, InterruptedException {
Knowledge knowledge = get(name);
if (ObjectUtil.isNull(knowledge)) {
throw new RuntimeException(StrUtil.format("{} 不存在"));
}
template.update(
SqlBuilder.delete(KNOWLEDGE_TABLE_NAME)
.whereEq("id", "?")
.precompileSql(),
knowledge.getId()
);
knowledgeGroupService.removeByKnowledgeId(knowledge.getId());
client.deleteCollectionAsync(String.valueOf(knowledge.getVectorSourceId())).get();
}
}

View File

@@ -0,0 +1,51 @@
spring:
application:
name: service-ai-knowledge
profiles:
include: common,metrics,forest
cloud:
zookeeper:
enabled: true
connect-string: b1m2.hdp.dc:2181,b1m3.hdp.dc:2181,b1m4.hdp.dc:2181,b1m5.hdp.dc:2181,b1m6.hdp.dc:2181
discovery:
enabled: ${spring.cloud.zookeeper.enabled}
root: /hudi-services
instance-id: ${spring.application.name}-127.0.0.1-${random.uuid}-20250514
metadata:
discovery: zookeeper
ip: 127.0.0.1
hostname: localhost
hostname_full: localhost
start_time: 20250514112750
datasource:
url: jdbc:mysql://localhost:3307/ai?useSSL=false
username: test
password: test
driver-class-name: com.mysql.cj.jdbc.Driver
security:
meta:
authority: ENC(GXKnbq1LS11U2HaONspvH+D/TkIx13aWTaokdkzaF7HSvq6Z0Rv1+JUWFnYopVXu)
username: ENC(moIO5mO39V1Z+RDwROK9JXY4GfM8ZjDgM6Si7wRZ1MPVjbhTpmLz3lz28rAiw7c2LeCmizfJzHkEXIwGlB280g==)
darkcode: ENC(0jzpQ7T6S+P7bZrENgYsUoLhlqGvw7DA2MN3BRqEOwq7plhtg72vuuiPQNnr3DaYz0CpyTvxInhpx11W3VZ1trD6NINh7O3LN70ZqO5pWXk=)
ai:
openai:
base-url: http://132.121.206.65:10086
api-key: ENC(K+Hff9QGC+fcyi510VIDd9CaeK/IN5WBJ9rlkUsHEdDgIidW+stHHJlsK0lLPUXXREha+ToQZqqDXJrqSE+GUKCXklFhelD8bRHFXBIeP/ZzT2cxhzgKUXgjw3S0Qw2R)
chat:
options:
model: 'Qwen3-1.7'
embedding:
options:
model: 'Bge-m3'
vectorstore:
qdrant:
api-key: lanyuanxiaoyao
jasypt:
encryptor:
password: 'r#(R,P"Dp^A47>WSn:Wn].gs/+"v:q_Q*An~zF*g-@j@jtSTv5H/,S-3:R?r9R}.'
server:
port: 8080
liteflow:
rule-source: config/flow.xml
print-banner: false
check-node-exists: false

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<flow>
<chain name="embedding">
SER(
embedding_start,
SWITCH(embedding_mode_switch).TO(
normal_embedding,
llm_embedding,
qa_embedding
),
embedding_finish
);
</chain>
</flow>

View File

@@ -0,0 +1,34 @@
<configuration>
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<springProperty scope="context" name="LOKI_PUSH_URL" source="loki.url"/>
<springProperty scope="context" name="LOGGING_PARENT" source="logging.parent"/>
<springProperty scope="context" name="APP_NAME" source="spring.application.name"/>
<appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %clr(%5p) %clr([${HOSTNAME}]){yellow} %clr([%t]){magenta} %clr(%logger{40}){cyan} #@# %m%n%wEx</pattern>
</encoder>
</appender>
<appender name="RollingFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOGGING_PARENT:-.}/${APP_NAME:-run}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGGING_PARENT:-.}/archive/${APP_NAME:-run}-%d{yyyy-MM-dd}.gz</fileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %p [${HOSTNAME}] [%t] %logger #@# %m%n%wEx</pattern>
</encoder>
</appender>
<logger name="com.zaxxer.hikari" level="ERROR"/>
<logger name="com.netflix.discovery.shared.resolver.aws.ConfigClusterResolver" level="WARN"/>
<root level="INFO">
<appender-ref ref="Console"/>
<!-- <appender-ref ref="RollingFile"/>-->
</root>
</configuration>

View File

@@ -7,23 +7,6 @@
<springProperty scope="context" name="LOGGING_PARENT" source="logging.parent"/> <springProperty scope="context" name="LOGGING_PARENT" source="logging.parent"/>
<springProperty scope="context" name="APP_NAME" source="spring.application.name"/> <springProperty scope="context" name="APP_NAME" source="spring.application.name"/>
<appender name="Loki" class="com.github.loki4j.logback.Loki4jAppender">
<metricsEnabled>true</metricsEnabled>
<http class="com.github.loki4j.logback.ApacheHttpSender">
<url>${LOKI_PUSH_URL:-http://localhost/loki/api/v1/push}</url>
</http>
<format>
<label>
<pattern>app=${APP_NAME:-none},host=${HOSTNAME:-none},level=%level</pattern>
<readMarkers>true</readMarkers>
</label>
<message>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %p [${HOSTNAME}] [%t] %logger #@# %m%n%wEx</pattern>
</message>
<sortByTime>true</sortByTime>
</format>
</appender>
<appender name="Console" class="ch.qos.logback.core.ConsoleAppender"> <appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
<encoder> <encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %clr(%5p) %clr([${HOSTNAME}]){yellow} %clr([%t]){magenta} %clr(%logger{40}){cyan} #@# %m%n%wEx</pattern> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %clr(%5p) %clr([${HOSTNAME}]){yellow} %clr([%t]){magenta} %clr(%logger{40}){cyan} #@# %m%n%wEx</pattern>
@@ -45,7 +28,6 @@
<logger name="com.netflix.discovery.shared.resolver.aws.ConfigClusterResolver" level="WARN"/> <logger name="com.netflix.discovery.shared.resolver.aws.ConfigClusterResolver" level="WARN"/>
<root level="INFO"> <root level="INFO">
<appender-ref ref="Loki"/>
<appender-ref ref="Console"/> <appender-ref ref="Console"/>
<appender-ref ref="RollingFile"/> <appender-ref ref="RollingFile"/>
</root> </root>

View File

@@ -60,10 +60,6 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>io.juicefs</groupId>
<artifactId>juicefs-hadoop</artifactId>
</dependency>
<dependency> <dependency>
<groupId>com.lanyuanxiaoyao</groupId> <groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-configuration</artifactId> <artifactId>service-configuration</artifactId>

View File

@@ -8,6 +8,7 @@ package com.lanyuanxiaoyao.service.cli.core;
*/ */
public class HostInfo { public class HostInfo {
private String ip; private String ip;
private Boolean enabled = true;
private Boolean useAuthority = false; private Boolean useAuthority = false;
private String username; private String username;
private String password; private String password;
@@ -20,6 +21,14 @@ public class HostInfo {
this.ip = ip; this.ip = ip;
} }
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public Boolean getUseAuthority() { public Boolean getUseAuthority() {
return useAuthority; return useAuthority;
} }
@@ -47,7 +56,8 @@ public class HostInfo {
@Override @Override
public String toString() { public String toString() {
return "HostInfo{" + return "HostInfo{" +
"ip='" + ip + '\'' + "enabled=" + enabled +
", ip='" + ip + '\'' +
", useAuthority=" + useAuthority + ", useAuthority=" + useAuthority +
", username='" + username + '\'' + ", username='" + username + '\'' +
", password='" + password + '\'' + ", password='" + password + '\'' +

View File

@@ -1,5 +1,7 @@
package com.lanyuanxiaoyao.service.cli.core; package com.lanyuanxiaoyao.service.cli.core;
import java.util.Map;
/** /**
* 运行配置 * 运行配置
* *
@@ -10,7 +12,8 @@ public class RuntimeInfo {
private String signature; private String signature;
private String user; private String user;
private String jarPath; private String jarPath;
private String jdkPath; private String defaultJdk;
private Map<String, String> jdkPath;
private String logPath; private String logPath;
private String confPath; private String confPath;
private String dataPath; private String dataPath;
@@ -47,11 +50,19 @@ public class RuntimeInfo {
this.jarPath = jarPath; this.jarPath = jarPath;
} }
public String getJdkPath() { public String getDefaultJdk() {
return defaultJdk;
}
public void setDefaultJdk(String defaultJdk) {
this.defaultJdk = defaultJdk;
}
public Map<String, String> getJdkPath() {
return jdkPath; return jdkPath;
} }
public void setJdkPath(String jdkPath) { public void setJdkPath(Map<String, String> jdkPath) {
this.jdkPath = jdkPath; this.jdkPath = jdkPath;
} }
@@ -146,22 +157,23 @@ public class RuntimeInfo {
@Override @Override
public String toString() { public String toString() {
return "RuntimeInfo{" + return "RuntimeInfo{" +
"signature='" + signature + '\'' + "signature='" + signature + '\'' +
", user='" + user + '\'' + ", user='" + user + '\'' +
", jarPath='" + jarPath + '\'' + ", jarPath='" + jarPath + '\'' +
", jdkPath='" + jdkPath + '\'' + ", defaultJdk='" + defaultJdk + '\'' +
", logPath='" + logPath + '\'' + ", jdkPath=" + jdkPath +
", confPath='" + confPath + '\'' + ", logPath='" + logPath + '\'' +
", dataPath='" + dataPath + '\'' + ", confPath='" + confPath + '\'' +
", downloadUrl='" + downloadUrl + '\'' + ", dataPath='" + dataPath + '\'' +
", kerberosKeytabPath='" + kerberosKeytabPath + '\'' + ", downloadUrl='" + downloadUrl + '\'' +
", loki=" + loki + ", kerberosKeytabPath='" + kerberosKeytabPath + '\'' +
", zkUrl='" + zkUrl + '\'' + ", loki=" + loki +
", connectorZkUrl='" + connectorZkUrl + '\'' + ", zkUrl='" + zkUrl + '\'' +
", hudi=" + hudi + ", connectorZkUrl='" + connectorZkUrl + '\'' +
", security=" + security + ", hudi=" + hudi +
", yarn=" + yarn + ", security=" + security +
'}'; ", yarn=" + yarn +
'}';
} }
public static final class LokiInfo { public static final class LokiInfo {

View File

@@ -17,6 +17,7 @@ public class ServiceInfo {
private List<String> groups = new ArrayList<>(); private List<String> groups = new ArrayList<>();
private Integer replicas = 0; private Integer replicas = 0;
private String sourceJar; private String sourceJar;
private String jdk;
private List<String> classpath = new ArrayList<>(); private List<String> classpath = new ArrayList<>();
private Map<String, Object> environments = new HashMap<>(); private Map<String, Object> environments = new HashMap<>();
private Map<String, Object> arguments = new HashMap<>(); private Map<String, Object> arguments = new HashMap<>();
@@ -61,6 +62,14 @@ public class ServiceInfo {
this.sourceJar = sourceJar; this.sourceJar = sourceJar;
} }
public String getJdk() {
return jdk;
}
public void setJdk(String jdk) {
this.jdk = jdk;
}
public List<String> getClasspath() { public List<String> getClasspath() {
return classpath; return classpath;
} }
@@ -88,14 +97,15 @@ public class ServiceInfo {
@Override @Override
public String toString() { public String toString() {
return "ServiceInfo{" + return "ServiceInfo{" +
"enabled=" + enabled + "enabled=" + enabled +
", order=" + order + ", order=" + order +
", groups=" + groups + ", groups=" + groups +
", replicas=" + replicas + ", replicas=" + replicas +
", sourceJar='" + sourceJar + '\'' + ", sourceJar='" + sourceJar + '\'' +
", classpath=" + classpath + ", jdk=" + jdk +
", environments=" + environments + ", classpath=" + classpath +
", arguments=" + arguments + ", environments=" + environments +
'}'; ", arguments=" + arguments +
'}';
} }
} }

Some files were not shown because too many files have changed in this diff Show More