34 Commits

Author SHA1 Message Date
v-zhangjc9
5d49c82190 feat(ai): 完善知识库接口 2025-05-16 19:00:55 +08:00
v-zhangjc9
be976290b6 feat(ai): 完善AI对话 2025-05-16 19:00:26 +08:00
v-zhangjc9
8fbc665abf feat(ai): 增加ai相关子项目 2025-05-15 17:15:18 +08:00
v-zhangjc9
a129caf5f4 fix(web): 修复思考开关无效 2025-05-14 09:14:35 +08:00
v-zhangjc9
aea8a7ed59 feat(web): 优化对话界面显示 2025-05-13 16:13:07 +08:00
v-zhangjc9
dd2e56e27b feat(web): 增加AI对话的能力 2025-05-13 16:03:08 +08:00
v-zhangjc9
819d56fbe3 feat(web): 优化图表展示 2025-05-13 12:06:21 +08:00
v-zhangjc9
255aad4987 feat(web): 优化图标显示 2025-05-12 19:28:23 +08:00
v-zhangjc9
f23de7c959 fix(web): 修复页面显示不正确、logo显示异常 2025-05-12 18:15:10 +08:00
v-zhangjc9
b0603d10bc feat(web): 优化页面跳转和菜单展现 2025-05-12 15:59:46 +08:00
v-zhangjc9
1e7b195f9f feat(web): 更换页面框架为pro-layout 2025-05-12 10:42:59 +08:00
v-zhangjc9
aa93b52dd9 refactor(web): 优化部署打包方案 2025-05-09 17:23:26 +08:00
v-zhangjc9
121f6688c6 refactor(web): 更换client代码的目录 2025-05-09 12:14:06 +08:00
v-zhangjc9
8a7ad32df9 fix(web): 修复切换路由不刷新页面的问题 2025-05-09 12:13:34 +08:00
v-zhangjc9
de445d7061 feat(web): 完成基本适配 2025-05-09 12:13:18 +08:00
v-zhangjc9
fa295b15c6 feat(launcher): 增加批量执行ssh命令脚本 2025-04-24 15:56:52 +08:00
v-zhangjc9
51c9e71b0d fix(launcher): 修复空值环境变量错误 2025-04-23 14:08:10 +08:00
v-zhangjc9
224115e938 refactor(all): 移除juice-fs依赖 2025-04-21 12:28:18 +08:00
v-zhangjc9
2f3eaa9e1a feat(scheduler): 恢复调度时间点 2025-02-12 10:13:52 +08:00
v-zhangjc9
f791b60fd5 feat(sync): 增加日志输出 2025-02-12 10:13:52 +08:00
v-zhangjc9
e6a03122a6 feat(cli): 增加主机是否启用的能力 2025-02-12 10:13:52 +08:00
v-zhangjc9
7249419624 feat(scheduler): 微调b12集群的资源限制 2025-02-12 10:13:52 +08:00
v-zhangjc9
58140fa0e8 feat(bin): 移除ytp传输 2025-02-12 10:13:52 +08:00
v-zhangjc9
b3ccbce16e feat(all): 移除b5集群 2025-02-12 10:13:52 +08:00
v-zhangjc9
6dbad6825d feat(scheduler): 调整b12的资源限制
反正也没有备用集群的需求,一个集群用到头就好
2025-02-12 10:13:52 +08:00
v-zhangjc9
686c523274 fix(scheduler): 修复闲时压缩crm重点表调度到A4集群 2025-02-12 10:13:52 +08:00
v-zhangjc9
1e88c62987 feat(scheduler): 禁止b5、a4在闲时调度期间使用 2025-02-12 10:13:52 +08:00
v-zhangjc9
fb79468eee feat(web): 增加指标采集进度显示 2025-02-12 10:13:52 +08:00
v-zhangjc9
7efd9129c2 feat(monitor): 增加关于hudi表文件数的监控指标 2025-02-12 10:13:52 +08:00
v-zhangjc9
e30a720cea fix(hudi-query): 修复接口调用错误 2025-02-12 10:13:52 +08:00
v-zhangjc9
28b3fd9ca1 feat(hudi-query): 增加关于hdfs文件数相关的接口 2025-02-12 10:13:52 +08:00
v-zhangjc9
70c2442ff1 fix(forest): 修复接口类型错误 2025-02-12 10:13:52 +08:00
v-zhangjc9
3c971e1438 feat(scheduler): 修复调度 2025-02-12 10:13:52 +08:00
v-zhangjc9
2c7d72bdb8 feat(scheduler): 调整日常调度的时间点
停止11、14点的全表压缩调度
2025-02-12 10:13:52 +08:00
239 changed files with 14468 additions and 121198 deletions

View File

@@ -1,3 +1,168 @@
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s20.hdp.dc:19521/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T154854.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154825.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154754.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154616.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/list?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T154529.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/list?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151839.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/file_count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151753.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/file_count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=EE0952421D19909D0A80BB5A1216DE93
content-length: 0
<> 2024-10-12T151727.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s12.hdp.dc:25961/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T151704.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151540.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151442.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?root=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151417.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs/count?hdfs=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
Cookie: JSESSIONID=D4B48AD7708DF28D7AFA0A74B26CF45A
content-length: 0
<> 2024-10-12T151409.500.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s22.hdp.dc:13241/hdfs?hdfs=hdfs://b2/apps/datalake/hive/dws_wsyyt/external_table_hudi/dws_tb_jt_servuser_data
Authorization: Basic AxhEbscwsJDbYMH2 cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
User-Agent: IntelliJ HTTP Client/IntelliJ IDEA 2024.1.4
Accept-Encoding: br, deflate, gzip, x-gzip
Accept: */*
content-length: 0
<> 2024-10-12T151340.404.json
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:31719/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_sz/acct_item_755&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
@@ -358,163 +523,3 @@ Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164901.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164758.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164303.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164220.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=08E78CE8926806AAB5D110D0FE9B05F7
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T164107.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s10.hdp.dc:33535/task/law_enforcement?pulsar_url=pulsar://132.122.115.158:16650,132.122.115.159:16650,132.122.115.160:16650,132.122.115.161:16650,132.122.115.167:16650,132.122.115.168:16650&pulsar_topic=persistent://odcp/acct_dg/acct_item_760&start_time=1716858000000&end_time=1716861600000&primary_keys=ACCT_ITEM_ID&partition_keys=ACCT_ID
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=C5D2666661F27F68E53223FE5B74AF35
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-28T163410.200.txt
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/queue/queue/clear?name=compaction-queue-pre
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=8516C92140B5118AF9AA61025D0F8C93
Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/service_scheduler/schedule/all
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=7A4C34E0240A98C1186F3A2551BC5E80
Accept-Encoding: br,deflate,gzip,x-gzip
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/service_web/cloud/list
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=F5F155198FAF72435339CC2E21B873CC
Accept-Encoding: br,deflate,gzip,x-gzip
<> 2024-05-09T170723.200.json
###
GET http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.130:35690/hudi_services/hudi_api/api/message_id?flink_job_id=1542097984132706304&alias=crm_cfguse_mkt_cam_strategy_rel
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=F5F155198FAF72435339CC2E21B873CC
Accept-Encoding: br,deflate,gzip,x-gzip
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:15391/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Type: text/plain
Content-Length: 738
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=E6FF5447C8553BA4268979B8C5779363
Accept-Encoding: br,deflate,gzip,x-gzip
\#Properties saved on 2023-12-26T09:18:39.583Z
\#Tue Dec 26 17:18:39 CST 2023
hoodie.table.precombine.field=update_ts
hoodie.datasource.write.drop.partition.columns=false
hoodie.table.partition.fields=CITY_ID
hoodie.table.type=MERGE_ON_READ
hoodie.archivelog.folder=archived
hoodie.compaction.payload.class=org.apache.hudi.common.model.OverwriteWithLatestAvroPayload
hoodie.timeline.layout.version=1
hoodie.table.version=4
hoodie.table.recordkey.fields=_key
hoodie.datasource.write.partitionpath.urlencode=false
hoodie.table.name=dws_account
hoodie.table.keygenerator.class=org.apache.hudi.keygen.SimpleKeyGenerator
hoodie.table.timeline.timezone=LOCAL
hoodie.datasource.write.hive_style_partitioning=false
hoodie.table.checksum=989688289
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:15391/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:34469/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt&overwrite=true
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
###
POST http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@b12s8.hdp.dc:34469/hdfs/write?root=hdfs://b2/apps/datalake/test/test.txt
Content-Length: 11
Content-Type: */*; charset=UTF-8
Connection: Keep-Alive
User-Agent: Apache-HttpClient/4.5.14 (Java/17.0.10)
Cookie: JSESSIONID=D12E206603C453F1429C0B7DF1519A4B
Accept-Encoding: br,deflate,gzip,x-gzip
Hello world
<> 2024-05-08T095641.500.txt
###

View File

@@ -5,7 +5,7 @@ mvn install -N -D skipTests
deploy service-common service-dependencies service-configuration service-forest service-cli service-cli/service-cli-core service-executor service-executor/service-executor-core utils/executor
package service-api service-check service-cli/service-cli-runner service-cloud-query service-executor/service-executor-manager service-executor/service-executor-task service-command service-command-pro service-exporter service-flink-query service-gateway service-hudi-query service-info-query service-monitor service-loki-query service-pulsar-query service-queue service-scheduler service-uploader service-web service-yarn-query service-zookeeper-query utils/patch utils/sync
configs=(b2a4 b2b1 b2b5 b2b12)
configs=(b2a4 b2b1 b2b12)
for config in ${configs[*]};
do
mvn -pl service-launcher clean package -D skipTests -P $config
@@ -34,5 +34,5 @@ upload $root_path/service-yarn-query/target/service-yarn-query-1.0.0-SNAPSHOT.ja
upload $root_path/service-zookeeper-query/target/service-zookeeper-query-1.0.0-SNAPSHOT.jar
upload $root_path/utils/sync/target/sync-1.0.0-SNAPSHOT.jar
upload_ytp $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar
upload_ytp $root_path/service-uploader/target/service-uploader-1.0.0-SNAPSHOT.jar
upload $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar
upload $root_path/service-uploader/target/service-uploader-1.0.0-SNAPSHOT.jar

View File

@@ -3,4 +3,4 @@ root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-cli service-cli/service-cli-core
package service-cli/service-cli-runner
ytp-transfer2 $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar
upload $root_path/service-cli/service-cli-runner/target/service-cli-runner-1.0.0-SNAPSHOT.jar

View File

@@ -3,7 +3,7 @@ root_path=$(dirname $(cd $(dirname $0);pwd))
source $root_path/bin/library.sh
deploy service-common service-dependencies service-configuration service-forest
configs=(b2a4 b2b1 b2b5 b2b12)
configs=(b2a4 b2b1 b2b12)
for config in ${configs[*]};
do
mvn -pl service-launcher clean package -D skipTests -P $config

View File

@@ -3,5 +3,5 @@
root_path=/apps/zone_scfp/hudi/cloud
jdk_path=/opt/jdk8u252-b09/bin/java
curl ftp://yyy:QeY\!68\)4nH1@132.121.122.15:2222/service-check-1.0.0-SNAPSHOT.jar -o ${root_path}/service-check.jar
curl http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.124:36800/file/download/service-check-1.0.0-SNAPSHOT.jar -o ${root_path}/service-check.jar
${jdk_path} -jar ${root_path}/service-check.jar

View File

@@ -5,7 +5,7 @@ jdk_path=/opt/jdk1.8.0_162/bin/java
arguments=$@
# 手动上传jar包则注释掉这行更显神通吧反正是
curl ftp://yyy:QeY\!68\)4nH1@132.121.122.15:2222/service-cli-runner-1.0.0-SNAPSHOT.jar -o ${jars_path}/service-cli-runner.jar
curl http://AxhEbscwsJDbYMH2:cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4@132.126.207.124:36800/file/download/service-cli-runner-1.0.0-SNAPSHOT.jar -o ${jars_path}/service-cli-runner.jar
${jdk_path} -jar ${jars_path}/service-cli-runner.jar \
--spring.profiles.active=b12 \
--deploy.generate.command=true \

View File

@@ -2,14 +2,6 @@
build_profile=b2b12
iap_username=iap
iap_password=IAPAb123456!
iap_url=$iap_username@132.122.1.162
ytp_username=yyy
ytp_password='QeY\!68\)4nH1'
ytp_url=ftp://$ytp_username:$ytp_password@132.121.122.15:2222
upload_username=AxhEbscwsJDbYMH2
upload_password=cYxg3b4PtWoVD5SjFayWxtnSVsjzRsg4
upload_url=http://$upload_username:$upload_password@132.126.207.124:36800
@@ -28,23 +20,6 @@ function upload() {
rm $source_file_path
}
function upload_ytp() {
source_file_path=$(realpath $1)
file_name=$(basename $source_file_path)
echo "↪ Source md5: $(md5sum $source_file_path | awk '{print $1}')"
echo "↪ Uploading $source_file_path ↪ /tmp/$file_name"
sshpass -p $iap_password scp $source_file_path $iap_url:/tmp
echo "↪ Upload 162 success"
target_md5=$(sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url "md5sum /tmp/$file_name | awk '{print \$1}'")
echo "↪ Target md5: $target_md5"
echo "↪ Command: sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url \"curl --retry 5 $ytp_url -T /tmp/$file_name\""
sshpass -p $iap_password ssh -o 'StrictHostKeyChecking no' $iap_url "curl --retry 5 $ytp_url -T /tmp/$file_name"
echo "↪ Upload ytp success"
echo "↪ Download: curl $ytp_url/$file_name -o $file_name"
echo "↪ Delete source"
rm $source_file_path
}
function joining {
local d=${1-} f=${2-}
if shift 2; then

View File

@@ -1,265 +0,0 @@
<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
<property>
<name>fs.azure.user.agent.prefix</name>
<value>User-Agent: APN/1.0 Hortonworks/1.0 HDP/</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://b2</value>
<final>true</final>
</property>
<property>
<name>fs.s3a.fast.upload</name>
<value>true</value>
</property>
<property>
<name>fs.s3a.fast.upload.buffer</name>
<value>disk</value>
</property>
<property>
<name>fs.s3a.multipart.size</name>
<value>67108864</value>
</property>
<property>
<name>fs.trash.interval</name>
<value>4320</value>
</property>
<property>
<name>fs.trash.checkpoint.interval</name>
<value>360</value>
</property>
<property>
<name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
<value>120</value>
</property>
<property>
<name>ha.zookeeper.acl</name>
<value>sasl:nn:rwcda</value>
</property>
<property>
<name>ha.zookeeper.quorum</name>
<value>b5m1.hdp.dc:2181,b5m2.hdp.dc:2181,b5m3.hdp.dc:2181</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>hadoop.http.authentication.kerberos.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>hadoop.http.authentication.signature.secret.file</name>
<value>/etc/security/http_secret</value>
</property>
<property>
<name>hadoop.http.authentication.simple.anonymous.allowed</name>
<value>true</value>
</property>
<property>
<name>hadoop.http.authentication.type</name>
<value>simple</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-headers</name>
<value>X-Requested-With,Content-Type,Accept,Origin,WWW-Authenticate,Accept-Encoding,Transfer-Encoding</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-methods</name>
<value>GET,PUT,POST,OPTIONS,HEAD,DELETE</value>
</property>
<property>
<name>hadoop.http.cross-origin.allowed-origins</name>
<value>*</value>
</property>
<property>
<name>hadoop.http.cross-origin.max-age</name>
<value>1800</value>
</property>
<property>
<name>hadoop.http.filter.initializers</name>
<value>org.apache.hadoop.security.AuthenticationFilterInitializer,org.apache.hadoop.security.HttpCrossOriginFilterInitializer</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.HTTP.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.HTTP.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.iap.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.iap.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.livy.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.livy.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.rpc.protection</name>
<value>authentication,privacy</value>
</property>
<property>
<name>hadoop.security.auth_to_local</name>
<value>RULE:[1:$1@$0](hbase-b5@ECLD.COM)s/.*/hbase/
RULE:[1:$1@$0](hdfs-b5@ECLD.COM)s/.*/hdfs/
RULE:[1:$1@$0](spark-b5@ECLD.COM)s/.*/spark/
RULE:[1:$1@$0](yarn-ats-b5@ECLD.COM)s/.*/yarn-ats/
RULE:[1:$1@$0](.*@ECLD.COM)s/@.*//
RULE:[2:$1@$0](dn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](hbase@ECLD.COM)s/.*/hbase/
RULE:[2:$1@$0](hive@ECLD.COM)s/.*/hive/
RULE:[2:$1@$0](jhs@ECLD.COM)s/.*/mapred/
RULE:[2:$1@$0](jn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](livy@ECLD.COM)s/.*/livy/
RULE:[2:$1@$0](nm@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](nn@ECLD.COM)s/.*/hdfs/
RULE:[2:$1@$0](rangeradmin@ECLD.COM)s/.*/ranger/
RULE:[2:$1@$0](rangerlookup@ECLD.COM)s/.*/ranger/
RULE:[2:$1@$0](rangertagsync@ECLD.COM)s/.*/rangertagsync/
RULE:[2:$1@$0](rangerusersync@ECLD.COM)s/.*/rangerusersync/
RULE:[2:$1@$0](rm@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](spark@ECLD.COM)s/.*/spark/
RULE:[2:$1@$0](yarn@ECLD.COM)s/.*/yarn/
RULE:[2:$1@$0](yarn-ats-hbase@ECLD.COM)s/.*/yarn-ats/
DEFAULT</value>
</property>
<property>
<name>hadoop.security.authentication</name>
<value>kerberos</value>
</property>
<property>
<name>hadoop.security.authorization</name>
<value>true</value>
</property>
<property>
<name>hadoop.security.instrumentation.requires.admin</name>
<value>false</value>
</property>
<property>
<name>io.compression.codec.lzo.class</name>
<value>com.hadoop.compression.lzo.LzoCodec</value>
</property>
<property>
<name>io.compression.codecs</name>
<value>org.apache.hadoop.io.compress.GzipCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131072</value>
</property>
<property>
<name>io.serializations</name>
<value>org.apache.hadoop.io.serializer.WritableSerialization</value>
</property>
<property>
<name>ipc.client.connect.max.retries</name>
<value>50</value>
</property>
<property>
<name>ipc.client.connection.maxidletime</name>
<value>30000</value>
</property>
<property>
<name>ipc.client.idlethreshold</name>
<value>8000</value>
</property>
<property>
<name>ipc.server.tcpnodelay</name>
<value>true</value>
</property>
<property>
<name>mapreduce.jobtracker.webinterface.trusted</name>
<value>false</value>
</property>
<property>
<name>ipc.client.fallback-to-simple-auth-allowed</name>
<value>true</value>
</property>
<property>
<name>fs.hdfs.impl.disable.cache</name>
<value>true</value>
</property>
</configuration>

View File

@@ -1,713 +0,0 @@
<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
<property>
<name>dfs.block.access.token.enable</name>
<value>true</value>
</property>
<property>
<name>dfs.blockreport.initialDelay</name>
<value>120</value>
</property>
<property>
<name>dfs.blocksize</name>
<value>134217728</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b5</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.client.read.shortcircuit</name>
<value>true</value>
</property>
<property>
<name>dfs.client.read.shortcircuit.streams.cache.size</name>
<value>4096</value>
</property>
<property>
<name>dfs.client.retry.policy.enabled</name>
<value>false</value>
</property>
<property>
<name>dfs.cluster.administrators</name>
<value> hdfs</value>
</property>
<property>
<name>dfs.content-summary.limit</name>
<value>5000</value>
</property>
<property>
<name>dfs.data.transfer.protection</name>
<value>authentication,privacy</value>
</property>
<property>
<name>dfs.datanode.address</name>
<value>0.0.0.0:1019</value>
</property>
<property>
<name>dfs.datanode.balance.bandwidthPerSec</name>
<value>6250000</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>[DISK]file:///data1/hadoop/hdfs/data,[DISK]file:///data2/hadoop/hdfs/data,[DISK]file:///data3/hadoop/hdfs/data,[DISK]file:///data4/hadoop/hdfs/data,[DISK]file:///data5/hadoop/hdfs/data,[DISK]file:///data6/hadoop/hdfs/data,[DISK]file:///data7/hadoop/hdfs/data,[DISK]file:///data8/hadoop/hdfs/data,[DISK]file:///data9/hadoop/hdfs/data,[DISK]file:///data10/hadoop/hdfs/data,[DISK]file:///data11/hadoop/hdfs/data,[DISK]file:///data12/hadoop/hdfs/data,[DISK]file:///data13/hadoop/hdfs/data,[DISK]file:///data14/hadoop/hdfs/data,[DISK]file:///data15/hadoop/hdfs/data,[DISK]file:///data16/hadoop/hdfs/data,[DISK]file:///data17/hadoop/hdfs/data,[DISK]file:///data18/hadoop/hdfs/data,[DISK]file:///data19/hadoop/hdfs/data,[DISK]file:///data20/hadoop/hdfs/data,[DISK]file:///data21/hadoop/hdfs/data,[DISK]file:///data22/hadoop/hdfs/data,[DISK]file:///data23/hadoop/hdfs/data,[DISK]file:///data24/hadoop/hdfs/data</value>
<final>true</final>
</property>
<property>
<name>dfs.datanode.data.dir.perm</name>
<value>750</value>
</property>
<property>
<name>dfs.datanode.du.reserved</name>
<value>26405499904</value>
</property>
<property>
<name>dfs.datanode.failed.volumes.tolerated</name>
<value>2</value>
<final>true</final>
</property>
<property>
<name>dfs.datanode.http.address</name>
<value>0.0.0.0:1022</value>
</property>
<property>
<name>dfs.datanode.https.address</name>
<value>0.0.0.0:50475</value>
</property>
<property>
<name>dfs.datanode.ipc.address</name>
<value>0.0.0.0:8010</value>
</property>
<property>
<name>dfs.datanode.kerberos.principal</name>
<value>dn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.datanode.keytab.file</name>
<value>/etc/security/keytabs/dn.service.keytab</value>
</property>
<property>
<name>dfs.datanode.max.transfer.threads</name>
<value>16384</value>
</property>
<property>
<name>dfs.domain.socket.path</name>
<value>/var/lib/hadoop-hdfs/dn_socket</value>
</property>
<property>
<name>dfs.encrypt.data.transfer.cipher.suites</name>
<value>AES/CTR/NoPadding</value>
</property>
<property>
<name>dfs.ha.automatic-failover.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.ha.fencing.methods</name>
<value>shell(/bin/true)</value>
</property>
<property>
<name>dfs.ha.namenodes.b5</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.heartbeat.interval</name>
<value>3</value>
</property>
<property>
<name>dfs.hosts.exclude</name>
<value>/etc/hadoop/conf/dfs.exclude</value>
</property>
<property>
<name>dfs.http.policy</name>
<value>HTTP_ONLY</value>
</property>
<property>
<name>dfs.https.port</name>
<value>50470</value>
</property>
<property>
<name>dfs.internal.nameservices</name>
<value>b5</value>
</property>
<property>
<name>dfs.journalnode.edits.dir.b5</name>
<value>/data2/hadoop/hdfs/journal</value>
</property>
<property>
<name>dfs.journalnode.http-address</name>
<value>0.0.0.0:8480</value>
</property>
<property>
<name>dfs.journalnode.https-address</name>
<value>0.0.0.0:8481</value>
</property>
<property>
<name>dfs.journalnode.kerberos.internal.spnego.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.journalnode.kerberos.principal</name>
<value>jn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.journalnode.keytab.file</name>
<value>/etc/security/keytabs/jn.service.keytab</value>
</property>
<property>
<name>dfs.namenode.accesstime.precision</name>
<value>0</value>
</property>
<property>
<name>dfs.namenode.acls.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.audit.log.async</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.avoid.read.stale.datanode</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.avoid.write.stale.datanode</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.checkpoint.dir</name>
<value>/data/hadoop/hdfs/namesecondary</value>
</property>
<property>
<name>dfs.namenode.checkpoint.edits.dir</name>
<value>${dfs.namenode.checkpoint.dir}</value>
</property>
<property>
<name>dfs.namenode.checkpoint.period</name>
<value>21600</value>
</property>
<property>
<name>dfs.namenode.checkpoint.txns</name>
<value>1000000</value>
</property>
<property>
<name>dfs.namenode.fslock.fair</name>
<value>false</value>
</property>
<property>
<name>dfs.namenode.handler.count</name>
<value>100</value>
</property>
<property>
<name>dfs.namenode.http-address.b5.nn1</name>
<value>b5m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b5.nn2</name>
<value>b5m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b5.nn1</name>
<value>b5m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b5.nn2</name>
<value>b5m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.kerberos.internal.spnego.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.namenode.kerberos.principal</name>
<value>nn/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.namenode.keytab.file</name>
<value>/etc/security/keytabs/nn.service.keytab</value>
</property>
<property>
<name>dfs.namenode.max.extra.edits.segments.retained</name>
<value>180</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/data1/hadoop/hdfs/namenode,/data2/hadoop/hdfs/namenode</value>
<final>true</final>
</property>
<property>
<name>dfs.namenode.name.dir.restore</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.num.extra.edits.retained</name>
<value>18000</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b5.nn1</name>
<value>b5m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b5.nn2</name>
<value>b5m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.safemode.threshold-pct</name>
<value>0.99</value>
</property>
<property>
<name>dfs.namenode.shared.edits.dir.b5</name>
<value>qjournal://b5m1.hdp.dc:8485;b5m2.hdp.dc:8485;b5m3.hdp.dc:8485/b5</value>
</property>
<property>
<name>dfs.namenode.stale.datanode.interval</name>
<value>30000</value>
</property>
<property>
<name>dfs.namenode.startup.delay.block.deletion.sec</name>
<value>3600</value>
</property>
<property>
<name>dfs.namenode.write.stale.datanode.ratio</name>
<value>1.0f</value>
</property>
<property>
<name>dfs.nameservices</name>
<value>b5,b1,b2,b3,b4,a3,a4,f1,e1,d2</value>
</property>
<property>
<name>dfs.permissions.ContentSummary.subAccess</name>
<value>true</value>
</property>
<property>
<name>dfs.permissions.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.permissions.superusergroup</name>
<value>hdfs</value>
</property>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.replication.max</name>
<value>50</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.principal</name>
<value>HTTP/_HOST@ECLD.COM</value>
</property>
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
<final>true</final>
</property>
<property>
<name>fs.permissions.umask-mode</name>
<value>022</value>
</property>
<property>
<name>hadoop.caller.context.enabled</name>
<value>true</value>
</property>
<property>
<name>manage.include.files</name>
<value>false</value>
</property>
<property>
<name>nfs.exports.allowed.hosts</name>
<value>* rw</value>
</property>
<property>
<name>nfs.file.dump.dir</name>
<value>/tmp/.hdfs-nfs</value>
</property>
<property>
<name>dfs.client.datanode-restart.timeout</name>
<value>30</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.a4</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.a4</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.a4.nn1</name>
<value>a4m1.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.a4.nn2</name>
<value>a4m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.a4.nn1</name>
<value>a4m1.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.a4.nn2</name>
<value>a4m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a4.nn1</name>
<value>a4m1.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a4.nn2</name>
<value>a4m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.a3</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.a3</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.a3.nn1</name>
<value>a3m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.a3.nn2</name>
<value>a3m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.a3.nn1</name>
<value>a3m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.a3.nn2</name>
<value>a3m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a3.nn1</name>
<value>a3m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.a3.nn2</name>
<value>a3m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b3</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b3</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.b3.nn1</name>
<value>b3m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b3.nn2</name>
<value>b3m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b3.nn1</name>
<value>b3m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b3.nn2</name>
<value>b3m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b3.nn1</name>
<value>b3m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b3.nn2</name>
<value>b3m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b2</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.ha.namenodes.b2</name>
<value>nn3,nn4</value>
</property>
<property>
<name>dfs.namenode.http-address.b1.nn1</name>
<value>b1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b1.nn2</name>
<value>b1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b1.nn1</name>
<value>b1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b1.nn2</name>
<value>b1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b1.nn1</name>
<value>b1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b1.nn2</name>
<value>b1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.http-address.b2.nn3</name>
<value>b1m5.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b2.nn4</name>
<value>b1m6.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b2.nn3</name>
<value>b1m5.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b2.nn4</name>
<value>b1m6.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b2.nn3</name>
<value>b1m5.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b2.nn4</name>
<value>b1m6.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.f1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.f1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.f1.nn1</name>
<value>f1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.f1.nn2</name>
<value>f1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.f1.nn1</name>
<value>f1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.f1.nn2</name>
<value>f1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.f1.nn1</name>
<value>f1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.f1.nn2</name>
<value>f1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.d2</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.d2</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.d2.nn1</name>
<value>d2m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.d2.nn2</name>
<value>d2m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.d2.nn1</name>
<value>d2m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.d2.nn2</name>
<value>d2m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.d2.nn1</name>
<value>d2m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.d2.nn2</name>
<value>d2m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.e1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.e1</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.e1.nn1</name>
<value>e1m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.e1.nn2</name>
<value>e1m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.e1.nn1</name>
<value>e1m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.e1.nn2</name>
<value>e1m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.e1.nn1</name>
<value>e1m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.e1.nn2</name>
<value>e1m3.hdp.dc:8020</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.b4</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.namenodes.b4</name>
<value>nn1,nn2</value>
</property>
<property>
<name>dfs.namenode.http-address.b4.nn1</name>
<value>b4m2.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.http-address.b4.nn2</name>
<value>b4m3.hdp.dc:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.b4.nn1</name>
<value>b4m2.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.https-address.b4.nn2</name>
<value>b4m3.hdp.dc:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b4.nn1</name>
<value>b4m2.hdp.dc:8020</value>
</property>
<property>
<name>dfs.namenode.rpc-address.b4.nn2</name>
<value>b4m3.hdp.dc:8020</value>
</property>
</configuration>

File diff suppressed because it is too large Load Diff

View File

@@ -74,12 +74,6 @@
<build-tag>b2b1</build-tag>
</properties>
</profile>
<profile>
<id>b2b5</id>
<properties>
<build-tag>b2b5</build-tag>
</properties>
</profile>
<profile>
<id>b2b12</id>
<properties>

84
service-ai/.gitignore vendored Normal file
View File

@@ -0,0 +1,84 @@
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
.idea/**/aws.xml
.idea/**/contentModel.xml
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
.idea/**/gradle.xml
.idea/**/libraries
cmake-build-*/
.idea/**/mongoSettings.xml
*.iws
out/
.idea_modules/
atlassian-ide-plugin.xml
.idea/replstate.xml
.idea/sonarlint/
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
.idea/httpRequests
.idea/caches/build_file_checksums.ser
target/
pom.xml.tag
pom.xml.releaseBackup
pom.xml.versionsBackup
pom.xml.next
release.properties
dependency-reduced-pom.xml
buildNumber.properties
.mvn/timing.properties
.mvn/wrapper/maven-wrapper.jar
.project
.classpath
*~
.fuse_hidden*
.directory
.Trash-*
.nfs*
.gradle
**/build/
!src/**/build/
gradle-app.setting
!gradle-wrapper.jar
!gradle-wrapper.properties
.gradletasknamecache
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
*.stackdump
[Dd]esktop.ini
$RECYCLE.BIN/
*.cab
*.msi
*.msix
*.msm
*.msp
*.lnk
.DS_Store
.AppleDouble
.LSOverride
Icon
._*
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

10
service-ai/.idea/.gitignore generated vendored Normal file
View File

@@ -0,0 +1,10 @@
# 默认忽略的文件
/shelf/
/workspace.xml
# 基于编辑器的 HTTP 客户端请求
/httpRequests/
# 依赖于环境的 Maven 主目录路径
/mavenHomeManager.xml
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ApifoxUploaderProjectSetting">
<option name="apiAccessToken" value="APS-0ZZaS4q0gUiFOlbBJMN8hAmS7viQNi4D" />
</component>
</project>

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GitCommitMessageStorage">
<option name="messageStorage">
<MessageStorage />
</option>
</component>
</project>

29
service-ai/.idea/codeStyles/Project.xml generated Normal file
View File

@@ -0,0 +1,29 @@
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<JavaCodeStyleSettings>
<option name="PACKAGES_TO_USE_IMPORT_ON_DEMAND">
<value />
</option>
<option name="IMPORT_LAYOUT_TABLE">
<value>
<package name="" withSubpackages="true" static="false" module="true" />
<package name="" withSubpackages="true" static="false" />
<emptyLine />
<package name="" withSubpackages="true" static="true" />
</value>
</option>
</JavaCodeStyleSettings>
<JetCodeStyleSettings>
<option name="CODE_STYLE_DEFAULTS" value="KOTLIN_OFFICIAL" />
</JetCodeStyleSettings>
<ScalaCodeStyleSettings>
<option name="MULTILINE_STRING_CLOSING_QUOTES_ON_NEW_LINE" value="true" />
</ScalaCodeStyleSettings>
<codeStyleSettings language="JAVA">
<option name="KEEP_FIRST_COLUMN_COMMENT" value="false" />
</codeStyleSettings>
<codeStyleSettings language="kotlin">
<option name="CODE_STYLE_DEFAULTS" value="KOTLIN_OFFICIAL" />
</codeStyleSettings>
</code_scheme>
</component>

View File

@@ -0,0 +1,5 @@
<component name="ProjectCodeStyleConfiguration">
<state>
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
</state>
</component>

15
service-ai/.idea/compiler.xml generated Normal file
View File

@@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<annotationProcessing>
<profile name="Maven default annotation processors profile" enabled="true">
<sourceOutputDir name="target/generated-sources/annotations" />
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
<outputRelativeToContentRoot value="true" />
<module name="service-ai-knowledge" />
<module name="service-ai-chat" />
<module name="service-ai-core" />
</profile>
</annotationProcessing>
</component>
</project>

13
service-ai/.idea/encodings.xml generated Normal file
View File

@@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="file://$PROJECT_DIR$/service-ai-chat/src/main/java" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/service-ai-chat/src/main/resources" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/service-ai-core/src/main/java" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/service-ai-core/src/main/resources" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/service-ai-knowledge/src/main/java" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/service-ai-knowledge/src/main/resources" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/src/main/java" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/src/main/resources" charset="UTF-8" />
</component>
</project>

30
service-ai/.idea/jarRepositories.xml generated Normal file
View File

@@ -0,0 +1,30 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Central Repository" />
<option name="url" value="https://repo.maven.apache.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="lanyuanxiaoyao-maven-central" />
<option name="name" value="lanyuanxiaoyao-maven-central" />
<option name="url" value="https://maven.lanyuanxiaoyao.com/central" />
</remote-repository>
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Central Repository" />
<option name="url" value="https://maven.lanyuanxiaoyao.com/central" />
</remote-repository>
</component>
</project>

12
service-ai/.idea/misc.xml generated Normal file
View File

@@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="MavenProjectsManager">
<option name="originalFiles">
<list>
<option value="$PROJECT_DIR$/pom.xml" />
</list>
</option>
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_17" default="true" project-jdk-name="temurin-17" project-jdk-type="JavaSDK" />
</project>

6
service-ai/.idea/vcs.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@@ -0,0 +1,6 @@
#!/bin/bash
root_path=$(dirname $(cd $(dirname $0);pwd))
source $(realpath $root_path/..)/bin/library.sh
deploy service-ai-core
package service-ai-chat
upload $root_path/service-ai-chat/target/service-ai-chat-1.0.0-SNAPSHOT.jar

172
service-ai/pom.xml Normal file
View File

@@ -0,0 +1,172 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>pom</packaging>
<description>Hudi AI服务集合</description>
<modules>
<module>service-ai-core</module>
<module>service-ai-chat</module>
<module>service-ai-knowledge</module>
</modules>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<build-tag>b2b12</build-tag>
<spring-boot.version>3.4.3</spring-boot.version>
<spring-cloud.version>2024.0.1</spring-cloud.version>
<spring-ai.version>1.0.0-M6</spring-ai.version>
<eclipse-collections.version>11.1.0</eclipse-collections.version>
<curator.version>5.1.0</curator.version>
<hutool.version>5.8.27</hutool.version>
</properties>
<dependencyManagement>
<dependencies>
<!-- 当前项目依赖 -->
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-configuration</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-dependencies</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-forest</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
<version>${project.version}</version>
</dependency>
<!-- spring boot 相关依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>${spring-cloud.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-bom</artifactId>
<version>${spring-ai.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot3-starter</artifactId>
<version>1.5.36</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-eclipse-collections</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>com.github.ulisesbocchio</groupId>
<artifactId>jasypt-spring-boot-starter</artifactId>
<version>3.0.5</version>
</dependency>
<!-- 日志相关 -->
<dependency>
<groupId>pl.tkowalcz.tjahzi</groupId>
<artifactId>logback-appender</artifactId>
<version>0.9.23</version>
</dependency>
<dependency>
<groupId>com.github.loki4j</groupId>
<artifactId>loki-logback-appender-jdk8</artifactId>
<version>1.4.2</version>
</dependency>
<!-- 其他 -->
<dependency>
<groupId>dev.failsafe</groupId>
<artifactId>failsafe</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>org.eclipse.collections</groupId>
<artifactId>eclipse-collections</artifactId>
<version>${eclipse-collections.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.eclipse.collections</groupId>
<artifactId>eclipse-collections-api</artifactId>
<version>${eclipse-collections.version}</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.3.0</version>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${spring-boot.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<distributionManagement>
<repository>
<id>${releases.id}</id>
<name>${releases.name}</name>
<url>${releases.url}</url>
</repository>
<snapshotRepository>
<id>${snapshots.id}</id>
<name>${snapshots.name}</name>
<url>${snapshots.url}</url>
</snapshotRepository>
</distributionManagement>
</project>

View File

@@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-chat</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-openai-spring-boot-starter</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,23 @@
package com.lanyuanxiaoyao.service.ai.chat;
import com.ulisesbocchio.jasyptspringboot.annotation.EnableEncryptableProperties;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.retry.annotation.EnableRetry;
/**
* @author lanyuanxiaoyao
* @version 20250514
*/
@SpringBootApplication(scanBasePackages = "com.lanyuanxiaoyao.service")
@EnableDiscoveryClient
@EnableConfigurationProperties
@EnableEncryptableProperties
@EnableRetry
public class AiChatApplication {
public static void main(String[] args) {
SpringApplication.run(AiChatApplication.class, args);
}
}

View File

@@ -0,0 +1,80 @@
package com.lanyuanxiaoyao.service.ai.chat.controller;
import cn.hutool.core.util.StrUtil;
import com.lanyuanxiaoyao.service.ai.chat.entity.MessageVO;
import com.lanyuanxiaoyao.service.ai.chat.tools.DatetimeTools;
import java.io.IOException;
import org.eclipse.collections.api.list.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
/**
* 聊天
*
* @author lanyuanxiaoyao
* @version 20250514
*/
@Controller
@RequestMapping("chat")
public class ChatController {
private static final Logger logger = LoggerFactory.getLogger(ChatController.class);
private final ChatClient chatClient;
public ChatController(ChatClient.Builder builder) {
this.chatClient = builder
.defaultSystem("始终在中文语境下进行对话")
.build();
}
private ChatClient.ChatClientRequestSpec buildRequest(ImmutableList<MessageVO> messages) {
return chatClient.prompt()
.messages(
messages
.collect(message -> StrUtil.equals(message.getRole(), "assistant")
? new AssistantMessage(message.getContent())
: new UserMessage(message.getContent()))
.collect(message -> (Message) message)
.toList()
);
}
@PostMapping("sync")
@ResponseBody
public String chatSync(@RequestBody ImmutableList<MessageVO> messages) {
return buildRequest(messages)
.call()
.content();
}
@PostMapping("async")
public SseEmitter chatAsync(@RequestBody ImmutableList<MessageVO> messages) {
SseEmitter emitter = new SseEmitter();
buildRequest(messages)
.stream()
.content()
.subscribe(
content -> {
try {
emitter.send(content);
} catch (IOException e) {
emitter.completeWithError(e);
throw new RuntimeException(e);
}
},
emitter::completeWithError,
emitter::complete
);
return emitter;
}
}

View File

@@ -0,0 +1,34 @@
package com.lanyuanxiaoyao.service.ai.chat.entity;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class MessageVO {
private String role;
private String content;
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
@Override
public String toString() {
return "MessageVO{" +
"role='" + role + '\'' +
", content='" + content + '\'' +
'}';
}
}

View File

@@ -0,0 +1,18 @@
package com.lanyuanxiaoyao.service.ai.chat.tools;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import org.springframework.ai.tool.annotation.Tool;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class DatetimeTools {
private final static DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
@Tool(description = "获取当前时间")
public String getCurrentTime() {
return LocalDateTime.now().format(formatter);
}
}

View File

@@ -0,0 +1,15 @@
spring:
application:
name: service-ai-chat
profiles:
include: random-port,common,metrics,forest
ai:
openai:
base-url: http://132.121.206.65:10086
api-key: ENC(K+Hff9QGC+fcyi510VIDd9CaeK/IN5WBJ9rlkUsHEdDgIidW+stHHJlsK0lLPUXXREha+ToQZqqDXJrqSE+GUKCXklFhelD8bRHFXBIeP/ZzT2cxhzgKUXgjw3S0Qw2R)
chat:
options:
model: 'Qwen3-1.7'
mvc:
async:
request-timeout: 300000

View File

@@ -0,0 +1,34 @@
<configuration>
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<springProperty scope="context" name="LOKI_PUSH_URL" source="loki.url"/>
<springProperty scope="context" name="LOGGING_PARENT" source="logging.parent"/>
<springProperty scope="context" name="APP_NAME" source="spring.application.name"/>
<appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %clr(%5p) %clr([${HOSTNAME}]){yellow} %clr([%t]){magenta} %clr(%logger{40}){cyan} #@# %m%n%wEx</pattern>
</encoder>
</appender>
<appender name="RollingFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOGGING_PARENT:-.}/${APP_NAME:-run}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGGING_PARENT:-.}/archive/${APP_NAME:-run}-%d{yyyy-MM-dd}.gz</fileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %p [${HOSTNAME}] [%t] %logger #@# %m%n%wEx</pattern>
</encoder>
</appender>
<logger name="com.zaxxer.hikari" level="ERROR"/>
<logger name="com.netflix.discovery.shared.resolver.aws.ConfigClusterResolver" level="WARN"/>
<root level="INFO">
<appender-ref ref="Console"/>
<!-- <appender-ref ref="RollingFile"/>-->
</root>
</configuration>

View File

@@ -0,0 +1,37 @@
package com.lanyuanxiaoyao.service.ai.chat;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.openai.api.OpenAiApi;
import reactor.core.Disposable;
/**
* @author lanyuanxiaoyao
* @version 20250514
*/
public class TestChat {
public static void main(String[] args) {
ChatClient client = ChatClient.builder(
OpenAiChatModel.builder()
.openAiApi(
OpenAiApi.builder()
.baseUrl("http://132.121.206.65:10086")
.apiKey("*XMySqV%>hR&v>>g*NwCs3tpQ5FVMFEF2VHVTj<MYQd$&@$sY7CgqNyea4giJi4")
.build()
)
.defaultOptions(
OpenAiChatOptions.builder()
.model("Qwen3-1.7")
.build()
)
.build()
)
.build();
String content = client.prompt()
.user("你好")
.call()
.content();
System.out.println(content);
}
}

View File

@@ -0,0 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-core</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-forest</artifactId>
<exclusions>
<exclusion>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot-starter</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-sleuth</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>com.dtflys.forest</groupId>
<artifactId>forest-spring-boot3-starter</artifactId>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,58 @@
package com.lanyuanxiaoyao.service.configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.Customizer;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.SecurityFilterChain;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.CorsConfigurationSource;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
/**
* @author lanyuanxiaoyao
* @version 20250514
*/
@Configuration
@EnableWebSecurity
public class SecurityConfig {
private static final Logger logger = LoggerFactory.getLogger(SecurityConfig.class);
@Bean
public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception {
return http.authorizeHttpRequests(registry -> registry.anyRequest().authenticated())
.httpBasic(Customizer.withDefaults())
.csrf(AbstractHttpConfigurer::disable)
.cors(configurer -> configurer.configurationSource(corsConfigurationSource()))
.formLogin(AbstractHttpConfigurer::disable)
.build();
}
private CorsConfigurationSource corsConfigurationSource() {
CorsConfiguration configuration = new CorsConfiguration();
configuration.setAllowCredentials(true);
configuration.addAllowedHeader("*");
configuration.addAllowedMethod("*");
configuration.addAllowedOriginPattern("*");
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
source.registerCorsConfiguration("/**", configuration);
return source;
}
@Bean
public InMemoryUserDetailsManager userDetailsService(SecurityProperties securityProperties) {
UserDetails user = User.builder()
.username(securityProperties.getUsername())
.password("{noop}" + securityProperties.getDarkcode())
.authorities(securityProperties.getAuthority())
.build();
return new InMemoryUserDetailsManager(user);
}
}

View File

@@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>service-ai-knowledge</artifactId>
<dependencies>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-ai-core</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-openai-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-qdrant-store-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-markdown-document-reader</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,29 @@
package com.lanyuanxiaoyao.service.ai.knowledge;
import com.ulisesbocchio.jasyptspringboot.annotation.EnableEncryptableProperties;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.retry.annotation.EnableRetry;
/**
* @author lanyuanxiaoyao
* @version 20250515
*/
@SpringBootApplication(scanBasePackages = "com.lanyuanxiaoyao.service")
@EnableDiscoveryClient
@EnableConfigurationProperties
@EnableEncryptableProperties
@EnableRetry
public class KnowledgeApplication implements ApplicationRunner {
public static void main(String[] args) {
SpringApplication.run(KnowledgeApplication.class, args);
}
@Override
public void run(ApplicationArguments args) {
}
}

View File

@@ -0,0 +1,141 @@
package com.lanyuanxiaoyao.service.ai.knowledge.controller;
import com.lanyuanxiaoyao.service.ai.knowledge.entity.CollectionVO;
import com.lanyuanxiaoyao.service.ai.knowledge.entity.PointVO;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.grpc.Collections;
import io.qdrant.client.grpc.Points;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.list.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.embedding.EmbeddingModel;
import org.springframework.ai.reader.markdown.MarkdownDocumentReader;
import org.springframework.ai.reader.markdown.config.MarkdownDocumentReaderConfig;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.ai.vectorstore.qdrant.QdrantVectorStore;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* @author lanyuanxiaoyao
* @version 20250515
*/
@RestController
@RequestMapping("knowledge")
public class KnowledgeController {
private static final Logger logger = LoggerFactory.getLogger(KnowledgeController.class);
private final QdrantClient client;
private final EmbeddingModel embeddingModel;
public KnowledgeController(VectorStore vectorStore, EmbeddingModel embeddingModel) {
client = (QdrantClient) vectorStore.getNativeClient().orElseThrow();
this.embeddingModel = embeddingModel;
}
@PostMapping("add")
public void add(
@RequestParam("name") String name,
@RequestParam("strategy") String strategy
) throws ExecutionException, InterruptedException {
logger.info("Enter method: add[name, strategy]. name:{},strategy:{}", name, strategy);
client.createCollectionAsync(
name,
Collections.VectorParams.newBuilder()
.setDistance(Collections.Distance.valueOf(strategy))
.setSize(embeddingModel.dimensions())
.build()
).get();
}
@GetMapping("list")
public ImmutableList<CollectionVO> list() throws ExecutionException, InterruptedException {
return client.listCollectionsAsync()
.get()
.stream()
.collect(Collectors.toCollection(Lists.mutable::empty))
.collect(name -> {
try {
Collections.CollectionInfo info = client.getCollectionInfoAsync(name).get();
CollectionVO vo = new CollectionVO();
vo.setName(name);
vo.setPoints(info.getPointsCount());
vo.setSegments(info.getSegmentsCount());
vo.setStatus(info.getStatus().name());
Collections.VectorParams vectorParams = info.getConfig().getParams().getVectorsConfig().getParams();
vo.setStrategy(vectorParams.getDistance().name());
vo.setSize(vectorParams.getSize());
return vo;
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
})
.toImmutable();
}
@GetMapping("list_points")
public ImmutableList<PointVO> listPoints(@RequestParam("name") String name) throws ExecutionException, InterruptedException {
Points.ScrollResponse response = client.scrollAsync(
Points.ScrollPoints.newBuilder()
.setCollectionName(name)
// .setLimit(2)
.setWithPayload(Points.WithPayloadSelector.newBuilder().setEnable(true).build())
.setWithVectors(Points.WithVectorsSelector.newBuilder().setEnable(false).build())
.build()
)
.get();
return response.getResultList()
.stream()
.collect(Collectors.toCollection(Lists.mutable::empty))
.collect(point -> {
PointVO vo = new PointVO();
vo.setId(point.getId().getUuid());
vo.setText(point.getPayloadMap().get("doc_content").getStringValue());
return vo;
})
.toImmutable();
}
@GetMapping("delete")
public void delete(@RequestParam("name") String name) throws ExecutionException, InterruptedException {
client.deleteCollectionAsync(name).get();
}
@PostMapping(value = "preview_text", consumes = "text/plain;charset=utf-8")
public ImmutableList<String> previewText(
@RequestParam("name") String name,
@RequestParam(value = "mode", defaultValue = "normal") String mode,
@RequestBody String text
) {
return Lists.immutable.empty();
}
@PostMapping(value = "process_text", consumes = "text/plain;charset=utf-8")
public void processText(
@RequestParam("name") String name,
@RequestBody String text
) {
VectorStore source = QdrantVectorStore.builder(client, embeddingModel)
.collectionName(name)
.initializeSchema(true)
.build();
MarkdownDocumentReader reader = new MarkdownDocumentReader(
new ByteArrayResource(text.getBytes(StandardCharsets.UTF_8)),
MarkdownDocumentReaderConfig.builder()
.withHorizontalRuleCreateDocument(true)
.withIncludeCodeBlock(false)
.withIncludeBlockquote(false)
.build()
);
source.add(reader.get());
}
}

View File

@@ -0,0 +1,74 @@
package com.lanyuanxiaoyao.service.ai.knowledge.entity;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class CollectionVO {
private String name;
private String strategy;
private Long size;
private Long points;
private Long segments;
private String status;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getStrategy() {
return strategy;
}
public void setStrategy(String strategy) {
this.strategy = strategy;
}
public Long getSize() {
return size;
}
public void setSize(Long size) {
this.size = size;
}
public Long getPoints() {
return points;
}
public void setPoints(Long points) {
this.points = points;
}
public Long getSegments() {
return segments;
}
public void setSegments(Long segments) {
this.segments = segments;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
@Override
public String toString() {
return "CollectionVO{" +
"name='" + name + '\'' +
", strategy='" + strategy + '\'' +
", size=" + size +
", points=" + points +
", segments=" + segments +
", status='" + status + '\'' +
'}';
}
}

View File

@@ -0,0 +1,34 @@
package com.lanyuanxiaoyao.service.ai.knowledge.entity;
/**
* @author lanyuanxiaoyao
* @version 20250516
*/
public class PointVO {
private String id;
private String text;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
@Override
public String toString() {
return "PointVO{" +
"id='" + id + '\'' +
", text='" + text + '\'' +
'}';
}
}

View File

@@ -0,0 +1,42 @@
spring:
application:
name: service-ai-knowledge
profiles:
include: common,metrics,forest
cloud:
zookeeper:
enabled: true
connect-string: b1m2.hdp.dc:2181,b1m3.hdp.dc:2181,b1m4.hdp.dc:2181,b1m5.hdp.dc:2181,b1m6.hdp.dc:2181
discovery:
enabled: ${spring.cloud.zookeeper.enabled}
root: /hudi-services
instance-id: ${spring.application.name}-127.0.0.1-${random.uuid}-20250514
metadata:
discovery: zookeeper
ip: 127.0.0.1
hostname: localhost
hostname_full: localhost
start_time: 20250514112750
security:
meta:
authority: ENC(GXKnbq1LS11U2HaONspvH+D/TkIx13aWTaokdkzaF7HSvq6Z0Rv1+JUWFnYopVXu)
username: ENC(moIO5mO39V1Z+RDwROK9JXY4GfM8ZjDgM6Si7wRZ1MPVjbhTpmLz3lz28rAiw7c2LeCmizfJzHkEXIwGlB280g==)
darkcode: ENC(0jzpQ7T6S+P7bZrENgYsUoLhlqGvw7DA2MN3BRqEOwq7plhtg72vuuiPQNnr3DaYz0CpyTvxInhpx11W3VZ1trD6NINh7O3LN70ZqO5pWXk=)
ai:
openai:
base-url: http://132.121.206.65:10086
api-key: ENC(K+Hff9QGC+fcyi510VIDd9CaeK/IN5WBJ9rlkUsHEdDgIidW+stHHJlsK0lLPUXXREha+ToQZqqDXJrqSE+GUKCXklFhelD8bRHFXBIeP/ZzT2cxhzgKUXgjw3S0Qw2R)
chat:
options:
model: 'Qwen3-1.7'
embedding:
options:
model: 'Bge-m3'
vectorstore:
qdrant:
api-key: lanyuanxiaoyao
jasypt:
encryptor:
password: 'r#(R,P"Dp^A47>WSn:Wn].gs/+"v:q_Q*An~zF*g-@j@jtSTv5H/,S-3:R?r9R}.'
server:
port: 8080

View File

@@ -0,0 +1,34 @@
<configuration>
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<springProperty scope="context" name="LOKI_PUSH_URL" source="loki.url"/>
<springProperty scope="context" name="LOGGING_PARENT" source="logging.parent"/>
<springProperty scope="context" name="APP_NAME" source="spring.application.name"/>
<appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %clr(%5p) %clr([${HOSTNAME}]){yellow} %clr([%t]){magenta} %clr(%logger{40}){cyan} #@# %m%n%wEx</pattern>
</encoder>
</appender>
<appender name="RollingFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOGGING_PARENT:-.}/${APP_NAME:-run}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGGING_PARENT:-.}/archive/${APP_NAME:-run}-%d{yyyy-MM-dd}.gz</fileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %p [${HOSTNAME}] [%t] %logger #@# %m%n%wEx</pattern>
</encoder>
</appender>
<logger name="com.zaxxer.hikari" level="ERROR"/>
<logger name="com.netflix.discovery.shared.resolver.aws.ConfigClusterResolver" level="WARN"/>
<root level="INFO">
<appender-ref ref="Console"/>
<!-- <appender-ref ref="RollingFile"/>-->
</root>
</configuration>

View File

@@ -60,10 +60,6 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.juicefs</groupId>
<artifactId>juicefs-hadoop</artifactId>
</dependency>
<dependency>
<groupId>com.lanyuanxiaoyao</groupId>
<artifactId>service-configuration</artifactId>

View File

@@ -8,6 +8,7 @@ package com.lanyuanxiaoyao.service.cli.core;
*/
public class HostInfo {
private String ip;
private Boolean enabled = true;
private Boolean useAuthority = false;
private String username;
private String password;
@@ -20,6 +21,14 @@ public class HostInfo {
this.ip = ip;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public Boolean getUseAuthority() {
return useAuthority;
}
@@ -47,7 +56,8 @@ public class HostInfo {
@Override
public String toString() {
return "HostInfo{" +
"ip='" + ip + '\'' +
"enabled=" + enabled +
", ip='" + ip + '\'' +
", useAuthority=" + useAuthority +
", username='" + username + '\'' +
", password='" + password + '\'' +

View File

@@ -26,6 +26,10 @@ public class HostInfoWrapper {
return hostInfo.getIp();
}
public Boolean getEnabled() {
return hostInfo.getEnabled();
}
public Boolean getUseAuthority() {
return hostInfo.getUseAuthority();
}

View File

@@ -82,6 +82,7 @@ public class RunnerApplication implements ApplicationRunner {
return serviceInfo.getReplicas() == 0
? hostInfoList
.stream()
.filter(HostInfoWrapper::getEnabled)
.map(HostInfoWrapper::getIp)
.sorted(Comparator.naturalOrder())
.collect(Collectors.toList())
@@ -89,6 +90,7 @@ public class RunnerApplication implements ApplicationRunner {
RandomUtil.randomEleList(
hostInfoList
.stream()
.filter(HostInfoWrapper::getEnabled)
.map(HostInfoWrapper::getIp)
.collect(Collectors.toList()
), serviceInfo.getReplicas()
@@ -148,6 +150,15 @@ public class RunnerApplication implements ApplicationRunner {
selectedHosts = selectHosts(serviceInfo);
deployPlans.put(serviceInfo.getName(), selectedHosts);
}
// 排除不可用的主机
List<String> validIps = hostInfoList.stream()
.filter(HostInfoWrapper::getEnabled)
.map(HostInfoWrapper::getIp)
.collect(Collectors.toList());
selectedHosts = selectedHosts
.stream()
.filter(validIps::contains)
.collect(Collectors.toList());
} else {
selectedHosts = selectHosts(serviceInfo);
deployPlans.put(serviceInfo.getName(), selectedHosts);
@@ -157,8 +168,8 @@ public class RunnerApplication implements ApplicationRunner {
MapUtil.builder()
.put("currentPath", absolutRootPath)
.put("hosts", hostInfoList
.collect(HostInfoWrapper::getHostnameIp)
.toSortedList((o1, o2) -> Comparator.<String>naturalOrder().compare(o1.getIp(), o2.getIp())))
.collect(HostInfoWrapper::getHostnameIp)
.toSortedList((o1, o2) -> Comparator.<String>naturalOrder().compare(o1.getIp(), o2.getIp())))
.put("selectedHosts", selectedHosts)
.put("runtime", runtimeInfo)
.put("info", serviceInfo)
@@ -176,8 +187,8 @@ public class RunnerApplication implements ApplicationRunner {
MapUtil.builder()
.put("currentPath", absolutRootPath)
.put("hosts", hostInfoList
.collect(HostInfoWrapper::getIp)
.toSortedList(Comparator.naturalOrder()))
.collect(HostInfoWrapper::getHostnameIp)
.toSortedList((o1, o2) -> Comparator.<String>naturalOrder().compare(o1.getIp(), o2.getIp())))
.put("runtime", runtimeInfo)
.put("info", serviceInfo)
.put("arguments", serviceInfo.getArguments())
@@ -188,13 +199,13 @@ public class RunnerApplication implements ApplicationRunner {
StrUtil.format("stop-{}.sh", serviceInfo.getName())
)
);
generateTemplate(
generateTemplate(
"cloud/log.ftl",
MapUtil.builder()
.put("currentPath", absolutRootPath)
.put("hosts", hostInfoList
.collect(HostInfoWrapper::getIp)
.toSortedList(Comparator.naturalOrder()))
.collect(HostInfoWrapper::getHostnameIp)
.toSortedList((o1, o2) -> Comparator.<String>naturalOrder().compare(o1.getIp(), o2.getIp())))
.put("selectedHosts", selectedHosts)
.put("runtime", runtimeInfo)
.put("info", serviceInfo)
@@ -217,6 +228,18 @@ public class RunnerApplication implements ApplicationRunner {
Paths.get(root.toString(), "stop.sh")
);
generateTemplate(
"cloud/ssh-script.ftl",
MapUtil.builder()
.put("currentPath", absolutRootPath)
.put("hosts", hostInfoList
.collect(HostInfoWrapper::getHostnameIp)
.toSortedList((o1, o2) -> Comparator.<String>naturalOrder().compare(o1.getIp(), o2.getIp())))
.put("runtime", runtimeInfo)
.build(),
Paths.get(root.toString(), "ssh.sh")
);
MutableMap<String, MutableList<ServiceInfoWrapper>> groups = Maps.mutable.empty();
for (ServiceInfoWrapper service : serviceInfoList) {
service.getGroups().add(0, "all");

View File

@@ -59,19 +59,17 @@ deploy:
# hudi同步运行集群
sync-clusters: b12
# hudi压缩运行集群
compaction-clusters: b12,b1,b5,a4
compaction-clusters: b12,b1,a4
# 覆盖service的公共配置主要需要修改的就是部署副本数
services:
service-api:
replicas: 10
service-launcher-b1:
replicas: 8
service-launcher-b5:
replicas: 6
service-launcher-a4:
replicas: 6
service-launcher-b12:
replicas: 10
replicas: 15
service-info-query:
replicas: 10
service-yarn-query:

View File

@@ -46,27 +46,6 @@ deploy:
"[connector.cluster.sync-queue-name]": sync-queue-b1
"[connector.cluster.compaction-queue-name]": compaction-queue-b1
"[connector.zookeeper.connect-url]": ${deploy.runtime.connector-zk-url}
service-launcher-b5:
order: 4
groups:
- "service"
- "service-hudi"
- "service-hudi-launcher"
source-jar: service-launcher-b2b5-1.0.0-SNAPSHOT.jar
replicas: 6
environments:
"[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
"[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path}
"[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path}
"[connector.hudi.app-test-hdfs-path]": ${deploy.runtime.hudi.app-test-hdfs-path}
"[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url}
"[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url}
arguments:
"[spring.application.name]": service-launcher-b5
"[connector.cluster.cluster]": b5
"[connector.cluster.sync-queue-name]": sync-queue-b5
"[connector.cluster.compaction-queue-name]": compaction-queue-b5
"[connector.zookeeper.connect-url]": ${deploy.runtime.connector-zk-url}
service-launcher-a4:
order: 4
groups:

View File

@@ -13,5 +13,5 @@ for host in <#noparse>${hosts[@]}</#noparse>
do
hostname=`ssh $host 'echo $HOSTNAME'`
echo "$host $hostname"
ssh $host "cat ${runtime.logPath}/${info.name}.log" > ${currentPath}/logs/${info.name}/$hostname.log
scp $host:${runtime.logPath}/${info.name}.log ${currentPath}/logs/${info.name}/$hostname.log
done

View File

@@ -0,0 +1,16 @@
#!/bin/bash
command=$@
hosts=(
<#list hosts as host>
${host.ip}
</#list>
)
for host in <#noparse>${hosts[@]}</#noparse>
do
hostname=`ssh $host 'echo $HOSTNAME'`
echo "$host $hostname"
ssh $host $command
done

View File

@@ -1,14 +1,7 @@
#!/bin/bash
hosts=(
<#list hosts as host>
${host}
host=${host.ip}
echo "${info.name} ${host.ip} ${host.hostname} $datetime"
ssh $host 'bash -s' < ${currentPath}/stop.sh ${runtime.jarPath}/${info.name}.jar
</#list>
)
for host in <#noparse>${hosts[@]}</#noparse>
do
hostname=`ssh $host 'echo $HOSTNAME'`
echo "$host $hostname"
ssh $host 'bash -s' < ${currentPath}/stop.sh ${runtime.jarPath}/${info.name}.jar
done

View File

@@ -80,10 +80,6 @@
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-flink${flink.major.version}-bundle</artifactId>
</dependency>
<dependency>
<groupId>io.juicefs</groupId>
<artifactId>juicefs-hadoop</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>

View File

@@ -205,6 +205,41 @@ public class HudiCommand {
fileSystem.close();
}
@ShellMethod("Max meta files")
public void maxMetaFiles() throws IOException {
MutableList<P> list = Lists.mutable.<P>empty().asSynchronized();
FileSystem fileSystem = FileSystem.get(new Configuration());
infoService
.tableMetaList()
.collect(TableMeta::getHudi)
.collect(TableMeta.HudiMeta::getTargetHdfsPath)
.asParallel(ExecutorProvider.EXECUTORS_20, 1)
.forEach(hdfs -> {
Path root = new Path(hdfs, ".hoodie");
try {
FileStatus[] statuses = fileSystem.listStatus(root);
long num = 0;
for (FileStatus status : statuses) {
if (status.isFile()) {
num++;
}
if (StrUtil.containsIgnoreCase(status.getPath().toString(), "INVALID")) {
logger.info("{}", status.getPath().toString());
}
}
list.add(new P(num, hdfs));
logger.info("Count: {} Hdfs: {}", num, hdfs);
} catch (IOException e) {
logger.warn("List file error", e);
}
});
MutableList<P> listP = list.select(p -> p.count > 1000);
for (P maxP : listP) {
logger.info("Max: {} Hdfs: {}", maxP.count, maxP.hdfs);
}
fileSystem.close();
}
@ShellMethod("Get timeline instants")
public void timelineInstant(@ShellOption(help = "root hdfs path") String hdfs) {
hudiService.timelineHdfsAllActive(hdfs).forEach(instant -> logger.info(instant.toString()));
@@ -229,4 +264,14 @@ public class HudiCommand {
public interface Runnable {
void run(LongAdder counter);
}
private static final class P {
String hdfs;
long count;
public P(long count, String hdfs) {
this.count = count;
this.hdfs = hdfs;
}
}
}

View File

@@ -128,6 +128,12 @@ public interface Constants {
String METRICS_PULSAR_PREFIX = METRICS_PREFIX + "_pulsar";
String METRICS_PULSAR_BACKLOG = METRICS_PULSAR_PREFIX + "_backlog";
String METRICS_HUDI_TABLE = METRICS_PREFIX + "_hudi_table";
String METRICS_HUDI_TABLE_FILE_COUNT = METRICS_HUDI_TABLE + "_file_count";
String METRICS_HUDI_TABLE_FILE_COUNT_AVERAGE_PER_TABLE = METRICS_HUDI_TABLE_FILE_COUNT + "_average_per_table";
String METRICS_HUDI_TABLE_TIMELINE_FILE_COUNT = METRICS_HUDI_TABLE + "_timeline_file_count";
String METRICS_HUDI_TABLE_TIMELINE_FILE_COUNT_AVERAGE_PER_TABLE = METRICS_HUDI_TABLE_TIMELINE_FILE_COUNT + "_average_per_table";
String METRICS_LABEL_FLINK_JOB_ID = "flink_job_id";
String METRICS_LABEL_FLINK_JOB_NAME = "flink_job_name";
String METRICS_LABEL_FLINK_NATIVE_JOB_ID = "flink_native_job_id";
@@ -221,12 +227,10 @@ public interface Constants {
String COMPACTION_QUEUE_PRE = "compaction-queue-pre";
String COMPACTION_QUEUE_B1 = "compaction-queue-b1";
String COMPACTION_QUEUE_B5 = "compaction-queue-b5";
String COMPACTION_QUEUE_A4 = "compaction-queue-a4";
String COMPACTION_QUEUE_B12 = "compaction-queue-b12";
String CLUSTER_B1 = "b1";
String CLUSTER_B5 = "b5";
String CLUSTER_A4 = "a4";
String CLUSTER_B12 = "b12";

View File

@@ -0,0 +1,55 @@
package com.lanyuanxiaoyao.service.configuration.entity.monitor;
/**
* 指标运行进度
*
* @author lanyuanxiaoyao
* @date 2024-10-14
*/
public class MetricsProgress {
private String name;
private Boolean running;
private Double progress;
public MetricsProgress() {
}
public MetricsProgress(String name, Boolean running, Double progress) {
this.name = name;
this.running = running;
this.progress = progress;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Double getProgress() {
return progress;
}
public void setProgress(Double progress) {
this.progress = progress;
}
public Boolean getRunning() {
return running;
}
public void setRunning(Boolean running) {
this.running = running;
}
@Override
public String toString() {
return "MetricsProgress{" +
"name='" + name + '\'' +
", running=" + running +
", progress=" + progress +
'}';
}
}

View File

@@ -22,7 +22,6 @@ public class YarnClusters {
MapUtil.<String, Cluster>builder()
.put("a4", new Cluster("http://132.121.107.91:8088"))
.put("b1", new Cluster("http://132.122.98.13:8088"))
.put("b5", new Cluster("http://132.122.116.12:8088"))
.put("b12", new Cluster("http://132.126.207.125:8088"))
.build()
);

View File

@@ -106,5 +106,14 @@ public interface HudiService {
InputStream download(@Query("root") String root);
@Get("/hdfs/size")
String size(@Query("root") String root);
Long size(@Query("root") String root);
@Get("/hdfs/count")
Long count(@Query("root") String root);
@Get("/hdfs/file_count")
Long fileCount(@Query("root") String root);
@Get("/hdfs/directory_count")
Long directoryCount(@Query("root") String root);
}

View File

@@ -0,0 +1,18 @@
package com.lanyuanxiaoyao.service.forest.service;
import com.dtflys.forest.annotation.BaseRequest;
import com.dtflys.forest.annotation.Get;
import com.lanyuanxiaoyao.service.configuration.entity.monitor.MetricsProgress;
import org.eclipse.collections.api.list.ImmutableList;
/**
* 监控指标查询
*
* @author lanyuanxiaoyao
* @date 2024-10-14
*/
@BaseRequest(baseURL = "http://service-monitor")
public interface MonitorService {
@Get("/metrics_control/progress")
ImmutableList<MetricsProgress> progress();
}

View File

@@ -1,12 +0,0 @@
package com.lanyuanxiaoyao.service.forest.service.launcher.impl;
import com.dtflys.forest.annotation.BaseRequest;
import com.lanyuanxiaoyao.service.forest.service.launcher.LauncherService;
/**
* @author lanyuanxiaoyao
* @date 2023-06-06
*/
@BaseRequest(baseURL = "http://service-launcher-b5")
public interface B5LauncherService extends LauncherService {
}

View File

@@ -72,10 +72,6 @@
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-flink${flink.major.version}-bundle</artifactId>
</dependency>
<dependency>
<groupId>io.juicefs</groupId>
<artifactId>juicefs-hadoop</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>

View File

@@ -82,4 +82,19 @@ public class HdfsController {
public Long size(@RequestParam("root") String root) throws IOException {
return hdfsService.size(root);
}
@GetMapping("count")
public Long count(@RequestParam("root") String root) throws IOException {
return hdfsService.count(root);
}
@GetMapping("file_count")
public Long fileCount(@RequestParam("root") String root) throws IOException {
return hdfsService.countFiles(root);
}
@GetMapping("directory_count")
public Long DirectoryCount(@RequestParam("root") String root) throws IOException {
return hdfsService.countDirectories(root);
}
}

View File

@@ -8,6 +8,7 @@ import com.lanyuanxiaoyao.service.forest.service.InfoService;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -160,4 +161,26 @@ public class HdfsService {
return fileSystem.getContentSummary(new Path(root)).getLength();
}
}
@Cacheable(value = "count-hpath", sync = true)
public Long count(String root) throws IOException {
try (FileSystem fileSystem = FileSystem.get(new Configuration())) {
ContentSummary summary = fileSystem.getContentSummary(new Path(root));
return summary.getFileCount() + summary.getDirectoryCount();
}
}
@Cacheable(value = "file-count-hpath", sync = true)
public Long countFiles(String root) throws IOException {
try (FileSystem fileSystem = FileSystem.get(new Configuration())) {
return fileSystem.getContentSummary(new Path(root)).getFileCount();
}
}
@Cacheable(value = "directory-count-hpath", sync = true)
public Long countDirectories(String root) throws IOException {
try (FileSystem fileSystem = FileSystem.get(new Configuration())) {
return fileSystem.getContentSummary(new Path(root)).getDirectoryCount();
}
}
}

View File

@@ -174,6 +174,11 @@ public class ExecutorService {
}
private void setEnvironment(Configuration configuration, String key, String value) {
logger.info("Setting environment variable {} = {}", key, value);
if (ObjectUtil.isNull(value)) {
logger.warn("Environment variable {} is null", key);
return;
}
configuration.setString(ResourceManagerOptions.CONTAINERIZED_MASTER_ENV_PREFIX + key, value);
configuration.setString(ResourceManagerOptions.CONTAINERIZED_TASK_MANAGER_ENV_PREFIX + key, value);
}

View File

@@ -0,0 +1,40 @@
package com.lanyuanxiaoyao.service.monitor.controller;
import com.lanyuanxiaoyao.service.configuration.entity.monitor.MetricsProgress;
import com.lanyuanxiaoyao.service.monitor.metric.Metrics;
import java.util.Map;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.list.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 操作进度
*
* @author lanyuanxiaoyao
* @date 2024-10-14
*/
@RestController
@RequestMapping("metrics_control")
public class MetricsController {
private static final Logger logger = LoggerFactory.getLogger(MetricsController.class);
private final ApplicationContext context;
public MetricsController(ApplicationContext context) {
this.context = context;
}
@GetMapping("progress")
public ImmutableList<MetricsProgress> progress() {
Map<String, Metrics> metricsMap = context.getBeansOfType(Metrics.class);
return Lists.immutable.ofAll(metricsMap.entrySet())
.toImmutableSortedList(Map.Entry.comparingByKey())
.collect(Map.Entry::getValue)
.collect(metrics -> new MetricsProgress(metrics.name(), metrics.running(), metrics.progress()));
}
}

View File

@@ -0,0 +1,111 @@
package com.lanyuanxiaoyao.service.monitor.metric;
import cn.hutool.core.util.StrUtil;
import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import com.lanyuanxiaoyao.service.configuration.ExecutorProvider;
import com.lanyuanxiaoyao.service.forest.service.HudiService;
import com.lanyuanxiaoyao.service.forest.service.InfoService;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import java.util.concurrent.atomic.AtomicLong;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.factory.Maps;
import org.eclipse.collections.api.list.ImmutableList;
import org.eclipse.collections.api.map.MutableMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import static com.lanyuanxiaoyao.service.common.Constants.MINUTE;
/**
* Hudi表相关指标
*
* @author lanyuanxiaoyao
* @date 2024-03-05
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Service
public class HudiTableFilesCountMetrics extends Metrics {
private static final Logger logger = LoggerFactory.getLogger(HudiTableFilesCountMetrics.class);
private final MeterRegistry registry;
private final InfoService infoService;
private final HudiService hudiService;
private final MutableMap<String, AtomicLong> fileCountCacheMap;
private final MutableMap<String, AtomicLong> timelineFileCountCacheMap;
public HudiTableFilesCountMetrics(MeterRegistry registry, InfoService infoService, HudiService hudiService) {
this.registry = registry;
this.infoService = infoService;
this.hudiService = hudiService;
fileCountCacheMap = Maps.mutable.empty();
timelineFileCountCacheMap = Maps.mutable.empty();
}
@Override
public String name() {
return "Hudi表文件数量监控";
}
@Scheduled(fixedDelay = 30 * MINUTE, initialDelay = MINUTE)
@Override
public void update() {
try {
start();
ImmutableList<TableMeta> metas = infoService.tableMetaList();
setTotal(metas.size());
metas
.asParallel(ExecutorProvider.EXECUTORS_2, 1)
.reject(meta -> StrUtil.isBlank(meta.getPulsarAddress()))
.forEach(meta -> {
try {
AtomicLong filecountCache = fileCountCacheMap.getIfAbsentPut(
meta.getAlias(),
registry.gauge(
Constants.METRICS_HUDI_TABLE_FILE_COUNT,
Lists.immutable.of(
Tag.of(Constants.METRICS_LABEL_FLINK_JOB_ID, meta.getJob().getId().toString()),
Tag.of(Constants.METRICS_LABEL_ALIAS, meta.getAlias()),
Tag.of(Constants.METRICS_LABEL_SCHEMA, meta.getSchema()),
Tag.of(Constants.METRICS_LABEL_TABLE, meta.getTable())
),
new AtomicLong(0)
)
);
AtomicLong timelineFileCountCache = timelineFileCountCacheMap.getIfAbsentPut(
meta.getAlias(),
registry.gauge(
Constants.METRICS_HUDI_TABLE_TIMELINE_FILE_COUNT,
Lists.immutable.of(
Tag.of(Constants.METRICS_LABEL_FLINK_JOB_ID, meta.getJob().getId().toString()),
Tag.of(Constants.METRICS_LABEL_ALIAS, meta.getAlias()),
Tag.of(Constants.METRICS_LABEL_SCHEMA, meta.getSchema()),
Tag.of(Constants.METRICS_LABEL_TABLE, meta.getTable())
),
new AtomicLong(0)
)
);
String hdfs = meta.getHudi().getTargetHdfsPath();
if (hudiService.existsHudiTable(hdfs)) {
Long count = hudiService.fileCount(hdfs);
filecountCache.set(count);
String timelineHdfs = hdfs + "/.hoodie";
timelineFileCountCache.set(hudiService.fileCount(timelineHdfs));
}
} catch (Exception exception) {
logger.warn("Get file count fail for {}", meta.getAlias(), exception);
}
finished();
});
} finally {
reset();
}
}
}

View File

@@ -1,9 +1,56 @@
package com.lanyuanxiaoyao.service.monitor.metric;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
/**
* @author lanyuanxiaoyao
* @date 2024-03-05
*/
public abstract class Metrics {
abstract void update();
private final AtomicBoolean running = new AtomicBoolean(false);
private final AtomicLong finished = new AtomicLong(0);
private final AtomicLong total = new AtomicLong(0);
public abstract String name();
public abstract void update();
public boolean running() {
return running.get();
}
public double progress() {
if (total.get() == 0) {
return 0;
} else {
return finished.get() * 1.0 / total.get();
}
}
protected void start() {
running.set(true);
}
protected void stop() {
running.set(false);
}
protected void setTotal(Long total) {
this.total.set(total);
}
protected void setTotal(Integer total) {
this.total.set(total);
}
protected void finished() {
finished.incrementAndGet();
}
protected void reset() {
stop();
setTotal(0);
finished.set(0);
}
}

View File

@@ -0,0 +1,97 @@
package com.lanyuanxiaoyao.service.monitor.metric;
import cn.hutool.core.util.StrUtil;
import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import com.lanyuanxiaoyao.service.common.utils.NameHelper;
import com.lanyuanxiaoyao.service.configuration.ExecutorProvider;
import com.lanyuanxiaoyao.service.configuration.HudiServiceProperties;
import com.lanyuanxiaoyao.service.forest.service.InfoService;
import com.lanyuanxiaoyao.service.forest.service.PulsarService;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import java.util.concurrent.atomic.AtomicLong;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.factory.Maps;
import org.eclipse.collections.api.list.ImmutableList;
import org.eclipse.collections.api.map.MutableMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import static com.lanyuanxiaoyao.service.common.Constants.MINUTE;
/**
* Pulsar
*
* @author lanyuanxiaoyao
* @date 2024-03-05
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Service
public class PulsarBacklogMetrics extends Metrics {
private static final Logger logger = LoggerFactory.getLogger(PulsarBacklogMetrics.class);
private final MeterRegistry registry;
private final InfoService infoService;
private final PulsarService pulsarService;
private final HudiServiceProperties hudiServiceProperties;
private final MutableMap<String, AtomicLong> backlogMap;
public PulsarBacklogMetrics(MeterRegistry registry, InfoService infoService, PulsarService pulsarService, HudiServiceProperties hudiServiceProperties) {
this.registry = registry;
this.infoService = infoService;
this.pulsarService = pulsarService;
this.hudiServiceProperties = hudiServiceProperties;
backlogMap = Maps.mutable.empty();
}
@Override
public String name() {
return "Pulsar backlog监控";
}
@Scheduled(fixedDelay = 30 * MINUTE, initialDelay = MINUTE)
@Override
public void update() {
try {
start();
ImmutableList<TableMeta> metas = infoService.tableMetaList();
setTotal(metas.size());
metas
.asParallel(ExecutorProvider.EXECUTORS_2, 1)
.reject(meta -> StrUtil.isBlank(meta.getPulsarAddress()))
.forEach(meta -> {
try {
AtomicLong backlogCache = backlogMap.getIfAbsentPut(
meta.getAlias(),
registry.gauge(
Constants.METRICS_PULSAR_BACKLOG,
Lists.immutable.of(
Tag.of(Constants.METRICS_LABEL_FLINK_JOB_ID, meta.getJob().getId().toString()),
Tag.of(Constants.METRICS_LABEL_ALIAS, meta.getAlias()),
Tag.of(Constants.METRICS_LABEL_SCHEMA, meta.getSchema()),
Tag.of(Constants.METRICS_LABEL_TABLE, meta.getTable())
),
new AtomicLong(0)
)
);
String name = pulsarService.name(meta.getPulsarAddress());
if (StrUtil.isNotBlank(name)) {
Long backlog = pulsarService.backlog(name, meta.getTopic(), NameHelper.pulsarSubscriptionName(meta.getJob().getId(), meta.getAlias(), hudiServiceProperties.getSignature()));
backlogCache.set(backlog);
infoService.savePulsarBacklog(meta.getJob().getId(), meta.getAlias(), backlog);
}
} catch (Exception exception) {
logger.warn("Update pulsar backlog fail for {}", meta.getAlias(), exception);
}
finished();
});
} finally {
reset();
}
}
}

View File

@@ -1,82 +0,0 @@
package com.lanyuanxiaoyao.service.monitor.metric;
import cn.hutool.core.util.StrUtil;
import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.utils.NameHelper;
import com.lanyuanxiaoyao.service.configuration.ExecutorProvider;
import com.lanyuanxiaoyao.service.configuration.HudiServiceProperties;
import com.lanyuanxiaoyao.service.forest.service.InfoService;
import com.lanyuanxiaoyao.service.forest.service.PulsarService;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import java.util.concurrent.atomic.AtomicLong;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.factory.Maps;
import org.eclipse.collections.api.map.MutableMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import static com.lanyuanxiaoyao.service.common.Constants.MINUTE;
/**
* Pulsar
*
* @author lanyuanxiaoyao
* @date 2024-03-05
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Service
public class PulsarMetrics extends Metrics {
private static final Logger logger = LoggerFactory.getLogger(PulsarMetrics.class);
private final MeterRegistry registry;
private final InfoService infoService;
private final PulsarService pulsarService;
private final HudiServiceProperties hudiServiceProperties;
private final MutableMap<String, AtomicLong> backlogMap;
public PulsarMetrics(MeterRegistry registry, InfoService infoService, PulsarService pulsarService, HudiServiceProperties hudiServiceProperties) {
this.registry = registry;
this.infoService = infoService;
this.pulsarService = pulsarService;
this.hudiServiceProperties = hudiServiceProperties;
backlogMap = Maps.mutable.empty();
}
@Scheduled(fixedDelay = 30 * MINUTE, initialDelay = MINUTE)
@Override
void update() {
infoService.tableMetaList()
.asParallel(ExecutorProvider.EXECUTORS_2, 1)
.reject(meta -> StrUtil.isBlank(meta.getPulsarAddress()))
.forEach(meta -> {
try {
AtomicLong backlogCache = backlogMap.getIfAbsentPut(
meta.getAlias(),
registry.gauge(
Constants.METRICS_PULSAR_BACKLOG,
Lists.immutable.of(
Tag.of(Constants.METRICS_LABEL_FLINK_JOB_ID, meta.getJob().getId().toString()),
Tag.of(Constants.METRICS_LABEL_ALIAS, meta.getAlias()),
Tag.of(Constants.METRICS_LABEL_SCHEMA, meta.getSchema()),
Tag.of(Constants.METRICS_LABEL_TABLE, meta.getTable())
),
new AtomicLong(0)
)
);
String name = pulsarService.name(meta.getPulsarAddress());
if (StrUtil.isNotBlank(name)) {
Long backlog = pulsarService.backlog(name, meta.getTopic(), NameHelper.pulsarSubscriptionName(meta.getJob().getId(), meta.getAlias(), hudiServiceProperties.getSignature()));
backlogCache.set(backlog);
infoService.savePulsarBacklog(meta.getJob().getId(), meta.getAlias(), backlog);
}
} catch (Exception exception) {
logger.warn("Update pulsar backlog fail for " + meta.getAlias(), exception);
}
});
}
}

View File

@@ -27,6 +27,10 @@ public class ScheduleStrategyProvider {
ScheduleStrategyImpl.simple(false, "distribute_schedule", "定时分布式调度", DistributeScheduleJob.class, "0/2 * * * * ?"),
// 普通调度
ScheduleStrategyImpl.simple("daily_schedule", "普通全表调度", DailyScheduleJob.class, "0 50 1,4,7,10,13,16,19 * * ?"),
// 普通调度20240925不调度11点、14点
// ScheduleStrategyImpl.simple("daily_schedule", "普通全表调度", DailyScheduleJob.class, "0 50 1,4,7,16,19 * * ?"),
// 普通调度20240925不调度8点、11点、14点
// ScheduleStrategyImpl.simple("daily_schedule", "普通全表调度", DailyScheduleJob.class, "0 50 1,4,16,19 * * ?"),
// 重点表调度
ScheduleStrategyImpl.simple("focus_evening_schedule", "晚间重点表调度", FocusScheduleJob.class, "0 50 20,21,22 * * ?"),
// ODS重点表调度

View File

@@ -70,7 +70,7 @@ public class DistributeScheduleJob extends BaseScheduleJob {
if (cluster.isPresent() && cluster.get().available(metadata)) {
return cluster.get().queue();
} else {
logger.warn(StrUtil.format("{} cluster not found or busy"));
logger.warn(StrUtil.format("{} cluster not found or busy", recommendCluster));
}
}
for (Cluster cluster : clusters) {

View File

@@ -3,6 +3,7 @@ package com.lanyuanxiaoyao.service.scheduler.quartz.distribute.cluster;
import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.forest.service.YarnService;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.AvailableStrategy;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.DatetimeLimit;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.QueueSizeLimit;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.YarnQueueUsedLimit;
import org.springframework.cloud.client.discovery.DiscoveryClient;
@@ -23,7 +24,8 @@ public class A4Cluster extends Cluster {
Constants.COMPACTION_QUEUE_A4,
AvailableStrategy.and(
new QueueSizeLimit(client, Constants.COMPACTION_QUEUE_A4, 10),
new YarnQueueUsedLimit(yarnService, Constants.CLUSTER_A4, "ten_iap.datalake", 0.8)
new YarnQueueUsedLimit(yarnService, Constants.CLUSTER_A4, "ten_iap.datalake", 0.8),
new DatetimeLimit(false, "* * 7-22 * * ?")
)
);
}

View File

@@ -22,8 +22,8 @@ public class B12Cluster extends Cluster {
Constants.CLUSTER_B12,
Constants.COMPACTION_QUEUE_B12,
AvailableStrategy.and(
new QueueSizeLimit(client, Constants.COMPACTION_QUEUE_B12, 20),
new YarnQueueUsedLimit(yarnService, Constants.CLUSTER_B12, "default", 0.9)
new QueueSizeLimit(client, Constants.COMPACTION_QUEUE_B12, 50),
new YarnQueueUsedLimit(yarnService, Constants.CLUSTER_B12, "default", 1.0)
)
);
}

View File

@@ -3,6 +3,7 @@ package com.lanyuanxiaoyao.service.scheduler.quartz.distribute.cluster;
import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.forest.service.YarnService;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.AvailableStrategy;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.DatetimeLimit;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.QueueSizeLimit;
import com.lanyuanxiaoyao.service.scheduler.quartz.distribute.strategy.YarnQueueUsedLimit;
import org.springframework.cloud.client.discovery.DiscoveryClient;
@@ -23,7 +24,8 @@ public class B1Cluster extends Cluster {
Constants.COMPACTION_QUEUE_B1,
AvailableStrategy.and(
new QueueSizeLimit(client, Constants.COMPACTION_QUEUE_B1, 20),
new YarnQueueUsedLimit(yarnService, Constants.CLUSTER_B1, "datalake", 1.0)
new YarnQueueUsedLimit(yarnService, Constants.CLUSTER_B1, "datalake", 1.0),
new DatetimeLimit(false, "* * 7-22 * * ?")
)
);
}

View File

@@ -1,28 +0,0 @@
package com.lanyuanxiaoyao.service.scheduler.quartz.distribute.cluster;
import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.forest.service.YarnService;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.stereotype.Component;
/**
* B5
*
* @author lanyuanxiaoyao
* @date 2023-06-08
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Component
public class B5Cluster extends Cluster {
public B5Cluster(DiscoveryClient client, YarnService yarnService) {
super(
Constants.CLUSTER_B5,
Constants.COMPACTION_QUEUE_B5,
/* AvailableStrategy.and(
new QueueSizeLimit(client, Constants.COMPACTION_QUEUE_B5, 10),
new YarnQueueUsedLimit(yarnService, Constants.CLUSTER_B5, "ten_iap.datalake", 0.9)
) */
metadata -> false
);
}
}

View File

@@ -117,7 +117,7 @@ public class ScheduleHelper {
// 统一在这里覆盖特定请求
// CRM重点表独占A4集群
if (TagsHelper.existsTag(meta.getTags(), Constants.TAGS_CRM_FOCUS)) {
finalMetadata.put(Constants.SCHEDULE_FORCE, Constants.CLUSTER_A4);
finalMetadata.put(Constants.SCHEDULE_RECOMMEND, Constants.CLUSTER_A4);
} else {
finalMetadata.put(Constants.SCHEDULE_ESCAPE, Constants.CLUSTER_A4);
}

90
service-web/client/.gitignore vendored Normal file
View File

@@ -0,0 +1,90 @@
.idea/**
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
.idea/**/aws.xml
.idea/**/contentModel.xml
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
.idea/**/gradle.xml
.idea/**/libraries
cmake-build-*/
.idea/**/mongoSettings.xml
*.iws
out/
.idea_modules/
atlassian-ide-plugin.xml
.idea/replstate.xml
.idea/sonarlint/
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
.idea/httpRequests
.idea/caches/build_file_checksums.ser
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
pids
*.pid
*.seed
*.pid.lock
lib-cov
coverage
*.lcov
.nyc_output
.grunt
bower_components
.lock-wscript
build/Release
node_modules/
jspm_packages/
web_modules/
*.tsbuildinfo
.npm
.eslintcache
.stylelintcache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
.node_repl_history
*.tgz
.yarn-integrity
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
.cache
.parcel-cache
.next
out
.nuxt
dist
.cache/
.vuepress/dist
.temp
.docusaurus
.serverless/
.fusebox/
.dynamodb/
.tern-port
.vscode-test
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

View File

@@ -0,0 +1,22 @@
<!doctype html>
<html lang="zh">
<head>
<meta charset="UTF-8"/>
<link rel="icon" href="icon.png"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<title>Hudi 服务总台</title>
<style>
html, body, #root {
position: absolute;
width: 100%;
height: 100%;
margin: 0;
padding: 0;
}
</style>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/index.tsx"></script>
</body>
</html>

View File

@@ -0,0 +1,39 @@
{
"name": "hudi-service-web-client",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"preview": "vite preview"
},
"dependencies": {
"@ant-design/icons": "^6.0.0",
"@ant-design/pro-components": "^2.8.7",
"@ant-design/x": "^1.2.0",
"@echofly/fetch-event-source": "^3.0.2",
"@fortawesome/fontawesome-free": "^6.7.2",
"@tinyflow-ai/react": "^0.1.6",
"amis": "^6.12.0",
"antd": "^5.25.0",
"axios": "^1.9.0",
"licia": "^1.48.0",
"markdown-it": "^14.1.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-router": "^7.5.3",
"styled-components": "^6.1.18"
},
"devDependencies": {
"@types/markdown-it": "^14.1.2",
"@types/react": "^18.2.0",
"@types/react-dom": "^18.2.0",
"@vitejs/plugin-react-swc": "^3.9.0",
"globals": "^16.0.0",
"sass": "^1.87.0",
"typescript": "~5.8.3",
"typescript-eslint": "^8.30.1",
"vite": "^6.3.5"
}
}

6730
service-web/client/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

View File

@@ -0,0 +1,10 @@
import {createRoot} from 'react-dom/client'
import {createHashRouter, RouterProvider} from 'react-router'
import './components/Registry.ts'
import {routes} from './route.tsx'
createRoot(document.getElementById('root')!).render(
<RouterProvider router={createHashRouter(routes)}/>,
)

View File

@@ -0,0 +1,42 @@
import {ProLayout} from '@ant-design/pro-components'
import React, {useEffect} from 'react'
import {Outlet, useLocation, useNavigate} from 'react-router'
import {menus} from '../route.tsx'
const App: React.FC = () => {
const navigate = useNavigate()
const location = useLocation()
return (
<ProLayout
token={{
header: {
colorBgHeader: '#292f33',
colorHeaderTitle: '#ffffff',
colorTextMenu: '#dfdfdf',
colorTextMenuSecondary: '#dfdfdf',
colorTextMenuSelected: '#ffffff',
colorTextMenuActive: '#ffffff',
colorBgMenuItemSelected: '#22272b',
colorTextRightActionsItem: '#dfdfdf',
},
}}
logo={<img src="icon.png" alt="logo"/>}
title="Hudi 服务总台"
route={menus}
location={{pathname: location.pathname}}
menu={{type: 'sub'}}
menuItemRender={(item, dom) => {
return <div onClick={() => navigate(item.path || '/')}>{dom}</div>
}}
fixSiderbar={true}
layout="mix"
splitMenus={true}
style={{minHeight: '100vh'}}
contentStyle={{backgroundColor: 'white', padding: '10px 10px 10px 20px'}}
>
<Outlet/>
</ProLayout>
)
}
export default App

View File

@@ -0,0 +1,193 @@
import {ClearOutlined, UserOutlined} from '@ant-design/icons'
import {Bubble, Sender, useXAgent, useXChat, Welcome} from '@ant-design/x'
import {fetchEventSource} from '@echofly/fetch-event-source'
import {Button, Divider, Flex, Switch, Tooltip, Typography} from 'antd'
import markdownIt from 'markdown-it'
import {useRef, useState} from 'react'
import styled from 'styled-components'
const md = markdownIt({html: true, breaks: true})
const ConversationDiv = styled.div`
height: calc(100vh - 76px);
display: flex;
flex-direction: column;
padding: 10px;
.conversation-welcome {
flex: 1;
width: 70%;
margin: 30px auto 30px;
}
.conversation-list {
flex: 1;
margin-bottom: 30px;
padding-left: 30px;
padding-right: 30px;
think {
color: gray;
display: block;
border-left: 3px solid;
padding-left: 5px;
margin-bottom: 10px;
white-space: pre-line;
}
}
.conversation-sender {
height: 100px;
padding-left: 30px;
padding-right: 30px;
}
`
function Conversation() {
const abortController = useRef<AbortController | null>(null)
const [input, setInput] = useState<string>('')
const [think, setThink] = useState<boolean>(true)
const [agent] = useXAgent<{ role: string, content: string }>({
request: async (info, callbacks) => {
await fetchEventSource('http://127.0.0.1:8080/chat/async', {
method: 'POST',
headers: {
'Authorization': 'Basic QXhoRWJzY3dzSkRiWU1IMjpjWXhnM2I0UHRXb1ZENVNqRmF5V3h0blNWc2p6UnNnNA==',
'Content-Type': 'application/json',
},
body: JSON.stringify(info.messages),
signal: abortController.current?.signal,
onmessage: ev => {
console.log(ev)
callbacks.onUpdate({
id: ev.id,
event: 'delta',
data: ev.data,
})
},
onclose: () => callbacks.onSuccess([]),
})
},
})
const {onRequest, messages, setMessages} = useXChat({
agent,
transformMessage: ({originMessage, chunk}) => {
let text = ''
try {
if (chunk?.data) {
text = chunk.data
}
} catch (error) {
console.error(error)
}
return {
content: (originMessage?.content || '') + text,
role: 'assistant',
}
},
resolveAbortController: controller => {
abortController.current = controller
},
})
return (
<ConversationDiv>
{messages.length > 0
? (<Bubble.List
className="conversation-list"
roles={{
assistant: {
placement: 'start',
avatar: {
icon: <img src="icon.png" alt=""/>,
style: {
background: 'transparent',
},
},
messageRender: content => {
let split = content.split('</think>')
if (split.length > 1) {
content = `${split[0]}</think>${md.render(split[1])}`
}
return (
<Typography>
<div dangerouslySetInnerHTML={{__html: content}}/>
</Typography>
)
},
},
user: {
placement: 'end',
avatar: {
icon: <UserOutlined/>,
},
},
}}
items={messages.map(({id, message}) => ({
key: id,
...message,
}))}
/>)
: (<div className="conversation-welcome">
<Welcome
variant="borderless"
icon={<img src="icon.png" alt="icon"/>}
title="你好,我是基于大模型深度思考技术构建的 AI 运营助手"
description="我可以帮你查询、检索Hudi 服务的运行情况,分析、处理 Hudi 服务的运营故障,输出、解读 Hudi 系统整体运营报告"
/>
</div>)}
<div className="conversation-sender">
<Sender
value={input}
onChange={setInput}
onSubmit={message => {
onRequest({
message: {
role: 'user',
content: (!think && messages.length === 0) ? `/no_think ${message}` : message,
},
stream: true,
})
setInput('')
}}
onCancel={() => abortController.current?.abort()}
footer={({components}) => {
const {SendButton, LoadingButton} = components
return (
<Flex justify="space-between" align="center">
<Flex gap="small" align="center">
<Switch
size="small"
value={think}
onChange={setThink}
disabled={messages.length > 0}
/>
<Divider type="vertical"/>
<Tooltip title="清空对话">
<Button
icon={<ClearOutlined/>}
type="text"
size="small"
onClick={() => setMessages([])}
/>
</Tooltip>
</Flex>
<Flex align="center">
{agent.isRequesting() ? (
<LoadingButton type="default"/>
) : (
<SendButton type="primary" disabled={false}/>
)}
</Flex>
</Flex>
)
}}
actions={false}
/>
</div>
</ConversationDiv>
)
}
export default Conversation

View File

@@ -0,0 +1,9 @@
function Inspection() {
return (
<div className="inspection">
</div>
)
}
export default Inspection

View File

@@ -0,0 +1,105 @@
import React from 'react'
import {useParams} from 'react-router'
import {amisRender, crudCommonOptions} from '../../../util/amis.tsx'
const DataDetail: React.FC = () => {
const {name} = useParams()
return (
<div className="import-detail h-full">
{amisRender(
{
className: 'h-full',
type: 'page',
title: `数据详情 (知识库:${name}`,
size: 'lg',
actions: [],
body: [
{
type: 'crud',
api: {
url: 'http://127.0.0.1:8080/knowledge/list_points?name=${name}',
headers: {
'Authorization': 'Basic QXhoRWJzY3dzSkRiWU1IMjpjWXhnM2I0UHRXb1ZENVNqRmF5V3h0blNWc2p6UnNnNA==',
},
},
...crudCommonOptions(),
headerToolbar: [
'reload',
],
columns: [
{
name: 'id',
hidden: true,
},
{
name: 'text',
label: '内容',
},
{
type: 'operation',
label: '操作',
width: 100,
buttons: [
{
type: 'action',
label: '编辑',
level: 'link',
size: 'lg',
actionType: 'dialog',
dialog: {
title: '编辑文段',
size: 'md',
body: {
type: 'form',
body: [
{
type: 'input-text',
name: 'id',
disabled: true,
label: '文段ID',
},
{
type: 'editor',
label: '内容',
name: 'text',
language: 'plaintext',
options: {
lineNumbers: 'off',
wordWrap: 'bounded',
},
},
],
},
},
},
{
type: 'action',
label: '删除',
className: 'text-danger hover:text-red-600',
level: 'link',
size: 'xs',
actionType: 'ajax',
api: {
method: 'get',
headers: {
'Authorization': 'Basic QXhoRWJzY3dzSkRiWU1IMjpjWXhnM2I0UHRXb1ZENVNqRmF5V3h0blNWc2p6UnNnNA==',
},
},
confirmText: '确认删除',
confirmTitle: '删除',
},
],
},
],
},
],
},
{
name: name,
},
)}
</div>
)
}
export default DataDetail

View File

@@ -0,0 +1,144 @@
import React from 'react'
import {useParams} from 'react-router'
import styled from 'styled-components'
import {amisRender} from '../../../util/amis.tsx'
const ImportDataDiv = styled.div`
.antd-EditorControl {
min-height: 500px !important;
}
`
const DataImport: React.FC = () => {
const {name} = useParams()
return (
<ImportDataDiv className="import-data h-full">
{amisRender({
type: 'page',
title: `数据导入 (知识库:${name}`,
body: [
[
{
className: 'h-full',
type: 'grid',
columns: [
{
body: [
{
type: 'form',
wrapWithPanel: false,
mode: 'horizontal',
actions: [],
body: [
{
name: 'mode',
type: 'radios',
label: '解析模式',
value: 'normal',
options: [
{
value: 'normal',
label: '常规模式',
},
{
value: 'llm',
label: '智能模式',
},
{
value: 'qa',
label: 'Q/A模式',
},
],
},
{
name: 'type',
type: 'radios',
label: '数据形式',
value: 'text',
options: [
{
value: 'text',
label: '文本',
},
{
value: 'file',
label: '文件',
},
],
},
{
visibleOn: 'type === \'text\'',
type: 'editor',
label: '数据内容',
name: 'content',
language: 'plaintext',
options: {
lineNumbers: 'off',
wordWrap: 'bounded',
},
},
{
visibleOn: 'type === \'file\'',
type: 'input-file',
name: 'files',
label: '数据文件',
accept: '.txt,.csv',
autoUpload: false,
drag: true,
multiple: true,
},
{
className: 'text-right',
type: 'button-toolbar',
buttons: [
{
type: 'action',
label: '预览',
},
{
type: 'submit',
label: '提交',
level: 'primary',
},
],
},
],
},
],
},
{
body: [
{
type: 'card',
className: 'h-full',
header: {
title: '解析预览',
subTitle: '截取部份文本进行解析预览',
},
body: [
{
type: 'list',
source: '${rows}',
listItem: [
{
body: {
type: 'tpl',
tpl: '${content}',
},
},
],
},
],
},
],
},
],
},
],
],
})}
</ImportDataDiv>
)
}
export default DataImport

View File

@@ -0,0 +1,176 @@
import React from 'react'
import {useNavigate} from 'react-router'
import {amisRender, crudCommonOptions, mappingField, mappingItem} from '../../../util/amis.tsx'
const strategyMapping = [
mappingItem('文本', 'Cosine'),
mappingItem('图片', 'Euclid'),
]
const statusMapping = [
mappingItem('正常', 'Green', 'label-success'),
mappingItem('优化中', 'Yellow', 'label-warning'),
mappingItem('错误', 'Red', 'label-danger'),
mappingItem('等待中', 'Grey', 'label-primary'),
]
const Knowledge: React.FC = () => {
const navigate = useNavigate()
return (
<div className="knowledge">
{amisRender(
{
type: 'page',
title: '知识库',
body: [
{
type: 'crud',
api: {
url: 'http://127.0.0.1:8080/knowledge/list',
headers: {
'Authorization': 'Basic QXhoRWJzY3dzSkRiWU1IMjpjWXhnM2I0UHRXb1ZENVNqRmF5V3h0blNWc2p6UnNnNA==',
},
},
...crudCommonOptions(),
headerToolbar: [
'reload',
{
type: 'action',
label: '',
icon: 'fa fa-plus',
actionType: 'dialog',
dialog: {
title: '新增知识库',
size: 'md',
body: {
type: 'form',
api: {
url: 'http://127.0.0.1:8080/knowledge/add',
dataType: 'form',
headers: {
'Authorization': 'Basic QXhoRWJzY3dzSkRiWU1IMjpjWXhnM2I0UHRXb1ZENVNqRmF5V3h0blNWc2p6UnNnNA==',
},
},
body: [
{
type: 'input-text',
name: 'name',
label: '名称',
},
{
type: 'select',
name: 'strategy',
label: '类型',
value: 'Cosine',
options: [
{
label: '文本',
value: 'Cosine',
},
{
label: '图片',
value: 'Euclid',
disabled: true,
},
],
},
],
},
},
},
],
columns: [
{
name: 'name',
label: '名称',
},
{
label: '类型',
width: 80,
align: 'center',
...mappingField('strategy', strategyMapping),
},
{
name: 'points',
label: '文本数',
width: 80,
align: 'center',
},
{
label: '状态',
width: 80,
align: 'center',
...mappingField('status', statusMapping),
},
{
type: 'operation',
label: '操作',
width: 150,
buttons: [
{
type: 'action',
label: '详情',
level: 'link',
size: 'xs',
onEvent: {
click: {
actions: [
{
actionType: 'custom',
// @ts-ignore
script: (context, action, event) => {
navigate(`/ai/knowledge/detail/${context.props.data['name']}`)
},
},
],
},
},
},
{
type: 'action',
label: '导入',
level: 'link',
size: 'xs',
onEvent: {
click: {
actions: [
{
actionType: 'custom',
// @ts-ignore
script: (context, action, event) => {
navigate(`/ai/knowledge/import/${context.props.data['name']}`)
},
},
],
},
},
},
{
type: 'action',
label: '删除',
className: 'text-danger hover:text-red-600',
level: 'link',
size: 'xs',
actionType: 'ajax',
api: {
method: 'get',
url: 'http://127.0.0.1:8080/knowledge/delete?name=${name}',
headers: {
'Authorization': 'Basic QXhoRWJzY3dzSkRiWU1IMjpjWXhnM2I0UHRXb1ZENVNqRmF5V3h0blNWc2p6UnNnNA==',
},
},
confirmText: '确认删除',
confirmTitle: '删除',
},
],
},
],
},
],
},
)}
</div>
)
}
export default Knowledge

View File

@@ -0,0 +1,96 @@
import React from 'react'
import {
amisRender,
commonInfo,
crudCommonOptions,
serviceLogByAppName,
serviceLogByAppNameAndHost,
time,
} from '../../util/amis.tsx'
const cloudCrud = (title: string, path: string) => {
return {
type: 'crud',
title: title,
api: `${commonInfo.baseUrl}${path}`,
...crudCommonOptions(),
interval: 2000,
headerToolbar: ['reload'],
loadDataOnce: true,
perPage: 100,
columns: [
{
label: '名称',
type: 'tpl',
tpl: `\${name} \${IF(size === undefined, '', '<span class="font-bold label label-primary">' + size + '</span>')}`,
},
{
name: 'status',
label: '状态',
align: 'center',
width: 60,
},
{
name: 'serviceUpTime',
label: '启动时间',
...time('serviceUpTime'),
align: 'center',
width: 160,
},
{name: 'url', label: '地址'},
{
type: 'operation',
label: '操作',
width: 100,
fixed: 'right',
className: 'nowrap',
buttons: [
{
label: '日志',
type: 'action',
level: 'link',
tooltip: '打开Grafana日志',
onEvent: {
click: {
actions: [
{
actionType: 'custom',
// @ts-ignore
script: (context, doAction, event) => {
let data = context.props.data
let url = ''
if (data['metadata']) {
url = serviceLogByAppNameAndHost(data.serviceId, data.metadata.hostname)
} else if (data['name']) {
url = serviceLogByAppName(data.name)
}
window.open(url, '_blank')
},
},
],
},
},
},
],
},
],
}
}
const Cloud: React.FC = () => {
return (
<div className="hudi-cloud">
{amisRender(
{
type: 'wrapper',
body: [
cloudCrud('服务列表', '/cloud/list'),
cloudCrud('服务列表 (IP)', '/cloud/list_ip'),
],
},
)}
</div>
)
}
export default Cloud

View File

@@ -0,0 +1,635 @@
import React from 'react'
import {amisRender, commonInfo, crudCommonOptions, readOnlyDialogOptions} from '../../util/amis.tsx'
const color = (number: number) => {
let color = 'text-success'
if (number > 30) {
color = 'text-primary'
}
if (number > 90) {
color = 'text-danger'
}
return color
}
const versionDetailDialog = (variable: string, target: string) => {
return {
disabledOn: `${variable} === 0`,
type: 'action',
label: '详情',
level: 'link',
size: 'sm',
actionType: 'dialog',
dialog: {
title: '详情',
actions: [],
size: 'md',
closeOnEsc: false,
closeOnOutside: false,
body: [
{
type: 'service',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/overview/version_detail`,
data: {
target: target,
version: '${version}',
},
},
body: [
{
type: 'table',
source: '${items}',
affixHeader: false,
columns: [
{
label: 'Flink job id',
fixed: 'left',
type: 'wrapper',
size: 'none',
body: [
{
type: 'tpl',
tpl: '${id}',
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${id}',
tooltip: '复制 ID',
},
],
},
{
label: '别名',
type: 'wrapper',
fixed: 'left',
size: 'none',
className: 'nowrap',
body: [
{
type: 'tpl',
tpl: '${alias}',
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${alias}',
tooltip: '复制别名',
},
],
},
],
},
],
},
],
},
}
}
const tableDetailDialog = (variable: string, targetList: any) => {
return {
disabledOn: `${variable} === 0`,
type: 'action',
label: '详情',
level: 'link',
size: 'sm',
actionType: 'dialog',
dialog: {
title: '详情',
size: 'md',
...readOnlyDialogOptions(),
body: [
{
type: 'table',
source: `\${${targetList}}`,
affixHeader: false,
columns: [
{
label: 'Flink job id',
fixed: 'left',
type: 'wrapper',
size: 'none',
body: [
{
type: 'tpl',
tpl: '${id}',
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${id}',
tooltip: '复制 ID',
},
],
},
{
label: '别名',
type: 'wrapper',
fixed: 'left',
size: 'none',
className: 'nowrap',
body: [
{
type: 'tpl',
tpl: '${alias}',
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${alias}',
tooltip: '复制别名',
},
],
},
],
},
],
},
}
}
const overviewYarnJob = (cluster: string, search: string, queue: string | undefined, yarnQueue: string) => {
return {
className: 'text-base leading-none',
type: 'table-view',
border: false,
padding: '0 10px 0 15px',
trs: [
{
tds: [
{
body: `${cluster}`,
},
{
padding: '0px',
body: queue === undefined ? {} : {
type: 'service',
api: `${commonInfo.baseUrl}/overview/queue?queue=${queue}`,
interval: 10000,
silentPolling: true,
body: [
{
type: 'tpl',
tpl: '${size}',
},
],
},
width: 100,
align: 'center',
},
{
padding: '0px',
width: 200,
body: {
type: 'service',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/overview/yarn-cluster`,
data: {
cluster: cluster,
queue: yarnQueue,
},
// @ts-ignore
adaptor: function (payload, response) {
let rootUsed = (payload['data']['root']['usedCapacity'] * 100 / payload['data']['root']['capacity'])
let targetUsed = (payload['data']['target']['absoluteUsedCapacity'] * 100 / payload['data']['target']['absoluteMaxCapacity'])
return {
...payload,
data: {
...payload.data,
rootUsed: rootUsed,
rootUsedColor: color(rootUsed),
targetUsed: targetUsed,
targetUsedColor: color(targetUsed),
},
}
},
},
interval: 10000,
silentPolling: true,
body: {
type: 'table-view',
border: false,
trs: [
{
tds: [
{
body: {
type: 'tpl',
tpl: '<span class="font-bold ${rootUsedColor}">${ROUND(rootUsed, 0)}%</span>',
},
width: 100,
align: 'center',
},
{
body: {
type: 'tpl',
tpl: '<span class="font-bold ${targetUsedColor}">${ROUND(targetUsed, 0)}%</span>',
},
width: 100,
align: 'center',
},
],
},
],
},
},
},
{
padding: '0px',
width: 200,
body: {
type: 'service',
api: {
url: `${commonInfo.baseUrl}/overview/yarn-job`,
data: {
cluster: cluster,
search: search,
},
},
interval: 10000,
silentPolling: true,
body: {
type: 'table-view',
border: false,
trs: [
{
tds: [
{
body: {
type: 'tpl',
tpl: '<span class=\'font-bold text-cyan-300\'>${scheduling}</span>',
},
width: 100,
align: 'center',
},
{
body: {
type: 'tpl',
tpl: '<span class="font-bold text-success">${running}</span>',
},
width: 100,
align: 'center',
},
],
},
],
},
},
},
],
},
],
}
}
const Overview: React.FC = () => {
return (
<div className="hudi-overview">
{amisRender(
{
type: 'wrapper',
body: [
{
type: 'service',
// language=JavaScript
dataProvider: 'const timer = setInterval(() => {\n setData({date: new Date().toLocaleString()})\n}, 1000)\nreturn () => {\n clearInterval(timer)\n}',
body: [
'当前时间:',
{
type: 'tpl',
className: 'font-bold',
tpl: '${date}',
},
],
},
{type: 'divider'},
{
type: 'crud',
title: '同步表数量',
api: `${commonInfo.baseUrl}/overview`,
...crudCommonOptions(),
interval: 60000,
columns: [
{
name: 'type',
label: '表类型',
},
{
name: 'total',
label: '总表数',
width: 100,
align: 'center',
},
{
name: 'focus',
label: '重点表',
className: 'text-danger font-bold',
width: 100,
align: 'center',
},
{
label: '普通表',
type: 'tpl',
tpl: '${total - focus}',
width: 100,
align: 'center',
},
],
},
{
type: 'crud',
title: '同步表数量',
api: `${commonInfo.baseUrl}/overview/sync_running_status`,
...crudCommonOptions(),
interval: 10000,
columns: [
{
name: 'type',
label: '类型',
},
{
name: 'total',
label: '任务数',
width: 100,
align: 'center',
},
{
name: 'running',
label: '运行中',
width: 100,
align: 'center',
},
{
name: 'stopped',
label: '已停止',
className: 'text-danger font-bold',
width: 100,
align: 'center',
},
{
type: 'operation',
label: '操作',
width: 100,
align: 'center',
buttons: [
tableDetailDialog('stopped', 'list'),
],
},
],
},
{
className: 'pl-2 my-5',
type: 'wrapper',
size: 'none',
body: {
type: 'tpl',
tpl: '同步集群资源用量情况',
},
},
{
type: 'table-view',
border: false,
trs: [
{
background: '#F9F9F9',
tds: [
{
bold: true,
body: '集群',
},
{
bold: true,
body: '集群资源',
width: 100,
align: 'center',
},
{
bold: true,
body: '队列资源',
width: 100,
align: 'center',
},
{
bold: true,
body: '调度中',
width: 100,
align: 'center',
},
{
bold: true,
body: '运行中',
width: 100,
align: 'center',
},
],
},
],
},
overviewYarnJob(commonInfo.clusters.sync_names(), 'Sync', undefined, 'default'),
{type: 'divider'},
{
className: 'pl-2 my-5',
type: 'wrapper',
size: 'none',
body: [
{
type: 'tpl',
tpl: '压缩集群资源用量情况',
},
{
className: 'mt-2',
type: 'service',
api: `${commonInfo.baseUrl}/overview/queue?queue=compaction-queue-pre`,
interval: 10000,
silentPolling: true,
body: [
{
type: 'tpl',
tpl: '预调度队列:<span class="font-bold">${size}</span>',
},
],
},
],
},
{
type: 'table-view',
border: false,
bold: true,
trs: [
{
background: '#F9F9F9',
tds: [
{
bold: true,
body: '集群',
},
{
bold: true,
body: '队列',
width: 100,
align: 'center',
},
{
bold: true,
body: '集群资源',
width: 100,
align: 'center',
},
{
bold: true,
body: '队列资源',
width: 100,
align: 'center',
},
{
bold: true,
body: '调度中',
width: 100,
align: 'center',
},
{
bold: true,
body: '运行中',
width: 100,
align: 'center',
},
],
},
],
},
// @ts-ignore
...Object.keys(commonInfo.clusters.compaction).map(name => overviewYarnJob(name, 'Compaction', `compaction-queue-${name}`, commonInfo.clusters.compaction[name])),
{type: 'divider'},
{
type: 'service',
api: `${commonInfo.baseUrl}/overview/version`,
interval: 10000,
silentPolling: true,
body: [
{
type: 'table',
title: '跨天情况 (${version})',
source: '${items}',
...crudCommonOptions(),
headerToolbar: [
'${version}',
],
columns: [
{
name: 'type',
label: '类型',
},
{
name: 'unReceive',
label: '未接收',
width: 100,
align: 'center',
},
{
type: 'operation',
label: '操作',
width: 100,
align: 'center',
buttons: [
versionDetailDialog('unReceive', 'unReceive_${key}'),
],
},
{
name: 'unScheduled',
label: '未跨天',
className: 'text-danger font-bold',
width: 100,
align: 'center',
},
{
type: 'operation',
label: '操作',
width: 100,
align: 'center',
buttons: [
versionDetailDialog('unScheduled', 'unScheduled_${key}'),
],
},
],
},
],
},
{
type: 'crud',
title: '调度策略',
api: `${commonInfo.baseUrl}/overview/schedule_jobs`,
...crudCommonOptions(),
interval: 60000,
loadDataOnce: true,
columns: [
{
name: 'job',
label: '策略描述',
},
{
name: 'trigger',
label: 'Cron表达式',
className: 'font-mono',
width: 250,
},
],
},
{
type: 'crud',
title: '监控指标运行进度',
api: `${commonInfo.baseUrl}/overview/monitor_progress`,
...crudCommonOptions(),
interval: 2000,
loadDataOnce: true,
columns: [
{
name: 'name',
label: '名称',
width: 150,
},
{
name: 'running',
label: '状态',
type: 'mapping',
width: 70,
map: {
'true': '运行中',
'false': '未运行',
},
},
{
label: '进度',
type: 'progress',
value: '${ROUND(progress * 100)}',
map: 'bg-primary',
},
],
},
],
},
)}
</div>
)
}
export default Overview

View File

@@ -0,0 +1,96 @@
import {
amisRender,
commonInfo,
copyField,
crudCommonOptions,
paginationCommonOptions,
time,
yarnQueueCrud,
} from '../../util/amis.tsx'
const queueCrud = (name: string) => {
return {
type: 'crud',
title: name,
api: `${commonInfo.baseUrl}/queue/all?name=${name}`,
...crudCommonOptions(),
interval: 10000,
loadDataOnce: true,
perPage: 5,
headerToolbar: [
'reload',
'filter-toggler',
{
type: 'tpl',
tpl: '共 <span class=\'text-primary font-bold\'>${total|default:0}</span> 个任务',
},
paginationCommonOptions(false),
],
footerToolbar: [],
columns: [
{
name: 'data.flinkJobId',
label: '任务 ID',
width: 190,
...copyField('data.flinkJobId'),
},
{
name: 'data.alias',
label: '别名',
className: 'nowrap',
...copyField('data.alias'),
},
{
name: 'data.batch',
label: '批次',
width: 100,
type: 'tpl',
tpl: '<span class="label label-warning">${data.batch}</span>',
},
{
name: 'priority',
label: '优先级',
width: 60,
align: 'center',
type: 'tpl',
tpl: '<span class="label bg-info">${priority}</span>',
},
{
name: 'data.comment',
label: '备注',
className: 'nowrap',
},
{
name: 'createTime',
label: '任务提交时间',
...time('createTime'),
width: 160,
fixed: 'right',
},
],
}
}
const Queue = () => {
let items = []
for (let name of Object.keys(commonInfo.clusters.compaction)) {
// @ts-ignore
items.push(yarnQueueCrud(name, commonInfo.clusters.compaction[name]))
items.push(queueCrud(`compaction-queue-${name}`))
}
return (
<div className="hudi-queue">
{amisRender(
{
type: 'wrapper',
body: [
queueCrud('compaction-queue-pre'),
...items,
],
},
)}
</div>
)
}
export default Queue

View File

@@ -0,0 +1,256 @@
import {
aliasTextInput,
amisRender,
commonInfo,
compactionStatusMapping,
crudCommonOptions,
filterableField,
flinkJobDialog,
flinkJobIdTextInput,
hudiTableTypeMapping,
mappingField,
paginationCommonOptions,
runModeMapping,
tableMetaDialog,
tableRunningStateMapping,
timeAndFrom,
} from '../../util/amis.tsx'
function Table() {
return (
<div className="hudi-table">
{amisRender(
{
type: 'wrapper',
size: 'none',
body: [
{
id: 'table-service',
type: 'service',
data: {},
body: [
{
type: 'crud',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/table/list`,
data: {
page: '${page|default:undefined}',
count: '${perPage|default:undefined}',
order: '${orderBy|default:undefined}',
direction: '${orderDir|default:undefined}',
search_flink_job_id: '${flinkJobId|default:undefined}',
search_alias: '${alias|default:undefined}',
filter_hudi_table_type: '${tableMeta\\.hudi\\.targetTableType|default:undefined}',
filter_run_mode: '${flinkJob\\.runMode|default:undefined}',
filter_compaction_status: '${syncState\\.compactionStatus|default:undefined}',
},
},
...crudCommonOptions(),
// interval: 10000,
filter: {
title: '表筛选',
body: [
{
type: 'group',
body: [
{
...flinkJobIdTextInput('58d0da94-1b3c-4234-948d-482ae3425e70'),
size: 'lg',
},
{
...aliasTextInput('58d0da94-1b3c-4234-948d-482ae3425e70'),
size: 'lg',
},
],
},
],
actions: [
{
type: 'submit',
level: 'primary',
label: '查询',
},
{
type: 'reset',
label: '重置',
},
],
},
filterTogglable: true,
filterDefaultVisible: true,
perPage: 20,
headerToolbar: [
'reload',
'filter-toggler',
paginationCommonOptions(),
],
footerToolbar: [
paginationCommonOptions(),
],
columns: [
{
label: 'Flink job id',
width: 195,
fixed: 'left',
type: 'wrapper',
size: 'none',
body: [
{
type: 'action',
level: 'link',
label: '${flinkJobId}',
size: 'xs',
actionType: 'dialog',
tooltip: '查看详情',
dialog: flinkJobDialog(),
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${flinkJobId}',
tooltip: '复制 ID',
},
],
},
{
label: '别名',
type: 'wrapper',
fixed: 'left',
size: 'none',
className: 'nowrap',
body: [
{
type: 'action',
level: 'link',
label: '${tableMeta.alias}',
size: 'xs',
actionType: 'dialog',
tooltip: '查看详情',
dialog: tableMetaDialog(),
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${tableMeta.alias}',
tooltip: '复制别名',
},
],
},
{
name: 'tableMeta.hudi.targetTableType',
label: '表类型',
width: 60,
align: 'center',
...mappingField('tableMeta.hudi.targetTableType', hudiTableTypeMapping),
filterable: filterableField(hudiTableTypeMapping, true),
},
{
name: 'flinkJob.runMode',
label: '任务类型',
width: 60,
align: 'center',
...mappingField('flinkJob.runMode', runModeMapping),
filterable: filterableField(runModeMapping, true),
},
{
name: 'syncRunning',
label: '同步运行状态',
align: 'center',
...mappingField('syncRunning', tableRunningStateMapping),
className: 'bg-green-50',
width: 75,
},
{
name: 'source_start_time',
label: '同步启动时间',
...timeAndFrom('syncState.sourceStartTime', 'syncState.sourceStartTimeFromNow', '未启动'),
sortable: true,
className: 'bg-green-50',
},
{
name: 'source_receive_time',
label: '同步接收时间',
...timeAndFrom('syncState.sourceReceiveTime', 'syncState.sourceReceiveTimeFromNow', '无数据'),
sortable: true,
className: 'bg-green-50',
},
{
name: 'source_checkpoint_time',
label: '同步心跳时间',
...timeAndFrom('syncState.sourceCheckpointTime', 'syncState.sourceCheckpointTimeFromNow', '未启动'),
sortable: true,
className: 'bg-green-50',
},
{
name: 'source_publish_time',
label: '源端发布时间',
...timeAndFrom('syncState.sourcePublishTime', 'syncState.sourcePublishTimeFromNow', '无增量'),
sortable: true,
className: 'bg-green-50',
},
{
name: 'source_operation_time',
label: '源端业务时间',
...timeAndFrom('syncState.sourceOperationTime', 'syncState.sourceOperationTimeFromNow', '无增量'),
sortable: true,
className: 'bg-green-50',
},
{
name: 'compactionRunning',
label: '压缩运行状态',
align: 'center',
...mappingField('compactionRunning', tableRunningStateMapping),
className: 'bg-cyan-50',
width: 75,
},
{
name: 'syncState.compactionStatus',
label: '压缩状态',
width: 60,
align: 'center',
...mappingField('syncState.compactionStatus', compactionStatusMapping),
filterable: filterableField(compactionStatusMapping, true),
className: 'bg-cyan-50',
},
{
name: 'compaction_start_time',
label: '压缩启动时间',
...timeAndFrom('syncState.compactionStartTime', 'syncState.compactionStartTimeFromNow'),
sortable: true,
className: 'bg-cyan-50',
},
{
name: 'compaction_latest_operation_time',
label: '压缩业务时间',
...timeAndFrom('syncState.compactionLatestOperationTime', 'syncState.compactionLatestOperationTimeFromNow', '无'),
sortable: true,
className: 'bg-cyan-50',
},
{
name: 'compaction_finish_time',
label: '压缩完成时间',
...timeAndFrom('syncState.compactionFinishTime', 'syncState.compactionFinishTimeFromNow'),
sortable: true,
className: 'bg-cyan-50',
},
],
},
],
},
],
},
)}
</div>
)
}
export default Table

View File

@@ -0,0 +1,195 @@
import React from 'react'
import {amisRender, commonInfo, paginationCommonOptions, serviceLogByAppName, yarnCrudColumns} from '../../util/amis.tsx'
const Task: React.FC = () => {
return (
<div className="hudi-task">
{amisRender(
{
type: 'wrapper',
body: [
{
type: 'form',
title: '检索文件',
actions: [
{
type: 'submit',
label: '提交任务',
actionType: 'ajax',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/task/scan`,
data: {
key: '${key|default:undefined}',
hdfs: '${hdfs|default:undefined}',
pulsar: '${pulsar|default:undefined}',
topic: '${topic|default:undefined}',
mode: '${scan_mode|default:undefined}',
fields: '${fields|default:undefined}',
},
},
},
],
body: [
{
name: 'scan_mode',
type: 'checkboxes',
label: '检索范围',
checkAll: true,
required: true,
value: 'log',
options: [
{label: '消息队列', value: 'queue'},
{label: '日志文件', value: 'log'},
{label: '数据文件', value: 'base'},
],
},
{
type: 'input-text',
name: 'key',
label: '检索字段',
required: true,
clearable: true,
description: '检索带有该字符的记录',
},
{
type: 'input-text',
name: 'hdfs',
label: 'HDFS路经',
requiredOn: '${CONTAINS(scan_mode, \'log\') || CONTAINS(scan_mode, \'base\')}',
visibleOn: '${CONTAINS(scan_mode, \'log\') || CONTAINS(scan_mode, \'base\')}',
clearable: true,
description: '输入表HDFS路径',
autoComplete: `${commonInfo.baseUrl}/table/all_hdfs?key=$term`,
},
{
type: 'input-text',
name: 'fields',
label: '指定字段',
visibleOn: '${CONTAINS(scan_mode, \'base\')}',
clearable: true,
description: '逗号分隔可以大幅提高parquet文件检索速度但无法获取指定字段外的字段内容',
},
{
type: 'group',
body: [
{
type: 'input-text',
name: 'topic',
label: 'Pulsar主题',
requiredOn: '${CONTAINS(scan_mode, \'queue\')}',
visibleOn: '${CONTAINS(scan_mode, \'queue\')}',
clearable: true,
description: '输入Pulsar主题',
autoComplete: `${commonInfo.baseUrl}/table/all_pulsar_topic?key=$term`,
columnRatio: 4,
},
{
type: 'input-text',
name: 'pulsar',
label: 'Pulsar地址',
requiredOn: '${CONTAINS(scan_mode, \'queue\')}',
visibleOn: '${CONTAINS(scan_mode, \'queue\')}',
clearable: true,
description: '输入Pulsar地址',
autoComplete: `${commonInfo.baseUrl}/table/all_pulsar?key=$term`,
},
],
},
],
},
{
type: 'form',
title: '综合查询',
actions: [
{
type: 'action',
label: '总数&最后操作时间',
actionType: 'ajax',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/task/table_summary`,
data: {
hdfs: '${hdfs|default:undefined}',
},
},
},
{
type: 'action',
label: '最后10条记录',
actionType: 'ajax',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/task/table_sampling`,
data: {
hdfs: '${hdfs|default:undefined}',
},
},
},
],
body: [
{
type: 'input-text',
name: 'hdfs',
label: 'HDFS路经',
required: true,
clearable: true,
description: '输入表HDFS路径',
autoComplete: `${commonInfo.baseUrl}/table/all_hdfs?key=$term`,
},
],
},
{
type: 'crud',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/yarn/job_list`,
data: {
clusters: commonInfo.clusters.sync_names(),
page: '${page|default:undefined}',
count: '${perPage|default:undefined}',
order: '${orderBy|default:undefined}',
direction: '${orderDir|default:undefined}',
filter_state: '${state|default:undefined}',
filter_final_status: '${finalStatus|default:undefined}',
search_id: '${id|default:undefined}',
search_name: 'Service_Task',
precise: false,
},
},
affixHeader: false,
interval: 10000,
syncLocation: false,
silentPolling: true,
resizable: false,
perPage: 10,
headerToolbar: [
'reload',
{
label: '任务管理器日志',
type: 'action',
tooltip: '打开Grafana日志',
onEvent: {
click: {
actions: [
{
actionType: 'custom',
script: () => window.open(serviceLogByAppName('service-executor-manager'), '_blank'),
},
],
},
},
},
paginationCommonOptions(),
],
footerToolbar: [],
columns: yarnCrudColumns(),
},
],
},
)}
</div>
)
}
export default Task

View File

@@ -0,0 +1,316 @@
import React from 'react'
import {
aliasTextInput,
amisRender,
commonInfo,
crudCommonOptions,
flinkJobIdTextInput,
formReloadFlinkJobIdTextInputAndAliasTextInput,
hdfsDialog,
paginationCommonOptions,
readOnlyDialogOptions,
timelineColumns,
} from '../../util/amis.tsx'
const Tool: React.FC = () => {
return (
<div className="hudi-tool">
{amisRender(
{
type: 'wrapper',
body: [
{
type: 'panel',
title: '乱七八糟小工具',
body: [
{
type: 'action',
label: 'SQL日志',
actionType: 'dialog',
dialog: {
title: '日志',
...readOnlyDialogOptions(),
size: 'lg',
body: {
type: 'crud',
api: `${commonInfo.baseUrl}/log/query_sql_log`,
...crudCommonOptions(),
loadDataOnce: true,
perPage: 50,
headerToolbar: [
'reload',
paginationCommonOptions(undefined, 10),
],
footerToolbar: [
paginationCommonOptions(undefined, 10),
],
columns: [
{
name: 'sql',
label: 'SQL',
},
{
name: 'createTime',
label: '执行时间',
},
],
},
},
},
{
type: 'action',
label: 'ZK节点',
className: 'ml-2',
actionType: 'dialog',
dialog: {
title: '日志',
...readOnlyDialogOptions(),
size: 'lg',
body: {},
},
},
],
},
{
type: 'form',
title: 'HDFS文件管理器',
actions: [
{
label: '直接下载',
type: 'action',
onEvent: {
click: {
actions: [
{
actionType: 'custom',
// @ts-ignore
script: (context, action, event) => {
let downloadUrl = `${event.data.base}/hudi/hdfs_download?root=${encodeURI(event.data.hdfs)}`
window.open(downloadUrl, '_blank')
},
},
],
},
},
},
{
type: 'submit',
label: '查看',
actionType: 'dialog',
dialog: hdfsDialog('hdfs'),
},
],
body: [
{
type: 'input-text',
name: 'hdfs',
label: 'HDFS根路经',
required: true,
clearable: true,
description: '输入表HDFS路径',
autoComplete: `${commonInfo.baseUrl}/table/all_hdfs?key=$term`,
},
],
},
{
type: 'form',
title: '查询时间线',
actions: [
{
type: 'submit',
label: '查询时间线',
actionType: 'dialog',
dialog: {
title: 'Hudi 表时间线',
actions: [],
size: 'lg',
body: {
type: 'crud',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/hudi/timeline/list_hdfs`,
data: {
page: '${page|default:undefined}',
count: '${perPage|default:undefined}',
order: '${orderBy|default:undefined}',
direction: '${orderDir|default:undefined}',
hdfs: '${hdfs|default:undefined}',
filter_type: '${type|default:active}',
filter_action: '${action|default:undefined}',
filter_state: '${state|default:undefined}',
},
},
...crudCommonOptions(),
perPage: 50,
headerToolbar: [
'reload',
paginationCommonOptions(undefined, 10),
],
footerToolbar: [
paginationCommonOptions(undefined, 10),
],
columns: timelineColumns(),
},
},
},
{
type: 'submit',
label: '查询表结构',
actionType: 'dialog',
dialog: {
title: 'Hudi 表结构',
actions: [],
size: 'lg',
body: {
type: 'service',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/hudi/schema`,
data: {
hdfs: '${hdfs|default:undefined}',
},
},
body: {
type: 'page',
body: {
type: 'json',
source: '${detail}',
levelExpand: 3,
},
},
},
},
},
],
body: [
{
type: 'input-text',
name: 'hdfs',
label: 'HDFS路经',
required: true,
clearable: true,
description: '输入表HDFS路径',
autoComplete: `${commonInfo.baseUrl}/table/all_hdfs?key=$term`,
},
],
},
{
type: 'form',
title: '提交压缩任务',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/schedule/table`,
data: {
flink_job_id: '${flinkJobId|default:undefined}',
alias: '${alias|default:undefined}',
recommend: '${recommend === \'undefined\' ? undefined : recommend|default:undefined}',
force: '${force === \'undefined\' ? undefined : force|default:undefined}',
},
},
...formReloadFlinkJobIdTextInputAndAliasTextInput('0fe6a96c-6b6e-4346-b18e-c631c2389f48'),
body: [
{
type: 'group',
body: [
flinkJobIdTextInput('0fe6a96c-6b6e-4346-b18e-c631c2389f48', true),
aliasTextInput('0fe6a96c-6b6e-4346-b18e-c631c2389f48', true),
],
},
{
type: 'group',
body: [
{
name: 'recommend',
type: 'radios',
label: '优先指定集群',
selectFirst: true,
options: [
{label: '无', value: 'undefined'},
...Object.keys(commonInfo.clusters.compaction)
.map(name => {
return {label: name, value: name}
}),
],
},
{
name: 'force',
type: 'radios',
label: '强制指定集群',
selectFirst: true,
options: [
{label: '无', value: 'undefined'},
...Object.keys(commonInfo.clusters.compaction)
.map(name => {
return {label: name, value: name}
}),
],
},
],
},
],
},
{
type: 'form',
title: '批量提交压缩任务',
api: {
method: 'post',
url: `${commonInfo.baseUrl}/schedule/table_batch`,
dataType: 'form',
},
body: [
{
name: 'lines',
type: 'textarea',
label: '表信息 (flink_job_id alias\\n)',
clearable: true,
minRows: 5,
maxRows: 5,
className: 'no-resize',
required: true,
},
],
},
{
type: 'form',
title: '停止所有压缩任务',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/schedule/stop_all`,
data: {
flink_job_id: '${flinkJobId|default:undefined}',
alias: '${alias|default:undefined}',
disable_meta: '${disableMeta|default:undefined}',
},
},
...formReloadFlinkJobIdTextInputAndAliasTextInput('163e043e-8cee-41fd-b5a4-0442ac682aec'),
body: [
{
type: 'group',
body: [
{
...flinkJobIdTextInput('163e043e-8cee-41fd-b5a4-0442ac682aec', true),
columnRatio: 5,
},
{
...aliasTextInput('163e043e-8cee-41fd-b5a4-0442ac682aec', true),
columnRatio: 5,
},
{
name: 'disableMeta',
type: 'checkbox',
label: '是否禁用表',
option: '表status设为n',
columnRatio: 2,
},
],
},
],
},
],
},
)}
</div>
)
}
export default Tool

View File

@@ -0,0 +1,185 @@
import {
aliasTextInput,
amisRender,
commonInfo,
crudCommonOptions,
filterableField,
flinkJobDialog,
flinkJobIdTextInput,
mappingField,
paginationCommonOptions,
tableMetaDialog,
versionUpdateStateMapping,
} from '../../util/amis.tsx'
function Version() {
return (
<div className="hudi-version">
{amisRender(
{
type: 'wrapper',
body: [
{
type: 'crud',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/version_update/list`,
data: {
page: '${page|default:undefined}',
count: '${perPage|default:undefined}',
order: '${orderBy|default:undefined}',
direction: '${orderDir|default:undefined}',
search_flink_job_id: '${flinkJobId|default:undefined}',
search_alias: '${alias|default:undefined}',
search_version: '${version|default:undefined}',
filter_schedules: '${updated|default:undefined}',
},
},
data: {
now: '${DATETOSTR(DATEMODIFY(NOW(), -1, \'days\'), \'YYYYMMDD\')}',
},
...crudCommonOptions(),
interval: 10000,
filter: {
mode: 'inline',
title: '表筛选',
body: [
{
type: 'group',
body: [
{
...flinkJobIdTextInput('c5cac9d3-844a-4d86-b2c5-0c10f2283667'),
size: 'md',
},
{
...aliasTextInput('c5cac9d3-844a-4d86-b2c5-0c10f2283667'),
size: 'md',
},
{
type: 'input-date',
name: 'version',
label: '版本',
clearable: true,
placeholder: '通过版本搜索',
size: 'md',
format: 'YYYYMMDD',
inputFormat: 'YYYYMMDD',
value: '${now}',
},
],
},
],
actions: [
{
type: 'submit',
level: 'primary',
label: '查询',
},
{
type: 'reset',
label: '重置',
},
],
},
filterTogglable: true,
filterDefaultVisible: true,
perPage: 20,
headerToolbar: [
'reload',
'filter-toggler',
{
type: 'tpl',
tpl: '共 <span class=\'text-primary font-bold\'>${total|default:0}</span> 个表,其中 <span class=\'text-success font-bold\'>${scheduled|default:0}</span> 个表已跨天,<span class=\'text-danger font-bold\'>${unScheduled|default:0}</span> 个表未跨天',
},
paginationCommonOptions(),
],
footerToolbar: [
paginationCommonOptions(),
],
columns: [
{
label: 'Flink job id',
width: 195,
fixed: 'left',
type: 'wrapper',
size: 'none',
body: [
{
type: 'action',
level: 'link',
label: '${flinkJobId}',
size: 'xs',
actionType: 'dialog',
tooltip: '查看详情',
dialog: flinkJobDialog(),
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${flinkJobId}',
tooltip: '复制 ID',
},
],
},
{
label: '别名',
type: 'wrapper',
fixed: 'left',
size: 'none',
className: 'nowrap',
body: [
{
type: 'action',
level: 'link',
label: '${tableMeta.alias}',
size: 'xs',
actionType: 'dialog',
tooltip: '查看详情',
dialog: tableMetaDialog(),
},
{
type: 'action',
level: 'link',
label: '',
icon: 'fa fa-copy',
size: 'xs',
actionType: 'copy',
content: '${tableMeta.alias}',
tooltip: '复制别名',
},
],
},
{
name: 'priority',
label: '表优先级',
align: 'center',
width: 75,
sortable: true,
}, {
name: 'version',
label: '版本',
align: 'center',
width: 75,
},
{
name: 'updated',
label: '状态',
align: 'center',
...mappingField('updated', versionUpdateStateMapping),
filterable: filterableField(versionUpdateStateMapping, true),
width: 70,
},
],
},
],
},
)}
</div>
)
}
export default Version

View File

@@ -0,0 +1,123 @@
import React from 'react'
import {useLocation, useParams} from 'react-router'
import {
amisRender,
commonInfo,
crudCommonOptions,
paginationCommonOptions,
yarnCrudColumns,
yarnQueueCrud,
} from '../../util/amis.tsx'
const Yarn: React.FC = () => {
const {clusters, queue, search} = useParams()
const location = useLocation()
return (
<div key={location.key} className="hudi-yarn">
{amisRender(
{
type: 'wrapper',
body: [
{
id: `${clusters}-yarn-service`,
name: `${clusters}-yarn-service`,
type: 'service',
body: [
{
type: 'tpl',
tpl: '<span class="font-bold text-xl">集群资源</span>',
},
yarnQueueCrud(clusters, queue),
{
type: 'tpl',
tpl: '<span class="font-bold text-xl">集群任务</span>',
// className: 'mb-2 block',
},
{
type: 'crud',
api: {
method: 'get',
url: `${commonInfo.baseUrl}/yarn/job_list`,
data: {
clusters: `${clusters}`,
page: '${page|default:undefined}',
count: '${perPage|default:undefined}',
order: '${orderBy|default:undefined}',
direction: '${orderDir|default:undefined}',
filter_state: '${state|default:undefined}',
filter_final_status: '${finalStatus|default:undefined}',
search_id: '${id|default:undefined}',
search_name: '${name|default:undefined}',
completion: 'true',
},
},
defaultParams: {
name: search,
},
...crudCommonOptions(),
interval: 10000,
filter: {
mode: 'inline',
title: '任务筛选',
body: [
{
type: 'group',
body: [
{
type: 'input-text',
name: 'id',
label: 'ID',
clearable: true,
placeholder: '通过 ID 搜索',
size: 'md',
},
{
type: 'input-text',
name: 'name',
label: '名称',
clearable: true,
placeholder: '通过名称搜索',
size: 'md',
},
],
},
],
actions: [
{
type: 'submit',
level: 'primary',
label: '查询',
},
{
type: 'reset',
label: '重置',
},
],
},
filterTogglable: true,
filterDefaultVisible: false,
perPage: 20,
headerToolbar: [
'reload',
'filter-toggler',
{
type: 'tpl',
tpl: '共 <span class=\'text-primary font-bold\'>${total|default:0}</span> 个任务,其中 <span class=\'text-success font-bold\'>${running|default:0}</span> 个任务运行中,<span class=\'text-danger font-bold\'>${unRunning|default:0}</span> 个任务处于非运行状态',
},
paginationCommonOptions(),
],
footerToolbar: [
paginationCommonOptions(),
],
columns: yarnCrudColumns(),
},
],
},
],
},
)}
</div>
)
}
export default Yarn

View File

@@ -0,0 +1,19 @@
import React from 'react'
import {amisRender, commonInfo, yarnQueueCrud} from '../../util/amis.tsx'
const YarnCluster: React.FC = () => {
return (
<div className="hudi-yarn-cluster">
{amisRender(
{
type: 'wrapper',
body: [
...Object.keys(commonInfo.clusters.compaction).map(name => yarnQueueCrud(name)),
],
},
)}
</div>
)
}
export default YarnCluster

View File

@@ -0,0 +1,206 @@
import {
CheckSquareOutlined,
CloudOutlined,
ClusterOutlined,
CompressOutlined,
DatabaseOutlined,
InfoCircleOutlined,
OpenAIOutlined,
QuestionOutlined,
SunOutlined,
SyncOutlined,
TableOutlined,
ToolOutlined,
} from '@ant-design/icons'
import {Navigate, type RouteObject} from 'react-router'
import Conversation from './pages/ai/Conversation.tsx'
import Inspection from './pages/ai/Inspection.tsx'
import DataDetail from './pages/ai/knowledge/DataDetail.tsx'
import DataImport from './pages/ai/knowledge/DataImport.tsx'
import Knowledge from './pages/ai/knowledge/Knowledge.tsx'
import App from './pages/App.tsx'
import Cloud from './pages/overview/Cloud.tsx'
import Overview from './pages/overview/Overview.tsx'
import Queue from './pages/overview/Queue.tsx'
import Table from './pages/overview/Table.tsx'
import Task from './pages/overview/Task.tsx'
import Tool from './pages/overview/Tool.tsx'
import Version from './pages/overview/Version.tsx'
import Yarn from './pages/overview/Yarn.tsx'
import YarnCluster from './pages/overview/YarnCluster.tsx'
import {commonInfo} from './util/amis.tsx'
export const routes: RouteObject[] = [
{
path: '/',
Component: App,
children: [
{
index: true,
element: <Navigate to="/overview" replace/>,
},
{
path: 'overview',
Component: Overview,
},
{
path: 'table',
Component: Table,
},
{
path: 'queue',
Component: Queue,
},
{
path: 'version',
Component: Version,
},
{
path: 'yarn/:clusters/:queue/:search?',
Component: Yarn,
},
{
path: 'cloud',
Component: Cloud,
},
{
path: 'yarn_cluster',
Component: YarnCluster,
},
{
path: 'tool',
Component: Tool,
},
{
path: 'task',
Component: Task,
},
{
path: 'ai',
children: [
{
index: true,
element: <Navigate to="/ai/conversation" replace/>,
},
{
path: 'inspection',
Component: Inspection,
},
{
path: 'conversation',
Component: Conversation,
},
{
path: 'knowledge',
Component: Knowledge,
},
{
path: 'knowledge/import/:name',
Component: DataImport,
},
{
path: 'knowledge/detail/:name',
Component: DataDetail,
},
],
},
],
},
]
export const menus = {
routes: [
{
path: '/',
name: '概览',
icon: <InfoCircleOutlined/>,
routes: [
{
path: '/overview',
name: '概览',
icon: <InfoCircleOutlined/>,
},
{
path: '/table',
name: '表任务',
icon: <TableOutlined/>,
},
{
path: '/queue',
name: '压缩队列',
icon: <CompressOutlined/>,
},
{
path: '/version',
name: '跨天',
icon: <SunOutlined/>,
},
{
path: `/yarn/${commonInfo.clusters.sync_names()}/root/Sync`,
name: '同步集群',
icon: <SyncOutlined/>,
},
{
path: `/yarn/${commonInfo.clusters.compaction_names()}/default/Compaction`,
name: '压缩集群',
icon: <SyncOutlined/>,
},
{
path: '/cloud',
name: '服务',
icon: <CloudOutlined/>,
},
{
path: 'a86f7c51-ae60-4ca4-8c4d-40b86b445a04',
name: '集群',
icon: <ClusterOutlined/>,
routes: [
{
path: '/yarn_cluster',
name: '概览',
icon: <InfoCircleOutlined/>,
},
...Object.keys(commonInfo.clusters.compaction).map(name => ({
// @ts-ignore
path: `/yarn/${name}/${commonInfo.clusters.compaction[name]}`,
name: `${name} 集群`,
icon: <ClusterOutlined/>,
})),
],
},
{
path: '/tool',
name: '工具',
icon: <ToolOutlined/>,
},
{
path: '/task',
name: '任务',
icon: <CheckSquareOutlined/>,
},
],
},
{
path: '/ai',
name: 'AI',
icon: <OpenAIOutlined/>,
routes: [
{
path: '/ai/conversation',
name: '智慧问答',
icon: <QuestionOutlined/>,
},
{
path: '/ai/inspection',
name: '智能巡检',
icon: <CheckSquareOutlined/>,
},
{
path: '/ai/knowledge',
name: '知识库',
icon: <DatabaseOutlined/>,
},
],
},
],
}

File diff suppressed because it is too large Load Diff

1
service-web/client/src/vite-env.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@@ -0,0 +1,31 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
"target": "ESNext",
"useDefineForClassFields": true,
"lib": [
"ESNext",
"DOM",
"DOM.Iterable"
],
"module": "ESNext",
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": [
"src"
]
}

Some files were not shown because too many files have changed in this diff Show More