feat(all): flink job增加tags属性

flink job级别增加标签属性,用于区分调用测试包和非测试包
This commit is contained in:
v-zhangjc9
2024-07-30 16:42:02 +08:00
parent b0c5d04476
commit a3472340b5
23 changed files with 332 additions and 219 deletions

View File

@@ -7,5 +7,6 @@ CREATE TABLE `tb_app_flink_job_config`
`application_id` varchar(50) DEFAULT NULL COMMENT '同步 yarn id', `application_id` varchar(50) DEFAULT NULL COMMENT '同步 yarn id',
`run_mode` varchar(20) NOT NULL DEFAULT 'ALL_IN_ONE' COMMENT '运行模式', `run_mode` varchar(20) NOT NULL DEFAULT 'ALL_IN_ONE' COMMENT '运行模式',
`one_in_one_yarn_job_id` bigint(20) DEFAULT NULL COMMENT 'ONE_IN_ONE yarn 配置', `one_in_one_yarn_job_id` bigint(20) DEFAULT NULL COMMENT 'ONE_IN_ONE yarn 配置',
`tags` varchar(500) NOT NULL DEFAULT '' COMMENT '标签',
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
) DEFAULT CHARSET = utf8mb4 COMMENT ='Flink 任务信息表'; ) DEFAULT CHARSET = utf8mb4 COMMENT ='Flink 任务信息表';

View File

@@ -264,6 +264,7 @@ public class RuntimeInfo {
public static final class HudiInfo { public static final class HudiInfo {
private String appHdfsPath; private String appHdfsPath;
private String appTestHdfsPath;
private String archiveHdfsPath; private String archiveHdfsPath;
private String victoriaPushUrl; private String victoriaPushUrl;
@@ -275,6 +276,14 @@ public class RuntimeInfo {
this.appHdfsPath = appHdfsPath; this.appHdfsPath = appHdfsPath;
} }
public String getAppTestHdfsPath() {
return appTestHdfsPath;
}
public void setAppTestHdfsPath(String appTestHdfsPath) {
this.appTestHdfsPath = appTestHdfsPath;
}
public String getArchiveHdfsPath() { public String getArchiveHdfsPath() {
return archiveHdfsPath; return archiveHdfsPath;
} }
@@ -295,6 +304,7 @@ public class RuntimeInfo {
public String toString() { public String toString() {
return "HudiInfo{" + return "HudiInfo{" +
"appHdfsPath='" + appHdfsPath + '\'' + "appHdfsPath='" + appHdfsPath + '\'' +
", appTestHdfsPath='" + appTestHdfsPath + '\'' +
", archiveHdfsPath='" + archiveHdfsPath + '\'' + ", archiveHdfsPath='" + archiveHdfsPath + '\'' +
", victoriaPushUrl='" + victoriaPushUrl + '\'' + ", victoriaPushUrl='" + victoriaPushUrl + '\'' +
'}'; '}';

View File

@@ -43,7 +43,7 @@ import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication @SpringBootApplication
public class RunnerApplication implements ApplicationRunner { public class RunnerApplication implements ApplicationRunner {
private static final Logger logger = LoggerFactory.getLogger(RunnerApplication.class); private static final Logger logger = LoggerFactory.getLogger(RunnerApplication.class);
private static final TemplateEngine engine = TemplateUtil.createEngine(new TemplateConfig("template", TemplateConfig.ResourceMode.CLASSPATH));
private final DeployInformationProperties deployInformationProperties; private final DeployInformationProperties deployInformationProperties;
private final RuntimeInfo runtimeInfo; private final RuntimeInfo runtimeInfo;
private final ImmutableList<ServiceInfoWrapper> serviceInfoList; private final ImmutableList<ServiceInfoWrapper> serviceInfoList;
@@ -71,6 +71,13 @@ public class RunnerApplication implements ApplicationRunner {
SpringApplication.run(RunnerApplication.class, args); SpringApplication.run(RunnerApplication.class, args);
} }
private static void generateTemplate(String templatePath, Map<?, ?> data, Path targetScriptPath) throws IOException {
Template template = engine.getTemplate(templatePath);
String script = template.render(data);
Files.deleteIfExists(targetScriptPath);
Files.write(targetScriptPath, script.getBytes());
}
private List<String> selectHosts(ServiceInfoWrapper serviceInfo) { private List<String> selectHosts(ServiceInfoWrapper serviceInfo) {
return serviceInfo.getReplicas() == 0 return serviceInfo.getReplicas() == 0
? hostInfoList ? hostInfoList
@@ -111,15 +118,14 @@ public class RunnerApplication implements ApplicationRunner {
String absolutRootPath = root.toAbsolutePath().toString(); String absolutRootPath = root.toAbsolutePath().toString();
logger.info("Current path: {}", absolutRootPath); logger.info("Current path: {}", absolutRootPath);
TemplateEngine engine = TemplateUtil.createEngine(new TemplateConfig("template", TemplateConfig.ResourceMode.CLASSPATH)); generateTemplate(
Template syncTemplate = engine.getTemplate("check/check.ftl"); "check/check.ftl",
String syncScript = syncTemplate.render(MapUtil.builder() MapUtil.builder()
.put("currentPath", absolutRootPath) .put("currentPath", absolutRootPath)
.put("runtime", runtimeInfo) .put("runtime", runtimeInfo)
.build()); .build(),
Path checkScriptFile = Paths.get(root.toString(), "check.sh"); Paths.get(root.toString(), "check.sh")
Files.deleteIfExists(checkScriptFile); );
Files.write(checkScriptFile, syncScript.getBytes());
} }
private void generateCloud(Path root) throws IOException { private void generateCloud(Path root) throws IOException {
@@ -133,8 +139,6 @@ public class RunnerApplication implements ApplicationRunner {
deployPlans = mapper.readValue(new String(Files.readAllBytes(planPath)), new TypeReference<Map<String, List<String>>>() {}); deployPlans = mapper.readValue(new String(Files.readAllBytes(planPath)), new TypeReference<Map<String, List<String>>>() {});
} }
TemplateEngine engine = TemplateUtil.createEngine(new TemplateConfig("template", TemplateConfig.ResourceMode.CLASSPATH));
Template deployTemplate = engine.getTemplate("cloud/deploy.ftl");
for (ServiceInfoWrapper serviceInfo : serviceInfoList) { for (ServiceInfoWrapper serviceInfo : serviceInfoList) {
logger.info("Generate script for {}", serviceInfo.getName()); logger.info("Generate script for {}", serviceInfo.getName());
List<String> selectedHosts; List<String> selectedHosts;
@@ -148,71 +152,70 @@ public class RunnerApplication implements ApplicationRunner {
selectedHosts = selectHosts(serviceInfo); selectedHosts = selectHosts(serviceInfo);
deployPlans.put(serviceInfo.getName(), selectedHosts); deployPlans.put(serviceInfo.getName(), selectedHosts);
} }
String deployScript = deployTemplate.render(MapUtil.builder() generateTemplate(
.put("currentPath", absolutRootPath) "cloud/deploy.ftl",
.put("hosts", hostInfoList MapUtil.builder()
.collect(HostInfoWrapper::getHostnameIp) .put("currentPath", absolutRootPath)
.toSortedList((o1, o2) -> Comparator.<String>naturalOrder().compare(o1.getIp(), o2.getIp()))) .put("hosts", hostInfoList
.put("selectedHosts", selectedHosts) .collect(HostInfoWrapper::getHostnameIp)
.put("runtime", runtimeInfo) .toSortedList((o1, o2) -> Comparator.<String>naturalOrder().compare(o1.getIp(), o2.getIp())))
.put("info", serviceInfo) .put("selectedHosts", selectedHosts)
.put("classpath", String.join(":", serviceInfo.getClasspath())) .put("runtime", runtimeInfo)
.put("arguments", serviceInfo.getArguments()) .put("info", serviceInfo)
.put("environments", serviceInfo.getEnvironments()) .put("classpath", String.join(":", serviceInfo.getClasspath()))
.build()); .put("arguments", serviceInfo.getArguments())
Path deployScriptFile = Paths.get( .put("environments", serviceInfo.getEnvironments())
root.toString(), .build(),
StrUtil.format("deploy-{}.sh", serviceInfo.getName()) Paths.get(
root.toString(),
StrUtil.format("deploy-{}.sh", serviceInfo.getName())
)
); );
Files.deleteIfExists(deployScriptFile); generateTemplate(
Files.write(deployScriptFile, deployScript.getBytes()); "cloud/stop.ftl",
MapUtil.builder()
Template stopTemplate = engine.getTemplate("cloud/stop.ftl"); .put("currentPath", absolutRootPath)
String stopScript = stopTemplate.render(MapUtil.builder() .put("hosts", hostInfoList
.put("currentPath", absolutRootPath) .collect(HostInfoWrapper::getIp)
.put("hosts", hostInfoList .toSortedList(Comparator.naturalOrder()))
.collect(HostInfoWrapper::getIp) .put("runtime", runtimeInfo)
.toSortedList(Comparator.naturalOrder())) .put("info", serviceInfo)
.put("runtime", runtimeInfo) .put("arguments", serviceInfo.getArguments())
.put("info", serviceInfo) .put("environments", serviceInfo.getEnvironments())
.put("arguments", serviceInfo.getArguments()) .build(),
.put("environments", serviceInfo.getEnvironments()) Paths.get(
.build()); root.toString(),
Path stopScriptFile = Paths.get( StrUtil.format("stop-{}.sh", serviceInfo.getName())
root.toString(), )
StrUtil.format("stop-{}.sh", serviceInfo.getName())
); );
Files.deleteIfExists(stopScriptFile); generateTemplate(
Files.write(stopScriptFile, stopScript.getBytes()); "cloud/log.ftl",
MapUtil.builder()
Template logTemplate = engine.getTemplate("cloud/log.ftl"); .put("currentPath", absolutRootPath)
String logScript = logTemplate.render(MapUtil.builder() .put("hosts", hostInfoList
.put("currentPath", absolutRootPath) .collect(HostInfoWrapper::getIp)
.put("hosts", hostInfoList .toSortedList(Comparator.naturalOrder()))
.collect(HostInfoWrapper::getIp) .put("selectedHosts", selectedHosts)
.toSortedList(Comparator.naturalOrder())) .put("runtime", runtimeInfo)
.put("selectedHosts", selectedHosts) .put("info", serviceInfo)
.put("runtime", runtimeInfo) .put("arguments", serviceInfo.getArguments())
.put("info", serviceInfo) .put("environments", serviceInfo.getEnvironments())
.put("arguments", serviceInfo.getArguments()) .build(),
.put("environments", serviceInfo.getEnvironments()) Paths.get(
.build()); root.toString(),
Path logScriptFile = Paths.get( StrUtil.format("log-{}.sh", serviceInfo.getName())
root.toString(), )
StrUtil.format("log-{}.sh", serviceInfo.getName())
); );
Files.deleteIfExists(logScriptFile);
Files.write(logScriptFile, logScript.getBytes());
} }
Template stopTemplate = engine.getTemplate("cloud/stop-script.ftl"); generateTemplate(
String stopScript = stopTemplate.render(MapUtil.builder() "cloud/stop-script.ftl",
.put("currentPath", absolutRootPath) MapUtil.builder()
.put("runtime", runtimeInfo) .put("currentPath", absolutRootPath)
.build()); .put("runtime", runtimeInfo)
Path stopScriptFile = Paths.get(root.toString(), "stop.sh"); .build(),
Files.deleteIfExists(stopScriptFile); Paths.get(root.toString(), "stop.sh")
Files.write(stopScriptFile, stopScript.getBytes()); );
MutableMap<String, MutableList<ServiceInfoWrapper>> groups = Maps.mutable.empty(); MutableMap<String, MutableList<ServiceInfoWrapper>> groups = Maps.mutable.empty();
for (ServiceInfoWrapper service : serviceInfoList) { for (ServiceInfoWrapper service : serviceInfoList) {
@@ -229,29 +232,28 @@ public class RunnerApplication implements ApplicationRunner {
String group = entry.getKey(); String group = entry.getKey();
MutableList<ServiceInfoWrapper> infos = entry.getValue(); MutableList<ServiceInfoWrapper> infos = entry.getValue();
Template batchDeployTemplate = engine.getTemplate("cloud/batch-deploy.ftl"); generateTemplate(
String batchDeployScript = batchDeployTemplate.render(MapUtil.builder() "cloud/batch-deploy.ftl",
.put("currentPath", absolutRootPath) MapUtil.builder()
.put("services", infos.collect(ServiceInfoWrapper::getName)) .put("currentPath", absolutRootPath)
.build()); .put("services", infos.collect(ServiceInfoWrapper::getName))
Path batchDeployScriptFile = Paths.get( .build(),
root.toString(), Paths.get(
StrUtil.format("batch-deploy-{}.sh", group) root.toString(),
StrUtil.format("batch-deploy-{}.sh", group)
)
); );
Files.deleteIfExists(batchDeployScriptFile); generateTemplate(
Files.write(batchDeployScriptFile, batchDeployScript.getBytes()); "cloud/batch-stop.ftl",
MapUtil.builder()
Template batchStopTemplate = engine.getTemplate("cloud/batch-stop.ftl"); .put("currentPath", absolutRootPath)
String batchStopScript = batchStopTemplate.render(MapUtil.builder() .put("services", infos.collect(ServiceInfoWrapper::getName))
.put("currentPath", absolutRootPath) .build(),
.put("services", infos.collect(ServiceInfoWrapper::getName)) Paths.get(
.build()); root.toString(),
Path batchStopScriptFile = Paths.get( StrUtil.format("batch-stop-{}.sh", group)
root.toString(), )
StrUtil.format("batch-stop-{}.sh", group)
); );
Files.deleteIfExists(batchStopScriptFile);
Files.write(batchStopScriptFile, batchStopScript.getBytes());
} }
Files.deleteIfExists(planPath); Files.deleteIfExists(planPath);
@@ -263,35 +265,32 @@ public class RunnerApplication implements ApplicationRunner {
String absolutRootPath = root.toAbsolutePath().toString(); String absolutRootPath = root.toAbsolutePath().toString();
logger.info("Current path: {}", absolutRootPath); logger.info("Current path: {}", absolutRootPath);
TemplateEngine engine = TemplateUtil.createEngine(new TemplateConfig("template", TemplateConfig.ResourceMode.CLASSPATH)); generateTemplate(
Template commandTemplate = engine.getTemplate(root.toFile().getName() + "/cli.ftl"); root.toFile().getName() + "/cli.ftl",
String commandScript = commandTemplate.render(MapUtil.builder() MapUtil.builder()
.put("currentPath", absolutRootPath) .put("currentPath", absolutRootPath)
.put("runtime", runtimeInfo) .put("runtime", runtimeInfo)
.put("directly", false) .put("directly", false)
.build()); .build(),
Path commandScriptFile = Paths.get(root.toString(), "cli"); Paths.get(root.toString(), "cli")
Files.deleteIfExists(commandScriptFile); );
Files.write(commandScriptFile, commandScript.getBytes()); generateTemplate(
root.toFile().getName() + "/cli.ftl",
Template commandDirectlyTemplate = engine.getTemplate(root.toFile().getName() + "/cli.ftl"); MapUtil.builder()
String commandDirectlyScript = commandDirectlyTemplate.render(MapUtil.builder() .put("currentPath", absolutRootPath)
.put("currentPath", absolutRootPath) .put("runtime", runtimeInfo)
.put("runtime", runtimeInfo) .put("directly", true)
.put("directly", true) .build(),
.build()); Paths.get(root.toString(), "cli_d")
Path commandDirectlyScriptFile = Paths.get(root.toString(), "cli_d"); );
Files.deleteIfExists(commandDirectlyScriptFile); generateTemplate(
Files.write(commandDirectlyScriptFile, commandDirectlyScript.getBytes()); root.toFile().getName() + "/update.ftl",
MapUtil.builder()
Template updateTemplate = engine.getTemplate(root.toFile().getName() + "/update.ftl"); .put("currentPath", absolutRootPath)
String updateScript = updateTemplate.render(MapUtil.builder() .put("runtime", runtimeInfo)
.put("currentPath", absolutRootPath) .build(),
.put("runtime", runtimeInfo) Paths.get(root.toString(), "update.sh")
.build()); );
Path updateScriptFile = Paths.get(root.toString(), "update.sh");
Files.deleteIfExists(updateScriptFile);
Files.write(updateScriptFile, updateScript.getBytes());
} }
private void generateUploader(Path root) throws IOException { private void generateUploader(Path root) throws IOException {
@@ -299,24 +298,22 @@ public class RunnerApplication implements ApplicationRunner {
String absolutRootPath = root.toAbsolutePath().toString(); String absolutRootPath = root.toAbsolutePath().toString();
logger.info("Current path: {}", absolutRootPath); logger.info("Current path: {}", absolutRootPath);
TemplateEngine engine = TemplateUtil.createEngine(new TemplateConfig("template", TemplateConfig.ResourceMode.CLASSPATH)); generateTemplate(
Template startTemplate = engine.getTemplate("uploader/start.ftl"); "uploader/start.ftl",
String startScript = startTemplate.render(MapUtil.builder() MapUtil.builder()
.put("currentPath", absolutRootPath) .put("currentPath", absolutRootPath)
.put("runtime", runtimeInfo) .put("runtime", runtimeInfo)
.build()); .build(),
Path startScriptFile = Paths.get(root.toString(), "start.sh"); Paths.get(root.toString(), "start.sh")
Files.deleteIfExists(startScriptFile); );
Files.write(startScriptFile, startScript.getBytes()); generateTemplate(
"uploader/update.ftl",
Template updateTemplate = engine.getTemplate("uploader/update.ftl"); MapUtil.builder()
String updateScript = updateTemplate.render(MapUtil.builder() .put("currentPath", absolutRootPath)
.put("currentPath", absolutRootPath) .put("runtime", runtimeInfo)
.put("runtime", runtimeInfo) .build(),
.build()); Paths.get(root.toString(), "update.sh")
Path updateScriptFile = Paths.get(root.toString(), "update.sh"); );
Files.deleteIfExists(updateScriptFile);
Files.write(updateScriptFile, updateScript.getBytes());
Template stopTemplate = engine.getTemplate("cloud/stop-script.ftl"); Template stopTemplate = engine.getTemplate("cloud/stop-script.ftl");
String stopScript = stopTemplate.render(MapUtil.builder() String stopScript = stopTemplate.render(MapUtil.builder()
@@ -332,29 +329,38 @@ public class RunnerApplication implements ApplicationRunner {
String absolutRootPath = root.toAbsolutePath().toString(); String absolutRootPath = root.toAbsolutePath().toString();
logger.info("Current path: {}", absolutRootPath); logger.info("Current path: {}", absolutRootPath);
TemplateEngine engine = TemplateUtil.createEngine(new TemplateConfig("template", TemplateConfig.ResourceMode.CLASSPATH)); generateTemplate(
Template syncTemplate = engine.getTemplate("update-jar.ftl"); "update-jar.ftl",
String syncScript = syncTemplate.render(MapUtil.builder() MapUtil.builder()
.put("currentPath", absolutRootPath) .put("currentPath", absolutRootPath)
.put("runtime", runtimeInfo) .put("runtime", runtimeInfo)
.put("jarPrefix", "sync") .put("jarPrefix", "sync")
.put("jarName", "sync-1.0.0-SNAPSHOT.jar") .put("jarName", "sync-1.0.0-SNAPSHOT.jar")
.put("uploadPath", runtimeInfo.getHudi().getAppHdfsPath()) .put("uploadPath", runtimeInfo.getHudi().getAppHdfsPath())
.build()); .build(),
Path syncScriptFile = Paths.get(root.toString(), "update-sync.sh"); Paths.get(root.toString(), "update-sync.sh")
Files.deleteIfExists(syncScriptFile); );
Files.write(syncScriptFile, syncScript.getBytes()); generateTemplate(
"update-jar.ftl",
Template taskTemplate = engine.getTemplate("update-jar.ftl"); MapUtil.builder()
String taskScript = taskTemplate.render(MapUtil.builder() .put("currentPath", absolutRootPath)
.put("currentPath", absolutRootPath) .put("runtime", runtimeInfo)
.put("runtime", runtimeInfo) .put("jarPrefix", "sync")
.put("jarPrefix", "task") .put("jarName", "sync-1.0.0-SNAPSHOT.jar")
.put("jarName", "service-executor-task-1.0.0-SNAPSHOT.jar") .put("uploadPath", runtimeInfo.getHudi().getAppTestHdfsPath())
.put("uploadPath", taskJarPath) .build(),
.build()); Paths.get(root.toString(), "update-test.sh")
Path taskScriptFile = Paths.get(root.toString(), "update-task.sh"); );
Files.deleteIfExists(taskScriptFile); generateTemplate(
Files.write(taskScriptFile, taskScript.getBytes()); "update-jar.ftl",
MapUtil.builder()
.put("currentPath", absolutRootPath)
.put("runtime", runtimeInfo)
.put("jarPrefix", "task")
.put("jarName", "service-executor-task-1.0.0-SNAPSHOT.jar")
.put("uploadPath", taskJarPath)
.build(),
Paths.get(root.toString(), "update-task.sh")
);
} }
} }

View File

@@ -31,6 +31,7 @@ deploy:
hudi: hudi:
# hudi业务jar包所在目录 # hudi业务jar包所在目录
app-hdfs-path: hdfs://b2/apps/datalake/jars/app-b12 app-hdfs-path: hdfs://b2/apps/datalake/jars/app-b12
app-test-hdfs-path: hdfs://b2/apps/datalake/jars/app-test-b12
# hudi指标推送 # hudi指标推送
victoria-push-url: http://132.126.207.125:35710/api/v1/import/prometheus victoria-push-url: http://132.126.207.125:35710/api/v1/import/prometheus
loki-push-url: ${deploy.runtime.loki.hudi-push-url} loki-push-url: ${deploy.runtime.loki.hudi-push-url}

View File

@@ -37,6 +37,7 @@ deploy:
"[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM "[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
"[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path} "[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path}
"[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path} "[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path}
"[connector.hudi.app-test-hdfs-path]": ${deploy.runtime.hudi.app-test-hdfs-path}
"[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url} "[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url}
"[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url} "[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url}
arguments: arguments:
@@ -57,6 +58,7 @@ deploy:
"[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM "[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
"[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path} "[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path}
"[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path} "[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path}
"[connector.hudi.app-test-hdfs-path]": ${deploy.runtime.hudi.app-test-hdfs-path}
"[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url} "[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url}
"[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url} "[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url}
arguments: arguments:
@@ -77,6 +79,7 @@ deploy:
"[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM "[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
"[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path} "[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path}
"[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path} "[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path}
"[connector.hudi.app-test-hdfs-path]": ${deploy.runtime.hudi.app-test-hdfs-path}
"[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url} "[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url}
"[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url} "[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url}
arguments: arguments:
@@ -97,6 +100,7 @@ deploy:
"[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM "[connector.hadoop.kerberos-principal]": ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
"[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path} "[connector.hadoop.kerberos-keytab-path]": ${deploy.runtime.kerberos-keytab-path}
"[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path} "[connector.hudi.app-hdfs-path]": ${deploy.runtime.hudi.app-hdfs-path}
"[connector.hudi.app-test-hdfs-path]": ${deploy.runtime.hudi.app-test-hdfs-path}
"[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url} "[connector.hudi.victoria-push-url]": ${deploy.runtime.hudi.victoria-push-url}
"[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url} "[connector.hudi.loki-push-url]": ${deploy.runtime.hudi.loki-push-url}
arguments: arguments:

View File

@@ -408,6 +408,14 @@ public interface SQLConstants {
* 字段 one_in_one_yarn_job_id 别名值 tafjc.one_in_one_yarn_job_id ONE_IN_ONE yarn 配置 * 字段 one_in_one_yarn_job_id 别名值 tafjc.one_in_one_yarn_job_id ONE_IN_ONE yarn 配置
*/ */
String ONE_IN_ONE_YARN_JOB_ID_A = _alias_.getAlias() + "." + ONE_IN_ONE_YARN_JOB_ID_O; String ONE_IN_ONE_YARN_JOB_ID_A = _alias_.getAlias() + "." + ONE_IN_ONE_YARN_JOB_ID_O;
/**
* 字段 tags 原始值 tags 标签
*/
String TAGS_O = "tags";
/**
* 字段 tags 别名值 tafjc.tags 标签
*/
String TAGS_A = _alias_.getAlias() + "." + TAGS_O;
} }
/** /**

View File

@@ -1,6 +1,7 @@
package com.lanyuanxiaoyao.service.common.entity; package com.lanyuanxiaoyao.service.common.entity;
import java.io.Serializable; import java.io.Serializable;
import java.util.List;
/** /**
* Flink Job * Flink Job
@@ -14,6 +15,7 @@ public class FlinkJob implements Serializable {
private String name; private String name;
private RunMode runMode; private RunMode runMode;
private TableMeta.YarnMeta oneInOneSyncYarn; private TableMeta.YarnMeta oneInOneSyncYarn;
private List<String> tags;
public FlinkJob() { public FlinkJob() {
} }
@@ -23,6 +25,7 @@ public class FlinkJob implements Serializable {
this.name = builder.name; this.name = builder.name;
this.runMode = builder.runMode; this.runMode = builder.runMode;
this.oneInOneSyncYarn = builder.oneInOneSyncYarn; this.oneInOneSyncYarn = builder.oneInOneSyncYarn;
this.tags = builder.tags;
} }
public static Builder builder() { public static Builder builder() {
@@ -61,14 +64,23 @@ public class FlinkJob implements Serializable {
this.oneInOneSyncYarn = oneInOneSyncYarn; this.oneInOneSyncYarn = oneInOneSyncYarn;
} }
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
@Override @Override
public String toString() { public String toString() {
return "FlinkJob{" + return "FlinkJob{" +
"id=" + id + "id=" + id +
", name='" + name + '\'' + ", name='" + name + '\'' +
", runMode=" + runMode + ", runMode=" + runMode +
", oneInOneSyncYarn=" + oneInOneSyncYarn + ", oneInOneSyncYarn=" + oneInOneSyncYarn +
'}'; ", tags='" + tags + '\'' +
'}';
} }
public enum RunMode { public enum RunMode {
@@ -92,6 +104,7 @@ public class FlinkJob implements Serializable {
private String name; private String name;
private RunMode runMode; private RunMode runMode;
private TableMeta.YarnMeta oneInOneSyncYarn; private TableMeta.YarnMeta oneInOneSyncYarn;
private List<String> tags;
private Builder() {} private Builder() {}
@@ -115,6 +128,11 @@ public class FlinkJob implements Serializable {
return this; return this;
} }
public Builder tags(List<String> tags) {
this.tags = tags;
return this;
}
public FlinkJob build() { public FlinkJob build() {
return new FlinkJob(this); return new FlinkJob(this);
} }

View File

@@ -575,16 +575,4 @@ public class TableMetaHelper {
.findFirst(); .findFirst();
} }
} }
public static boolean existsTag(TableMeta meta, String tag) {
return existsTag(meta.getTags(), tag);
}
public static boolean existsTag(String sourceTags, String tag) {
return existsTag(Arrays.asList(sourceTags.split(",")), tag);
}
public static boolean existsTag(List<String> sourceTags, String tag) {
return sourceTags != null && sourceTags.contains(tag);
}
} }

View File

@@ -0,0 +1,30 @@
package com.lanyuanxiaoyao.service.common.utils;
import com.lanyuanxiaoyao.service.common.entity.FlinkJob;
import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import java.util.Arrays;
import java.util.List;
/**
* 标签比对
*
* @author lanyuanxiaoyao
* @date 2024-07-29
*/
public class TagsHelper {
public static boolean existsTag(FlinkJob job, String tag) {
return existsTag(job.getTags(), tag);
}
public static boolean existsTag(TableMeta meta, String tag) {
return existsTag(meta.getTags(), tag);
}
public static boolean existsTag(String sourceTags, String tag) {
return existsTag(Arrays.asList(sourceTags.split(",")), tag);
}
public static boolean existsTag(List<String> sourceTags, String tag) {
return sourceTags != null && sourceTags.contains(tag);
}
}

View File

@@ -6,6 +6,7 @@ import cn.hutool.core.util.ObjectUtil;
import com.lanyuanxiaoyao.service.common.entity.FlinkJob; import com.lanyuanxiaoyao.service.common.entity.FlinkJob;
import com.lanyuanxiaoyao.service.common.entity.TableMeta; import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import com.lanyuanxiaoyao.service.common.exception.FlinkJobNotFoundException; import com.lanyuanxiaoyao.service.common.exception.FlinkJobNotFoundException;
import java.util.List;
import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.list.ImmutableList; import org.eclipse.collections.api.list.ImmutableList;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -43,7 +44,8 @@ public class FlinkJobService extends BaseService {
TbAppFlinkJobConfig.NAME_A, TbAppFlinkJobConfig.NAME_A,
TbAppFlinkJobConfig.RUN_MODE_A, TbAppFlinkJobConfig.RUN_MODE_A,
TbAppYarnJobConfig.JOB_MANAGER_MEMORY_A, TbAppYarnJobConfig.JOB_MANAGER_MEMORY_A,
TbAppYarnJobConfig.TASK_MANAGER_MEMORY_A TbAppYarnJobConfig.TASK_MANAGER_MEMORY_A,
TbAppFlinkJobConfig.TAGS_A
) )
.from(TbAppFlinkJobConfig._alias_) .from(TbAppFlinkJobConfig._alias_)
.leftJoin(TbAppYarnJobConfig._alias_) .leftJoin(TbAppYarnJobConfig._alias_)
@@ -63,11 +65,14 @@ public class FlinkJobService extends BaseService {
.jobManagerMemory(rs.getInt(4)) .jobManagerMemory(rs.getInt(4))
.taskManagerMemory(rs.getInt(5)) .taskManagerMemory(rs.getInt(5))
.build(); .build();
String tagText = rs.getString(6);
List<String> tags = Lists.mutable.of(tagText.split(","));
return FlinkJob.builder() return FlinkJob.builder()
.id(rs.getLong(1)) .id(rs.getLong(1))
.name(rs.getString(2)) .name(rs.getString(2))
.runMode(mode) .runMode(mode)
.oneInOneSyncYarn(yarnMeta) .oneInOneSyncYarn(yarnMeta)
.tags(tags)
.build(); .build();
}) })
); );

View File

@@ -9,6 +9,7 @@ import com.lanyuanxiaoyao.service.common.entity.FlinkJob;
import com.lanyuanxiaoyao.service.common.entity.TableMeta; import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import com.lanyuanxiaoyao.service.common.exception.CheckpointRootPathNotFoundException; import com.lanyuanxiaoyao.service.common.exception.CheckpointRootPathNotFoundException;
import com.lanyuanxiaoyao.service.common.utils.NameHelper; import com.lanyuanxiaoyao.service.common.utils.NameHelper;
import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.configuration.HudiServiceProperties; import com.lanyuanxiaoyao.service.configuration.HudiServiceProperties;
import com.lanyuanxiaoyao.service.executor.Runner; import com.lanyuanxiaoyao.service.executor.Runner;
import com.lanyuanxiaoyao.service.forest.service.InfoService; import com.lanyuanxiaoyao.service.forest.service.InfoService;
@@ -23,7 +24,18 @@ import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.flink.client.cli.ClientOptions; import org.apache.flink.client.cli.ClientOptions;
import org.apache.flink.configuration.*; import org.apache.flink.configuration.AkkaOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.CoreOptions;
import org.apache.flink.configuration.DeploymentOptions;
import org.apache.flink.configuration.HeartbeatManagerOptions;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.PipelineOptions;
import org.apache.flink.configuration.ResourceManagerOptions;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.configuration.SecurityOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.yarn.configuration.YarnConfigOptions; import org.apache.flink.yarn.configuration.YarnConfigOptions;
@@ -137,9 +149,13 @@ public class ExecutorService {
); );
} }
private String getLatestExecutorJarPath() throws IOException { private String getLatestExecutorJarPath(FlinkJob flinkJob) throws IOException {
try (FileSystem fileSystem = HadoopUtil.createFileSystem(HadoopUtil.createConfiguration(hadoopConfiguration))) { try (FileSystem fileSystem = HadoopUtil.createFileSystem(HadoopUtil.createConfiguration(hadoopConfiguration))) {
Path root = new Path(hudiConfiguration.getAppHdfsPath()); Path root = new Path(hudiConfiguration.getAppHdfsPath());
if (TagsHelper.existsTag(flinkJob, Constants.TAGS_USE_TEST_JAR)) {
logger.warn("Use test jar for {}", flinkJob.getId());
root = new Path(hudiConfiguration.getAppTestHdfsPath());
}
return Lists.immutable.of(fileSystem.listStatus(root)) return Lists.immutable.of(fileSystem.listStatus(root))
.select(FileStatus::isFile) .select(FileStatus::isFile)
.collect(FileStatus::getPath) .collect(FileStatus::getPath)
@@ -263,7 +279,7 @@ public class ExecutorService {
// configuration.setLong(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL, 10000); // configuration.setLong(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL, 10000);
// 业务jar包 // 业务jar包
String executorJarPath = getLatestExecutorJarPath(); String executorJarPath = getLatestExecutorJarPath(flinkJob);
logger.info("Executor jar path: {}", executorJarPath); logger.info("Executor jar path: {}", executorJarPath);
Long executorJarVersion = getLatestExecutorJarVersion(executorJarPath); Long executorJarVersion = getLatestExecutorJarVersion(executorJarPath);
configuration.set(PipelineOptions.JARS, new ArrayList<String>() {{ configuration.set(PipelineOptions.JARS, new ArrayList<String>() {{
@@ -314,7 +330,7 @@ public class ExecutorService {
configuration.setString(YarnConfigOptions.APPLICATION_NAME, NameHelper.compactionJobName(flinkJob.getId(), tableMeta.getAlias())); configuration.setString(YarnConfigOptions.APPLICATION_NAME, NameHelper.compactionJobName(flinkJob.getId(), tableMeta.getAlias()));
String executorJarPath = getLatestExecutorJarPath(); String executorJarPath = getLatestExecutorJarPath(flinkJob);
logger.info("Executor jar path: {}", executorJarPath); logger.info("Executor jar path: {}", executorJarPath);
Long executorJarVersion = getLatestExecutorJarVersion(executorJarPath); Long executorJarVersion = getLatestExecutorJarVersion(executorJarPath);
configuration.set(PipelineOptions.JARS, new ArrayList<String>() {{ configuration.set(PipelineOptions.JARS, new ArrayList<String>() {{

View File

@@ -10,7 +10,7 @@ import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import com.lanyuanxiaoyao.service.common.entity.compaction.ScheduleJob; import com.lanyuanxiaoyao.service.common.entity.compaction.ScheduleJob;
import com.lanyuanxiaoyao.service.common.utils.LogHelper; import com.lanyuanxiaoyao.service.common.utils.LogHelper;
import com.lanyuanxiaoyao.service.common.utils.NameHelper; import com.lanyuanxiaoyao.service.common.utils.NameHelper;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.configuration.entity.hudi.HudiCompactionPlan; import com.lanyuanxiaoyao.service.configuration.entity.hudi.HudiCompactionPlan;
import com.lanyuanxiaoyao.service.configuration.entity.hudi.HudiInstant; import com.lanyuanxiaoyao.service.configuration.entity.hudi.HudiInstant;
import com.lanyuanxiaoyao.service.configuration.entity.queue.QueueItem; import com.lanyuanxiaoyao.service.configuration.entity.queue.QueueItem;
@@ -30,7 +30,6 @@ import java.time.Duration;
import java.time.Instant; import java.time.Instant;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import javax.annotation.PreDestroy; import javax.annotation.PreDestroy;
import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.CuratorFrameworkFactory;
@@ -218,7 +217,7 @@ public class CompactionService {
TableMeta meta = infoService.tableMetaDetail(flinkJobId, alias); TableMeta meta = infoService.tableMetaDetail(flinkJobId, alias);
compactionJobGetTableInfoCost.record(Duration.between(getTableInfoStartTime, Instant.now())); compactionJobGetTableInfoCost.record(Duration.between(getTableInfoStartTime, Instant.now()));
if (TableMetaHelper.existsTag(meta, Constants.TAGS_NO_COMPACT)) { if (TagsHelper.existsTag(meta, Constants.TAGS_NO_COMPACT)) {
logger.warn("[{}] [{}] Table tags no compact", flinkJob.getId(), meta.getAlias()); logger.warn("[{}] [{}] Table tags no compact", flinkJob.getId(), meta.getAlias());
return; return;
} }

View File

@@ -18,6 +18,7 @@ public class HudiConfiguration {
private static final Logger logger = LoggerFactory.getLogger(HudiConfiguration.class); private static final Logger logger = LoggerFactory.getLogger(HudiConfiguration.class);
private String appHdfsPath; private String appHdfsPath;
private String appTestHdfsPath;
private String victoriaPushUrl; private String victoriaPushUrl;
private String lokiPushUrl; private String lokiPushUrl;
@@ -34,6 +35,14 @@ public class HudiConfiguration {
this.appHdfsPath = appHdfsPath; this.appHdfsPath = appHdfsPath;
} }
public String getAppTestHdfsPath() {
return appTestHdfsPath;
}
public void setAppTestHdfsPath(String appTestHdfsPath) {
this.appTestHdfsPath = appTestHdfsPath;
}
public String getVictoriaPushUrl() { public String getVictoriaPushUrl() {
return victoriaPushUrl; return victoriaPushUrl;
} }
@@ -54,6 +63,7 @@ public class HudiConfiguration {
public String toString() { public String toString() {
return "HudiConfiguration{" + return "HudiConfiguration{" +
"appHdfsPath='" + appHdfsPath + '\'' + "appHdfsPath='" + appHdfsPath + '\'' +
", appTestHdfsPath='" + appTestHdfsPath + '\'' +
", victoriaPushUrl='" + victoriaPushUrl + '\'' + ", victoriaPushUrl='" + victoriaPushUrl + '\'' +
", lokiPushUrl='" + lokiPushUrl + '\'' + ", lokiPushUrl='" + lokiPushUrl + '\'' +
'}'; '}';

View File

@@ -1,16 +1,13 @@
package com.lanyuanxiaoyao.service.scheduler.quartz.compaction; package com.lanyuanxiaoyao.service.scheduler.quartz.compaction;
import cn.hutool.core.util.StrUtil;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.lanyuanxiaoyao.service.common.Constants; import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.forest.service.HudiService; import com.lanyuanxiaoyao.service.forest.service.HudiService;
import com.lanyuanxiaoyao.service.forest.service.InfoService; import com.lanyuanxiaoyao.service.forest.service.InfoService;
import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper; import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import org.eclipse.collections.api.factory.Maps;
import org.eclipse.collections.api.list.ImmutableList;
import org.quartz.DisallowConcurrentExecution; import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionContext;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -48,7 +45,7 @@ public class CrmFocusScheduleJob extends BaseScheduleJob {
infoService, infoService,
hudiService, hudiService,
mapper, mapper,
meta -> TableMetaHelper.existsTag(meta.getTags(), Constants.TAGS_CRM_FOCUS), meta -> TagsHelper.existsTag(meta.getTags(), Constants.TAGS_CRM_FOCUS),
comment comment
); );
} }

View File

@@ -2,13 +2,12 @@ package com.lanyuanxiaoyao.service.scheduler.quartz.compaction;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.lanyuanxiaoyao.service.common.Constants; import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.forest.service.HudiService; import com.lanyuanxiaoyao.service.forest.service.HudiService;
import com.lanyuanxiaoyao.service.forest.service.InfoService; import com.lanyuanxiaoyao.service.forest.service.InfoService;
import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper; import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import org.eclipse.collections.api.factory.Maps;
import org.quartz.DisallowConcurrentExecution; import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionContext;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -46,7 +45,7 @@ public class FocusScheduleJob extends BaseScheduleJob {
infoService, infoService,
hudiService, hudiService,
mapper, mapper,
meta -> TableMetaHelper.existsTag(meta.getTags(), Constants.TAGS_FOCUS), meta -> TagsHelper.existsTag(meta.getTags(), Constants.TAGS_FOCUS),
comment comment
); );
} }

View File

@@ -3,13 +3,12 @@ package com.lanyuanxiaoyao.service.scheduler.quartz.compaction;
import cn.hutool.core.util.StrUtil; import cn.hutool.core.util.StrUtil;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.lanyuanxiaoyao.service.common.Constants; import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.forest.service.HudiService; import com.lanyuanxiaoyao.service.forest.service.HudiService;
import com.lanyuanxiaoyao.service.forest.service.InfoService; import com.lanyuanxiaoyao.service.forest.service.InfoService;
import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper; import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import org.eclipse.collections.api.factory.Maps;
import org.eclipse.collections.api.list.ImmutableList; import org.eclipse.collections.api.list.ImmutableList;
import org.quartz.DisallowConcurrentExecution; import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionContext;
@@ -49,7 +48,7 @@ public class FocusUnVersionUpdateScheduleJob extends BaseScheduleJob {
infoService, infoService,
hudiService, hudiService,
mapper, mapper,
meta -> TableMetaHelper.existsTag(meta.getTags(), Constants.TAGS_FOCUS) meta -> TagsHelper.existsTag(meta.getTags(), Constants.TAGS_FOCUS)
&& unUpdateVersionTableIds.contains(StrUtil.format("{}-{}", meta.getFlinkJobId(), meta.getAlias())), && unUpdateVersionTableIds.contains(StrUtil.format("{}-{}", meta.getFlinkJobId(), meta.getAlias())),
comment comment
); );

View File

@@ -2,13 +2,12 @@ package com.lanyuanxiaoyao.service.scheduler.quartz.compaction;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.lanyuanxiaoyao.service.common.Constants; import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.forest.service.HudiService; import com.lanyuanxiaoyao.service.forest.service.HudiService;
import com.lanyuanxiaoyao.service.forest.service.InfoService; import com.lanyuanxiaoyao.service.forest.service.InfoService;
import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper; import com.lanyuanxiaoyao.service.scheduler.utils.ScheduleHelper;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import org.eclipse.collections.api.factory.Maps;
import org.quartz.DisallowConcurrentExecution; import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionContext;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -46,7 +45,7 @@ public class OdsFocusScheduleJob extends BaseScheduleJob {
infoService, infoService,
hudiService, hudiService,
mapper, mapper,
meta -> TableMetaHelper.existsTag(meta.getTags(), Constants.TAGS_ODS_FOCUS), meta -> TagsHelper.existsTag(meta.getTags(), Constants.TAGS_ODS_FOCUS),
comment comment
); );
} }

View File

@@ -8,7 +8,7 @@ import com.github.loki4j.slf4j.marker.LabelMarker;
import com.lanyuanxiaoyao.service.common.Constants; import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.entity.SyncState; import com.lanyuanxiaoyao.service.common.entity.SyncState;
import com.lanyuanxiaoyao.service.common.entity.compaction.ScheduleJob; import com.lanyuanxiaoyao.service.common.entity.compaction.ScheduleJob;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.configuration.ExecutorProvider; import com.lanyuanxiaoyao.service.configuration.ExecutorProvider;
import com.lanyuanxiaoyao.service.configuration.entity.info.SimpleTableMeta; import com.lanyuanxiaoyao.service.configuration.entity.info.SimpleTableMeta;
import com.lanyuanxiaoyao.service.configuration.entity.queue.QueueItem; import com.lanyuanxiaoyao.service.configuration.entity.queue.QueueItem;
@@ -83,9 +83,9 @@ public class ScheduleHelper {
return false; return false;
}) })
// 拒绝不压缩标志的任务 // 拒绝不压缩标志的任务
.reject(meta -> TableMetaHelper.existsTag(meta.getTags(), Constants.TAGS_NO_COMPACT)) .reject(meta -> TagsHelper.existsTag(meta.getTags(), Constants.TAGS_NO_COMPACT))
// 拒绝不调度压缩标志的任务 // 拒绝不调度压缩标志的任务
.reject(meta -> TableMetaHelper.existsTag(meta.getTags(), Constants.TAGS_NO_SCHEDULE_COMPACT)) .reject(meta -> TagsHelper.existsTag(meta.getTags(), Constants.TAGS_NO_SCHEDULE_COMPACT))
.collect(meta -> { .collect(meta -> {
long compactionDuration = 0L; long compactionDuration = 0L;
try { try {
@@ -116,7 +116,7 @@ public class ScheduleHelper {
// 统一在这里覆盖特定请求 // 统一在这里覆盖特定请求
// CRM重点表独占A4集群 // CRM重点表独占A4集群
if (TableMetaHelper.existsTag(meta.getTags(), Constants.TAGS_CRM_FOCUS)) { if (TagsHelper.existsTag(meta.getTags(), Constants.TAGS_CRM_FOCUS)) {
finalMetadata.put(Constants.SCHEDULE_FORCE, Constants.CLUSTER_A4); finalMetadata.put(Constants.SCHEDULE_FORCE, Constants.CLUSTER_A4);
} else { } else {
finalMetadata.put(Constants.SCHEDULE_ESCAPE, Constants.CLUSTER_A4); finalMetadata.put(Constants.SCHEDULE_ESCAPE, Constants.CLUSTER_A4);

View File

@@ -3,6 +3,7 @@ package com.lanyuanxiaoyao.service.web.entity;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.lanyuanxiaoyao.service.common.entity.FlinkJob; import com.lanyuanxiaoyao.service.common.entity.FlinkJob;
import com.lanyuanxiaoyao.service.common.entity.TableMeta; import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import java.util.List;
/** /**
* 避免前段不支持java long类型的精度 * 避免前段不支持java long类型的精度
@@ -34,6 +35,10 @@ public class FlinkJobVO {
return flinkJob.getOneInOneSyncYarn(); return flinkJob.getOneInOneSyncYarn();
} }
public List<String> getTags() {
return flinkJob.getTags();
}
@Override @Override
public String toString() { public String toString() {
return "FlinkJobVO{" + return "FlinkJobVO{" +

View File

@@ -816,7 +816,7 @@ function copyField(field, tips = '复制', ignoreLength = 0) {
} }
} }
function flinkJobProperty(id, name, runMode) { function flinkJobProperty(id, name, runMode, tags) {
return { return {
type: 'property', type: 'property',
title: 'Flink Job 配置', title: 'Flink Job 配置',
@@ -829,6 +829,15 @@ function flinkJobProperty(id, name, runMode) {
...mappingField(`${runMode}`, runModeMapping), ...mappingField(`${runMode}`, runModeMapping),
} }
}, },
{
label: '标签',
content: {
type: 'each',
source: `\${SPLIT(${tags}, ",")}`,
items: mappingField('item', tagsMapping),
},
span: 3,
},
], ],
} }
} }
@@ -875,7 +884,7 @@ function flinkJobDialog() {
showCloseButton: false, showCloseButton: false,
size: 'md', size: 'md',
body: [ body: [
flinkJobProperty('flinkJobId', 'flinkJob.name', 'flinkJob.runMode'), flinkJobProperty('flinkJobId', 'flinkJob.name', 'flinkJob.runMode', 'flinkJob.tags'),
{type: 'divider'}, {type: 'divider'},
{ {
type: 'action', type: 'action',
@@ -1797,7 +1806,7 @@ function tableMetaDialog() {
...runMetaProperty('compaction'), ...runMetaProperty('compaction'),
}, },
{type: 'divider'}, {type: 'divider'},
flinkJobProperty('flinkJobId', 'flinkJob.name', 'flinkJob.runMode'), flinkJobProperty('flinkJobId', 'flinkJob.name', 'flinkJob.runMode', 'flinkJob.tags'),
{type: 'divider'}, {type: 'divider'},
{ {
type: 'property', type: 'property',
@@ -2038,6 +2047,7 @@ let tagsMapping = [
mappingItem('取消算子合并', 'DISABLE_CHAINING'), mappingItem('取消算子合并', 'DISABLE_CHAINING'),
mappingItem('跟踪压缩op_ts', 'TRACE_LATEST_OP_TS'), mappingItem('跟踪压缩op_ts', 'TRACE_LATEST_OP_TS'),
mappingItem('不使用HSync', 'DISABLE_HSYNC'), mappingItem('不使用HSync', 'DISABLE_HSYNC'),
mappingItem('测试包', 'USE_TEST_JAR'),
] ]
let hudiTableTypeMapping = [ let hudiTableTypeMapping = [

View File

@@ -9,12 +9,16 @@ import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import com.lanyuanxiaoyao.service.common.exception.CheckpointRootPathNotFoundException; import com.lanyuanxiaoyao.service.common.exception.CheckpointRootPathNotFoundException;
import com.lanyuanxiaoyao.service.common.exception.ZookeeperUrlNotFoundException; import com.lanyuanxiaoyao.service.common.exception.ZookeeperUrlNotFoundException;
import com.lanyuanxiaoyao.service.common.utils.NameHelper; import com.lanyuanxiaoyao.service.common.utils.NameHelper;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.sync.configuration.GlobalConfiguration; import com.lanyuanxiaoyao.service.sync.configuration.GlobalConfiguration;
import com.lanyuanxiaoyao.service.sync.functions.PulsarMessage2RecordFunction; import com.lanyuanxiaoyao.service.sync.functions.PulsarMessage2RecordFunction;
import com.lanyuanxiaoyao.service.sync.functions.PulsarMessageSourceReader; import com.lanyuanxiaoyao.service.sync.functions.PulsarMessageSourceReader;
import com.lanyuanxiaoyao.service.sync.functions.ValidateRecordFilter; import com.lanyuanxiaoyao.service.sync.functions.ValidateRecordFilter;
import com.lanyuanxiaoyao.service.sync.utils.*; import com.lanyuanxiaoyao.service.sync.utils.ArgumentsUtils;
import com.lanyuanxiaoyao.service.sync.utils.JacksonUtils;
import com.lanyuanxiaoyao.service.sync.utils.StatusUtils;
import com.lanyuanxiaoyao.service.sync.utils.SyncUtils;
import com.lanyuanxiaoyao.service.sync.utils.ZkUtils;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -34,7 +38,10 @@ import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import static com.lanyuanxiaoyao.service.common.Constants.*; import static com.lanyuanxiaoyao.service.common.Constants.GB;
import static com.lanyuanxiaoyao.service.common.Constants.HOUR;
import static com.lanyuanxiaoyao.service.common.Constants.MINUTE;
import static com.lanyuanxiaoyao.service.common.Constants.TAGS_DISABLE_CHAINING;
/** /**
* 同步应用 * 同步应用
@@ -92,7 +99,7 @@ public class Synchronizer {
environment.getCheckpointConfig().enableUnalignedCheckpoints(); environment.getCheckpointConfig().enableUnalignedCheckpoints();
environment.getCheckpointConfig().setTolerableCheckpointFailureNumber(5); environment.getCheckpointConfig().setTolerableCheckpointFailureNumber(5);
if (tableMetaList.stream().anyMatch(meta -> TableMetaHelper.existsTag(meta, TAGS_DISABLE_CHAINING))) { if (tableMetaList.stream().anyMatch(meta -> TagsHelper.existsTag(meta, TAGS_DISABLE_CHAINING))) {
logger.warn("Disable operator chaining"); logger.warn("Disable operator chaining");
environment.disableOperatorChaining(); environment.disableOperatorChaining();
} }
@@ -148,7 +155,7 @@ public class Synchronizer {
SingleOutputStreamOperator<String> source = environment SingleOutputStreamOperator<String> source = environment
.addSource(new PulsarMessageSourceReader(configuration, flinkJob, tableMeta)) .addSource(new PulsarMessageSourceReader(configuration, flinkJob, tableMeta))
.setParallelism(tableMeta.getHudi().getSourceTasks()); .setParallelism(tableMeta.getHudi().getSourceTasks());
if (TableMetaHelper.existsTag(tableMeta, Constants.TAGS_PULSAR_BACKUP)) { if (TagsHelper.existsTag(tableMeta, Constants.TAGS_PULSAR_BACKUP)) {
Path path = new Path(StrUtil.format("hdfs://b2/apps/datalake/hive_test/source/{}/{}", String.join("_", flinkJob.getName().split("\\s")), tableMeta.getAlias())); Path path = new Path(StrUtil.format("hdfs://b2/apps/datalake/hive_test/source/{}/{}", String.join("_", flinkJob.getName().split("\\s")), tableMeta.getAlias()));
StreamingFileSink<String> fileSink = StreamingFileSink.<String>forRowFormat(path, new SimpleStringEncoder<>("UTF-8")) StreamingFileSink<String> fileSink = StreamingFileSink.<String>forRowFormat(path, new SimpleStringEncoder<>("UTF-8"))
.withRollingPolicy(DefaultRollingPolicy.builder() .withRollingPolicy(DefaultRollingPolicy.builder()

View File

@@ -72,7 +72,7 @@ public class ArgumentsUtils {
return JacksonUtils.getMapper().readValue(argsTool.get(Constants.FLINK_JOB), FlinkJob.class); return JacksonUtils.getMapper().readValue(argsTool.get(Constants.FLINK_JOB), FlinkJob.class);
} }
public static String getInstants(String[] args) throws JsonProcessingException { public static String getInstants(String[] args) {
ParameterTool argsTool = ParameterTool.fromArgs(args); ParameterTool argsTool = ParameterTool.fromArgs(args);
if (!argsTool.has(Constants.INSTANTS)) { if (!argsTool.has(Constants.INSTANTS)) {
return ""; return "";

View File

@@ -7,6 +7,7 @@ import com.lanyuanxiaoyao.service.common.Constants;
import com.lanyuanxiaoyao.service.common.entity.FlinkJob; import com.lanyuanxiaoyao.service.common.entity.FlinkJob;
import com.lanyuanxiaoyao.service.common.entity.TableMeta; import com.lanyuanxiaoyao.service.common.entity.TableMeta;
import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper; import com.lanyuanxiaoyao.service.common.utils.TableMetaHelper;
import com.lanyuanxiaoyao.service.common.utils.TagsHelper;
import com.lanyuanxiaoyao.service.sync.configuration.DefaultPartitionNameKeyGenerator; import com.lanyuanxiaoyao.service.sync.configuration.DefaultPartitionNameKeyGenerator;
import com.lanyuanxiaoyao.service.sync.configuration.GlobalConfiguration; import com.lanyuanxiaoyao.service.sync.configuration.GlobalConfiguration;
import java.util.Optional; import java.util.Optional;
@@ -104,17 +105,17 @@ public class ConfigurationUtils {
configuration.setString(FlinkOptions.PATH, tableMeta.getHudi().getTargetHdfsPath()); configuration.setString(FlinkOptions.PATH, tableMeta.getHudi().getTargetHdfsPath());
configuration.setString(FlinkOptions.RECORD_KEY_FIELD, Constants.UNION_KEY_NAME); configuration.setString(FlinkOptions.RECORD_KEY_FIELD, Constants.UNION_KEY_NAME);
configuration.setBoolean(FlinkOptions.PRE_COMBINE, false); configuration.setBoolean(FlinkOptions.PRE_COMBINE, false);
if (TableMetaHelper.existsTag(tableMeta, Constants.TAGS_PRE_COMBINE)) { if (TagsHelper.existsTag(tableMeta, Constants.TAGS_PRE_COMBINE)) {
configuration.setBoolean(FlinkOptions.PRE_COMBINE, true); configuration.setBoolean(FlinkOptions.PRE_COMBINE, true);
} }
configuration.setString(FlinkOptions.PRECOMBINE_FIELD, Constants.UPDATE_TIMESTAMP_KEY_NAME); configuration.setString(FlinkOptions.PRECOMBINE_FIELD, Constants.UPDATE_TIMESTAMP_KEY_NAME);
configuration.setString(FlinkOptions.SOURCE_AVRO_SCHEMA, schema.toString()); configuration.setString(FlinkOptions.SOURCE_AVRO_SCHEMA, schema.toString());
if (TableMetaHelper.existsTag(tableMeta, Constants.TAGS_NO_IGNORE_FAILED)) { if (TagsHelper.existsTag(tableMeta, Constants.TAGS_NO_IGNORE_FAILED)) {
configuration.setBoolean(FlinkOptions.IGNORE_FAILED, false); configuration.setBoolean(FlinkOptions.IGNORE_FAILED, false);
} }
if (TableMetaHelper.existsTag(tableMeta, Constants.TAGS_DISABLE_HSYNC)) { if (TagsHelper.existsTag(tableMeta, Constants.TAGS_DISABLE_HSYNC)) {
logger.info("Disable hsync"); logger.info("Disable hsync");
configuration.setBoolean(HoodieWriteConfig.USE_HSYNC.key(), false); configuration.setBoolean(HoodieWriteConfig.USE_HSYNC.key(), false);
} }