refactor(cli): 优化配置

移除flink多余配置
This commit is contained in:
2024-02-28 10:36:11 +08:00
parent 2c36a826a5
commit de78898a60
5 changed files with 1 additions and 39 deletions

View File

@@ -16,12 +16,9 @@ deploy:
connector-zk-url: b1m2.hdp.dc:2181,b1m3.hdp.dc:2181,b1m4.hdp.dc:2181,b1m5.hdp.dc:2181,b1m6.hdp.dc:2181
hudi:
app-hdfs-path: hdfs://b2/apps/datalake/jars/app-b12
archive-hdfs-path: hdfs://b2/apps/datalake/flink/completed-jobs-hudi
victoria-push-url: http://132.126.207.125:35710/api/v1/import/prometheus
loki-push-url: ${deploy.runtime.loki.hudi-push-url}
executor:
staging-path: hdfs://b2/apps/datalake/yarn
archive-hdfs-path: hdfs://b2/apps/flink/completed-jobs/
task-jar-path: hdfs://b2/apps/datalake/jars/service/service-executor-task-1.0.0-SNAPSHOT.jar
task-result-path: hdfs://b2/apps/datalake/task-results
security:

View File

@@ -16,12 +16,9 @@ deploy:
connector-zk-url: b5m1.hdp.dc:2181,b5m2.hdp.dc:2181,b5m3.hdp.dc:2181
hudi:
app-hdfs-path: hdfs://b2/apps/datalake/jars/app
archive-hdfs-path: hdfs://b2/apps/datalake/flink/completed-jobs-hudi
victoria-push-url: http://132.122.116.142:35710/api/v1/import/prometheus
loki-push-url: ${deploy.runtime.loki.hudi-push-url}
executor:
staging-path: hdfs://b2/apps/datalake/yarn
archive-hdfs-path: hdfs://b2/apps/flink/completed-jobs/
task-jar-path: hdfs://b2/apps/datalake/jars/service/service-executor-task-1.0.0-SNAPSHOT.jar
task-result-path: hdfs://b2/apps/datalake/task-results
security:

View File

@@ -32,7 +32,6 @@ deploy:
connector_hadoop_kerberos-principal: ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
connector_hadoop_kerberos-keytab-path: ${deploy.runtime.kerberos-keytab-path}
connector_hudi_app-hdfs-path: ${deploy.runtime.hudi.app-hdfs-path}
connector_hudi_archive-hdfs-path: ${deploy.runtime.hudi.archive-hdfs-path}
connector_hudi_victoria-push-url: ${deploy.runtime.hudi.victoria-push-url}
connector_hudi_loki-push-url: ${deploy.runtime.hudi.loki-push-url}
arguments:
@@ -53,7 +52,6 @@ deploy:
connector_hadoop_kerberos-principal: ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
connector_hadoop_kerberos-keytab-path: ${deploy.runtime.kerberos-keytab-path}
connector_hudi_app-hdfs-path: ${deploy.runtime.hudi.app-hdfs-path}
connector_hudi_archive-hdfs-path: ${deploy.runtime.hudi.archive-hdfs-path}
connector_hudi_victoria-push-url: ${deploy.runtime.hudi.victoria-push-url}
connector_hudi_loki-push-url: ${deploy.runtime.hudi.loki-push-url}
arguments:
@@ -74,7 +72,6 @@ deploy:
connector_hadoop_kerberos-principal: ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
connector_hadoop_kerberos-keytab-path: ${deploy.runtime.kerberos-keytab-path}
connector_hudi_app-hdfs-path: ${deploy.runtime.hudi.app-hdfs-path}
connector_hudi_archive-hdfs-path: ${deploy.runtime.hudi.archive-hdfs-path}
connector_hudi_victoria-push-url: ${deploy.runtime.hudi.victoria-push-url}
connector_hudi_loki-push-url: ${deploy.runtime.hudi.loki-push-url}
arguments:
@@ -95,7 +92,6 @@ deploy:
connector_hadoop_kerberos-principal: ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
connector_hadoop_kerberos-keytab-path: ${deploy.runtime.kerberos-keytab-path}
connector_hudi_app-hdfs-path: ${deploy.runtime.hudi.app-hdfs-path}
connector_hudi_archive-hdfs-path: ${deploy.runtime.hudi.archive-hdfs-path}
connector_hudi_victoria-push-url: ${deploy.runtime.hudi.victoria-push-url}
connector_hudi_loki-push-url: ${deploy.runtime.hudi.loki-push-url}
arguments:
@@ -158,8 +154,6 @@ deploy:
connector_hadoop_kerberos-principal: ${deploy.runtime.user}/$\{hostname}.hdp.dc@ECLD.COM
connector_hadoop_kerberos-keytab-path: ${deploy.runtime.kerberos-keytab-path}
arguments:
executor_staging-directory: ${deploy.runtime.executor.staging-path}
executor_history-server-archive-dir: ${deploy.runtime.executor.archive-hdfs-path}
executor_task-jar-path: ${deploy.runtime.executor.task-jar-path}
executor_task-result-path: ${deploy.runtime.executor.task-result-path}
service-web:

View File

@@ -17,27 +17,9 @@ import org.springframework.stereotype.Component;
public class ExecutorConfiguration {
private static final Logger logger = LoggerFactory.getLogger(ExecutorConfiguration.class);
private String stagingDirectory;
private String historyServerArchiveDir;
private String taskJarPath;
private String taskResultPath;
public String getStagingDirectory() {
return stagingDirectory;
}
public void setStagingDirectory(String stagingDirectory) {
this.stagingDirectory = stagingDirectory;
}
public String getHistoryServerArchiveDir() {
return historyServerArchiveDir;
}
public void setHistoryServerArchiveDir(String historyServerArchiveDir) {
this.historyServerArchiveDir = historyServerArchiveDir;
}
public String getTaskJarPath() {
return taskJarPath;
}
@@ -57,9 +39,7 @@ public class ExecutorConfiguration {
@Override
public String toString() {
return "ExecutorConfiguration{" +
"stagingDirectory='" + stagingDirectory + '\'' +
", historyServerArchiveDir='" + historyServerArchiveDir + '\'' +
", taskJarPath='" + taskJarPath + '\'' +
"taskJarPath='" + taskJarPath + '\'' +
", taskResultPath='" + taskResultPath + '\'' +
'}';
}

View File

@@ -85,7 +85,6 @@ public class ExecutorTaskService {
configuration.setString(AkkaOptions.TCP_TIMEOUT, "2 min");
configuration.setBoolean(CoreOptions.CHECK_LEAKED_CLASSLOADER, false);
configuration.setString(YarnConfigOptions.APPLICATION_ATTEMPTS, "4");
configuration.setString(YarnConfigOptions.STAGING_DIRECTORY, executorConfiguration.getStagingDirectory());
configuration.setString(ResourceManagerOptions.CONTAINERIZED_MASTER_ENV_PREFIX + "MALLOC_ARENA_MAX", "1");
configuration.setString(ResourceManagerOptions.CONTAINERIZED_TASK_MANAGER_ENV_PREFIX + "MALLOC_ARENA_MAX", "1");
configuration.setInteger(RestOptions.PORT, 8081);
@@ -98,14 +97,9 @@ public class ExecutorTaskService {
configuration.set(TaskManagerOptions.JVM_METASPACE, MemorySize.parse("128m"));
configuration.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, 10);
configuration.setString(YarnConfigOptions.APPLICATION_NAME, StrUtil.format("Service_Task {} #{}", name, taskId));
configuration.setString(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_DIRS, executorConfiguration.getHistoryServerArchiveDir());
configuration.setLong(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL, 10000);
setEnvironment(configuration, "task_id", taskId);
configuration.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
configuration.setString(YarnConfiguration.LOG_AGGREGATION_REMOTE_APP_LOG_DIR_FMT, "/app-logs");
configuration.set(PipelineOptions.JARS, new ArrayList<String>() {{
add(executorConfiguration.getTaskJarPath());
}});