feature(executor): 增加Flink集群任务模块

新增 executor-manager 和 executor-task
manager管理任务信息启停等,task执行业务方法
This commit is contained in:
2023-12-05 14:53:15 +08:00
parent 5450559470
commit 62bfc08fc3
18 changed files with 715 additions and 0 deletions

View File

@@ -0,0 +1,97 @@
package com.lanyuanxiaoyao.service.executor.manager;
import com.eshore.odcp.hudi.connector.utils.executor.Runner;
import com.lanyuanxiaoyao.service.executor.core.TaskConstants;
import com.ulisesbocchio.jasyptspringboot.annotation.EnableEncryptableProperties;
import java.time.Duration;
import java.util.ArrayList;
import org.apache.flink.client.cli.ClientOptions;
import org.apache.flink.configuration.*;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.configuration.YarnDeploymentTarget;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.gson.GsonAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.retry.annotation.EnableRetry;
import static com.eshore.odcp.hudi.connector.Constants.HALF_HOUR;
import static com.eshore.odcp.hudi.connector.Constants.MINUTE;
/**
* @author lanyuanxiaoyao
* @date 2023-12-04
*/
@EnableDiscoveryClient
@SpringBootApplication(
scanBasePackages = {"com.lanyuanxiaoyao.service"},
exclude = {
GsonAutoConfiguration.class,
DataSourceAutoConfiguration.class
}
)
@EnableConfigurationProperties
@EnableEncryptableProperties
@EnableRetry
public class ExecutorManagerApplication implements ApplicationRunner {
public static void main(String[] args) {
SpringApplication.run(ExecutorManagerApplication.class, args);
}
@Override
public void run(ApplicationArguments args) throws Exception {
Configuration configuration = new Configuration();
configuration.setBoolean(JobManagerOptions.JVM_DIRECT_MEMORY_LIMIT_ENABLED, true);
configuration.setString(AkkaOptions.ASK_TIMEOUT, "10 min");
configuration.setString(AkkaOptions.TCP_TIMEOUT, "15 min");
configuration.setString(AkkaOptions.LOOKUP_TIMEOUT, "10 min");
configuration.set(ClientOptions.CLIENT_TIMEOUT, Duration.ofMinutes(30));
// Kerberos认证
configuration.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, true);
configuration.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, "/etc/security/keytabs/datalake.app.keytab");
configuration.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, "datalake/b5s119.hdp.dc@ECLD.COM");
configuration.setLong(HeartbeatManagerOptions.HEARTBEAT_INTERVAL, MINUTE);
configuration.setLong(HeartbeatManagerOptions.HEARTBEAT_TIMEOUT, HALF_HOUR);
configuration.setString(AkkaOptions.ASK_TIMEOUT, "1 min");
configuration.setString(AkkaOptions.TCP_TIMEOUT, "2 min");
configuration.setBoolean(CoreOptions.CHECK_LEAKED_CLASSLOADER, false);
configuration.setString(YarnConfigOptions.APPLICATION_ATTEMPTS, "4");
configuration.setString(YarnConfigOptions.STAGING_DIRECTORY, "hdfs://b2/apps/datalake/yarn");
configuration.setString(ResourceManagerOptions.CONTAINERIZED_MASTER_ENV_PREFIX + "MALLOC_ARENA_MAX", "1");
configuration.setString(ResourceManagerOptions.CONTAINERIZED_TASK_MANAGER_ENV_PREFIX + "MALLOC_ARENA_MAX", "1");
configuration.setInteger(RestOptions.PORT, 8081);
configuration.setString(RestOptions.BIND_PORT, "8084-9400");
configuration.setString(DeploymentOptions.TARGET, YarnDeploymentTarget.APPLICATION.getName());
configuration.set(JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.parse("5120m"));
configuration.set(JobManagerOptions.JVM_METASPACE, MemorySize.parse("128m"));
configuration.set(TaskManagerOptions.TOTAL_FLINK_MEMORY, MemorySize.parse("1024m"));
configuration.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MemorySize.parse("0m"));
configuration.set(TaskManagerOptions.JVM_METASPACE, MemorySize.parse("128m"));
configuration.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, 10);
configuration.setString(YarnConfigOptions.APPLICATION_NAME, "HudiService_faee2e95-660d-4b1c-9cec-13473b3cd5b7");
configuration.setString(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_DIRS, "hdfs://b2/apps/flink/completed-jobs/");
configuration.setLong(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL, 10000);
configuration.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
configuration.setString(YarnConfiguration.LOG_AGGREGATION_REMOTE_APP_LOG_DIR_FMT, "/app-logs");
configuration.set(PipelineOptions.JARS, new ArrayList<String>() {{
add("hdfs://b2/apps/datalake/jars/service/service-executor-task.jar");
}});
ApplicationId applicationId = Runner.run(
configuration,
"com.lanyuanxiaoyao.service.executor.task.Hello",
new String[]{
TaskConstants.TASK_CONTEXT_OPTION,
"{\"metadata\": {\"id\":\"faee2e95-660d-4b1c-9cec-13473b3cd5b7\"}}"
}
);
System.out.println(applicationId);
}
}

View File

@@ -0,0 +1,5 @@
spring:
application:
name: service-executor-manager
profiles:
include: random-port,common,discovery,metrics

View File

@@ -0,0 +1,51 @@
<configuration>
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<springProperty scope="context" name="LOKI_PUSH_URL" source="loki.url"/>
<springProperty scope="context" name="LOGGING_PARENT" source="logging.parent"/>
<springProperty scope="context" name="APP_NAME" source="spring.application.name"/>
<appender name="Loki" class="com.github.loki4j.logback.Loki4jAppender">
<metricsEnabled>true</metricsEnabled>
<http class="com.github.loki4j.logback.ApacheHttpSender">
<url>${LOKI_PUSH_URL:-http://localhost/loki/api/v1/push}</url>
</http>
<format>
<label>
<pattern>app=${APP_NAME:- },host=${HOSTNAME},level=%level</pattern>
</label>
<message>
<pattern>${FILE_LOG_PATTERN:-%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} [${HOSTNAME}] ${LOG_LEVEL_PATTERN:-%5p} ${PID:- } -&#45;&#45; [%t] %-40.40logger{39} #@# : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}</pattern>
</message>
<sortByTime>true</sortByTime>
</format>
</appender>
<appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${CONSOLE_LOG_PATTERN:-%clr(%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}</pattern>
</encoder>
</appender>
<appender name="RollingFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOGGING_PARENT:-.}/${APP_NAME:-run}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGGING_PARENT:-.}/archive/${APP_NAME:-run}-%d{yyyy-MM-dd}.gz</fileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder>
<pattern>${FILE_LOG_PATTERN:-%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} [${HOSTNAME}] ${LOG_LEVEL_PATTERN:-%5p} ${PID:- } -&#45;&#45; [%t] %-40.40logger{39} #@# : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}</pattern>
</encoder>
</appender>
<logger name="com.zaxxer.hikari" level="ERROR"/>
<logger name="com.netflix.discovery.shared.resolver.aws.ConfigClusterResolver" level="WARN"/>
<root level="INFO">
<appender-ref ref="Loki"/>
<appender-ref ref="Console"/>
<appender-ref ref="RollingFile"/>
</root>
</configuration>