feature(info-query): 优化SQL logger的埋点,增加说明

This commit is contained in:
2023-07-14 10:27:06 +08:00
parent ab50b3254b
commit 01500d2b5d
8 changed files with 293 additions and 231 deletions

View File

@@ -11,12 +11,18 @@ import java.time.Instant;
public class SQLLine {
private String sql;
private Long createTime;
private String comment;
public SQLLine() {
}
public SQLLine(String sql) {
this(sql, "Unknown");
}
public SQLLine(String sql, String comment) {
this.sql = sql;
this.comment = comment;
this.createTime = Instant.now().toEpochMilli();
}
@@ -28,6 +34,14 @@ public class SQLLine {
this.sql = sql;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public Long getCreateTime() {
return createTime;
}
@@ -41,6 +55,7 @@ public class SQLLine {
return "SQLLine{" +
"sql='" + sql + '\'' +
", createTime=" + createTime +
", comment='" + comment + '\'' +
'}';
}
}

View File

@@ -1,36 +0,0 @@
package com.lanyuanxiaoyao.service.info.configuration;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.stereotype.Component;
/**
* SQL记录
*
* @author lanyuanxiaoyao
* @date 2023-07-11
*/
@Aspect
@Component
@EnableAspectJAutoProxy
public class SQLLoggerAdvice {
private static final Logger logger = LoggerFactory.getLogger(SQLLoggerAdvice.class);
private final SQLLoggerProvider.SQLLogger sqlLogger;
public SQLLoggerAdvice(SQLLoggerProvider.SQLLogger sqlLogger) {
this.sqlLogger = sqlLogger;
}
@Around("execution(* org.springframework.jdbc.core.JdbcTemplate.query(String, org.springframework.jdbc.core.ResultSetExtractor))")
public Object around(ProceedingJoinPoint joinPoint) throws Throwable {
Object[] args = joinPoint.getArgs();
if (args != null && args.length > 0 && args[0] instanceof String) {
sqlLogger.log((String) args[0]);
}
return joinPoint.proceed();
}
}

View File

@@ -1,7 +1,6 @@
package com.lanyuanxiaoyao.service.info.configuration;
import com.lanyuanxiaoyao.service.configuration.entity.info.SQLLine;
import java.time.Instant;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.eclipse.collections.api.factory.Lists;
@@ -17,6 +16,11 @@ import org.springframework.context.annotation.Configuration;
*/
@Configuration
public class SQLLoggerProvider {
@Bean
public SQLLogger sqlLogger() {
return new SQLLogger(200);
}
public static final class SQLLogger {
private final int size;
private final Queue<SQLLine> container = new ConcurrentLinkedQueue<>();
@@ -26,19 +30,22 @@ public class SQLLoggerProvider {
}
public void log(String sql) {
log(new SQLLine(sql));
}
public void log(String sql, String comment) {
log(new SQLLine(sql, comment));
}
public void log(SQLLine line) {
if (container.size() >= size) {
container.poll();
}
container.add(new SQLLine(sql));
container.add(line);
}
public ImmutableList<SQLLine> getLogs() {
return Lists.immutable.ofAll(container);
}
}
@Bean
public SQLLogger sqlLogger() {
return new SQLLogger(200);
}
}

View File

@@ -17,6 +17,7 @@ import com.lanyuanxiaoyao.service.configuration.entity.info.CompactionMetrics;
import com.lanyuanxiaoyao.service.configuration.entity.info.JobAndMetas;
import com.lanyuanxiaoyao.service.configuration.entity.info.JobIdAndAlias;
import com.lanyuanxiaoyao.service.configuration.entity.info.VersionUpdated;
import com.lanyuanxiaoyao.service.info.configuration.SQLLoggerProvider;
import java.sql.Timestamp;
import java.util.List;
import org.eclipse.collections.api.factory.Lists;
@@ -42,15 +43,52 @@ import static com.eshore.odcp.hudi.connector.Constants.DATABASE_NAME;
@Service
public class InfoService {
private static final Logger logger = LoggerFactory.getLogger(InfoService.class);
private static final String COUNT = "count(*)";
private static final Alias TABLE_VERSION = Alias.of(StrUtil.format("{}.tb_app_collect_table_version", DATABASE_NAME), "tactv");
private static final String TABLE_VERSION_FLINK_JOB_ID = column(TABLE_VERSION, "flink_job_id");
private static final String TABLE_VERSION_ALIAS = column(TABLE_VERSION, "alias");
private static final String TABLE_VERSION_VERSION = column(TABLE_VERSION, "version");
private static final String TABLE_VERSION_SCHEDULED = column(TABLE_VERSION, "scheduled");
private static final Alias TABLE_INFO = Alias.of(StrUtil.format("{}.tb_app_collect_table_info", DATABASE_NAME), "tacti");
private static final String TABLE_INFO_FLINK_JOB_ID = column(TABLE_INFO, "flink_job_id");
private static final String TABLE_INFO_ALIAS = column(TABLE_INFO, "alias");
private static final String TABLE_INFO_PRIORITY = column(TABLE_INFO, "priority");
private static final String TABLE_INFO_STATUS = column(TABLE_INFO, "status");
private static final String TABLE_INFO_TARGET_HDFS = column(TABLE_INFO, "tgt_hdfs_path");
private static final String TABLE_INFO_TARGET_TABLE_TYPE = column(TABLE_INFO, "tgt_table_type");
private static final Alias TABLE_SYNC_STATE = Alias.of(StrUtil.format("{}.tb_app_hudi_sync_state", DATABASE_NAME), "tahss");
private static final String TABLE_SYNC_STATE_ID = column(TABLE_SYNC_STATE, "id");
private static final String TABLE_SYNC_STATE_COMPACTION_STATE = column(TABLE_SYNC_STATE, "compaction_status");
private static final Alias TABLE_COMPACTION_METRICS = Alias.of(StrUtil.format("{}.tb_app_hudi_compaction_metrics", DATABASE_NAME), "tahcm");
private static final String TABLE_COMPACTION_METRICS_TYPE = column(TABLE_COMPACTION_METRICS, "type");
private static final String TABLE_COMPACTION_METRICS_FLINK_JOB_ID = column(TABLE_COMPACTION_METRICS, "flink_job_id");
private static final String TABLE_COMPACTION_METRICS_ALIAS = column(TABLE_COMPACTION_METRICS, "alias");
private static final Alias TABLE_FLINK_JOB = Alias.of(StrUtil.format("{}.tb_app_flink_job_config", DATABASE_NAME), "tafjc");
private static final String TABLE_FLINK_JOB_ID = column(TABLE_FLINK_JOB, "id");
private static final String TABLE_FLINK_JOB_STATUS = column(TABLE_FLINK_JOB, "status");
private static final String TABLE_FLINK_JOB_RUN_MODE = column(TABLE_FLINK_JOB, "run_mode");
private final DatabaseService databaseService;
private final JdbcTemplate mysqlJdbcTemplate;
private final TransactionTemplate mysqlTransactionTemplate;
private final SQLLoggerProvider.SQLLogger sqlLogger;
public InfoService(DatabaseService databaseService, JdbcTemplate mysqlJdbcTemplate, TransactionTemplate mysqlTransactionTemplate) {
public InfoService(DatabaseService databaseService, JdbcTemplate mysqlJdbcTemplate, TransactionTemplate mysqlTransactionTemplate, SQLLoggerProvider.SQLLogger sqlLogger) {
this.databaseService = databaseService;
this.mysqlJdbcTemplate = mysqlJdbcTemplate;
this.mysqlTransactionTemplate = mysqlTransactionTemplate;
this.sqlLogger = sqlLogger;
}
private static String generateVersionTableIdCriteria(Boolean scheduled) {
return SqlBuilder.select(StrUtil.format("concat({}, '-', {})", TABLE_VERSION_FLINK_JOB_ID, TABLE_VERSION_ALIAS))
.from(TABLE_VERSION)
.whereEq(TABLE_VERSION_SCHEDULED, scheduled)
.andEq(TABLE_VERSION_VERSION, Column.as("date_format(subdate(current_date(), 1), '%Y%m%d')"))
.build();
}
private static String column(Alias table, String column) {
return StrUtil.format("{}.{}", table.getAlias(), column);
}
@Cacheable(value = "sync-state", sync = true, key = "#flinkJobId.toString()+#alias")
@@ -160,22 +198,21 @@ public class InfoService {
).build(),
Long.class
);
List<JobIdAndAlias> list = mysqlJdbcTemplate.query(
generateJobIdAndAliasCriteria(
SqlBuilder.select(TABLE_FLINK_JOB_ID, TABLE_INFO_ALIAS),
page,
count,
flinkJobId,
alias,
orderField,
orderDirection,
selectHudiTableType,
selectedRunMode,
selectedCompactionStatus,
true
).build(),
(rs, row) -> new JobIdAndAlias(rs.getLong(1), rs.getString(2))
);
String listSQL = generateJobIdAndAliasCriteria(
SqlBuilder.select(TABLE_FLINK_JOB_ID, TABLE_INFO_ALIAS),
page,
count,
flinkJobId,
alias,
orderField,
orderDirection,
selectHudiTableType,
selectedRunMode,
selectedCompactionStatus,
true
).build();
sqlLogger.log(listSQL, "findAllJobIdAndAlias");
List<JobIdAndAlias> list = mysqlJdbcTemplate.query(listSQL, (rs, row) -> new JobIdAndAlias(rs.getLong(1), rs.getString(2)));
return new PageResponse<>(list, total);
});
}
@@ -216,19 +253,13 @@ public class InfoService {
return databaseService.getTableMeta(flinkJobId, alias);
}
private static String generateVersionTableIdCriteria(Boolean scheduled) {
return SqlBuilder.select(StrUtil.format("concat({}, '-', {})", TABLE_VERSION_FLINK_JOB_ID, TABLE_VERSION_ALIAS))
.from(TABLE_VERSION)
.whereEq(TABLE_VERSION_SCHEDULED, scheduled)
.andEq(TABLE_VERSION_VERSION, Column.as("date_format(subdate(current_date(), 1), '%Y%m%d')"))
.build();
}
@Cacheable("un-updated-version-table")
@Retryable(Throwable.class)
public ImmutableList<String> nonUpdatedVersionTables() {
return mysqlTransactionTemplate.execute(status -> {
List<String> ids = mysqlJdbcTemplate.queryForList(generateVersionTableIdCriteria(false), String.class);
String listSQL = generateVersionTableIdCriteria(false);
sqlLogger.log(listSQL, "nonUpdatedVersionTables");
List<String> ids = mysqlJdbcTemplate.queryForList(listSQL, String.class);
return Lists.immutable.ofAll(ids);
});
}
@@ -237,35 +268,13 @@ public class InfoService {
@Retryable(Throwable.class)
public ImmutableList<String> updatedVersionTables() {
return mysqlTransactionTemplate.execute(status -> {
List<String> ids = mysqlJdbcTemplate.queryForList(generateVersionTableIdCriteria(true), String.class);
String listSQL = generateVersionTableIdCriteria(true);
sqlLogger.log(listSQL, "updatedVersionTables");
List<String> ids = mysqlJdbcTemplate.queryForList(listSQL, String.class);
return Lists.immutable.ofAll(ids);
});
}
private static String column(Alias table, String column) {
return StrUtil.format("{}.{}", table.getAlias(), column);
}
private static final String COUNT = "count(*)";
private static final Alias TABLE_VERSION = Alias.of(StrUtil.format("{}.tb_app_collect_table_version", DATABASE_NAME), "tactv");
private static final String TABLE_VERSION_FLINK_JOB_ID = column(TABLE_VERSION, "flink_job_id");
private static final String TABLE_VERSION_ALIAS = column(TABLE_VERSION, "alias");
private static final String TABLE_VERSION_VERSION = column(TABLE_VERSION, "version");
private static final String TABLE_VERSION_SCHEDULED = column(TABLE_VERSION, "scheduled");
private static final Alias TABLE_INFO = Alias.of(StrUtil.format("{}.tb_app_collect_table_info", DATABASE_NAME), "tacti");
private static final String TABLE_INFO_FLINK_JOB_ID = column(TABLE_INFO, "flink_job_id");
private static final String TABLE_INFO_ALIAS = column(TABLE_INFO, "alias");
private static final String TABLE_INFO_PRIORITY = column(TABLE_INFO, "priority");
private static final String TABLE_INFO_STATUS = column(TABLE_INFO, "status");
private static final String TABLE_INFO_TARGET_HDFS = column(TABLE_INFO, "tgt_hdfs_path");
private static final String TABLE_INFO_TARGET_TABLE_TYPE = column(TABLE_INFO, "tgt_table_type");
private static final Alias TABLE_SYNC_STATE = Alias.of(StrUtil.format("{}.tb_app_hudi_sync_state", DATABASE_NAME), "tahss");
private static final String TABLE_SYNC_STATE_ID = column(TABLE_SYNC_STATE, "id");
private static final String TABLE_SYNC_STATE_COMPACTION_STATE = column(TABLE_SYNC_STATE, "compaction_status");
private SqlBuilder generateVersionTableCriteria(
SelectSqlBuilder builder,
Integer page,
@@ -351,26 +360,33 @@ public class InfoService {
.toMap()
.collectValues((key, list) -> list.getOnly().getValue())
.toImmutable();
String listSQL = generateVersionTableCriteria(
SqlBuilder.select(
TABLE_INFO_FLINK_JOB_ID,
TABLE_INFO_ALIAS,
TABLE_VERSION_VERSION,
TABLE_VERSION_SCHEDULED
),
page,
count,
version,
flinkJobId,
alias,
order,
direction,
filterSchedules,
true,
false
).build();
sqlLogger.log(listSQL, "findAllVersionTables");
List<VersionUpdated> list = mysqlJdbcTemplate.query(
generateVersionTableCriteria(
SqlBuilder.select(
TABLE_INFO_FLINK_JOB_ID,
TABLE_INFO_ALIAS,
TABLE_VERSION_VERSION,
TABLE_VERSION_SCHEDULED
),
page,
count,
version,
flinkJobId,
alias,
order,
direction,
filterSchedules,
true,
false
).build(),
(rs, row) -> new VersionUpdated(rs.getLong(1), rs.getString(2), rs.getString(3), rs.getBoolean(4))
listSQL,
(rs, row) -> new VersionUpdated(
rs.getLong(1),
rs.getString(2),
rs.getString(3),
rs.getBoolean(4)
)
);
return new PageResponse<>(list, total)
.withMetadata("scheduled", scheduleCount.getOrDefault(true, 0))
@@ -591,11 +607,6 @@ public class InfoService {
);
}
private static final Alias TABLE_COMPACTION_METRICS = Alias.of(StrUtil.format("{}.tb_app_hudi_compaction_metrics", DATABASE_NAME), "tahcm");
private static final String TABLE_COMPACTION_METRICS_TYPE = column(TABLE_COMPACTION_METRICS, "type");
private static final String TABLE_COMPACTION_METRICS_FLINK_JOB_ID = column(TABLE_COMPACTION_METRICS, "flink_job_id");
private static final String TABLE_COMPACTION_METRICS_ALIAS = column(TABLE_COMPACTION_METRICS, "alias");
private SqlBuilder generateCompactionMetricsCriteria(
SelectSqlBuilder builder,
Integer page,
@@ -746,11 +757,6 @@ public class InfoService {
});
}
private static final Alias TABLE_FLINK_JOB = Alias.of(StrUtil.format("{}.tb_app_flink_job_config", DATABASE_NAME), "tafjc");
private static final String TABLE_FLINK_JOB_ID = column(TABLE_FLINK_JOB, "id");
private static final String TABLE_FLINK_JOB_STATUS = column(TABLE_FLINK_JOB, "status");
private static final String TABLE_FLINK_JOB_RUN_MODE = column(TABLE_FLINK_JOB, "run_mode");
@Cacheable(value = "exists-table", sync = true)
@Retryable(Throwable.class)
public Boolean existsTable(Long flinkJobId, String alias) {

View File

@@ -1,107 +0,0 @@
import club.kingon.sql.builder.SelectSqlBuilder;
import club.kingon.sql.builder.SqlBuilder;
import club.kingon.sql.builder.entry.Alias;
import club.kingon.sql.builder.entry.Column;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.db.sql.SqlFormatter;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.list.ImmutableList;
import static com.eshore.odcp.hudi.connector.Constants.DATABASE_NAME;
/**
* @author lanyuanxiaoyao
* @date 2023-06-07
*/
public class SqlBuilderTests {
private static final String COUNT = "count(*)";
private static final Alias TABLE_VERSION = Alias.of(StrUtil.format("{}.tb_app_collect_table_version", DATABASE_NAME), "tactv");
private static final String TABLE_VERSION_FLINK_JOB_ID = column(TABLE_VERSION, "flink_job_id");
private static final String TABLE_VERSION_ALIAS = column(TABLE_VERSION, "alias");
private static final String TABLE_VERSION_VERSION = column(TABLE_VERSION, "version");
private static final String TABLE_VERSION_SCHEDULED = column(TABLE_VERSION, "scheduled");
private static final Alias TABLE_INFO = Alias.of(StrUtil.format("{}.tb_app_collect_table_info", DATABASE_NAME), "tacti");
private static final String TABLE_INFO_FLINK_JOB_ID = column(TABLE_INFO, "flink_job_id");
private static final String TABLE_INFO_ALIAS = column(TABLE_INFO, "alias");
private static final String TABLE_INFO_PRIORITY = column(TABLE_INFO, "priority");
private static final String TABLE_INFO_STATUS = column(TABLE_INFO, "status");
private static final String TABLE_INFO_TARGET_TABLE_TYPE = column(TABLE_INFO, "tgt_table_type");
private static final Alias TABLE_SYNC_STATE = Alias.of(StrUtil.format("{}.tb_app_hudi_sync_state", DATABASE_NAME), "tahss");
private static final String TABLE_SYNC_STATE_ID = column(TABLE_SYNC_STATE, "id");
private static final String TABLE_SYNC_STATE_COMPACTION_STATE = column(TABLE_SYNC_STATE, "compaction_status");
private static String column(Alias table, String column) {
return StrUtil.format("{}.{}", table.getAlias(), column);
}
private static final Alias TABLE_COMPACTION_METRICS = Alias.of(StrUtil.format("{}.tb_app_hudi_compaction_metrics", DATABASE_NAME), "tahcm");
private static final String TABLE_COMPACTION_METRICS_TYPE = column(TABLE_COMPACTION_METRICS, "type");
private static final String TABLE_COMPACTION_METRICS_FLINK_JOB_ID = column(TABLE_COMPACTION_METRICS, "flink_job_id");
private static final String TABLE_COMPACTION_METRICS_ALIAS = column(TABLE_COMPACTION_METRICS, "alias");
private static final Alias TABLE_FLINK_JOB = Alias.of(StrUtil.format("{}.tb_app_flink_job_config", DATABASE_NAME), "tafjc");
private static final String TABLE_FLINK_JOB_ID = column(TABLE_FLINK_JOB, "id");
private static final String TABLE_FLINK_JOB_STATUS = column(TABLE_FLINK_JOB, "status");
private static final String TABLE_FLINK_JOB_RUN_MODE = column(TABLE_FLINK_JOB, "run_mode");
private static SqlBuilder generateJobIdAndAliasCriteria(
SelectSqlBuilder builder,
Integer page,
Integer count,
Long flinkJobId,
String alias,
String order,
String direction,
ImmutableList<String> selectHudiTableType,
ImmutableList<String> selectedRunMode,
ImmutableList<String> selectedCompactionStatus,
boolean limited
) {
int limit = Math.max(count, 1);
int offset = limit * Math.max(page - 1, 0);
return builder.from(TABLE_FLINK_JOB, TABLE_INFO, TABLE_SYNC_STATE)
.whereEq(TABLE_FLINK_JOB_ID, Column.as(TABLE_INFO_FLINK_JOB_ID))
.andEq(TABLE_SYNC_STATE_ID, Column.as(StrUtil.format("concat({}, '-', {})", TABLE_FLINK_JOB_ID, TABLE_INFO_ALIAS)))
.andLike(ObjectUtil.isNotNull(flinkJobId), TABLE_FLINK_JOB_ID, flinkJobId)
.andLike(ObjectUtil.isNotNull(alias), TABLE_INFO_ALIAS, alias)
.andIn(ObjectUtil.isNotEmpty(selectHudiTableType), TABLE_INFO_TARGET_TABLE_TYPE, selectHudiTableType)
.andIn(ObjectUtil.isNotEmpty(selectedRunMode), TABLE_FLINK_JOB_RUN_MODE, selectedRunMode)
.andIn(ObjectUtil.isNotEmpty(selectedCompactionStatus), TABLE_SYNC_STATE_COMPACTION_STATE, selectedCompactionStatus)
.orderBy(StrUtil.isNotBlank(order) && StrUtil.isNotBlank(direction), () -> StrUtil.format("{} {}", order, direction))
.limit(limited, offset, limit);
}
public static void main(String[] args) {
System.out.println(SqlFormatter.format(
/*SqlBuilder.select(StrUtil.format("distinct {}", TABLE_INFO_ALIAS))
.from(TABLE_FLINK_JOB, TABLE_INFO, TABLE_SYNC_STATE)
.whereEq(TABLE_FLINK_JOB_ID, Column.as(TABLE_INFO_FLINK_JOB_ID))
.andEq(TABLE_SYNC_STATE_ID, StrUtil.format("concat({}, '-', {})", TABLE_FLINK_JOB_ID, TABLE_INFO_ALIAS))
.andLike(ObjectUtil.isNotNull(flinkJobId), TABLE_FLINK_JOB_ID, flinkJobId)
.andLike(ObjectUtil.isNotNull(alias), TABLE_INFO_ALIAS, alias)
.andIn(ObjectUtil.isNotEmpty(selectHudiTableType), TABLE_INFO_TARGET_TABLE_TYPE, selectHudiTableType)
.andIn(ObjectUtil.isNotEmpty(selectedRunMode), TABLE_FLINK_JOB_RUN_MODE, selectedRunMode)
.andIn(ObjectUtil.isNotEmpty(selectedCompactionStatus), TABLE_SYNC_STATE_COMPACTION_STATE, selectedCompactionStatus)
.orderBy(StrUtil.isNotBlank(order) && StrUtil.isNotBlank(direction), () -> StrUtil.format("{} {}", order, direction))
.limit(limited, offset, limit)
.build()*/
generateJobIdAndAliasCriteria(
SqlBuilder.selectAll(),
1,
10,
1000L,
"hello",
"dog",
"asc",
Lists.immutable.of("MOR"),
Lists.immutable.of("ONE_IN_ONE", "ALL_IN_ONE"),
Lists.immutable.of("COMPLETE"),
true
).build()
));
}
}

View File

@@ -0,0 +1,48 @@
package com.test;
import cn.hutool.core.lang.Tuple;
import cn.hutool.core.util.StrUtil;
import java.sql.*;
import org.eclipse.collections.api.factory.Lists;
/**
* @author lanyuanxiaoyao
* @date 2023-07-12
*/
public class GenerateFields {
public static void main(String[] args) throws ClassNotFoundException, SQLException {
Class.forName("com.mysql.cj.jdbc.Driver");
try (Connection connection = DriverManager.getConnection("jdbc:mysql://127.0.0.1:3307/main?useSSL=false", "test", "test")) {
for (Tuple tablePath : Lists.immutable.of(
new Tuple("main", "tb_app_collect_table_info", "tacti"),
new Tuple("main", "tb_app_hudi_sync_state", "tahss"),
new Tuple("main", "tb_app_flink_job_config", "tafjc"),
new Tuple("main", "tb_app_hudi_job_config", "tajhc"),
new Tuple("main", "tb_app_yarn_job_config", "tayjc_sync"),
new Tuple("main", "tb_app_yarn_job_config", "tayjc_compaction"),
new Tuple("main", "tb_app_global_config", "tagc"),
new Tuple("main", "tb_app_hudi_compaction_schedule", "tahcs"),
new Tuple("main", "data_source_table_field", "dstf"),
new Tuple("main", "data_source_table", "dst"),
new Tuple("main", "data_source", "ds")
)) {
String schema = tablePath.get(0);
String table = tablePath.get(1);
String alias = tablePath.get(2);
try (PreparedStatement statement = connection.prepareStatement(StrUtil.format("show full columns from {}.{}", schema, table))) {
System.out.println(StrUtil.format("private static final Alias {}_{} = Alias.of(\"`{}`.`{}`\", \"{}\");", table.toUpperCase(), alias.toUpperCase(), schema, table, alias));
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
String name = resultSet.getString(1);
String type = resultSet.getString(2);
String comment = resultSet.getString("Comment");
System.out.println(StrUtil.format("/**\n * {}.{}.{} {} ({})\n */", schema, table, name, type, comment));
System.out.println(StrUtil.format("private static final String {}_{}_{} = {}_{}.getAlias() + \".`{}`\";", table.toUpperCase(), alias.toUpperCase(), name.toUpperCase(), table.toUpperCase(), alias.toUpperCase(), name));
}
System.out.println();
System.out.println();
}
}
}
}
}

View File

@@ -0,0 +1,121 @@
package com.test;
import club.kingon.sql.builder.SqlBuilder;
import club.kingon.sql.builder.config.GlobalConfig;
import club.kingon.sql.builder.entry.Alias;
import club.kingon.sql.builder.entry.Column;
import club.kingon.sql.builder.function.Functions;
import cn.hutool.core.util.StrUtil;
import cn.hutool.db.sql.SqlFormatter;
import static com.eshore.odcp.hudi.connector.SQLConstants.HudiCollectBuild.*;
import static com.eshore.odcp.hudi.connector.SQLConstants.IapDatahub.*;
/**
* @author lanyuanxiaoyao
* @date 2023-06-07
*/
public class SqlBuilderTests {
public static void main(String[] args) {
System.out.println(SqlFormatter.format(
SqlBuilder.select(
Functions.count(Column.as(DataSource.DS_NAME_A)),
DataSource.DS_NAME_A,
DataSource.SCHEMA_NAME_A,
DataSourceTable.TABLE_NAME_A,
DataSourceTable.TABLE_TYPE_A,
DataSourceTableField.FIELD_NAME_A,
DataSourceTableField.FIELD_SEQ_A,
DataSourceTableField.FIELD_TYPE_A,
DataSourceTableField.PRIMARY_KEY_A,
DataSourceTableField.PARTITION_KEY_A,
DataSourceTableField.LENGTH_A,
TbAppCollectTableInfo.TGT_DB_A,
TbAppCollectTableInfo.TGT_TABLE_A,
TbAppCollectTableInfo.TGT_TABLE_TYPE_A,
TbAppCollectTableInfo.TGT_HDFS_PATH_A,
TbAppHudiJobConfig.WRITE_TASKS_A,
TbAppHudiJobConfig.WRITE_OPERATION_A,
TbAppHudiJobConfig.WRITE_TASK_MAX_MEMORY_A,
TbAppHudiJobConfig.WRITE_BATCH_SIZE_A,
TbAppHudiJobConfig.WRITE_RATE_LIMIT_A,
TbAppCollectTableInfo.BUCKET_NUMBER_A,
TbAppHudiJobConfig.COMPACTION_STRATEGY_A,
TbAppHudiJobConfig.COMPACTION_TASKS_A,
TbAppHudiJobConfig.COMPACTION_DELTA_COMMITS_A,
TbAppHudiJobConfig.COMPACTION_DELTA_SECONDS_A,
TbAppHudiJobConfig.COMPACTION_ASYNC_ENABLED_A,
TbAppHudiJobConfig.COMPACTION_MAX_MEMORY_A,
TbAppHudiJobConfig.CONFIGS_A,
TbAppCollectTableInfo.FILTER_FIELD_A,
TbAppCollectTableInfo.FILTER_VALUES_A,
TbAppCollectTableInfo.FILTER_TYPE_A,
TbAppCollectTableInfo.SRC_TOPIC_A,
TbAppCollectTableInfo.SRC_PULSAR_ADDR_A,
Alias.of(TbAppYarnJobConfigSync.JOB_MANAGER_MEMORY_A, "sync_job_manager_memory"),
Alias.of(TbAppYarnJobConfigSync.TASK_MANAGER_MEMORY_A, "sync_task_manager_memory"),
Alias.of(TbAppYarnJobConfigCompaction.JOB_MANAGER_MEMORY_A, "compaction_job_manager_memory"),
Alias.of(TbAppYarnJobConfigCompaction.TASK_MANAGER_MEMORY_A, "compaction_task_manger_momory"),
TbAppCollectTableInfo.PARTITION_FIELD_A,
TbAppHudiSyncState.MESSAGE_ID_A,
TbAppGlobalConfig.METRIC_PUBLISH_URL_A,
TbAppGlobalConfig.METRIC_PROMETHEUS_URL_A,
TbAppGlobalConfig.METRIC_API_URL_A,
TbAppGlobalConfig.METRIC_PUBLISH_DELAY_A,
TbAppGlobalConfig.METRIC_PUBLISH_PERIOD_A,
TbAppGlobalConfig.METRIC_PUBLISH_TIMEOUT_A,
TbAppGlobalConfig.METRIC_PUBLISH_BATCH_A,
Alias.of(TbAppFlinkJobConfig.ID_A, "job_id"),
Alias.of(TbAppFlinkJobConfig.NAME_A, "job_name"),
TbAppGlobalConfig.CHECKPOINT_ROOT_PATH_A,
TbAppHudiJobConfig.SOURCE_TASKS_A,
TbAppCollectTableInfo.ALIAS_A,
DataSource.CONNECTION_A,
TbAppCollectTableInfo.PRIORITY_A,
DataSource.DS_TYPE_A,
TbAppHudiJobConfig.KEEP_FILE_VERSION_A,
TbAppHudiJobConfig.KEEP_COMMIT_VERSION_A,
TbAppCollectTableInfo.TAGS_A,
TbAppGlobalConfig.ZK_URL_A
)
.from(
DataSource._alias_,
DataSourceTable._alias_,
DataSourceTableField._alias_,
TbAppFlinkJobConfig._alias_,
TbAppHudiJobConfig._alias_,
TbAppYarnJobConfigSync._alias_,
TbAppYarnJobConfigCompaction._alias_,
TbAppGlobalConfig._alias_,
TbAppCollectTableInfo._alias_
)
.leftJoin(TbAppHudiSyncState._alias_)
.onEq(TbAppHudiSyncState.ID_A, Column.as(StrUtil.format("concat({}, '-', {})", TbAppCollectTableInfo.FLINK_JOB_ID_A, TbAppCollectTableInfo.ALIAS_A)))
.whereEq(DataSource.DS_ROLE_A, "src")
.andEq(DataSource.DS_STATE_A, "y")
.andEq(DataSource.RECORD_STATE_A, "y")
.andEq(DataSourceTable.DS_ID_A, Column.as(DataSource.DS_ID_A))
.andEq(DataSourceTable.RECORD_STATE_A, "y")
.andEq(DataSourceTableField.TABLE_ID_A, Column.as(DataSourceTable.TABLE_ID_A))
.andEq(DataSourceTableField.RECORD_STATE_A, "y")
.andIn(DataSource.DS_TYPE_A, "udal", "telepg")
.andEq(DataSource.DS_NAME_A, Column.as(TbAppCollectTableInfo.SRC_DB_A))
.andEq(DataSource.SCHEMA_NAME_A, Column.as(TbAppCollectTableInfo.SRC_SCHEMA_A))
.andEq(DataSourceTable.TABLE_NAME_A, Column.as(TbAppCollectTableInfo.SRC_TABLE_A))
.andEq(TbAppCollectTableInfo.FLINK_JOB_ID_A, Column.as(TbAppFlinkJobConfig.ID_A))
.andEq(TbAppCollectTableInfo.HUDI_JOB_ID_A, Column.as(TbAppHudiJobConfig.ID_A))
.andEq(TbAppCollectTableInfo.SYNC_YARN_JOB_ID_A, Column.as(TbAppYarnJobConfigSync.ID_A))
.andEq(TbAppCollectTableInfo.COMPACTION_YARN_JOB_ID_A, Column.as(TbAppYarnJobConfigCompaction.ID_A))
.andEq(TbAppCollectTableInfo.CONFIG_ID_A, Column.as(TbAppGlobalConfig.ID_A))
.andEq(TbAppFlinkJobConfig.ID_A, 1542097984132706304L)
.andEq(TbAppCollectTableInfo.ALIAS_A, "crm_cfguse_channel")
.andEq(TbAppCollectTableInfo.STATUS_A, "y")
.andEq(TbAppFlinkJobConfig.STATUS_A, "y")
.andEq(TbAppHudiJobConfig.STATUS_A, "y")
.andEq(TbAppYarnJobConfigSync.STATUS_A, "y")
.andEq(TbAppYarnJobConfigCompaction.STATUS_A, "y")
.orderBy(DataSourceTableField.FIELD_SEQ_A)
.build()
));
}
}

View File

@@ -183,6 +183,14 @@ function toolTab() {
paginationCommonOptions(undefined, 10),
],
columns: [
{
name: 'sql',
label: 'SQL',
},
{
name: 'createTime',
label: '执行时间',
},
],
}
}