feature(info-query): 增加历史压缩情况查询

This commit is contained in:
2023-06-14 15:19:56 +08:00
parent 437b2188b7
commit edbf54a519
5 changed files with 301 additions and 7 deletions

View File

@@ -1,7 +1,11 @@
import club.kingon.sql.builder.SelectSqlBuilder;
import club.kingon.sql.builder.SqlBuilder;
import club.kingon.sql.builder.entry.Alias;
import club.kingon.sql.builder.entry.Column;
import cn.hutool.core.util.StrUtil;
import cn.hutool.db.sql.SqlFormatter;
import org.eclipse.collections.api.factory.Lists;
import org.eclipse.collections.api.list.ImmutableList;
import static com.eshore.odcp.hudi.connector.Constants.DATABASE_NAME;
@@ -31,13 +35,73 @@ public class SqlBuilderTests {
return StrUtil.format("{}.{}", table.getAlias(), column);
}
private static final Alias TABLE_COMPACTION_METRICS = Alias.of(StrUtil.format("{}.tb_app_hudi_compaction_metrics", DATABASE_NAME), "tahcm");
private static final String TABLE_COMPACTION_METRICS_TYPE = column(TABLE_COMPACTION_METRICS, "type");
private static final String TABLE_COMPACTION_METRICS_FLINK_JOB_ID = column(TABLE_COMPACTION_METRICS, "flink_job_id");
private static final String TABLE_COMPACTION_METRICS_ALIAS = column(TABLE_COMPACTION_METRICS, "alias");
private static SqlBuilder generateCompactionMetricsCriteria(
SelectSqlBuilder builder,
Integer page,
Integer count,
Long flinkJobId,
String alias,
String order,
String direction,
ImmutableList<Boolean> filterCompletes
) {
int limit = Math.max(count, 1);
int offset = limit * Math.max(page - 1, 0);
Alias m1 = Alias.of(
SqlBuilder.selectAll()
.from(TABLE_COMPACTION_METRICS)
.whereEq(TABLE_COMPACTION_METRICS_TYPE, "pre")
.andEq(TABLE_COMPACTION_METRICS_FLINK_JOB_ID, flinkJobId)
.andEq(TABLE_COMPACTION_METRICS_ALIAS, alias),
"m1"
);
Alias m2 = Alias.of(
SqlBuilder.selectAll()
.from(TABLE_COMPACTION_METRICS)
.whereEq(TABLE_COMPACTION_METRICS_TYPE, "complete")
.andEq(TABLE_COMPACTION_METRICS_FLINK_JOB_ID, flinkJobId)
.andEq(TABLE_COMPACTION_METRICS_ALIAS, alias),
"m2"
);
return builder
.from(m1)
.leftJoin(m2)
.onEq(column(m1, "flink_job_id"), Column.as(column(m2, "flink_job_id")))
.andEq(column(m1, "alias"), Column.as(column(m2, "alias")))
.andEq(column(m1, "application_id"), Column.as(column(m2, "application_id")))
.andEq(column(m1, "compaction_plan_instant"), Column.as(column(m2, "compaction_plan_instant")))
.whereNotNull(filterCompletes.anySatisfy(b -> b), column(m2, "type"))
.orNull(filterCompletes.anySatisfy(b -> !b), column(m2, "type"))
.orderBy(StrUtil.isNotBlank(order) && StrUtil.isNotBlank(direction), StrUtil.format("m1.{} {}", order, direction))
.limit(offset, count);
}
public static void main(String[] args) {
System.out.println(SqlFormatter.format(
SqlBuilder.select("count(distinct concat(src_schema, src_table))")
.from(TABLE_INFO)
.whereGe(TABLE_INFO_PRIORITY, 10000)
.andEq(TABLE_INFO_STATUS, "y")
.build()
generateCompactionMetricsCriteria(
SqlBuilder.select(
"m1.flink_job_id",
"m1.alias",
"m1.application_id",
"m1.cluster",
"m1.compaction_plan_instant",
"m2.type is not null as is_complete",
"m1.update_time as started_time",
"m2.update_time as finished_time"
),
1,
100,
1542097996099055616L,
"acct_acct_item_fs",
"update_time",
"asc",
Lists.immutable.of(false, true)
).build()
));
}
}