feat(api): 增加压缩参数记录

This commit is contained in:
v-zhangjc9
2024-04-29 15:31:44 +08:00
parent b30ce6d675
commit 6167a94fcf
3 changed files with 67 additions and 42 deletions

View File

@@ -104,7 +104,8 @@ public class ApiController {
"pre",
instant,
cluster,
applicationId
applicationId,
metadata
);
}
@@ -125,7 +126,8 @@ public class ApiController {
"complete",
instant,
cluster,
applicationId
applicationId,
metadata
);
}

View File

@@ -3,6 +3,7 @@ package com.lanyuanxiaoyao.service.api.service;
import club.kingon.sql.builder.SqlBuilder;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import com.lanyuanxiaoyao.service.api.entity.HoodieCommitMetadata;
import com.lanyuanxiaoyao.service.common.Constants;
import java.time.Instant;
import java.util.Date;
@@ -183,30 +184,68 @@ public class SyncStateService {
);
}
public void saveCompactionMetrics(Long flinkJobId, String alias, String type, String instant, String cluster, String applicationId) {
jdbcTemplate.update(
SqlBuilder
.insertInto(
TbAppHudiCompactionMetrics._origin_,
TbAppHudiCompactionMetrics.FLINK_JOB_ID_O,
TbAppHudiCompactionMetrics.ALIAS_O,
TbAppHudiCompactionMetrics.TYPE_O,
TbAppHudiCompactionMetrics.COMPACTION_PLAN_INSTANT_O,
TbAppHudiCompactionMetrics.CLUSTER_O,
TbAppHudiCompactionMetrics.APPLICATION_ID_O,
TbAppHudiCompactionMetrics.UPDATE_TIME_O
)
.values()
.addValue(null, null, null, null, null, null, null)
.precompileSql(),
flinkJobId,
alias,
type,
Date.from(Instant.ofEpochMilli(DateUtil.parse(instant).getTime())),
cluster,
applicationId,
now()
);
public void saveCompactionMetrics(Long flinkJobId, String alias, String type, String instant, String cluster, String applicationId, HoodieCommitMetadata metadata) {
if (ObjectUtil.isNull(metadata)) {
jdbcTemplate.update(
SqlBuilder
.insertInto(
TbAppHudiCompactionMetrics._origin_,
TbAppHudiCompactionMetrics.FLINK_JOB_ID_O,
TbAppHudiCompactionMetrics.ALIAS_O,
TbAppHudiCompactionMetrics.TYPE_O,
TbAppHudiCompactionMetrics.COMPACTION_PLAN_INSTANT_O,
TbAppHudiCompactionMetrics.CLUSTER_O,
TbAppHudiCompactionMetrics.APPLICATION_ID_O,
TbAppHudiCompactionMetrics.UPDATE_TIME_O
)
.values()
.addValue(null, null, null, null, null, null, null)
.precompileSql(),
flinkJobId,
alias,
type,
Date.from(Instant.ofEpochMilli(DateUtil.parse(instant).getTime())),
cluster,
applicationId,
now()
);
} else {
jdbcTemplate.update(
SqlBuilder
.insertInto(
TbAppHudiCompactionMetrics._origin_,
TbAppHudiCompactionMetrics.FLINK_JOB_ID_O,
TbAppHudiCompactionMetrics.ALIAS_O,
TbAppHudiCompactionMetrics.TYPE_O,
TbAppHudiCompactionMetrics.COMPACTION_PLAN_INSTANT_O,
TbAppHudiCompactionMetrics.CLUSTER_O,
TbAppHudiCompactionMetrics.APPLICATION_ID_O,
TbAppHudiCompactionMetrics.TOTAL_SCAN_TIME_O,
TbAppHudiCompactionMetrics.TOTAL_LOG_FILES_COMPACTED_O,
TbAppHudiCompactionMetrics.TOTAL_LOG_FILES_SIZE_O,
TbAppHudiCompactionMetrics.TOTAL_RECORDS_DELETED_O,
TbAppHudiCompactionMetrics.TOTAL_RECORDS_UPDATED_O,
TbAppHudiCompactionMetrics.TOTAL_RECORDS_COMPACTED_O,
TbAppHudiCompactionMetrics.UPDATE_TIME_O
)
.values()
.addValue(null, null, null, null, null, null, null, null, null, null, null, null, null)
.precompileSql(),
flinkJobId,
alias,
type,
Date.from(Instant.ofEpochMilli(DateUtil.parse(instant).getTime())),
cluster,
applicationId,
metadata.getTotalScanTime(),
metadata.getTotalLogFilesCompacted(),
metadata.getTotalLogFilesSize(),
metadata.getTotalRecordsDeleted(),
metadata.getTotalCompactedRecordsUpdated(),
metadata.getTotalLogRecordsCompacted(),
now()
);
}
}
public void saveCompactionLatestOperationTime(Long flinkJobId, String alias, Long latestOperationTime) {