feat(api): 增加原始压缩报告的保存

This commit is contained in:
v-zhangjc9
2024-05-23 10:58:19 +08:00
parent bff18280f3
commit 5c80589ff6
4 changed files with 42 additions and 18 deletions

View File

@@ -1,16 +1,18 @@
package com.lanyuanxiaoyao.service.api.controller;
import cn.hutool.core.util.StrUtil;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.loki4j.slf4j.marker.LabelMarker;
import com.lanyuanxiaoyao.service.api.entity.HoodieCommitMetadata;
import com.lanyuanxiaoyao.service.api.service.SyncStateService;
import com.lanyuanxiaoyao.service.api.service.VersionUpdateService;
import com.lanyuanxiaoyao.service.common.Constants;
import javax.annotation.Resource;
import org.eclipse.collections.api.factory.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -30,10 +32,15 @@ import org.springframework.web.bind.annotation.RestController;
public class ApiController {
private static final Logger logger = LoggerFactory.getLogger(ApiController.class);
@Resource
private SyncStateService syncStateService;
@Resource
private VersionUpdateService versionUpdateService;
private final SyncStateService syncStateService;
private final VersionUpdateService versionUpdateService;
private final ObjectMapper mapper;
public ApiController(SyncStateService syncStateService, VersionUpdateService versionUpdateService, Jackson2ObjectMapperBuilder builder) {
this.syncStateService = syncStateService;
this.versionUpdateService = versionUpdateService;
this.mapper = builder.build();
}
private Marker makeMarker(Long flinkJobId, String alias) {
return LabelMarker.of(() -> Maps.mutable.of(Constants.LOG_FLINK_JOB_ID, flinkJobId.toString(), Constants.LOG_ALIAS, alias));
@@ -121,9 +128,10 @@ public class ApiController {
@RequestParam("instant") String instant,
@RequestParam("cluster") String cluster,
@RequestParam("application_id") String applicationId,
@RequestBody HoodieCommitMetadata metadata
) {
@RequestBody String metadataText
) throws JsonProcessingException {
logger.info(makeMarker(flinkJobId, alias), "Compaction pre commit: {} {} {} {} {}", flinkJobId, alias, instant, cluster, applicationId);
HoodieCommitMetadata metadata = mapper.readValue(metadataText, HoodieCommitMetadata.class);
syncStateService.compactionCommit(
flinkJobId,
alias,
@@ -131,7 +139,8 @@ public class ApiController {
instant,
cluster,
applicationId,
metadata
metadata,
metadataText
);
}
@@ -142,9 +151,10 @@ public class ApiController {
@RequestParam("instant") String instant,
@RequestParam("cluster") String cluster,
@RequestParam("application_id") String applicationId,
@RequestBody HoodieCommitMetadata metadata
) {
@RequestBody String metadataText
) throws JsonProcessingException {
logger.info(makeMarker(flinkJobId, alias), "Compaction commit: {} {} {} {} {}", flinkJobId, alias, instant, cluster, applicationId);
HoodieCommitMetadata metadata = mapper.readValue(metadataText, HoodieCommitMetadata.class);
syncStateService.compactionFinish(flinkJobId, alias);
syncStateService.compactionCommit(
flinkJobId,
@@ -153,7 +163,8 @@ public class ApiController {
instant,
cluster,
applicationId,
metadata
metadata,
metadataText
);
}

View File

@@ -252,7 +252,7 @@ public class SyncStateService {
);
}
public void compactionCommit(Long flinkJobId, String alias, String type, String instant, String cluster, String applicationId, HoodieCommitMetadata metadata) {
public void compactionCommit(Long flinkJobId, String alias, String type, String instant, String cluster, String applicationId, HoodieCommitMetadata metadata, String metadataText) {
if (ObjectUtil.isNull(metadata)) {
jdbcTemplate.update(
SqlBuilder
@@ -264,10 +264,11 @@ public class SyncStateService {
TbAppHudiCompactionMetrics.COMPACTION_PLAN_INSTANT_O,
TbAppHudiCompactionMetrics.CLUSTER_O,
TbAppHudiCompactionMetrics.APPLICATION_ID_O,
TbAppHudiCompactionMetrics.UPDATE_TIME_O
TbAppHudiCompactionMetrics.UPDATE_TIME_O,
TbAppHudiCompactionMetrics.METADATA_O
)
.values()
.addValue(null, null, null, null, null, null, null)
.addValue(null, null, null, null, null, null, null, null)
.precompileSql(),
flinkJobId,
alias,
@@ -275,7 +276,8 @@ public class SyncStateService {
Date.from(Instant.ofEpochMilli(DateUtil.parse(instant).getTime())),
cluster,
applicationId,
now()
now(),
metadataText
);
} else {
jdbcTemplate.update(
@@ -294,10 +296,11 @@ public class SyncStateService {
TbAppHudiCompactionMetrics.TOTAL_RECORDS_DELETED_O,
TbAppHudiCompactionMetrics.TOTAL_RECORDS_UPDATED_O,
TbAppHudiCompactionMetrics.TOTAL_RECORDS_COMPACTED_O,
TbAppHudiCompactionMetrics.UPDATE_TIME_O
TbAppHudiCompactionMetrics.UPDATE_TIME_O,
TbAppHudiCompactionMetrics.METADATA_O
)
.values()
.addValue(null, null, null, null, null, null, null, null, null, null, null, null, null)
.addValue(null, null, null, null, null, null, null, null, null, null, null, null, null, null)
.precompileSql(),
flinkJobId,
alias,
@@ -311,7 +314,8 @@ public class SyncStateService {
metadata.getTotalRecordsDeleted(),
metadata.getTotalCompactedRecordsUpdated(),
metadata.getTotalLogRecordsCompacted(),
now()
now(),
metadataText
);
}
}