feat(hudi-query): 增加关于hdfs文件数相关的接口
This commit is contained in:
@@ -107,4 +107,13 @@ public interface HudiService {
|
|||||||
|
|
||||||
@Get("/hdfs/size")
|
@Get("/hdfs/size")
|
||||||
Long size(@Query("root") String root);
|
Long size(@Query("root") String root);
|
||||||
|
|
||||||
|
@Get("/hdfs/count")
|
||||||
|
Long count(@Query("root") String root);
|
||||||
|
|
||||||
|
@Get("/hdfs/file_count")
|
||||||
|
Long fileCount(@Query("root") String root);
|
||||||
|
|
||||||
|
@Get("/hdfs/directory_count")
|
||||||
|
Long directoryCount(@Query("root") String root);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -82,4 +82,19 @@ public class HdfsController {
|
|||||||
public Long size(@RequestParam("root") String root) throws IOException {
|
public Long size(@RequestParam("root") String root) throws IOException {
|
||||||
return hdfsService.size(root);
|
return hdfsService.size(root);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@GetMapping("count")
|
||||||
|
public Long count(@RequestParam("root") String root) throws IOException {
|
||||||
|
return hdfsService.size(root);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("file_count")
|
||||||
|
public Long fileCount(@RequestParam("root") String root) throws IOException {
|
||||||
|
return hdfsService.size(root);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("directory_count")
|
||||||
|
public Long DirectoryCount(@RequestParam("root") String root) throws IOException {
|
||||||
|
return hdfsService.size(root);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import com.lanyuanxiaoyao.service.forest.service.InfoService;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.ContentSummary;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
@@ -160,4 +161,26 @@ public class HdfsService {
|
|||||||
return fileSystem.getContentSummary(new Path(root)).getLength();
|
return fileSystem.getContentSummary(new Path(root)).getLength();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Cacheable(value = "count-hpath", sync = true)
|
||||||
|
public Long count(String root) throws IOException {
|
||||||
|
try (FileSystem fileSystem = FileSystem.get(new Configuration())) {
|
||||||
|
ContentSummary summary = fileSystem.getContentSummary(new Path(root));
|
||||||
|
return summary.getFileCount() + summary.getDirectoryCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Cacheable(value = "file-count-hpath", sync = true)
|
||||||
|
public Long countFiles(String root) throws IOException {
|
||||||
|
try (FileSystem fileSystem = FileSystem.get(new Configuration())) {
|
||||||
|
return fileSystem.getContentSummary(new Path(root)).getFileCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Cacheable(value = "directory-count-hpath", sync = true)
|
||||||
|
public Long countDirectories(String root) throws IOException {
|
||||||
|
try (FileSystem fileSystem = FileSystem.get(new Configuration())) {
|
||||||
|
return fileSystem.getContentSummary(new Path(root)).getDirectoryCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user