[HUDI-2451] On windows client with hdfs server for wrong file separator (#3687)
Co-authored-by: yao.zhou <yao.zhou@linkflowtech.com>
This commit is contained in:
@@ -181,7 +181,7 @@ public class ExportCommand implements CommandMarker {
|
||||
final HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
|
||||
final HoodieActiveTimeline timeline = metaClient.getActiveTimeline();
|
||||
for (HoodieInstant instant : instants) {
|
||||
String localPath = localFolder + File.separator + instant.getFileName();
|
||||
String localPath = localFolder + Path.SEPARATOR + instant.getFileName();
|
||||
|
||||
byte[] data = null;
|
||||
switch (instant.getAction()) {
|
||||
|
||||
@@ -53,7 +53,6 @@ import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.shell.core.CommandResult;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.file.Files;
|
||||
@@ -174,7 +173,7 @@ public class TestHoodieLogFileCommand extends CLIFunctionalTestHarness {
|
||||
|
||||
// write to path '2015/03/16'.
|
||||
Schema schema = HoodieAvroUtils.addMetadataFields(getSimpleSchema());
|
||||
partitionPath = tablePath + File.separator + HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARTITION_PATH;
|
||||
partitionPath = tablePath + Path.SEPARATOR + HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARTITION_PATH;
|
||||
Files.createDirectories(Paths.get(partitionPath));
|
||||
|
||||
HoodieLogFormat.Writer writer = null;
|
||||
|
||||
@@ -142,7 +142,7 @@ public class TestTableCommand extends CLIFunctionalTestHarness {
|
||||
assertTrue(cr.isSuccess());
|
||||
assertEquals("Metadata for table " + tableName + " loaded", cr.getResult().toString());
|
||||
HoodieTableMetaClient client = HoodieCLI.getTableMetaClient();
|
||||
assertEquals(metaPath + File.separator + "archive", client.getArchivePath());
|
||||
assertEquals(metaPath + Path.SEPARATOR + "archive", client.getArchivePath());
|
||||
assertEquals(tablePath, client.getBasePath());
|
||||
assertEquals(metaPath, client.getMetaPath());
|
||||
assertEquals(HoodieTableType.MERGE_ON_READ, client.getTableType());
|
||||
@@ -181,7 +181,7 @@ public class TestTableCommand extends CLIFunctionalTestHarness {
|
||||
private void testRefreshCommand(String command) throws IOException {
|
||||
// clean table matedata
|
||||
FileSystem fs = FileSystem.get(hadoopConf());
|
||||
fs.delete(new Path(tablePath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME), true);
|
||||
fs.delete(new Path(tablePath + Path.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME), true);
|
||||
|
||||
// Create table
|
||||
assertTrue(prepareTable());
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hudi.cli.integ;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hudi.cli.HoodieCLI;
|
||||
import org.apache.hudi.cli.HoodiePrintHelper;
|
||||
import org.apache.hudi.cli.commands.TableCommand;
|
||||
@@ -32,7 +33,6 @@ import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.shell.core.CommandResult;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
@@ -59,8 +59,8 @@ public class ITTestBootstrapCommand extends AbstractShellIntegrationTest {
|
||||
public void init() {
|
||||
String srcName = "source";
|
||||
tableName = "test-table";
|
||||
sourcePath = basePath + File.separator + srcName;
|
||||
tablePath = basePath + File.separator + tableName;
|
||||
sourcePath = basePath + Path.SEPARATOR + srcName;
|
||||
tablePath = basePath + Path.SEPARATOR + tableName;
|
||||
|
||||
// generate test data
|
||||
partitions = Arrays.asList("2018", "2019", "2020");
|
||||
@@ -68,7 +68,7 @@ public class ITTestBootstrapCommand extends AbstractShellIntegrationTest {
|
||||
for (int i = 0; i < partitions.size(); i++) {
|
||||
Dataset<Row> df = TestBootstrap.generateTestRawTripDataset(timestamp,
|
||||
i * NUM_OF_RECORDS, i * NUM_OF_RECORDS + NUM_OF_RECORDS, null, jsc, sqlContext);
|
||||
df.write().parquet(sourcePath + File.separator + PARTITION_FIELD + "=" + partitions.get(i));
|
||||
df.write().parquet(sourcePath + Path.SEPARATOR + PARTITION_FIELD + "=" + partitions.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -40,7 +40,6 @@ import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.shell.core.CommandResult;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
@@ -70,7 +69,7 @@ public class ITTestHDFSParquetImportCommand extends AbstractShellIntegrationTest
|
||||
@BeforeEach
|
||||
public void init() throws IOException, ParseException {
|
||||
tableName = "test_table";
|
||||
tablePath = basePath + File.separator + tableName;
|
||||
tablePath = basePath + Path.SEPARATOR + tableName;
|
||||
sourcePath = new Path(basePath, "source");
|
||||
targetPath = new Path(tablePath);
|
||||
schemaFile = new Path(basePath, "file.schema").toString();
|
||||
@@ -101,7 +100,7 @@ public class ITTestHDFSParquetImportCommand extends AbstractShellIntegrationTest
|
||||
() -> assertEquals("Table imported to hoodie format", cr.getResult().toString()));
|
||||
|
||||
// Check hudi table exist
|
||||
String metaPath = targetPath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME;
|
||||
String metaPath = targetPath + Path.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
|
||||
assertTrue(Files.exists(Paths.get(metaPath)), "Hoodie table not exist.");
|
||||
|
||||
// Load meta data
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hudi.cli.integ;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hudi.cli.HoodieCLI;
|
||||
import org.apache.hudi.cli.commands.TableCommand;
|
||||
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
|
||||
@@ -32,7 +33,6 @@ import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.shell.core.CommandResult;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertAll;
|
||||
@@ -53,7 +53,7 @@ public class ITTestSavepointsCommand extends AbstractShellIntegrationTest {
|
||||
@BeforeEach
|
||||
public void init() throws IOException {
|
||||
String tableName = "test_table";
|
||||
tablePath = basePath + File.separator + tableName;
|
||||
tablePath = basePath + Path.SEPARATOR + tableName;
|
||||
|
||||
// Create table and connect
|
||||
new TableCommand().createTable(
|
||||
|
||||
Reference in New Issue
Block a user