diff options
Diffstat (limited to 'exec/java-exec/src/test/java')
13 files changed, 152 insertions, 27 deletions
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java index 9e0d95c85..247d7842e 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java +++ b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java @@ -143,8 +143,7 @@ public class PlanTestBase extends BaseTestQuery { * planning process throws an exception */ public static void testPlanWithAttributesMatchingPatterns(String query, String[] expectedPatterns, - String[] excludedPatterns) - throws Exception { + String[] excludedPatterns) throws Exception { final String plan = getPlanInString("EXPLAIN PLAN INCLUDING ALL ATTRIBUTES for " + QueryTestUtil.normalizeQuery(query), OPTIQ_FORMAT); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestPathSerialization.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestPathSerialization.java new file mode 100644 index 000000000..cb9356568 --- /dev/null +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestPathSerialization.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec; + + +import com.fasterxml.jackson.databind.module.SimpleModule; +import org.apache.drill.exec.serialization.PathSerDe; +import org.apache.drill.exec.store.schedule.CompleteFileWork; +import org.apache.drill.test.DrillTest; +import org.apache.hadoop.fs.Path; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.junit.Assert.assertThat; + +public class TestPathSerialization extends DrillTest { + + @Test + public void testDeSerializingWithJsonCreator() throws IOException { + + String jsonString = "{\"start\": 1, \"length\": 2, \"path\": \"/tmp/drill/test\"}"; + + SimpleModule module = new SimpleModule(); + module.addSerializer(Path.class, new PathSerDe.Se()); + objectMapper.registerModule(module); + + CompleteFileWork.FileWorkImpl bean = objectMapper.readValue(jsonString, CompleteFileWork.FileWorkImpl.class); + + assertThat(bean.getStart() == 1, equalTo( true )); + assertThat(bean.getLength() == 2, equalTo( true )); + assertThat(bean.getPath().equals(new Path("/tmp/drill/test")), equalTo( true )); + } + + @Test + public void testHadoopPathSerDe() throws IOException { + CompleteFileWork.FileWorkImpl fileWork = new CompleteFileWork.FileWorkImpl(5, 6, new Path("/tmp")); + SimpleModule module = new SimpleModule(); + module.addSerializer(Path.class, new PathSerDe.Se()); + objectMapper.registerModule(module); + + CompleteFileWork.FileWorkImpl bean = + objectMapper.readValue(objectMapper.writeValueAsString(fileWork), CompleteFileWork.FileWorkImpl.class); + + assertThat(bean.getStart() == 5, equalTo( true )); + assertThat(bean.getLength() == 6, equalTo( true )); + assertThat(bean.getPath().equals(new Path("/tmp")), equalTo( true )); + } +} diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java index 2fdf3e637..b05bb28f3 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java @@ -85,7 +85,7 @@ public class TestFileScanFramework extends SubOperatorTest { } @Override - public String getPath() { return path.toString(); } + public Path getPath() { return path; } @Override public long getStart() { return 0; } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java index 9a2bb1f85..911a09744 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java @@ -32,6 +32,7 @@ import org.apache.drill.exec.store.dfs.DrillFileSystem; import org.apache.drill.exec.store.parquet.ParquetDirectByteBufferAllocator; import org.apache.drill.exec.store.parquet.ParquetReaderUtility; import org.apache.drill.exec.store.parquet.columnreaders.ParquetRecordReader; +import org.apache.drill.test.LegacyOperatorTestBuilder; import org.apache.drill.test.PhysicalOpUnitTestBase; import org.apache.hadoop.fs.Path; import org.apache.parquet.hadoop.CodecFactory; @@ -358,7 +359,7 @@ public class MiniPlanUnitTestBase extends PhysicalOpUnitTestBase { */ public class JsonScanBuilder extends ScanPopBuider<JsonScanBuilder> { List<String> jsonBatches = null; - List<String> inputPaths = Collections.emptyList(); + List<Path> inputPaths = Collections.emptyList(); public JsonScanBuilder(PopBuilder parent) { super(parent); @@ -373,7 +374,7 @@ public class MiniPlanUnitTestBase extends PhysicalOpUnitTestBase { return this; } - public JsonScanBuilder inputPaths(List<String> inputPaths) { + public JsonScanBuilder inputPaths(List<Path> inputPaths) { this.inputPaths = inputPaths; return this; } @@ -412,7 +413,7 @@ public class MiniPlanUnitTestBase extends PhysicalOpUnitTestBase { * Builder for parquet Scan RecordBatch. */ public class ParquetScanBuilder extends ScanPopBuider<ParquetScanBuilder> { - List<String> inputPaths = Collections.emptyList(); + List<Path> inputPaths = Collections.emptyList(); public ParquetScanBuilder() { super(); @@ -422,7 +423,7 @@ public class MiniPlanUnitTestBase extends PhysicalOpUnitTestBase { super(parent); } - public ParquetScanBuilder inputPaths(List<String> inputPaths) { + public ParquetScanBuilder inputPaths(List<Path> inputPaths) { this.inputPaths = inputPaths; return this; } @@ -443,8 +444,8 @@ public class MiniPlanUnitTestBase extends PhysicalOpUnitTestBase { private RecordBatch getScanBatch() throws Exception { List<RecordReader> readers = new LinkedList<>(); - for (String path : inputPaths) { - ParquetMetadata footer = ParquetFileReader.readFooter(fs.getConf(), new Path(path)); + for (Path path : inputPaths) { + ParquetMetadata footer = ParquetFileReader.readFooter(fs.getConf(), path); for (int i = 0; i < footer.getBlocks().size(); i++) { readers.add(new ParquetRecordReader(fragContext, diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java index cc259cc1b..a1b700153 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java @@ -32,6 +32,7 @@ import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.exec.store.dfs.DrillFileSystem; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -59,11 +60,11 @@ public class TestMiniPlan extends MiniPlanUnitTestBase { @Test public void testSimpleParquetScan() throws Exception { String file = DrillFileUtils.getResourceAsFile("/tpchmulti/region/01.parquet").toURI().toString(); - + List<Path> filePath = Collections.singletonList(new Path(file)); RecordBatch scanBatch = new ParquetScanBuilder() .fileSystem(fs) .columnsToRead("R_REGIONKEY") - .inputPaths(Lists.newArrayList(file)) + .inputPaths(filePath) .build(); BatchSchema expectedSchema = new SchemaBuilder() diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java index bdff80b14..69a421481 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java @@ -41,6 +41,7 @@ import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.exec.store.dfs.DrillFileSystem; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; @@ -272,11 +273,12 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{ RecordBatch left = createScanBatchFromJson(SINGLE_EMPTY_JSON); String file = DrillFileUtils.getResourceAsFile("/tpchmulti/region/01.parquet").toURI().toString(); + List<Path> filePath = Collections.singletonList(new Path(file)); RecordBatch scanBatch = new ParquetScanBuilder() .fileSystem(fs) .columnsToRead("R_REGIONKEY") - .inputPaths(Lists.newArrayList(file)) + .inputPaths(filePath) .build(); RecordBatch projectBatch = new PopBuilder() @@ -554,10 +556,10 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{ } private RecordBatch createScanBatchFromJson(String... resourcePaths) throws Exception { - List<String> inputPaths = new ArrayList<>(); + List<Path> inputPaths = new ArrayList<>(); for (String resource : resourcePaths) { - inputPaths.add(DrillFileUtils.getResourceAsFile(resource).toURI().toString()); + inputPaths.add(new Path(DrillFileUtils.getResourceAsFile(resource).toURI())); } RecordBatch scanBatch = new JsonScanBuilder() diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java index 6bd7e4de7..3db92564d 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java @@ -41,16 +41,16 @@ import org.apache.hadoop.util.Progressable; public class CachedSingleFileSystem extends FileSystem { private ByteBuf file; - private String path; + private Path path; - public CachedSingleFileSystem(String path) throws IOException { + public CachedSingleFileSystem(Path path) throws IOException { this.path = path; - File f = new File(path); + File f = new File(path.toUri().getPath()); long length = f.length(); if (length > Integer.MAX_VALUE) { throw new UnsupportedOperationException("Cached file system only supports files of less than 2GB."); } - try (InputStream is = new BufferedInputStream(new FileInputStream(path))) { + try (InputStream is = new BufferedInputStream(new FileInputStream(path.toUri().getPath()))) { byte[] buffer = new byte[64*1024]; this.file = UnpooledByteBufAllocator.DEFAULT.directBuffer((int) length); int read; diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestDFSPartitionLocation.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestDFSPartitionLocation.java new file mode 100644 index 000000000..067abece3 --- /dev/null +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestDFSPartitionLocation.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.store.dfs; + +import org.apache.drill.exec.planner.DFSFilePartitionLocation; +import org.apache.drill.test.DrillTest; +import org.apache.hadoop.fs.Path; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +public class TestDFSPartitionLocation extends DrillTest { + + private static final Path SELECTION_ROOT = new Path("/tmp/drill"); + private static final Path PARTITION = new Path("/tmp/drill/test_table/first_dir/second_dir/"); + + @Test + public void testDFSFilePartitionLocation() { + Path file = new Path(PARTITION, "0_0_0.parquet"); + DFSFilePartitionLocation dfsPartition = new DFSFilePartitionLocation(4, SELECTION_ROOT, file, false); + checkSubdirectories(dfsPartition, file); + } + + @Test + public void testDFSDirectoryPartitionLocation() { + DFSFilePartitionLocation dfsPartition = new DFSFilePartitionLocation(4, SELECTION_ROOT, PARTITION, true); + checkSubdirectories(dfsPartition, PARTITION); + } + + private void checkSubdirectories(DFSFilePartitionLocation dfsPartition, Path partition) { + assertArrayEquals("Wrong partition dirs", new String[]{"test_table", "first_dir", "second_dir", null}, dfsPartition.getDirs()); + assertEquals("Wrong partition value","test_table", dfsPartition.getPartitionValue(0)); + assertEquals("Wrong partition value", "first_dir", dfsPartition.getPartitionValue(1)); + assertEquals("Wrong partition value", "second_dir", dfsPartition.getPartitionValue(2)); + assertEquals("Wrong partition location", partition, dfsPartition.getEntirePartitionLocation()); + } + +} diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java index b1f233e94..b853b9b8a 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertNull; import java.util.List; +import org.apache.drill.exec.util.DrillFileSystemUtil; import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList; import org.apache.drill.test.BaseTestQuery; import org.apache.hadoop.fs.FileStatus; @@ -30,7 +31,7 @@ import org.junit.Test; public class TestFileSelection extends BaseTestQuery { private static final List<FileStatus> EMPTY_STATUSES = ImmutableList.of(); - private static final List<String> EMPTY_FILES = ImmutableList.of(); + private static final List<Path> EMPTY_FILES = ImmutableList.of(); private static final String EMPTY_ROOT = ""; @Test @@ -38,8 +39,8 @@ public class TestFileSelection extends BaseTestQuery { for (final Object statuses : new Object[] { null, EMPTY_STATUSES}) { for (final Object files : new Object[]{null, EMPTY_FILES}) { for (final Object root : new Object[]{null, EMPTY_ROOT}) { - final FileSelection selection = FileSelection.create((List<FileStatus>) statuses, (List<String>) files, - (String)root); + FileSelection selection = FileSelection.create((List<FileStatus>) statuses, (List<Path>) files, + DrillFileSystemUtil.createPathSafe((String) root)); assertNull(selection); } } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java index 1bd90b37e..16b25ce13 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java @@ -610,13 +610,13 @@ public class ParquetRecordReaderTest extends BaseTestQuery { final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c); final FragmentContextImpl context = new FragmentContextImpl(bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry); - final String fileName = "/tmp/parquet_test_performance.parquet"; + final Path fileName = new Path("/tmp/parquet_test_performance.parquet"); final HashMap<String, FieldInfo> fields = new HashMap<>(); final ParquetTestProperties props = new ParquetTestProperties(1, 20 * 1000 * 1000, DEFAULT_BYTES_PER_PAGE, fields); populateFieldInfoMap(props); final Configuration dfsConfig = new Configuration(); - final List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, new Path(fileName)); + final List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, fileName); final Footer f = footers.iterator().next(); final List<SchemaPath> columns = Lists.newArrayList(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestAssignment.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestAssignment.java index 51e8c1b0a..129796788 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestAssignment.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestAssignment.java @@ -26,6 +26,7 @@ import org.apache.drill.exec.store.schedule.AssignmentCreator; import org.apache.drill.exec.store.schedule.CompleteFileWork; import org.apache.drill.exec.store.schedule.EndpointByteMap; import org.apache.drill.exec.store.schedule.EndpointByteMapImpl; +import org.apache.hadoop.fs.Path; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; @@ -125,7 +126,7 @@ public class TestAssignment { private List<CompleteFileWork> generateChunks(int chunks) { List<CompleteFileWork> chunkList = Lists.newArrayList(); for (int i = 0; i < chunks; i++) { - CompleteFileWork chunk = new CompleteFileWork(createByteMap(), 0, FILE_SIZE, "file" + i); + CompleteFileWork chunk = new CompleteFileWork(createByteMap(), 0, FILE_SIZE, new Path("file", Integer.toString(i))); chunkList.add(chunk); } return chunkList; diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/PhysicalOpUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/test/PhysicalOpUnitTestBase.java index b0820e926..27a4ad668 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/PhysicalOpUnitTestBase.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/PhysicalOpUnitTestBase.java @@ -56,6 +56,7 @@ import org.apache.drill.exec.record.VectorAccessible; import org.apache.drill.exec.server.DrillbitContext; import org.apache.drill.exec.store.RecordReader; import org.apache.drill.exec.rpc.NamedThreadFactory; +import org.apache.hadoop.fs.Path; import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -399,9 +400,9 @@ public class PhysicalOpUnitTestBase extends ExecTest { * @param columnsToRead * @return The {@link org.apache.drill.exec.store.easy.json.JSONRecordReader} corresponding to each given input path. */ - public static Iterator<RecordReader> getJsonReadersFromInputFiles(DrillFileSystem fs, List<String> inputPaths, FragmentContext fragContext, List<SchemaPath> columnsToRead) { + public static Iterator<RecordReader> getJsonReadersFromInputFiles(DrillFileSystem fs, List<Path> inputPaths, FragmentContext fragContext, List<SchemaPath> columnsToRead) { List<RecordReader> readers = new ArrayList<>(); - for (String inputPath : inputPaths) { + for (Path inputPath : inputPaths) { readers.add(new JSONRecordReader(fragContext, inputPath, fs, columnsToRead)); } return readers.iterator(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java index b4fcedf0f..629714b36 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java @@ -643,7 +643,7 @@ public class QueryBuilder { */ protected String queryPlan(String columnName) throws Exception { - Preconditions.checkArgument(queryType == QueryType.SQL, "Can only explan an SQL query."); + Preconditions.checkArgument(queryType == QueryType.SQL, "Can only explain an SQL query."); final List<QueryDataBatch> results = results(); final RecordBatchLoader loader = new RecordBatchLoader(client.allocator()); final StringBuilder builder = new StringBuilder(); |