aboutsummaryrefslogtreecommitdiff
path: root/exec/java-exec/src/test/java/org
diff options
context:
space:
mode:
authorBohdan Kazydub <bohdan.kazydub@gmail.com>2018-12-20 20:58:16 +0200
committerVolodymyr Vysotskyi <vvovyk@gmail.com>2019-01-25 17:31:42 +0200
commit780a3fbb0222ac037d3c559dc88c7f9dbd48b0cb (patch)
treea959fa3511f32cb757491ab88f1470ec1f9c894b /exec/java-exec/src/test/java/org
parent4e03d54cd854c08a5ed96a67e7c27f02fa5ff435 (diff)
DRILL-6962: Function coalesce returns an Error when none of the columns in coalesce exist in a parquet file
- Updated UntypedNullVector to hold value count when vector is allocated and transfered to another one; - Updated RecordBatchLoader and DrillCursor to handle case when only UntypedNull values are present in RecordBatch (special case when data buffer is null but actual values are present); - Added functions to cast UntypedNull value to other types for use in UDFs; - Moved UntypedReader, UntypedHolderReaderImpl and UntypedReaderImpl from org.apache.drill.exec.vector.complex.impl to org.apache.drill.exec.vector package. closes #1614
Diffstat (limited to 'exec/java-exec/src/test/java/org')
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java19
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java119
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java70
3 files changed, 208 insertions, 0 deletions
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java b/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java
index 949acf3f7..13f5dd8e0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java
@@ -556,6 +556,25 @@ public class TestJoinNullable extends BaseTestQuery {
}
}
+ // Full join with USING clause uses COALESCE internally
+ @Test // DRILL-6962
+ public void testFullJoinUsingUntypedNullColumn() throws Exception {
+ try {
+ enableJoin(true, true);
+ String query = "select * from " +
+ "(select n_nationkey, n_name, coalesce(unk1, unk2) as not_exists from cp.`tpch/nation.parquet`) t1 full join " +
+ "(select r_name, r_comment, coalesce(unk1, unk2) as not_exists from cp.`tpch/region.parquet`) t2 " +
+ "using (not_exists)";
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .expectsNumRecords(30)
+ .go();
+ } finally {
+ resetJoinOptions();
+ }
+ }
+
public void nullMixedComparatorEqualJoinHelper(final String query) throws Exception {
testBuilder()
.sqlQuery(query)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java b/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java
index 4976947ea..521531c5b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java
@@ -18,7 +18,11 @@
package org.apache.drill;
import org.apache.drill.categories.SqlFunctionTest;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.ClusterFixtureBuilder;
import org.apache.drill.test.ClusterTest;
@@ -36,6 +40,8 @@ import static org.junit.Assert.assertTrue;
@Category(SqlFunctionTest.class)
public class TestUntypedNull extends ClusterTest {
+ private static final TypeProtos.MajorType UNTYPED_NULL_TYPE = Types.optional(TypeProtos.MinorType.NULL);
+
@BeforeClass
public static void setup() throws Exception {
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
@@ -106,5 +112,118 @@ public class TestUntypedNull extends ClusterTest {
assertEquals(0, summary.recordCount());
}
+ @Test
+ public void testCoalesceOnNotExistentColumns() throws Exception {
+ String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` limit 5";
+ BatchSchema expectedSchema = new SchemaBuilder()
+ .add("coal", UNTYPED_NULL_TYPE)
+ .build();
+
+ testBuilder()
+ .sqlQuery(query)
+ .schemaBaseLine(expectedSchema)
+ .go();
+
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .baselineColumns("coal")
+ .baselineValuesForSingleColumn(null, null, null, null, null)
+ .go();
+ }
+
+ @Test
+ public void testCoalesceOnNotExistentColumnsWithGroupBy() throws Exception {
+ String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` group by 1";
+ BatchSchema expectedSchema = new SchemaBuilder()
+ .add("coal", UNTYPED_NULL_TYPE)
+ .build();
+
+ testBuilder()
+ .sqlQuery(query)
+ .schemaBaseLine(expectedSchema)
+ .go();
+
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .baselineColumns("coal")
+ .baselineValuesForSingleColumn(new Object[] {null})
+ .go();
+ }
+
+ @Test
+ public void testCoalesceOnNotExistentColumnsWithOrderBy() throws Exception {
+ String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` order by 1 limit 5";
+ BatchSchema expectedSchema = new SchemaBuilder()
+ .add("coal", UNTYPED_NULL_TYPE)
+ .build();
+
+ testBuilder()
+ .sqlQuery(query)
+ .schemaBaseLine(expectedSchema)
+ .go();
+
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .baselineColumns("coal")
+ .baselineValuesForSingleColumn(null, null, null, null, null)
+ .go();
+ }
+
+ @Test
+ public void testCoalesceOnNotExistentColumnsWithCoalesceInWhereClause() throws Exception {
+ String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` where coalesce(unk1, unk2) > 10";
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .expectsNumRecords(0)
+ .go();
+ }
+
+ @Test
+ public void testCoalesceOnNotExistentColumnsWithCoalesceInHavingClause() throws Exception {
+ String query = "select 1 from cp.`tpch/nation.parquet` group by n_name having count(coalesce(unk1, unk2)) > 10";
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .expectsNumRecords(0)
+ .go();
+ }
+
+ @Test
+ public void testPartitionByCoalesceOnNotExistentColumns() throws Exception {
+ String query =
+ "select row_number() over (partition by coalesce(unk1, unk2)) as row_num from cp.`tpch/nation.parquet` limit 5";
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .baselineColumns("row_num")
+ .baselineValuesForSingleColumn(1L, 2L, 3L, 4L, 5L)
+ .go();
+ }
+
+ @Test
+ public void testCoalesceOnNotExistentColumnsInUDF() throws Exception {
+ String query = "select substr(coalesce(unk1, unk2), 1, 2) as coal from cp.`tpch/nation.parquet` limit 5";
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .baselineColumns("coal")
+ .baselineValuesForSingleColumn(null, null, null, null, null)
+ .go();
+ }
+
+ @Test
+ public void testCoalesceOnNotExistentColumnsInUDF2() throws Exception {
+ String query = "select abs(coalesce(unk1, unk2)) as coal from cp.`tpch/nation.parquet` limit 5";
+ testBuilder()
+ .sqlQuery(query)
+ .unOrdered()
+ .baselineColumns("coal")
+ .baselineValuesForSingleColumn(null, null, null, null, null)
+ .go();
+ }
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
index 0d884b931..73b4b941a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
@@ -21,6 +21,7 @@ import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -30,8 +31,13 @@ import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.categories.UnlikelyTest;
import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
import org.apache.drill.exec.planner.physical.PlannerSettings;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.RecordBatchLoader;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.vector.IntervalYearVector;
import org.apache.drill.test.ClusterFixture;
@@ -46,6 +52,17 @@ import org.junit.rules.ExpectedException;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
+import static org.apache.drill.common.types.TypeProtos.MinorType.BIGINT;
+import static org.apache.drill.common.types.TypeProtos.MinorType.BIT;
+import static org.apache.drill.common.types.TypeProtos.MinorType.DATE;
+import static org.apache.drill.common.types.TypeProtos.MinorType.FLOAT4;
+import static org.apache.drill.common.types.TypeProtos.MinorType.FLOAT8;
+import static org.apache.drill.common.types.TypeProtos.MinorType.INT;
+import static org.apache.drill.common.types.TypeProtos.MinorType.INTERVALYEAR;
+import static org.apache.drill.common.types.TypeProtos.MinorType.TIME;
+import static org.apache.drill.common.types.TypeProtos.MinorType.TIMESTAMP;
+import static org.apache.drill.common.types.TypeProtos.MinorType.VARCHAR;
+import static org.apache.drill.common.types.TypeProtos.MinorType.VARDECIMAL;
import static org.apache.drill.exec.ExecTest.mockUtcDateTimeZone;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.hasItem;
@@ -749,4 +766,57 @@ public class TestCastFunctions extends ClusterTest {
run("drop table if exists dfs.tmp.test_time_filter");
}
}
+
+ @Test
+ public void testCastUntypedNull() throws Exception {
+ String query = "select cast(coalesce(unk1, unk2) as %s) as coal from cp.`tpch/nation.parquet` limit 1";
+
+ Map<String, TypeProtos.MajorType> typesMap = createCastTypeMap();
+ for (Map.Entry<String, TypeProtos.MajorType> entry : typesMap.entrySet()) {
+ String q = String.format(query, entry.getKey());
+
+ MaterializedField field = MaterializedField.create("coal", entry.getValue());
+ BatchSchema expectedSchema = new SchemaBuilder()
+ .add(field)
+ .build();
+
+ // Validate schema
+ testBuilder()
+ .sqlQuery(q)
+ .schemaBaseLine(expectedSchema)
+ .go();
+
+ // Validate result
+ testBuilder()
+ .sqlQuery(q)
+ .unOrdered()
+ .baselineColumns("coal")
+ .baselineValues(new Object[] {null})
+ .go();
+ }
+ }
+
+ private static Map<String, TypeProtos.MajorType> createCastTypeMap() {
+ TypeProtos.DataMode mode = TypeProtos.DataMode.OPTIONAL;
+ Map<String, TypeProtos.MajorType> typesMap = new HashMap<>();
+ typesMap.put("BOOLEAN", Types.withMode(BIT, mode));
+ typesMap.put("INT", Types.withMode(INT, mode));
+ typesMap.put("BIGINT", Types.withMode(BIGINT, mode));
+ typesMap.put("FLOAT", Types.withMode(FLOAT4, mode));
+ typesMap.put("DOUBLE", Types.withMode(FLOAT8, mode));
+ typesMap.put("DATE", Types.withMode(DATE, mode));
+ typesMap.put("TIME", Types.withMode(TIME, mode));
+ typesMap.put("TIMESTAMP", Types.withMode(TIMESTAMP, mode));
+ typesMap.put("INTERVAL MONTH", Types.withMode(INTERVALYEAR, mode));
+ typesMap.put("INTERVAL YEAR", Types.withMode(INTERVALYEAR, mode));
+ // todo: uncomment after DRILL-6993 is resolved
+ // typesMap.put("VARBINARY(31)", Types.withPrecision(VARBINARY, mode, 31));
+ typesMap.put("VARCHAR(26)", Types.withPrecision(VARCHAR, mode, 26));
+ typesMap.put("DECIMAL(9, 2)", Types.withScaleAndPrecision(VARDECIMAL, mode, 2, 9));
+ typesMap.put("DECIMAL(18, 5)", Types.withScaleAndPrecision(VARDECIMAL, mode, 5, 18));
+ typesMap.put("DECIMAL(28, 3)", Types.withScaleAndPrecision(VARDECIMAL, mode, 3, 28));
+ typesMap.put("DECIMAL(38, 2)", Types.withScaleAndPrecision(VARDECIMAL, mode, 2, 38));
+
+ return typesMap;
+ }
}