aboutsummaryrefslogtreecommitdiff
path: root/exec/java-exec/src/test/java/org/apache/drill
diff options
context:
space:
mode:
authorArina Ielchiieva <arina.yelchiyeva@gmail.com>2019-03-05 19:29:18 +0200
committerArina Ielchiieva <arina.yelchiyeva@gmail.com>2019-03-11 11:48:37 +0200
commit78dc86843fb9ef2683156708bc545a6b1950cb87 (patch)
tree5308d46a8eb6bc48a41d483197c61a16828a5904 /exec/java-exec/src/test/java/org/apache/drill
parentd585452b52e94a91ae76a24550c5c476847a9cba (diff)
DRILL-7073: CREATE SCHEMA command / TupleSchema / ColumnMetadata improvements
1. Add format, default, column properties logic. 2. Changed schema JSON after serialization. 3. Added appropriate unit tests. closes #1684
Diffstat (limited to 'exec/java-exec/src/test/java/org/apache/drill')
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java77
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/TestSchemaProvider.java133
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java4
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java119
4 files changed, 236 insertions, 97 deletions
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java
index 4b277ae57..f4b1e69f9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java
@@ -39,6 +39,7 @@ import org.junit.rules.ExpectedException;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
+import java.time.LocalDate;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
@@ -273,7 +274,7 @@ public class TestSchemaCommands extends ClusterTest {
}
@Test
- public void testCreateWithProperties() throws Exception {
+ public void testCreateWithSchemaProperties() throws Exception {
File tmpDir = dirTestWatcher.getTmpDir();
File schemaFile = new File(tmpDir, "schema_for_create_with_properties.schema");
assertFalse(schemaFile.exists());
@@ -292,16 +293,16 @@ public class TestSchemaCommands extends ClusterTest {
SchemaContainer schemaContainer = schemaProvider.read();
assertNull(schemaContainer.getTable());
- assertNotNull(schemaContainer.getSchema());
- assertNotNull(schemaContainer.getProperties());
+ TupleMetadata schema = schemaContainer.getSchema();
+ assertNotNull(schema);
Map<String, String> properties = new LinkedHashMap<>();
properties.put("k1", "v1");
properties.put("k2", "v2");
properties.put("k3", "v3");
- assertEquals(properties.size(), schemaContainer.getProperties().size());
- assertEquals(properties, schemaContainer.getProperties());
+ assertEquals(properties.size(), schema.properties().size());
+ assertEquals(properties, schema.properties());
} finally {
if (schemaFile.exists()) {
@@ -311,7 +312,7 @@ public class TestSchemaCommands extends ClusterTest {
}
@Test
- public void testCreateWithoutProperties() throws Exception {
+ public void testCreateWithoutSchemaProperties() throws Exception {
File tmpDir = dirTestWatcher.getTmpDir();
File schemaFile = new File(tmpDir, "schema_for_create_without_properties.schema");
assertFalse(schemaFile.exists());
@@ -329,9 +330,64 @@ public class TestSchemaCommands extends ClusterTest {
SchemaContainer schemaContainer = schemaProvider.read();
assertNull(schemaContainer.getTable());
- assertNotNull(schemaContainer.getSchema());
- assertNotNull(schemaContainer.getProperties());
- assertEquals(0, schemaContainer.getProperties().size());
+ TupleMetadata schema = schemaContainer.getSchema();
+ assertNotNull(schema);
+ assertNotNull(schema.properties());
+ assertEquals(0, schema.properties().size());
+ } finally {
+ if (schemaFile.exists()) {
+ assertTrue(schemaFile.delete());
+ }
+ }
+ }
+
+ @Test
+ public void testCreateWithVariousColumnProperties() throws Exception {
+ File tmpDir = dirTestWatcher.getTmpDir();
+ File schemaFile = new File(tmpDir, "schema_for_create_with__various_column_properties.schema");
+ assertFalse(schemaFile.exists());
+ try {
+ testBuilder()
+ .sqlQuery("create schema ( " +
+ "a int not null default '10', " +
+ "b date format 'yyyy-MM-dd' default '2017-01-31', " +
+ "c varchar properties {'k1' = 'v1', 'k2' = 'v2'}) " +
+ "path '%s'",
+ schemaFile.getPath())
+ .unOrdered()
+ .baselineColumns("ok", "summary")
+ .baselineValues(true, String.format("Created schema for [%s]", schemaFile.getPath()))
+ .go();
+
+ SchemaProvider schemaProvider = new PathSchemaProvider(new Path(schemaFile.getPath()));
+ assertTrue(schemaProvider.exists());
+
+ SchemaContainer schemaContainer = schemaProvider.read();
+
+ assertNull(schemaContainer.getTable());
+ TupleMetadata schema = schemaContainer.getSchema();
+ assertNotNull(schema);
+
+ assertEquals(3, schema.size());
+
+ ColumnMetadata a = schema.metadata("a");
+ assertTrue(a.defaultValue() instanceof Integer);
+ assertEquals(10, a.defaultValue());
+ assertEquals("10", a.defaultStringValue());
+
+ ColumnMetadata b = schema.metadata("b");
+ assertTrue(b.defaultValue() instanceof LocalDate);
+ assertEquals("yyyy-MM-dd", b.formatValue());
+ assertEquals(LocalDate.parse("2017-01-31"), b.defaultValue());
+ assertEquals("2017-01-31", b.defaultStringValue());
+
+ ColumnMetadata c = schema.metadata("c");
+ Map<String, String> properties = new LinkedHashMap<>();
+ properties.put("k1", "v1");
+ properties.put("k2", "v2");
+ assertEquals(properties, c.properties());
+
+ assertEquals(0, schema.properties().size());
} finally {
if (schemaFile.exists()) {
assertTrue(schemaFile.delete());
@@ -382,8 +438,7 @@ public class TestSchemaCommands extends ClusterTest {
assertEquals(TypeProtos.MinorType.INT, schema.metadata("i").type());
assertEquals(TypeProtos.MinorType.VARCHAR, schema.metadata("v").type());
- assertNotNull(schemaContainer.getProperties());
- assertEquals(2, schemaContainer.getProperties().size());
+ assertEquals(2, schema.properties().size());
} finally {
if (rawSchema.exists()) {
assertTrue(rawSchema.delete());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/TestSchemaProvider.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/TestSchemaProvider.java
index 427754f9a..435ec0d7a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/TestSchemaProvider.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/TestSchemaProvider.java
@@ -18,6 +18,7 @@
package org.apache.drill.exec.record.metadata.schema;
import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.exec.record.metadata.ColumnMetadata;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.store.StorageStrategy;
import org.junit.Rule;
@@ -86,7 +87,7 @@ public class TestSchemaProvider {
assertEquals(1, metadata.size());
assertEquals(TypeProtos.MinorType.INT, metadata.metadata("i").type());
- assertEquals(properties, schemaContainer.getProperties());
+ assertEquals(properties, metadata.properties());
SchemaContainer.Version version = schemaContainer.getVersion();
assertFalse(version.isUndefined());
@@ -134,15 +135,24 @@ public class TestSchemaProvider {
provider.store("i int, v varchar(10)", properties, StorageStrategy.DEFAULT);
assertTrue(provider.exists());
- String expectedContent =
- "{\n"
- + " \"schema\" : [\n"
- + " \"`i` INT\",\n"
- + " \"`v` VARCHAR(10)\"\n"
- + " ],\n"
- + " \"properties\" : {\n"
- + " \"k1\" : \"v1\",\n"
- + " \"k2\" : \"v2\"\n"
+ String expectedContent = "{\n"
+ + " \"schema\" : {\n"
+ + " \"columns\" : [\n"
+ + " {\n"
+ + " \"name\" : \"i\",\n"
+ + " \"type\" : \"INT\",\n"
+ + " \"mode\" : \"OPTIONAL\"\n"
+ + " },\n"
+ + " {\n"
+ + " \"name\" : \"v\",\n"
+ + " \"type\" : \"VARCHAR(10)\",\n"
+ + " \"mode\" : \"OPTIONAL\"\n"
+ + " }\n"
+ + " ],\n"
+ + " \"properties\" : {\n"
+ + " \"k1\" : \"v1\",\n"
+ + " \"k2\" : \"v2\"\n"
+ + " }\n"
+ " },\n"
+ " \"version\" : 1\n"
+ "}";
@@ -166,19 +176,39 @@ public class TestSchemaProvider {
@Test
public void testPathProviderRead() throws Exception {
Path schemaPath = folder.newFile("schema").toPath();
- Files.write(schemaPath, Collections.singletonList(
- "{ \n"
- + " \"table\":\"tbl\",\n"
- + " \"schema\":[ \n"
- + " \"`i` INT\",\n"
- + " \"`v` VARCHAR\"\n"
- + " ],\n"
- + " \"properties\" : {\n"
- + " \"k1\" : \"v1\",\n"
- + " \"k2\" : \"v2\"\n"
- + " }\n"
- + "}\n"
- ));
+ String schema = "{\n"
+ + " \"table\" : \"tbl\",\n"
+ + " \"schema\" : {\n"
+ + " \"columns\" : [\n"
+ + " {\n"
+ + " \"name\" : \"i\",\n"
+ + " \"type\" : \"INT\",\n"
+ + " \"mode\" : \"REQUIRED\",\n"
+ + " \"default\" : \"10\"\n"
+ + " },\n"
+ + " {\n"
+ + " \"name\" : \"a\",\n"
+ + " \"type\" : \"ARRAY<VARCHAR(10)>\",\n"
+ + " \"mode\" : \"REPEATED\",\n"
+ + " \"properties\" : {\n"
+ + " \"ck1\" : \"cv1\",\n"
+ + " \"ck2\" : \"cv2\"\n"
+ + " }\n"
+ + " },\n"
+ + " {\n"
+ + " \"name\" : \"t\",\n"
+ + " \"type\" : \"DATE\",\n"
+ + " \"mode\" : \"OPTIONAL\",\n"
+ + " \"format\" : \"yyyy-mm-dd\"\n"
+ + " }\n"
+ + " ],\n"
+ + " \"properties\" : {\n"
+ + " \"sk1\" : \"sv1\",\n"
+ + " \"sk2\" : \"sv2\"\n"
+ + " }\n"
+ + " }\n"
+ + "}";
+ Files.write(schemaPath, Collections.singletonList(schema));
SchemaProvider provider = new PathSchemaProvider(new org.apache.hadoop.fs.Path(schemaPath.toUri().getPath()));
assertTrue(provider.exists());
SchemaContainer schemaContainer = provider.read();
@@ -187,14 +217,30 @@ public class TestSchemaProvider {
TupleMetadata metadata = schemaContainer.getSchema();
assertNotNull(metadata);
- assertEquals(2, metadata.size());
- assertEquals(TypeProtos.MinorType.INT, metadata.metadata("i").type());
- assertEquals(TypeProtos.MinorType.VARCHAR, metadata.metadata("v").type());
-
- Map<String, String> properties = new LinkedHashMap<>();
- properties.put("k1", "v1");
- properties.put("k2", "v2");
- assertEquals(properties, schemaContainer.getProperties());
+ Map<String, String> schemaProperties = new LinkedHashMap<>();
+ schemaProperties.put("sk1", "sv1");
+ schemaProperties.put("sk2", "sv2");
+ assertEquals(schemaProperties, metadata.properties());
+
+ assertEquals(3, metadata.size());
+
+ ColumnMetadata i = metadata.metadata("i");
+ assertEquals(TypeProtos.MinorType.INT, i.type());
+ assertEquals(TypeProtos.DataMode.REQUIRED, i.mode());
+ assertEquals(10, i.defaultValue());
+
+ ColumnMetadata a = metadata.metadata("a");
+ assertEquals(TypeProtos.MinorType.VARCHAR, a.type());
+ assertEquals(TypeProtos.DataMode.REPEATED, a.mode());
+ Map<String, String> columnProperties = new LinkedHashMap<>();
+ columnProperties.put("ck1", "cv1");
+ columnProperties.put("ck2", "cv2");
+ assertEquals(columnProperties, a.properties());
+
+ ColumnMetadata t = metadata.metadata("t");
+ assertEquals(TypeProtos.MinorType.DATE, t.type());
+ assertEquals(TypeProtos.DataMode.OPTIONAL, t.mode());
+ assertEquals("yyyy-mm-dd", t.formatValue());
assertTrue(schemaContainer.getVersion().isUndefined());
}
@@ -213,16 +259,21 @@ public class TestSchemaProvider {
@Test
public void testPathProviderReadSchemaWithComments() throws Exception {
Path schemaPath = folder.newFile("schema").toPath();
- Files.write(schemaPath, Collections.singletonList(
- "// my schema file start\n"
- + "{ \n"
- + " \"schema\":[ // start columns list\n"
- + " \"`i` INT\"\n"
- + " ]\n"
- + "}\n"
- + "// schema file end\n"
- + "/* multiline comment */"
- ));
+ String schema = "// my schema file start\n" +
+ "{\n"
+ + " \"schema\" : {\n"
+ + " \"columns\" : [ // start columns list\n"
+ + " {\n"
+ + " \"name\" : \"i\",\n"
+ + " \"type\" : \"INT\",\n"
+ + " \"mode\" : \"OPTIONAL\"\n"
+ + " }\n"
+ + " ]\n"
+ + " }\n"
+ + "}"
+ + "// schema file end\n"
+ + "/* multiline comment */";
+ Files.write(schemaPath, Collections.singletonList(schema));
SchemaProvider provider = new PathSchemaProvider(new org.apache.hadoop.fs.Path(schemaPath.toUri().getPath()));
assertTrue(provider.exists());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java
index 58c979b34..110efeb7e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java
@@ -30,7 +30,6 @@ public class TestParserErrorHandling {
public void testUnsupportedType() {
String schema = "col unk_type";
thrown.expect(SchemaParsingException.class);
- thrown.expectMessage("offending symbol [@1,4:11='unk_type',<38>,1:4]: no viable alternative at input");
SchemaExprParser.parseSchema(schema);
}
@@ -54,7 +53,6 @@ public class TestParserErrorHandling {
public void testUnquotedId() {
String schema = "id with space varchar";
thrown.expect(SchemaParsingException.class);
- thrown.expectMessage("offending symbol [@1,3:6='with',<38>,1:3]: no viable alternative at input");
SchemaExprParser.parseSchema(schema);
}
@@ -62,7 +60,6 @@ public class TestParserErrorHandling {
public void testUnescapedBackTick() {
String schema = "`c`o`l` varchar";
thrown.expect(SchemaParsingException.class);
- thrown.expectMessage("offending symbol [@1,3:3='o',<38>,1:3]: no viable alternative at input");
SchemaExprParser.parseSchema(schema);
}
@@ -78,7 +75,6 @@ public class TestParserErrorHandling {
public void testMissingType() {
String schema = "col not null";
thrown.expect(SchemaParsingException.class);
- thrown.expectMessage("offending symbol [@1,4:6='not',<34>,1:4]: no viable alternative at input");
SchemaExprParser.parseSchema(schema);
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
index 1b9c06f45..eaae0a50e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
@@ -23,8 +23,10 @@ import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.junit.Test;
+import java.time.LocalDate;
import java.util.Arrays;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -35,18 +37,21 @@ import static org.junit.Assert.assertTrue;
public class TestSchemaParser {
@Test
- public void checkQuotedId() {
+ public void checkQuotedIdWithEscapes() {
String schemaWithEscapes = "`a\\\\b\\`c` INT";
- assertEquals(schemaWithEscapes, SchemaExprParser.parseSchema(schemaWithEscapes).schemaString());
+ assertEquals(schemaWithEscapes, SchemaExprParser.parseSchema(schemaWithEscapes).metadata(0).columnString());
String schemaWithKeywords = "`INTEGER` INT";
- assertEquals(schemaWithKeywords, SchemaExprParser.parseSchema(schemaWithKeywords).schemaString());
+ assertEquals(schemaWithKeywords, SchemaExprParser.parseSchema(schemaWithKeywords).metadata(0).columnString());
}
@Test
public void testSchemaWithParen() {
- String schema = "`a` INT NOT NULL, `b` VARCHAR(10)";
- assertEquals(schema, SchemaExprParser.parseSchema(String.format("(%s)", schema)).schemaString());
+ String schemaWithParen = "(`a` INT NOT NULL, `b` VARCHAR(10))";
+ TupleMetadata schema = SchemaExprParser.parseSchema(schemaWithParen);
+ assertEquals(2, schema.size());
+ assertEquals("`a` INT NOT NULL", schema.metadata("a").columnString());
+ assertEquals("`b` VARCHAR(10)", schema.metadata("b").columnString());
}
@Test
@@ -54,13 +59,14 @@ public class TestSchemaParser {
String schemaString = "id\n/*comment*/int\r,//comment\r\nname\nvarchar\t\t\t";
TupleMetadata schema = SchemaExprParser.parseSchema(schemaString);
assertEquals(2, schema.size());
- assertEquals("`id` INT, `name` VARCHAR", schema.schemaString());
+ assertEquals("`id` INT", schema.metadata("id").columnString());
+ assertEquals("`name` VARCHAR", schema.metadata("name").columnString());
}
@Test
public void testCaseInsensitivity() {
String schema = "`Id` InTeGeR NoT NuLl";
- assertEquals("`Id` INT NOT NULL", SchemaExprParser.parseSchema(schema).schemaString());
+ assertEquals("`Id` INT NOT NULL", SchemaExprParser.parseSchema(schema).metadata(0).columnString());
}
@Test
@@ -80,10 +86,7 @@ public class TestSchemaParser {
.buildSchema();
checkSchema("int_col int, integer_col integer not null, bigint_col bigint, " +
- "float_col float not null, double_col double",
- schema,
- "`int_col` INT, `integer_col` INT NOT NULL, `bigint_col` BIGINT, " +
- "`float_col` FLOAT NOT NULL, `double_col` DOUBLE");
+ "float_col float not null, double_col double", schema);
}
@Test
@@ -100,10 +103,8 @@ public class TestSchemaParser {
"col numeric, col_p numeric(5) not null, col_ps numeric(10, 2)"
);
- String expectedSchema = "`col` DECIMAL, `col_p` DECIMAL(5) NOT NULL, `col_ps` DECIMAL(10, 2)";
-
schemas.forEach(
- s -> checkSchema(s, schema, expectedSchema)
+ s -> checkSchema(s, schema)
);
}
@@ -113,13 +114,12 @@ public class TestSchemaParser {
.addNullable("col", TypeProtos.MinorType.BIT)
.buildSchema();
- checkSchema("col boolean", schema, "`col` BOOLEAN");
+ checkSchema("col boolean", schema);
}
@Test
public void testCharacterTypes() {
String schemaPattern = "col %1$s, col_p %1$s(50) not null";
- String expectedSchema = "`col` %1$s, `col_p` %1$s(50) NOT NULL";
Map<String, TypeProtos.MinorType> properties = new HashMap<>();
properties.put("char", TypeProtos.MinorType.VARCHAR);
@@ -136,7 +136,7 @@ public class TestSchemaParser {
.add("col_p", value, 50)
.buildSchema();
- checkSchema(String.format(schemaPattern, key), schema, String.format(expectedSchema, value.name()));
+ checkSchema(String.format(schemaPattern, key), schema);
});
}
@@ -151,10 +151,7 @@ public class TestSchemaParser {
.buildSchema();
checkSchema("time_col time, time_prec_col time(3), date_col date not null, " +
- "timestamp_col timestamp, timestamp_prec_col timestamp(3)",
- schema,
- "`time_col` TIME, `time_prec_col` TIME(3), `date_col` DATE NOT NULL, " +
- "`timestamp_col` TIMESTAMP, `timestamp_prec_col` TIMESTAMP(3)");
+ "timestamp_col timestamp, timestamp_prec_col timestamp(3)", schema);
}
@Test
@@ -171,11 +168,7 @@ public class TestSchemaParser {
checkSchema("interval_year_col interval year, interval_month_col interval month, " +
"interval_day_col interval day, interval_hour_col interval hour, interval_minute_col interval minute, " +
- "interval_second_col interval second, interval_col interval",
- schema,
- "`interval_year_col` INTERVAL YEAR, `interval_month_col` INTERVAL YEAR, " +
- "`interval_day_col` INTERVAL DAY, `interval_hour_col` INTERVAL DAY, `interval_minute_col` INTERVAL DAY, " +
- "`interval_second_col` INTERVAL DAY, `interval_col` INTERVAL");
+ "interval_second_col interval second, interval_col interval", schema);
}
@Test
@@ -201,12 +194,7 @@ public class TestSchemaParser {
+ ", nested_array array<array<int>>"
+ ", map_array array<map<m1 int, m2 varchar>>"
+ ", nested_array_map array<array<map<nm1 int, nm2 varchar>>>",
- schema,
- "`simple_array` ARRAY<INT>"
- + ", `nested_array` ARRAY<ARRAY<INT>>"
- + ", `map_array` ARRAY<MAP<`m1` INT, `m2` VARCHAR>>"
- + ", `nested_array_map` ARRAY<ARRAY<MAP<`nm1` INT, `nm2` VARCHAR>>>"
- );
+ schema);
}
@@ -223,9 +211,7 @@ public class TestSchemaParser {
.resumeSchema()
.buildSchema();
- checkSchema("map_col map<int_col int, array_col array<int>, nested_map map<m1 int, m2 varchar>>",
- schema,
- "`map_col` MAP<`int_col` INT, `array_col` ARRAY<INT>, `nested_map` MAP<`m1` INT, `m2` VARCHAR>>");
+ checkSchema("map_col map<int_col int, array_col array<int>, nested_map map<m1 int, m2 varchar>>", schema);
}
@Test
@@ -266,14 +252,65 @@ public class TestSchemaParser {
assertTrue(mapSchema.metadata("m2").isNullable());
}
- private void checkSchema(String schemaString, TupleMetadata expectedSchema, String expectedSchemaString) {
+ @Test
+ public void testFormat() {
+ String value = "`a` DATE NOT NULL FORMAT 'yyyy-MM-dd'";
+ TupleMetadata schema = SchemaExprParser.parseSchema(value);
+ ColumnMetadata columnMetadata = schema.metadata("a");
+ assertEquals("yyyy-MM-dd", columnMetadata.formatValue());
+ assertEquals(value, columnMetadata.columnString());
+ }
+
+ @Test
+ public void testDefault() {
+ String value = "`a` INT NOT NULL DEFAULT '12'";
+ TupleMetadata schema = SchemaExprParser.parseSchema(value);
+ ColumnMetadata columnMetadata = schema.metadata("a");
+ assertTrue(columnMetadata.defaultValue() instanceof Integer);
+ assertEquals(12, columnMetadata.defaultValue());
+ assertEquals("12", columnMetadata.defaultStringValue());
+ assertEquals(value, columnMetadata.columnString());
+ }
+
+ @Test
+ public void testFormatAndDefault() {
+ String value = "`a` DATE NOT NULL FORMAT 'yyyy-MM-dd' DEFAULT '2018-12-31'";
+ TupleMetadata schema = SchemaExprParser.parseSchema(value);
+ ColumnMetadata columnMetadata = schema.metadata("a");
+ assertTrue(columnMetadata.defaultValue() instanceof LocalDate);
+ assertEquals(LocalDate.of(2018, 12, 31), columnMetadata.defaultValue());
+ assertEquals("2018-12-31", columnMetadata.defaultStringValue());
+ assertEquals(value, columnMetadata.columnString());
+ }
+
+ @Test
+ public void testColumnProperties() {
+ String value = "`a` INT NOT NULL PROPERTIES { 'k1' = 'v1', 'k2' = 'v2' }";
+ TupleMetadata schema = SchemaExprParser.parseSchema(value);
+
+ ColumnMetadata columnMetadata = schema.metadata("a");
+
+ Map<String, String> properties = new LinkedHashMap<>();
+ properties.put("k1", "v1");
+ properties.put("k2", "v2");
+
+ assertEquals(properties, columnMetadata.properties());
+ assertEquals(value, columnMetadata.columnString());
+ }
+
+ private void checkSchema(String schemaString, TupleMetadata expectedSchema) {
TupleMetadata actualSchema = SchemaExprParser.parseSchema(schemaString);
- assertEquals(expectedSchema.schemaString(), actualSchema.schemaString());
- assertEquals(expectedSchemaString, actualSchema.schemaString());
- TupleMetadata unparsedSchema = SchemaExprParser.parseSchema(actualSchema.schemaString());
- assertEquals(unparsedSchema.schemaString(), expectedSchema.schemaString());
- assertEquals(expectedSchemaString, unparsedSchema.schemaString());
+ assertEquals(expectedSchema.size(), actualSchema.size());
+ assertEquals(expectedSchema.properties(), actualSchema.properties());
+
+ expectedSchema.toMetadataList().forEach(
+ expectedMetadata -> {
+ ColumnMetadata actualMetadata = actualSchema.metadata(expectedMetadata.name());
+ assertEquals(expectedMetadata.columnString(), actualMetadata.columnString());
+ }
+ );
+
}
}