aboutsummaryrefslogtreecommitdiff
path: root/exec/java-exec/src/main/java/org/apache
diff options
context:
space:
mode:
Diffstat (limited to 'exec/java-exec/src/main/java/org/apache')
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java94
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java115
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java63
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/SchemaContainer.java50
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java66
7 files changed, 360 insertions, 52 deletions
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
index 1f833dec0..521a7874f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
@@ -17,13 +17,23 @@
*/
package org.apache.drill.exec.record.metadata;
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.metadata.schema.parser.SchemaExprParser;
import org.apache.drill.exec.vector.accessor.ColumnConversionFactory;
import org.apache.drill.exec.vector.accessor.UnsupportedConversionError;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.stream.Collectors;
+
/**
* Abstract definition of column metadata. Allows applications to create
* specialized forms of a column metadata object by extending from this
@@ -36,6 +46,13 @@ import org.apache.drill.exec.vector.accessor.UnsupportedConversionError;
* since maps (and the row itself) will, by definition, differ between
* the two views.
*/
+@JsonAutoDetect(
+ fieldVisibility = JsonAutoDetect.Visibility.NONE,
+ getterVisibility = JsonAutoDetect.Visibility.NONE,
+ isGetterVisibility = JsonAutoDetect.Visibility.NONE,
+ setterVisibility = JsonAutoDetect.Visibility.NONE)
+@JsonInclude(JsonInclude.Include.NON_DEFAULT)
+@JsonPropertyOrder({"name", "type", "mode", "format", "default", "properties"})
public abstract class AbstractColumnMetadata implements ColumnMetadata {
// Capture the key schema information. We cannot use the MaterializedField
@@ -55,6 +72,21 @@ public abstract class AbstractColumnMetadata implements ColumnMetadata {
*/
protected int expectedElementCount = 1;
+ protected final Map<String, String> properties = new LinkedHashMap<>();
+
+ @JsonCreator
+ public static AbstractColumnMetadata createColumnMetadata(@JsonProperty("name") String name,
+ @JsonProperty("type") String type,
+ @JsonProperty("mode") DataMode mode,
+ @JsonProperty("format") String formatValue,
+ @JsonProperty("default") String defaultValue,
+ @JsonProperty("properties") Map<String, String> properties) {
+ ColumnMetadata columnMetadata = SchemaExprParser.parseColumn(name, type, mode);
+ columnMetadata.setFormatValue(formatValue);
+ columnMetadata.setDefaultFromString(defaultValue);
+ columnMetadata.setProperties(properties);
+ return (AbstractColumnMetadata) columnMetadata;
+ }
public AbstractColumnMetadata(MaterializedField schema) {
name = schema.getName();
@@ -91,6 +123,7 @@ public abstract class AbstractColumnMetadata implements ColumnMetadata {
@Override
public void bind(TupleMetadata parentTuple) { }
+ @JsonProperty("name")
@Override
public String name() { return name; }
@@ -105,6 +138,7 @@ public abstract class AbstractColumnMetadata implements ColumnMetadata {
.build();
}
+ @JsonProperty("mode")
@Override
public DataMode mode() { return mode; }
@@ -186,12 +220,28 @@ public abstract class AbstractColumnMetadata implements ColumnMetadata {
public boolean isProjected() { return projected; }
@Override
+ public void setFormatValue(String value) { }
+
+ @JsonProperty("format")
+ @Override
+ public String formatValue() { return null; }
+
+ @Override
public void setDefaultValue(Object value) { }
@Override
public Object defaultValue() { return null; }
@Override
+ public void setDefaultFromString(String value) { }
+
+ @JsonProperty("default")
+ @Override
+ public String defaultStringValue() {
+ return null;
+ }
+
+ @Override
public void setTypeConverter(ColumnConversionFactory factory) {
throw new UnsupportedConversionError("Type conversion not supported for non-scalar writers");
}
@@ -200,6 +250,20 @@ public abstract class AbstractColumnMetadata implements ColumnMetadata {
public ColumnConversionFactory typeConverter() { return null; }
@Override
+ public void setProperties(Map<String, String> properties) {
+ if (properties == null) {
+ return;
+ }
+ this.properties.putAll(properties);
+ }
+
+ @JsonProperty("properties")
+ @Override
+ public Map<String, String> properties() {
+ return properties;
+ }
+
+ @Override
public String toString() {
final StringBuilder buf = new StringBuilder()
.append("[")
@@ -221,11 +285,24 @@ public abstract class AbstractColumnMetadata implements ColumnMetadata {
buf.append(", schema: ")
.append(mapSchema().toString());
}
+ if (formatValue() != null) {
+ buf.append(", format: ")
+ .append(formatValue());
+ }
+ if (defaultValue() != null) {
+ buf.append(", default: ")
+ .append(defaultStringValue());
+ }
+ if (!properties().isEmpty()) {
+ buf.append(", properties: ")
+ .append(properties());
+ }
return buf
.append("]")
.toString();
}
+ @JsonProperty("type")
@Override
public String typeString() {
return majorType().toString();
@@ -243,6 +320,23 @@ public abstract class AbstractColumnMetadata implements ColumnMetadata {
builder.append(" NOT NULL");
}
+ if (formatValue() != null) {
+ builder.append(" FORMAT '").append(formatValue()).append("'");
+ }
+
+ if (defaultValue() != null) {
+ builder.append(" DEFAULT '").append(defaultStringValue()).append("'");
+ }
+
+ if (!properties().isEmpty()) {
+ builder.append(" PROPERTIES { ");
+ builder.append(properties().entrySet()
+ .stream()
+ .map(e -> String.format("'%s' = '%s'", e.getKey(), e.getValue()))
+ .collect(Collectors.joining(", ")));
+ builder.append(" }");
+ }
+
return builder.toString();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java
index 8d295e69f..3afc4d2ee 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java
@@ -22,6 +22,8 @@ import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.record.MaterializedField;
+import java.util.stream.Collectors;
+
/**
* Describes a map and repeated map. Both are tuples that have a tuple
* schema as part of the column definition.
@@ -125,7 +127,11 @@ public class MapColumnMetadata extends AbstractColumnMetadata {
if (isArray()) {
builder.append("ARRAY<");
}
- builder.append("MAP<").append(mapSchema.schemaString()).append(">");
+ builder.append("MAP<");
+ builder.append(mapSchema().toMetadataList().stream()
+ .map(ColumnMetadata::columnString)
+ .collect(Collectors.joining(", ")));
+ builder.append(">");
if (isArray()) {
builder.append(">");
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
index 9781e1c99..21ac093f1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
@@ -24,6 +24,15 @@ import org.apache.drill.common.types.Types;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.vector.accessor.ColumnConversionFactory;
+import org.joda.time.Period;
+
+import java.math.BigDecimal;
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeParseException;
/**
* Primitive (non-map) column. Describes non-nullable, nullable and array types
@@ -42,12 +51,16 @@ import org.apache.drill.exec.vector.accessor.ColumnConversionFactory;
public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PrimitiveColumnMetadata.class);
+
/**
* Expected (average) width for variable-width columns.
*/
private int expectedWidth;
+ private String formatValue;
+
/**
* Default value to use for filling a vector when no real data is
* available, such as for columns added in new files but which does not
@@ -135,6 +148,16 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
}
@Override
+ public void setFormatValue(String value) {
+ formatValue = value;
+ }
+
+ @Override
+ public String formatValue() {
+ return formatValue;
+ }
+
+ @Override
public void setDefaultValue(Object value) {
defaultValue = value;
}
@@ -143,6 +166,16 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
public Object defaultValue() { return defaultValue; }
@Override
+ public void setDefaultFromString(String value) {
+ this.defaultValue = valueFromString(value);
+ }
+
+ @Override
+ public String defaultStringValue() {
+ return valueToString(defaultValue);
+ }
+
+ @Override
public void setTypeConverter(ColumnConversionFactory factory) {
shimFactory = factory;
}
@@ -226,4 +259,86 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
return builder.toString();
}
+ /**
+ * Converts value in string literal form into Object instance based on {@link MinorType} value.
+ * Returns null in case of error during parsing or unsupported type.
+ *
+ * @param value value in string literal form
+ * @return Object instance
+ */
+ private Object valueFromString(String value) {
+ if (value == null) {
+ return null;
+ }
+ try {
+ switch (type) {
+ case INT:
+ return Integer.parseInt(value);
+ case BIGINT:
+ return Long.parseLong(value);
+ case FLOAT4:
+ return Float.parseFloat(value);
+ case FLOAT8:
+ return Double.parseDouble(value);
+ case VARDECIMAL:
+ return new BigDecimal(value);
+ case BIT:
+ return Boolean.parseBoolean(value);
+ case VARCHAR:
+ case VARBINARY:
+ return value;
+ case TIME:
+ DateTimeFormatter timeFormatter = formatValue == null
+ ? DateTimeFormatter.ISO_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
+ return LocalTime.parse(value, timeFormatter);
+ case DATE:
+ DateTimeFormatter dateFormatter = formatValue == null
+ ? DateTimeFormatter.ISO_DATE.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
+ return LocalDate.parse(value, dateFormatter);
+ case TIMESTAMP:
+ DateTimeFormatter dateTimeFormatter = formatValue == null
+ ? DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
+ return ZonedDateTime.parse(value, dateTimeFormatter);
+ case INTERVAL:
+ case INTERVALDAY:
+ case INTERVALYEAR:
+ return Period.parse(value);
+ default:
+ logger.warn("Unsupported type {} for default value {}, ignore and return null", type, value);
+ return null;
+ }
+ } catch (IllegalArgumentException | DateTimeParseException e) {
+ logger.warn("Error while parsing type {} default value {}, ignore and return null", type, value, e);
+ return null;
+ }
+ }
+
+ /**
+ * Converts given value instance into String literal representation based on column metadata type.
+ *
+ * @param value value instance
+ * @return value in string literal representation
+ */
+ private String valueToString(Object value) {
+ if (value == null) {
+ return null;
+ }
+ switch (type) {
+ case TIME:
+ DateTimeFormatter timeFormatter = formatValue == null
+ ? DateTimeFormatter.ISO_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
+ return timeFormatter.format((LocalTime) value);
+ case DATE:
+ DateTimeFormatter dateFormatter = formatValue == null
+ ? DateTimeFormatter.ISO_DATE.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
+ return dateFormatter.format((LocalDate) value);
+ case TIMESTAMP:
+ DateTimeFormatter dateTimeFormatter = formatValue == null
+ ? DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
+ return dateTimeFormatter.format((ZonedDateTime) value);
+ default:
+ return value.toString();
+ }
+ }
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
index 83dc91ac8..283ee6451 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
@@ -17,15 +17,22 @@
*/
package org.apache.drill.exec.record.metadata;
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
+import org.apache.drill.exec.record.MaterializedField;
+
import java.util.ArrayList;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Map;
import java.util.stream.Collectors;
-import org.apache.drill.exec.record.BatchSchema;
-import org.apache.drill.exec.record.MaterializedField;
-import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
-
/**
* Defines the schema of a tuple: either the top-level row or a nested
* "map" (really structure). A schema is a collection of columns (backed
@@ -33,11 +40,28 @@ import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
* index. New columns may be added at any time; the new column takes the
* next available index.
*/
-
+@JsonAutoDetect(
+ fieldVisibility = JsonAutoDetect.Visibility.NONE,
+ getterVisibility = JsonAutoDetect.Visibility.NONE,
+ isGetterVisibility = JsonAutoDetect.Visibility.NONE,
+ setterVisibility = JsonAutoDetect.Visibility.NONE)
+@JsonInclude(JsonInclude.Include.NON_DEFAULT)
+@JsonPropertyOrder({"columns", "properties"})
public class TupleSchema implements TupleMetadata {
private MapColumnMetadata parentMap;
private final TupleNameSpace<ColumnMetadata> nameSpace = new TupleNameSpace<>();
+ private final Map<String, String> properties = new LinkedHashMap<>();
+
+ public TupleSchema() {
+ }
+
+ @JsonCreator
+ public TupleSchema(@JsonProperty("columns") List<AbstractColumnMetadata> columns,
+ @JsonProperty("properties") Map<String, String> properties) {
+ columns.forEach(this::addColumn);
+ setProperties(properties);
+ }
public void bind(MapColumnMetadata parentMap) {
this.parentMap = parentMap;
@@ -145,6 +169,7 @@ public class TupleSchema implements TupleMetadata {
return cols;
}
+ @JsonProperty("columns")
@Override
public List<ColumnMetadata> toMetadataList() {
return new ArrayList<>(nameSpace.entries());
@@ -183,13 +208,6 @@ public class TupleSchema implements TupleMetadata {
public boolean isRoot() { return parentMap == null; }
@Override
- public String schemaString() {
- return nameSpace.entries().stream()
- .map(ColumnMetadata::columnString)
- .collect(Collectors.joining(", "));
- }
-
- @Override
public String toString() {
StringBuilder builder = new StringBuilder()
.append("[")
@@ -200,7 +218,28 @@ public class TupleSchema implements TupleMetadata {
.map(ColumnMetadata::toString)
.collect(Collectors.joining(", ")));
+ if (!properties.isEmpty()) {
+ if (!nameSpace.entries().isEmpty()) {
+ builder.append(", ");
+ }
+ builder.append("properties: ").append(properties);
+ }
+
builder.append("]");
return builder.toString();
}
+
+ @Override
+ public void setProperties(Map<String, String> properties) {
+ if (properties == null) {
+ return;
+ }
+ this.properties.putAll(properties);
+ }
+
+ @JsonProperty("properties")
+ @Override
+ public Map<String, String> properties() {
+ return properties;
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/SchemaContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/SchemaContainer.java
index e705be2eb..8db8f8e20 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/SchemaContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/SchemaContainer.java
@@ -21,34 +21,29 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
-import org.apache.drill.exec.record.metadata.ColumnMetadata;
import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.exec.record.metadata.TupleSchema;
import org.apache.drill.exec.record.metadata.schema.parser.SchemaExprParser;
-import java.util.LinkedHashMap;
-import java.util.List;
import java.util.Map;
-import java.util.stream.Collectors;
/**
- * Holder class that contains table name, schema definition
- * and properties passed in schema file or using table function.
+ * Holder class that contains table name, schema definition and current schema container version.
*/
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public class SchemaContainer {
private final String table;
private final TupleMetadata schema;
- // preserve properties order
- private final Map<String, String> properties = new LinkedHashMap<>();
private final Version version;
@JsonCreator
public SchemaContainer(@JsonProperty("table") String table,
- @JsonProperty("schema") List<String> schema,
- @JsonProperty("properties") LinkedHashMap<String, String> properties,
+ @JsonProperty("schema") TupleSchema schema,
@JsonProperty("version") Integer version) {
- this(table, schema == null ? null : String.join(", ", schema), properties, version);
+ this.table = table;
+ this.schema = schema;
+ this.version = new Version(version);
}
public SchemaContainer(String table, String schema, Map<String, String> properties) {
@@ -57,10 +52,7 @@ public class SchemaContainer {
public SchemaContainer(String table, String schema, Map<String, String> properties, Integer version) {
this.table = table;
- this.schema = schema == null ? null : convert(schema);
- if (properties != null) {
- this.properties.putAll(properties);
- }
+ this.schema = schema == null ? null : convert(schema, properties);
this.version = new Version(version);
}
@@ -70,15 +62,8 @@ public class SchemaContainer {
}
@JsonProperty("schema")
- public List<String> getSchemaList() {
- return schema == null ? null : schema.toMetadataList().stream()
- .map(ColumnMetadata::columnString)
- .collect(Collectors.toList());
- }
-
- @JsonProperty("properties")
- public Map<String, String> getProperties() {
- return properties;
+ public TupleMetadata getSchema() {
+ return schema;
}
@JsonProperty("version")
@@ -87,23 +72,21 @@ public class SchemaContainer {
}
@JsonIgnore
- public TupleMetadata getSchema() {
- return schema;
- }
-
- @JsonIgnore
public Version getVersion() {
return version;
}
- private TupleMetadata convert(String schema) {
- return SchemaExprParser.parseSchema(schema);
+ private TupleMetadata convert(String schemaString, Map<String, String> properties) {
+ TupleMetadata schema = SchemaExprParser.parseSchema(schemaString);
+ if (properties != null) {
+ schema.setProperties(properties);
+ }
+ return schema;
}
@Override
public String toString() {
- return "SchemaContainer{" + "table='" + table + '\'' + ", schema=" + schema +
- ", properties=" + properties + ", version=" + version + '}';
+ return "SchemaContainer{" + "table='" + table + '\'' + ", schema=" + schema + ", version=" + version + '}';
}
/**
@@ -114,6 +97,7 @@ public class SchemaContainer {
public static class Version {
public static final int UNDEFINED_VERSION = -1;
+
public static final int VERSION_1 = 1;
// is used for testing
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java
index 3cf376215..ea5071e5b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java
@@ -23,6 +23,7 @@ import org.antlr.v4.runtime.CodePointCharStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
+import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.exec.record.metadata.ColumnMetadata;
import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -41,6 +42,21 @@ public class SchemaExprParser {
}
/**
+ * Parses given column name, type and mode into {@link ColumnMetadata} instance.
+ *
+ * @param name column name
+ * @param type column type
+ * @param mode column mode
+ * @return column metadata
+ */
+ public static ColumnMetadata parseColumn(String name, String type, TypeProtos.DataMode mode) {
+ return parseColumn(String.format("`%s` %s %s",
+ name.replaceAll("(\\\\)|(`)", "\\\\$0"),
+ type,
+ TypeProtos.DataMode.REQUIRED == mode ? "not null" : ""));
+ }
+
+ /**
* Parses string definition of the column and converts it
* into {@link ColumnMetadata} instance.
*
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java
index 7c7663a71..c49007b65 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java
@@ -27,8 +27,12 @@ import org.apache.drill.exec.record.metadata.MetadataUtils;
import org.apache.drill.exec.record.metadata.RepeatedListBuilder;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.record.metadata.TupleSchema;
+import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
+import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
/**
* Visits schema and stores metadata about its columns into {@link TupleMetadata} class.
@@ -43,14 +47,42 @@ public class SchemaVisitor extends SchemaParserBaseVisitor<TupleMetadata> {
@Override
public TupleMetadata visitColumns(SchemaParser.ColumnsContext ctx) {
TupleMetadata schema = new TupleSchema();
- ColumnVisitor columnVisitor = new ColumnVisitor();
- ctx.column().forEach(
- c -> schema.addColumn(c.accept(columnVisitor))
+ ColumnDefVisitor columnDefVisitor = new ColumnDefVisitor();
+ ctx.column_def().forEach(
+ columnDef -> schema.addColumn(columnDef.accept(columnDefVisitor))
);
return schema;
}
/**
+ * Visits column definition, adds column properties to {@link ColumnMetadata} if present.
+ */
+ public static class ColumnDefVisitor extends SchemaParserBaseVisitor<ColumnMetadata> {
+
+ @Override
+ public ColumnMetadata visitColumn_def(SchemaParser.Column_defContext ctx) {
+ ColumnVisitor columnVisitor = new ColumnVisitor();
+ ColumnMetadata columnMetadata = ctx.column().accept(columnVisitor);
+ if (ctx.property_values() != null) {
+ StringValueVisitor stringValueVisitor = new StringValueVisitor();
+ Map<String, String> columnProperties = new LinkedHashMap<>();
+ ctx.property_values().property_pair().forEach(
+ pair -> {
+ List<String> pairValues = pair.string_value().stream()
+ .map(stringValueVisitor::visit)
+ .collect(Collectors.toList());
+ Preconditions.checkState(pairValues.size() == 2);
+ columnProperties.put(pairValues.get(0), pairValues.get(1));
+ }
+ );
+ columnMetadata.setProperties(columnProperties);
+ }
+ return columnMetadata;
+ }
+
+ }
+
+ /**
* Visits various types of columns (primitive, map, array) and stores their metadata
* into {@link ColumnMetadata} class.
*/
@@ -60,7 +92,15 @@ public class SchemaVisitor extends SchemaParserBaseVisitor<TupleMetadata> {
public ColumnMetadata visitPrimitive_column(SchemaParser.Primitive_columnContext ctx) {
String name = ctx.column_id().accept(new IdVisitor());
TypeProtos.DataMode mode = ctx.nullability() == null ? TypeProtos.DataMode.OPTIONAL : TypeProtos.DataMode.REQUIRED;
- return ctx.simple_type().accept(new TypeVisitor(name, mode));
+ ColumnMetadata columnMetadata = ctx.simple_type().accept(new TypeVisitor(name, mode));
+ StringValueVisitor stringValueVisitor = new StringValueVisitor();
+ if (ctx.format_value() != null) {
+ columnMetadata.setFormatValue(stringValueVisitor.visit(ctx.format_value().string_value()));
+ }
+ if (ctx.default_value() != null) {
+ columnMetadata.setDefaultFromString(stringValueVisitor.visit(ctx.default_value().string_value()));
+ }
+ return columnMetadata;
}
@Override
@@ -88,6 +128,20 @@ public class SchemaVisitor extends SchemaParserBaseVisitor<TupleMetadata> {
}
/**
+ * Visits quoted string, strips backticks, single quotes or double quotes and returns bare string value.
+ */
+ private static class StringValueVisitor extends SchemaParserBaseVisitor<String> {
+
+ @Override
+ public String visitString_value(SchemaParser.String_valueContext ctx) {
+ String text = ctx.getText();
+ // first substring first and last symbols (backticks, single quotes, double quotes)
+ // then find all chars that are preceding with the backslash and remove the backslash
+ return text.substring(1, text.length() -1).replaceAll("\\\\(.)", "$1");
+ }
+ }
+
+ /**
* Visits ID and QUOTED_ID, returning their string representation.
*/
private static class IdVisitor extends SchemaParserBaseVisitor<String> {
@@ -225,8 +279,8 @@ public class SchemaVisitor extends SchemaParserBaseVisitor<TupleMetadata> {
@Override
public ColumnMetadata visitMap_type(SchemaParser.Map_typeContext ctx) {
MapBuilder builder = new MapBuilder(null, name, mode);
- ColumnVisitor visitor = new ColumnVisitor();
- ctx.columns().column().forEach(
+ ColumnDefVisitor visitor = new ColumnDefVisitor();
+ ctx.columns().column_def().forEach(
c -> builder.addColumn(c.accept(visitor))
);
return builder.buildColumn();