aboutsummaryrefslogtreecommitdiff
path: root/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
blob: 21ac093f1bbd5decabcab83e6114d243a560e983 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.apache.drill.exec.record.metadata;

import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.vector.accessor.ColumnConversionFactory;
import org.joda.time.Period;

import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;

/**
 * Primitive (non-map) column. Describes non-nullable, nullable and array types
 * (which differ only in mode, but not in metadata structure.)
 * <p>
 * Metadata is of two types:
 * <ul>
 * <li>Storage metadata that describes how the column is materialized in a
 * vector. Storage metadata is immutable because revising an existing vector is
 * a complex operation.</li>
 * <li>Supplemental metadata used when reading or writing the column.
 * Supplemental metadata can be changed after the column is created, though it
 * should generally be set before invoking code that uses the metadata.</li>
 * </ul>
 */

public class PrimitiveColumnMetadata extends AbstractColumnMetadata {

  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PrimitiveColumnMetadata.class);

  /**
   * Expected (average) width for variable-width columns.
   */

  private int expectedWidth;

  private String formatValue;

  /**
   * Default value to use for filling a vector when no real data is
   * available, such as for columns added in new files but which does not
   * exist in existing files. The ultimate default value is the SQL null
   * value, which works only for nullable columns.
   */

  private Object defaultValue;

  /**
   * Factory for an optional shim writer that translates from the type of
   * data available to the code that creates the vectors on the one hand,
   * and the actual type of the column on the other. For example, a shim
   * might parse a string form of a date into the form stored in vectors.
   * <p>
   * The default is to use the "natural" type: that is, to insert no
   * conversion shim.
   */

  private ColumnConversionFactory shimFactory;

  public PrimitiveColumnMetadata(MaterializedField schema) {
    super(schema);
    expectedWidth = estimateWidth(schema.getType());
  }

  public PrimitiveColumnMetadata(String name, MinorType type, DataMode mode) {
    super(name, type, mode);
    expectedWidth = estimateWidth(Types.withMode(type, mode));
  }

  private int estimateWidth(MajorType majorType) {
    if (type() == MinorType.NULL || type() == MinorType.LATE) {
      return 0;
    } else if (isVariableWidth()) {

      // The above getSize() method uses the deprecated getWidth()
      // method to get the expected VarChar size. If zero (which
      // it will be), try the revised precision field.

      int precision = majorType.getPrecision();
      if (precision > 0) {
        return precision;
      } else {
        // TypeHelper includes the offset vector width

        return TypeHelper.getSize(majorType) - 4;
      }
    } else {
      return TypeHelper.getSize(majorType);
    }
  }

  public PrimitiveColumnMetadata(PrimitiveColumnMetadata from) {
    super(from);
    expectedWidth = from.expectedWidth;
  }

  @Override
  public ColumnMetadata copy() {
    return new PrimitiveColumnMetadata(this);
  }

  @Override
  public ColumnMetadata.StructureType structureType() { return ColumnMetadata.StructureType.PRIMITIVE; }

  @Override
  public int expectedWidth() { return expectedWidth; }

  @Override
  public int precision() { return precision; }

  @Override
  public int scale() { return scale; }

  @Override
  public void setExpectedWidth(int width) {
    // The allocation utilities don't like a width of zero, so set to
    // 1 as the minimum. Adjusted to avoid trivial errors if the caller
    // makes an error.

    if (isVariableWidth()) {
      expectedWidth = Math.max(1, width);
    }
  }

  @Override
  public void setFormatValue(String value) {
    formatValue = value;
  }

  @Override
  public String formatValue() {
    return formatValue;
  }

  @Override
  public void setDefaultValue(Object value) {
    defaultValue = value;
  }

  @Override
  public Object defaultValue() { return defaultValue; }

  @Override
  public void setDefaultFromString(String value) {
    this.defaultValue = valueFromString(value);
  }

  @Override
  public String defaultStringValue() {
    return valueToString(defaultValue);
  }

  @Override
  public void setTypeConverter(ColumnConversionFactory factory) {
    shimFactory = factory;
  }

  @Override
  public ColumnConversionFactory typeConverter() { return shimFactory; }

  @Override
  public ColumnMetadata cloneEmpty() {
    return new PrimitiveColumnMetadata(this);
  }

  public ColumnMetadata mergeWith(MaterializedField field) {
    PrimitiveColumnMetadata merged = new PrimitiveColumnMetadata(field);
    merged.setExpectedElementCount(expectedElementCount);
    merged.setExpectedWidth(Math.max(expectedWidth, field.getPrecision()));
    merged.setProjected(projected);
    return merged;
  }

  @Override
  public MajorType majorType() {
    return MajorType.newBuilder()
        .setMinorType(type)
        .setMode(mode)
        .setPrecision(precision)
        .setScale(scale)
        .build();
  }

  @Override
  public MaterializedField schema() {
    return MaterializedField.create(name, majorType());
  }

  @Override
  public MaterializedField emptySchema() { return schema(); }

  @Override
  public String typeString() {
    StringBuilder builder = new StringBuilder();
    if (isArray()) {
      builder.append("ARRAY<");
    }

    switch (type) {
      case VARDECIMAL:
        builder.append("DECIMAL");
        break;
      case FLOAT4:
        builder.append("FLOAT");
        break;
      case FLOAT8:
        builder.append("DOUBLE");
        break;
      case BIT:
        builder.append("BOOLEAN");
        break;
      case INTERVALYEAR:
        builder.append("INTERVAL YEAR");
        break;
      case INTERVALDAY:
        builder.append("INTERVAL DAY");
        break;
      default:
        // other minor types names correspond to SQL-like equivalents
        builder.append(type.name());
    }

    if (precision() > 0) {
      builder.append("(").append(precision());
      if (scale() > 0) {
        builder.append(", ").append(scale());
      }
      builder.append(")");
    }

    if (isArray()) {
      builder.append(">");
    }
    return builder.toString();
  }

  /**
   * Converts value in string literal form into Object instance based on {@link MinorType} value.
   * Returns null in case of error during parsing or unsupported type.
   *
   * @param value value in string literal form
   * @return Object instance
   */
  private Object valueFromString(String value) {
    if (value == null) {
      return null;
    }
    try {
      switch (type) {
        case INT:
          return Integer.parseInt(value);
        case BIGINT:
          return Long.parseLong(value);
        case FLOAT4:
          return Float.parseFloat(value);
        case FLOAT8:
          return Double.parseDouble(value);
        case VARDECIMAL:
          return new BigDecimal(value);
        case BIT:
          return Boolean.parseBoolean(value);
        case VARCHAR:
        case VARBINARY:
          return value;
        case TIME:
          DateTimeFormatter timeFormatter = formatValue == null
            ? DateTimeFormatter.ISO_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
          return LocalTime.parse(value, timeFormatter);
        case DATE:
          DateTimeFormatter dateFormatter = formatValue == null
            ? DateTimeFormatter.ISO_DATE.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
          return LocalDate.parse(value, dateFormatter);
        case TIMESTAMP:
          DateTimeFormatter dateTimeFormatter = formatValue == null
            ? DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
          return ZonedDateTime.parse(value, dateTimeFormatter);
        case INTERVAL:
        case INTERVALDAY:
        case INTERVALYEAR:
          return Period.parse(value);
        default:
          logger.warn("Unsupported type {} for default value {}, ignore and return null", type, value);
          return null;
      }
    } catch (IllegalArgumentException | DateTimeParseException e) {
      logger.warn("Error while parsing type {} default value {}, ignore and return null", type, value, e);
      return null;
    }
  }

  /**
   * Converts given value instance into String literal representation based on column metadata type.
   *
   * @param value value instance
   * @return value in string literal representation
   */
  private String valueToString(Object value) {
    if (value == null) {
      return null;
    }
    switch (type) {
      case TIME:
        DateTimeFormatter timeFormatter = formatValue == null
          ? DateTimeFormatter.ISO_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
        return timeFormatter.format((LocalTime) value);
      case DATE:
        DateTimeFormatter dateFormatter = formatValue == null
          ? DateTimeFormatter.ISO_DATE.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
        return dateFormatter.format((LocalDate) value);
      case TIMESTAMP:
        DateTimeFormatter dateTimeFormatter = formatValue == null
          ? DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneOffset.UTC) : DateTimeFormatter.ofPattern(formatValue);
        return dateTimeFormatter.format((ZonedDateTime) value);
      default:
        return value.toString();
    }
  }

}