aboutsummaryrefslogtreecommitdiff
path: root/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
blob: 154bf8cbb4191219de8b40caaa15116c11926009 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.apache.drill.exec.planner.sql;

import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;

import org.apache.drill.shaded.guava.com.google.common.base.Strings;
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.jdbc.DynamicSchema;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCostFactory;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.volcano.VolcanoPlanner;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.prepare.Prepare;
import org.apache.calcite.rel.RelCollationTraitDef;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeSystemImpl;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.runtime.Hook;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.ChainedSqlOperatorTable;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
import org.apache.calcite.sql.validate.SqlValidatorImpl;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.tools.RelBuilderFactory;
import org.apache.calcite.util.Util;
import org.apache.commons.collections.ListUtils;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.ops.QueryContext;
import org.apache.drill.exec.ops.UdfUtilities;
import org.apache.drill.exec.planner.cost.DrillCostBase;
import org.apache.drill.exec.planner.logical.DrillConstExecutor;
import org.apache.drill.exec.planner.logical.DrillRelFactories;
import org.apache.drill.exec.planner.logical.DrillTable;
import org.apache.drill.exec.planner.physical.DrillDistributionTraitDef;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.rpc.user.UserSession;
import org.apache.drill.exec.store.dfs.FileSelection;
import static org.apache.calcite.util.Static.RESOURCE;

import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
import org.apache.drill.exec.store.ColumnExplorer;
import org.apache.drill.exec.util.DecimalUtility;

/**
 * Class responsible for managing parsing, validation and toRel conversion for sql statements.
 */
public class SqlConverter {
  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SqlConverter.class);

  private static DrillTypeSystem DRILL_TYPE_SYSTEM = new DrillTypeSystem();

  private final JavaTypeFactory typeFactory;
  private final SqlParser.Config parserConfig;
  // Allow the default config to be modified using immutable configs
  private SqlToRelConverter.Config sqlToRelConverterConfig;
  private final DrillCalciteCatalogReader catalog;
  private final PlannerSettings settings;
  private final SchemaPlus rootSchema;
  private final SchemaPlus defaultSchema;
  private final SqlOperatorTable opTab;
  private final RelOptCostFactory costFactory;
  private final DrillValidator validator;
  private final boolean isInnerQuery;
  private final UdfUtilities util;
  private final FunctionImplementationRegistry functions;
  private final String temporarySchema;
  private final UserSession session;
  private final DrillConfig drillConfig;
  private RelOptCluster cluster;

  private VolcanoPlanner planner;
  private boolean useRootSchema = false;


  public SqlConverter(QueryContext context) {
    this.settings = context.getPlannerSettings();
    this.util = context;
    this.functions = context.getFunctionRegistry();
    this.parserConfig = new DrillParserConfig(settings);
    this.sqlToRelConverterConfig = new SqlToRelConverterConfig();
    this.isInnerQuery = false;
    this.typeFactory = new JavaTypeFactoryImpl(DRILL_TYPE_SYSTEM);
    this.defaultSchema =  context.getNewDefaultSchema();
    this.rootSchema = rootSchema(defaultSchema);
    this.temporarySchema = context.getConfig().getString(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE);
    this.session = context.getSession();
    this.drillConfig = context.getConfig();
    this.catalog = new DrillCalciteCatalogReader(
        rootSchema,
        parserConfig.caseSensitive(),
        DynamicSchema.from(defaultSchema).path(null),
        typeFactory,
        drillConfig,
        session);
    this.opTab = new ChainedSqlOperatorTable(Arrays.asList(context.getDrillOperatorTable(), catalog));
    this.costFactory = (settings.useDefaultCosting()) ? null : new DrillCostBase.DrillCostFactory();
    this.validator = new DrillValidator(opTab, catalog, typeFactory, parserConfig.conformance());
    validator.setIdentifierExpansion(true);
    cluster = null;
  }

  private SqlConverter(SqlConverter parent, SchemaPlus defaultSchema, SchemaPlus rootSchema,
      DrillCalciteCatalogReader catalog) {
    this.parserConfig = parent.parserConfig;
    this.sqlToRelConverterConfig = parent.sqlToRelConverterConfig;
    this.defaultSchema = defaultSchema;
    this.functions = parent.functions;
    this.util = parent.util;
    this.isInnerQuery = true;
    this.typeFactory = parent.typeFactory;
    this.costFactory = parent.costFactory;
    this.settings = parent.settings;
    this.rootSchema = rootSchema;
    this.catalog = catalog;
    this.opTab = parent.opTab;
    this.planner = parent.planner;
    this.validator = new DrillValidator(opTab, catalog, typeFactory, parserConfig.conformance());
    this.temporarySchema = parent.temporarySchema;
    this.session = parent.session;
    this.drillConfig = parent.drillConfig;
    validator.setIdentifierExpansion(true);
    this.cluster = parent.cluster;
  }


  public SqlNode parse(String sql) {
    try {
      SqlParser parser = SqlParser.create(sql, parserConfig);
      return parser.parseStmt();
    } catch (SqlParseException e) {
      UserException.Builder builder = UserException
          .parseError(e)
          .addContext("SQL Query", formatSQLParsingError(sql, e.getPos()));
      if (isInnerQuery) {
        builder.message("Failure parsing a view your query is dependent upon.");
      }
      throw builder.build(logger);
    }

  }

  public SqlNode validate(final SqlNode parsedNode) {
    try {
      return validator.validate(parsedNode);
    } catch (RuntimeException e) {
      UserException.Builder builder = UserException
          .validationError(e);
      if (isInnerQuery) {
        builder.message("Failure validating a view your query is dependent upon.");
      }
      throw builder.build(logger);
    }
  }

  public RelDataType getOutputType(SqlNode validatedNode) {
    return validator.getValidatedNodeType(validatedNode);
  }

  public JavaTypeFactory getTypeFactory() {
    return typeFactory;
  }

  public SqlOperatorTable getOpTab() {
    return opTab;
  }

  public RelOptCostFactory getCostFactory() {
    return costFactory;
  }

  public SchemaPlus getRootSchema() {
    return rootSchema;
  }

  public SchemaPlus getDefaultSchema() {
    return defaultSchema;
  }

  /** Disallow temporary tables presence in sql statement (ex: in view definitions) */
  public void disallowTemporaryTables() {
    catalog.disallowTemporaryTables();
  }

  /**
   * Is root schema path should be used as default schema path.
   *
   * @param useRoot flag
   */
  public void useRootSchemaAsDefault(boolean useRoot) {
    useRootSchema = useRoot;
  }

  private class DrillValidator extends SqlValidatorImpl {

    DrillValidator(SqlOperatorTable opTab, SqlValidatorCatalogReader catalogReader,
        RelDataTypeFactory typeFactory, SqlConformance conformance) {
      super(opTab, catalogReader, typeFactory, conformance);
    }

    @Override
    protected void validateFrom(
        SqlNode node,
        RelDataType targetRowType,
        SqlValidatorScope scope) {
      switch (node.getKind()) {
        case AS:
          SqlNode sqlNode = ((SqlCall) node).operand(0);
          switch (sqlNode.getKind()) {
            case IDENTIFIER:
              SqlIdentifier tempNode = (SqlIdentifier) sqlNode;
              DrillCalciteCatalogReader catalogReader = (SqlConverter.DrillCalciteCatalogReader) getCatalogReader();

              changeNamesIfTableIsTemporary(tempNode);

              // Check the schema and throw a valid SchemaNotFound exception instead of TableNotFound exception.
              if (catalogReader.getTable(tempNode.names) == null) {
                catalogReader.isValidSchema(tempNode.names);
              }
              break;
            case UNNEST:
              if (((SqlCall) node).operandCount() < 3) {
                throw RESOURCE.validationError("Alias table and column name are required for UNNEST").ex();
              }
          }
      }
      super.validateFrom(node, targetRowType, scope);
    }

    @Override
    public String deriveAlias(
        SqlNode node,
        int ordinal) {
      if (node instanceof SqlIdentifier) {
        SqlIdentifier tempNode = ((SqlIdentifier) node);
        changeNamesIfTableIsTemporary(tempNode);
      }
      return SqlValidatorUtil.getAlias(node, ordinal);
    }

    /**
     * Checks that specified expression is not implicit column and
     * adds it to a select list, ensuring that its alias does not
     * clash with any existing expressions on the list.
     * <p>
     * This method may be used when {@link RelDataType#isDynamicStruct}
     * method returns false. Each column from table row type except
     * the implicit is added into specified list, aliases and fieldList.
     * In the opposite case when {@link RelDataType#isDynamicStruct}
     * returns true, only dynamic star is added into specified
     * list, aliases and fieldList.
     */
    @Override
    protected void addToSelectList(
        List<SqlNode> list,
        Set<String> aliases,
        List<Map.Entry<String, RelDataType>> fieldList,
        SqlNode exp,
        SqlValidatorScope scope,
        final boolean includeSystemVars) {
      if (!ColumnExplorer.initImplicitFileColumns(session.getOptions())
          .containsKey(SqlValidatorUtil.getAlias(exp, -1))) {
        super.addToSelectList(list, aliases, fieldList, exp, scope, includeSystemVars);
      }
    }

    private void changeNamesIfTableIsTemporary(SqlIdentifier tempNode) {
      List<String> temporaryTableNames = ((SqlConverter.DrillCalciteCatalogReader) getCatalogReader()).getTemporaryNames(tempNode.names);
      if (temporaryTableNames != null) {
        SqlParserPos pos = tempNode.getComponentParserPosition(0);
        List<SqlParserPos> poses = Lists.newArrayList();
        for (int i = 0; i < temporaryTableNames.size(); i++) {
          poses.add(i, pos);
        }
        tempNode.setNames(temporaryTableNames, poses);
      }
    }
  }

  private static class DrillTypeSystem extends RelDataTypeSystemImpl {

    @Override
    public int getDefaultPrecision(SqlTypeName typeName) {
      switch (typeName) {
      case CHAR:
      case BINARY:
      case VARCHAR:
      case VARBINARY:
        return Types.MAX_VARCHAR_LENGTH;
      default:
        return super.getDefaultPrecision(typeName);
      }
    }

    @Override
    public int getMaxNumericScale() {
      return 38;
    }

    @Override
    public int getMaxNumericPrecision() {
      return 38;
    }

    @Override
    public boolean isSchemaCaseSensitive() {
      // Drill uses case-insensitive and case-preserve policy
      return false;
    }
  }

  public RelRoot toRel(final SqlNode validatedNode) {
    if (planner == null) {
      planner = new VolcanoPlanner(costFactory, settings);
      planner.setExecutor(new DrillConstExecutor(functions, util, settings));
      planner.clearRelTraitDefs();
      planner.addRelTraitDef(ConventionTraitDef.INSTANCE);
      planner.addRelTraitDef(DrillDistributionTraitDef.INSTANCE);
      planner.addRelTraitDef(RelCollationTraitDef.INSTANCE);
    }

    if (cluster == null) {
      initCluster();
    }
    final SqlToRelConverter sqlToRelConverter =
        new SqlToRelConverter(new Expander(), validator, catalog, cluster, DrillConvertletTable.INSTANCE,
            sqlToRelConverterConfig);

    /*
     * Sets value to false to avoid simplifying project expressions
     * during creating new projects since it may cause changing data mode
     * which causes to assertion errors during type validation
     */
    Hook.REL_BUILDER_SIMPLIFY.add(Hook.propertyJ(false));

    //To avoid unexpected column errors set a value of top to false
    final RelRoot rel = sqlToRelConverter.convertQuery(validatedNode, false, false);
    return rel.withRel(sqlToRelConverter.flattenTypes(rel.rel, true));
  }

  private class Expander implements RelOptTable.ViewExpander {

    @Override
    public RelRoot expandView(RelDataType rowType, String queryString, List<String> schemaPath, List<String> viewPath) {
      final DrillCalciteCatalogReader catalogReader = new DrillCalciteCatalogReader(
          rootSchema,
          parserConfig.caseSensitive(),
          schemaPath,
          typeFactory,
          drillConfig,
          session);
      final SqlConverter parser = new SqlConverter(SqlConverter.this, defaultSchema, rootSchema, catalogReader);
      return expandView(queryString, parser);
    }

    @Override
    public RelRoot expandView(RelDataType rowType, String queryString, SchemaPlus rootSchema, List<String> schemaPath) {
      final DrillCalciteCatalogReader catalogReader = new DrillCalciteCatalogReader(
          rootSchema,
          parserConfig.caseSensitive(),
          schemaPath,
          typeFactory,
          drillConfig,
          session);
      SchemaPlus schema = rootSchema;
      for (String s : schemaPath) {
        SchemaPlus newSchema = schema.getSubSchema(s);

        if (newSchema == null) {
          throw UserException
              .validationError()
              .message(
              "Failure while attempting to expand view. Requested schema %s not available in schema %s.", s,
                  schema.getName())
              .addContext("View Context", Joiner.on(", ").join(schemaPath))
              .addContext("View SQL", queryString)
              .build(logger);
        }

        schema = newSchema;
      }
      SqlConverter parser = new SqlConverter(SqlConverter.this, schema, rootSchema, catalogReader);
      return expandView(queryString, parser);
    }

    private RelRoot expandView(String queryString, SqlConverter converter) {
      converter.disallowTemporaryTables();
      final SqlNode parsedNode = converter.parse(queryString);
      final SqlNode validatedNode = converter.validate(parsedNode);
      return converter.toRel(validatedNode);
    }

  }

  private class SqlToRelConverterConfig implements SqlToRelConverter.Config {

    final int inSubqueryThreshold = (int)settings.getInSubqueryThreshold();

    @Override
    public boolean isConvertTableAccess() {
      return false;
    }

    @Override
    public boolean isDecorrelationEnabled() {
      return SqlToRelConverterConfig.DEFAULT.isDecorrelationEnabled();
    }

    @Override
    public boolean isTrimUnusedFields() {
      return false;
    }

    @Override
    public boolean isCreateValuesRel() {
      return SqlToRelConverterConfig.DEFAULT.isCreateValuesRel();
    }

    @Override
    public boolean isExplain() {
      return SqlToRelConverterConfig.DEFAULT.isExplain();
    }

    @Override
    public boolean isExpand() {
      return false;
    }

    @Override
    public int getInSubQueryThreshold() {
      return inSubqueryThreshold;
    }

    @Override
    public RelBuilderFactory getRelBuilderFactory() {
      return DrillRelFactories.LOGICAL_BUILDER;
    }
  }

  /**
   *
   * @param sql
   *          the SQL sent to the server
   * @param pos
   *          the position of the error
   * @return The sql with a ^ character under the error
   */
  static String formatSQLParsingError(String sql, SqlParserPos pos) {
    if (pos == null) {
      return sql;
    }
    StringBuilder sb = new StringBuilder();
    String[] lines = sql.split("\n");
    for (int i = 0; i < lines.length; i++) {
      String line = lines[i];
      sb.append(line).append("\n");
      if (i == (pos.getLineNum() - 1)) {
        for (int j = 0; j < pos.getColumnNum() - 1; j++) {
          sb.append(" ");
        }
        sb.append("^\n");
      }
    }
    return sb.toString();
  }

  private static SchemaPlus rootSchema(SchemaPlus schema) {
    while (true) {
      if (schema.getParentSchema() == null) {
        return schema;
      }
      schema = schema.getParentSchema();
    }
  }

  private void initCluster() {
    cluster = RelOptCluster.create(planner, new DrillRexBuilder(typeFactory));
  }

  private static class DrillRexBuilder extends RexBuilder {
    private DrillRexBuilder(RelDataTypeFactory typeFactory) {
      super(typeFactory);
    }

    /**
     * Since Drill has different mechanism and rules for implicit casting,
     * ensureType() is overridden to avoid conflicting cast functions being added to the expressions.
     */
    @Override
    public RexNode ensureType(
        RelDataType type,
        RexNode node,
        boolean matchNullability) {
      return node;
    }

    /**
     * Creates a call to the CAST operator, expanding if possible, and optionally
     * also preserving nullability.
     *
     * <p>Tries to expand the cast, and therefore the result may be something
     * other than a {@link org.apache.calcite.rex.RexCall} to the CAST operator, such as a
     * {@link RexLiteral} if {@code matchNullability} is false.
     *
     * @param type             Type to cast to
     * @param exp              Expression being cast
     * @param matchNullability Whether to ensure the result has the same
     *                         nullability as {@code type}
     * @return Call to CAST operator
     */
    @Override
    public RexNode makeCast(RelDataType type, RexNode exp, boolean matchNullability) {
      if (matchNullability) {
        return makeAbstractCast(type, exp);
      }
      // for the case when BigDecimal literal has a scale or precision
      // that differs from the value from specified RelDataType, cast cannot be removed
      // TODO: remove this code when CALCITE-1468 is fixed
      if (type.getSqlTypeName() == SqlTypeName.DECIMAL && exp instanceof RexLiteral) {
        if (type.getPrecision() < 1) {
          throw UserException.validationError()
              .message("Expected precision greater than 0, but was %s.", type.getPrecision())
              .build(logger);
        }
        if (type.getScale() > type.getPrecision()) {
          throw UserException.validationError()
              .message("Expected scale less than or equal to precision, " +
                  "but was scale %s and precision %s.", type.getScale(), type.getPrecision())
              .build(logger);
        }
        RexLiteral literal = (RexLiteral) exp;
        Comparable value = literal.getValueAs(Comparable.class);
        if (value instanceof BigDecimal) {
          BigDecimal bigDecimal = (BigDecimal) value;
          DecimalUtility.checkValueOverflow(bigDecimal, type.getPrecision(), type.getScale());
          if (bigDecimal.scale() != type.getScale() || bigDecimal.precision() != type.getPrecision()) {
            return makeAbstractCast(type, exp);
          }
        }
      }
      return super.makeCast(type, exp, false);
    }
  }

  /**
   * Extension of {@link CalciteCatalogReader} to add ability to check for temporary tables first
   * if schema is not indicated near table name during query parsing
   * or indicated workspace is default temporary workspace.
   */
  private class DrillCalciteCatalogReader extends CalciteCatalogReader {

    private final DrillConfig drillConfig;
    private final UserSession session;
    private boolean allowTemporaryTables;
    private final SchemaPlus rootSchema;


    DrillCalciteCatalogReader(SchemaPlus rootSchema,
                              boolean caseSensitive,
                              List<String> defaultSchema,
                              JavaTypeFactory typeFactory,
                              DrillConfig drillConfig,
                              UserSession session) {
      super(DynamicSchema.from(rootSchema), defaultSchema,
          typeFactory, getConnectionConfig(caseSensitive));
      this.drillConfig = drillConfig;
      this.session = session;
      this.allowTemporaryTables = true;
      this.rootSchema = rootSchema;
    }

    /**
     * Disallow temporary tables presence in sql statement (ex: in view definitions)
     */
    void disallowTemporaryTables() {
      this.allowTemporaryTables = false;
    }

    private List<String> getTemporaryNames(List<String> names) {
      if (mightBeTemporaryTable(names, session.getDefaultSchemaPath(), drillConfig)) {
        String tableName = FileSelection.removeLeadingSlash(names.get(names.size() - 1));
        String temporaryTableName = session.resolveTemporaryTableName(tableName);
        if (temporaryTableName != null) {
          List<String> temporaryNames = new ArrayList<>(SchemaUtilites.getSchemaPathAsList(temporarySchema));
          temporaryNames.add(temporaryTableName);
          return temporaryNames;
        }
      }
      return null;
    }

    /**
     * If schema is not indicated (only one element in the list) or schema is default temporary workspace,
     * we need to check among session temporary tables in default temporary workspace first.
     * If temporary table is found and temporary tables usage is allowed, its table instance will be returned,
     * otherwise search will be conducted in original workspace.
     *
     * @param names list of schema and table names, table name is always the last element
     * @return table instance, null otherwise
     * @throws UserException if temporary tables usage is disallowed
     */
    @Override
    public Prepare.PreparingTable getTable(final List<String> names) {
      String originalTableName = session.getOriginalTableNameFromTemporaryTable(names.get(names.size() - 1));
      if (originalTableName != null) {
        if (!allowTemporaryTables) {
          throw UserException
              .validationError()
              .message("Temporary tables usage is disallowed. Used temporary table name: [%s].", originalTableName)
              .build(logger);
        }
      }

      Prepare.PreparingTable table = super.getTable(names);
      DrillTable unwrap;
      // add session options if found table is Drill table
      if (table != null && (unwrap = table.unwrap(DrillTable.class)) != null) {
        unwrap.setOptions(session.getOptions());
      }
      return table;
    }

    @Override
    public List<List<String>> getSchemaPaths() {
      if (useRootSchema) {
        return ImmutableList.of(ImmutableList.of());
      }
      return super.getSchemaPaths();
    }

    /**
     * check if the schema provided is a valid schema:
     * <li>schema is not indicated (only one element in the names list)<li/>
     *
     * @param names list of schema and table names, table name is always the last element
     * @throws UserException if the schema is not valid.
     */
    private void isValidSchema(final List<String> names) throws UserException {
      SchemaPlus defaultSchema = session.getDefaultSchema(this.rootSchema);
      String defaultSchemaCombinedPath = SchemaUtilites.getSchemaPath(defaultSchema);
      List<String> schemaPath = Util.skipLast(names);
      String schemaPathCombined = SchemaUtilites.getSchemaPath(schemaPath);
      String commonPrefix = SchemaUtilites.getPrefixSchemaPath(defaultSchemaCombinedPath,
              schemaPathCombined,
              parserConfig.caseSensitive());
      boolean isPrefixDefaultPath = commonPrefix.length() == defaultSchemaCombinedPath.length();
      List<String> fullSchemaPath = Strings.isNullOrEmpty(defaultSchemaCombinedPath) ? schemaPath :
              isPrefixDefaultPath ? schemaPath : ListUtils.union(SchemaUtilites.getSchemaPathAsList(defaultSchema), schemaPath);
      if (names.size() > 1 && (SchemaUtilites.findSchema(this.rootSchema, fullSchemaPath) == null &&
              SchemaUtilites.findSchema(this.rootSchema, schemaPath) == null)) {
        SchemaUtilites.throwSchemaNotFoundException(defaultSchema, schemaPath);
      }
    }

    /**
     * We should check if passed table is temporary or not if:
     * <li>schema is not indicated (only one element in the names list)<li/>
     * <li>current schema or indicated schema is default temporary workspace<li/>
     *
     * Examples (where dfs.tmp is default temporary workspace):
     * <li>select * from t<li/>
     * <li>select * from dfs.tmp.t<li/>
     * <li>use dfs; select * from tmp.t<li/>
     *
     * @param names             list of schema and table names, table name is always the last element
     * @param defaultSchemaPath current schema path set using USE command
     * @param drillConfig       drill config
     * @return true if check for temporary table should be done, false otherwise
     */
    private boolean mightBeTemporaryTable(List<String> names, String defaultSchemaPath, DrillConfig drillConfig) {
      if (names.size() == 1) {
        return true;
      }

      String schemaPath = SchemaUtilites.getSchemaPath(names.subList(0, names.size() - 1));
      return SchemaUtilites.isTemporaryWorkspace(schemaPath, drillConfig) ||
          SchemaUtilites.isTemporaryWorkspace(
              SchemaUtilites.SCHEMA_PATH_JOINER.join(defaultSchemaPath, schemaPath), drillConfig);
    }
  }

  /**
   * Creates {@link CalciteConnectionConfigImpl} instance with specified caseSensitive property.
   *
   * @param caseSensitive is case sensitive.
   * @return {@link CalciteConnectionConfigImpl} instance
   */
  private static CalciteConnectionConfigImpl getConnectionConfig(boolean caseSensitive) {
    Properties properties = new Properties();
    properties.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(),
        String.valueOf(caseSensitive));
    return new CalciteConnectionConfigImpl(properties);
  }
}