aboutsummaryrefslogtreecommitdiff
path: root/exec/java-exec/src
diff options
context:
space:
mode:
authorAditya Kishore <aditya@maprtech.com>2014-09-11 10:43:08 -0700
committerAditya Kishore <aditya@maprtech.com>2014-09-11 19:25:28 -0700
commit676f5df6b14b10ccc3603360e0efee9c745c5b97 (patch)
tree592b02f84e8a6da2ace67f8e6c0e46d4237af20b /exec/java-exec/src
parent7ae257c42b2eb4e1db778dca9ba64e2516078b38 (diff)
DRILL-1402: Add check-style rules for trailing space, TABs and blocks without braces
Diffstat (limited to 'exec/java-exec/src')
-rw-r--r--exec/java-exec/src/main/java/io/netty/buffer/DrillBuf.java94
-rw-r--r--exec/java-exec/src/main/java/io/netty/buffer/UnsafeDirectLittleEndian.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/cache/CachedVectorContainer.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java58
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java28
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/cache/local/LocalCache.java43
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java37
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java38
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java17
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/AbstractClassCompiler.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/ByteCodeLoader.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java71
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/DrillJavaFileObject.java5
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/JDKClassCompiler.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java77
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/InstructionModifier.java43
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/ValueHolderIden.java54
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java19
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/MappingSet.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java37
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/coord/local/LocalClusterCoordinator.java38
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/coord/zk/ZKClusterCoordinator.java17
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java5
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java99
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java30
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/DirectExpression.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java30
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFuncHolder.java20
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFunctionRegistry.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillSimpleFuncHolder.java30
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java103
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/ModifiedUnparseVisitor.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteFunctionHelpers.java14
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteSubstring.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CharSubstring.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java74
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsFalse.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotFalse.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotTrue.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsTrue.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctionUtil.java23
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java145
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/VarHelpers.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/memory/Accountor.java49
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/memory/AtomicRemainder.java18
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java45
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/metrics/DrillMetrics.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java30
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractBase.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractPhysicalVisitor.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/Screen.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/SingleMergeExchange.java3
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/UnionExchange.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/OperatorCreatorRegistry.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java17
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java31
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TraceInjector.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java13
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java43
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java78
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java2
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java71
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java173
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java68
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java21
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java29
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java61
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java29
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatchBuilder.java39
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java23
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java5
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java75
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java24
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java12
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java34
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java60
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/trace/TraceRecordBatch.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java15
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorInjector.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java21
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java46
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/MSortTemplate.java12
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java90
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelOptCost.java3
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/Fragment.java69
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/MakeFragmentsVisitor.java14
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/SimpleParallelizer.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillAggregateRel.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjIntoScan.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java40
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExprHelper.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashJoinPrule.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToRandomExchangePrel.java5
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/JoinPruleBase.java5
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java59
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SubsetTransformer.java14
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/UnionExchangePrel.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/PrelSequencer.java83
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java28
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/FinalColumnReorderer.java21
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RelUniqifier.java14
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SelectionVectorPrelVisitor.java13
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/ExpandingConcurrentMap.java31
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java20
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java15
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/UseSchemaHandler.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java21
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/DrillParserUtil.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateTable.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateView.java14
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDescribeTable.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropView.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowFiles.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowSchemas.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowTables.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java17
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractSingleRecordBatch.java13
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java29
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java31
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/MajorTypeSerDe.java33
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java54
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/RawFragmentBatch.java12
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaBuilder.java19
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java29
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java99
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java31
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java24
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4Builder.java14
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/resolver/DefaultFunctionResolver.java5
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolver.java2
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolverFactory.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java40
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java4
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractHandshakeHandler.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java24
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFutureImpl.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java21
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java23
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ProtobufLengthDecoder.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ReconnectingConnection.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ResettableBarrier.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java44
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConfig.java61
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java43
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java37
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ConnectionManagerRegistry.java12
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlClient.java6
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlConnection.java21
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlServer.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java17
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClient.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClientConnection.java29
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataServer.java13
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultBatch.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java20
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java3
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java18
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java17
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/server/RemoteServiceSet.java17
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValue.java48
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java23
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java18
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java23
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java40
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java22
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java29
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java44
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceConfig.java3
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java39
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java18
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java8
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java12
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java26
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java13
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java20
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java15
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java12
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java25
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumnReaders.java18
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoDataType.java30
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java67
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/Writers.java71
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java3
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStore.java35
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStoreProvider.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordWriter.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java16
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java28
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java96
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java101
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java138
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java9
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java36
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReaderWithState.java5
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java53
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java63
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java28
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/ErrorHelper.java7
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlHandlerImpl.java27
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/IncomingBuffers.java20
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java19
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java79
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/QueryStatus.java50
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java34
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/NonRootFragmentManager.java24
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java23
-rw-r--r--exec/java-exec/src/main/resources/drill-module.conf22
-rw-r--r--exec/java-exec/src/main/resources/rest/profile/profile.ftl12
-rw-r--r--exec/java-exec/src/main/resources/rest/www/graph.js4
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java46
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java9
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java12
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java3
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java14
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java5
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java85
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java173
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java22
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestBroadcastExchange.java17
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestComparisonFunctions.java19
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java7
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestHashToRandomExchange.java8
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java50
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSimpleFragmentRun.java16
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestStringFunctions.java24
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestUnionExchange.java8
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java17
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java296
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java78
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java22
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java37
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java15
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java31
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java9
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java49
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestFragmentChecker.java13
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java4
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java9
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/server/TestBitRpc.java11
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/store/ByteArrayUtil.java65
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java23
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java12
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java20
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java30
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java10
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/util/MiniZooKeeperCluster.java4
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java17
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java3
-rw-r--r--exec/java-exec/src/test/resources/agg/hashagg/q6.json2
-rw-r--r--exec/java-exec/src/test/resources/agg/hashagg/q7_1.json2
-rw-r--r--exec/java-exec/src/test/resources/agg/hashagg/q7_2.json2
-rw-r--r--exec/java-exec/src/test/resources/agg/hashagg/q7_3.json2
-rw-r--r--exec/java-exec/src/test/resources/agg/hashagg/q8_1.json4
-rw-r--r--exec/java-exec/src/test/resources/agg/test1.json16
-rw-r--r--exec/java-exec/src/test/resources/agg/twokey.json18
-rw-r--r--exec/java-exec/src/test/resources/donuts.json249
-rw-r--r--exec/java-exec/src/test/resources/drill-module.conf18
-rw-r--r--exec/java-exec/src/test/resources/drill-spool-test-module.conf28
-rw-r--r--exec/java-exec/src/test/resources/filter/test1.json16
-rw-r--r--exec/java-exec/src/test/resources/filter/test_sv4.json14
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastBigInt.json26
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastFloat4.json28
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastFloat8.json28
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastInt.json26
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastNested.json24
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastNumException.json14
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastVarBinary.json50
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastVarChar.json38
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testCastVarCharNull.json8
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testICastConstant.json54
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testICastMockCol.json36
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/testICastNullExp.json34
-rw-r--r--exec/java-exec/src/test/resources/functions/cast/two_way_implicit_cast.json12
-rw-r--r--exec/java-exec/src/test/resources/functions/comparisonTest.json24
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testCharLength.json34
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testConcat.json16
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testLeft.json14
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testLike.json8
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testLower.json12
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testLpad.json31
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testLtrim.json22
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testPosition.json8
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testRegexpReplace.json17
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testReplace.json22
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testRight.json14
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testRpad.json29
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testRtrim.json22
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testSimilar.json8
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testStringFuncs.json8
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testSubstr.json8
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testTrim.json8
-rw-r--r--exec/java-exec/src/test/resources/functions/string/testUpper.json12
-rw-r--r--exec/java-exec/src/test/resources/functions/testByteSubstring.json14
-rw-r--r--exec/java-exec/src/test/resources/functions/testSubstring.json14
-rw-r--r--exec/java-exec/src/test/resources/functions/testSubstringNegative.json14
-rw-r--r--exec/java-exec/src/test/resources/join/hash_join.json86
-rw-r--r--exec/java-exec/src/test/resources/join/hj_multi_condition_join.json2
-rw-r--r--exec/java-exec/src/test/resources/join/join_batchsize.json14
-rw-r--r--exec/java-exec/src/test/resources/join/merge_inner_single_batch.json6
-rw-r--r--exec/java-exec/src/test/resources/join/merge_multi_batch.json6
-rw-r--r--exec/java-exec/src/test/resources/join/merge_single_batch.json6
-rw-r--r--exec/java-exec/src/test/resources/jsoninput/input1.json4
-rw-r--r--exec/java-exec/src/test/resources/jsoninput/input2.json8
-rw-r--r--exec/java-exec/src/test/resources/jsoninput/vvtypes.json4
-rw-r--r--exec/java-exec/src/test/resources/limit/limit_exchanges.json4
-rw-r--r--exec/java-exec/src/test/resources/limit/test1.json14
-rw-r--r--exec/java-exec/src/test/resources/limit/test2.json14
-rw-r--r--exec/java-exec/src/test/resources/limit/test3.json14
-rw-r--r--exec/java-exec/src/test/resources/limit/test4.json14
-rw-r--r--exec/java-exec/src/test/resources/mock-scan.json12
-rw-r--r--exec/java-exec/src/test/resources/physical_double_exchange.json22
-rw-r--r--exec/java-exec/src/test/resources/physical_join.json4
-rw-r--r--exec/java-exec/src/test/resources/physical_json_scan_test1.json6
-rw-r--r--exec/java-exec/src/test/resources/physical_repeated_1.json6
-rw-r--r--exec/java-exec/src/test/resources/physical_test1.json14
-rw-r--r--exec/java-exec/src/test/resources/physical_test2.json25
-rw-r--r--exec/java-exec/src/test/resources/project/test1.json16
-rw-r--r--exec/java-exec/src/test/resources/queries/tpch.json7
-rw-r--r--exec/java-exec/src/test/resources/queries/tpch/04.sql2
-rw-r--r--exec/java-exec/src/test/resources/queries/tpch/13.sql6
-rw-r--r--exec/java-exec/src/test/resources/queries/tpch/15.sql6
-rw-r--r--exec/java-exec/src/test/resources/queries/tpch/19_1.sql4
-rw-r--r--exec/java-exec/src/test/resources/remover/test1.json14
-rw-r--r--exec/java-exec/src/test/resources/scan_json_test_3.json18
-rw-r--r--exec/java-exec/src/test/resources/scan_json_test_6.json6
-rw-r--r--exec/java-exec/src/test/resources/scan_screen_logical.json6
-rw-r--r--exec/java-exec/src/test/resources/sender/broadcast_exchange.json6
-rw-r--r--exec/java-exec/src/test/resources/server/options_session_check.json2
-rw-r--r--exec/java-exec/src/test/resources/server/options_set.json3
-rw-r--r--exec/java-exec/src/test/resources/simple_plan.json19
-rw-r--r--exec/java-exec/src/test/resources/sort/one_key_sort.json12
-rw-r--r--exec/java-exec/src/test/resources/sort/two_key_sort.json12
-rw-r--r--exec/java-exec/src/test/resources/store/text/test.json4
-rw-r--r--exec/java-exec/src/test/resources/testRepeatedWrite.json146
-rw-r--r--exec/java-exec/src/test/resources/topN/one_key_sort.json12
-rw-r--r--exec/java-exec/src/test/resources/topN/two_key_sort.json12
-rw-r--r--exec/java-exec/src/test/resources/union/test1.json26
-rw-r--r--exec/java-exec/src/test/resources/xsort/one_key_sort_descending.json4
-rw-r--r--exec/java-exec/src/test/resources/xsort/one_key_sort_descending_sv2.json4
-rw-r--r--exec/java-exec/src/test/resources/xsort/oom_sort_test.json4
371 files changed, 5253 insertions, 4033 deletions
diff --git a/exec/java-exec/src/main/java/io/netty/buffer/DrillBuf.java b/exec/java-exec/src/main/java/io/netty/buffer/DrillBuf.java
index 5399239b9..2f9154db6 100644
--- a/exec/java-exec/src/main/java/io/netty/buffer/DrillBuf.java
+++ b/exec/java-exec/src/main/java/io/netty/buffer/DrillBuf.java
@@ -62,7 +62,7 @@ public final class DrillBuf extends AbstractByteBuf {
this.emptyBuffer = false;
}
- private DrillBuf(ByteBuffer bb){
+ private DrillBuf(ByteBuffer bb) {
super(bb.remaining());
UnpooledUnsafeDirectByteBuf bytebuf = new UnpooledUnsafeDirectByteBuf(UnpooledByteBufAllocator.DEFAULT, bb, bb.remaining());
this.acct = FakeAllocator.FAKE_ACCOUNTOR;
@@ -76,7 +76,7 @@ public final class DrillBuf extends AbstractByteBuf {
this.writerIndex(bb.remaining());
}
- private DrillBuf(BufferAllocator allocator, Accountor a){
+ private DrillBuf(BufferAllocator allocator, Accountor a) {
super(0);
this.b = new EmptyByteBuf(allocator.getUnderlyingAllocator()).order(ByteOrder.LITTLE_ENDIAN);
this.allocator = allocator;
@@ -106,24 +106,26 @@ public final class DrillBuf extends AbstractByteBuf {
this.allocator = buffer.allocator;
}
- public void setOperatorContext(OperatorContext c){
+ public void setOperatorContext(OperatorContext c) {
this.context = c;
}
- public void setFragmentContext(FragmentContext c){
+ public void setFragmentContext(FragmentContext c) {
this.fContext = c;
}
- public BufferAllocator getAllocator(){
+ public BufferAllocator getAllocator() {
return allocator;
}
- public DrillBuf reallocIfNeeded(int size){
- if(this.capacity() >= size) return this;
- if(context != null){
+ public DrillBuf reallocIfNeeded(int size) {
+ if (this.capacity() >= size) {
+ return this;
+ }
+ if (context != null) {
return context.replace(this, size);
- }else if(fContext != null){
+ } else if(fContext != null) {
return fContext.replace(this, size);
- }else{
+ } else {
throw new UnsupportedOperationException("Realloc is only available in the context of an operator's UDFs");
}
@@ -138,25 +140,23 @@ public final class DrillBuf extends AbstractByteBuf {
return addr + index;
}
-
-
private final void checkIndexD(int index) {
- ensureAccessible();
- if (index < 0 || index >= capacity()) {
- throw new IndexOutOfBoundsException(String.format(
- "index: %d (expected: range(0, %d))", index, capacity()));
- }
+ ensureAccessible();
+ if (index < 0 || index >= capacity()) {
+ throw new IndexOutOfBoundsException(String.format(
+ "index: %d (expected: range(0, %d))", index, capacity()));
+ }
}
private final void checkIndexD(int index, int fieldLength) {
- ensureAccessible();
- if (fieldLength < 0) {
- throw new IllegalArgumentException("length: " + fieldLength + " (expected: >= 0)");
- }
- if (index < 0 || index > capacity() - fieldLength) {
- throw new IndexOutOfBoundsException(String.format(
- "index: %d, length: %d (expected: range(0, %d))", index, fieldLength, capacity()));
- }
+ ensureAccessible();
+ if (fieldLength < 0) {
+ throw new IllegalArgumentException("length: " + fieldLength + " (expected: >= 0)");
+ }
+ if (index < 0 || index > capacity() - fieldLength) {
+ throw new IndexOutOfBoundsException(String.format(
+ "index: %d, length: %d (expected: range(0, %d))", index, fieldLength, capacity()));
+ }
}
private void chk(int index, int width) {
@@ -210,7 +210,6 @@ public final class DrillBuf extends AbstractByteBuf {
return length;
}
-
@Override
public synchronized ByteBuf capacity(int newCapacity) {
if (rootBuffer) {
@@ -363,20 +362,20 @@ public final class DrillBuf extends AbstractByteBuf {
@Override
public String toString(int index, int length, Charset charset) {
- if (length == 0) {
- return "";
- }
+ if (length == 0) {
+ return "";
+ }
- ByteBuffer nioBuffer;
- if (nioBufferCount() == 1) {
- nioBuffer = nioBuffer(index, length);
- } else {
- nioBuffer = ByteBuffer.allocate(length);
- getBytes(index, nioBuffer);
- nioBuffer.flip();
- }
+ ByteBuffer nioBuffer;
+ if (nioBufferCount() == 1) {
+ nioBuffer = nioBuffer(index, length);
+ } else {
+ nioBuffer = ByteBuffer.allocate(length);
+ getBytes(index, nioBuffer);
+ nioBuffer.flip();
+ }
- return ByteBufUtil.decodeString(nioBuffer, charset);
+ return ByteBufUtil.decodeString(nioBuffer, charset);
}
@Override
@@ -615,10 +614,10 @@ public final class DrillBuf extends AbstractByteBuf {
@Override
protected int _getUnsignedMedium(int index) {
- long addr = addr(index);
- return (PlatformDependent.getByte(addr) & 0xff) << 16 |
- (PlatformDependent.getByte(addr + 1) & 0xff) << 8 |
- PlatformDependent.getByte(addr + 2) & 0xff;
+ long addr = addr(index);
+ return (PlatformDependent.getByte(addr) & 0xff) << 16 |
+ (PlatformDependent.getByte(addr + 1) & 0xff) << 8 |
+ PlatformDependent.getByte(addr + 2) & 0xff;
}
@Override
@@ -659,20 +658,21 @@ public final class DrillBuf extends AbstractByteBuf {
return PlatformDependent.getByte(addr(index));
}
- public static DrillBuf getEmpty(BufferAllocator allocator, Accountor a){
+ public static DrillBuf getEmpty(BufferAllocator allocator, Accountor a) {
return new DrillBuf(allocator, a);
}
- public boolean isRootBuffer(){
+ public boolean isRootBuffer() {
return rootBuffer;
}
- public static DrillBuf wrapByteBuffer(ByteBuffer b){
- if(!b.isDirect()){
+ public static DrillBuf wrapByteBuffer(ByteBuffer b) {
+ if (!b.isDirect()) {
throw new IllegalStateException("DrillBufs can only refer to direct memory.");
- }else{
+ } else {
return new DrillBuf(b);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/io/netty/buffer/UnsafeDirectLittleEndian.java b/exec/java-exec/src/main/java/io/netty/buffer/UnsafeDirectLittleEndian.java
index 2cca5d76a..dfdc114b2 100644
--- a/exec/java-exec/src/main/java/io/netty/buffer/UnsafeDirectLittleEndian.java
+++ b/exec/java-exec/src/main/java/io/netty/buffer/UnsafeDirectLittleEndian.java
@@ -29,7 +29,9 @@ public final class UnsafeDirectLittleEndian extends WrappedByteBuf {
UnsafeDirectLittleEndian(PooledUnsafeDirectByteBuf buf) {
super(buf);
- if(!NATIVE_ORDER || buf.order() != ByteOrder.BIG_ENDIAN) throw new IllegalStateException("Drill only runs on LittleEndian systems.");
+ if (!NATIVE_ORDER || buf.order() != ByteOrder.BIG_ENDIAN) {
+ throw new IllegalStateException("Drill only runs on LittleEndian systems.");
+ }
wrapped = buf;
this.memoryAddress = buf.memoryAddress();
}
@@ -188,6 +190,4 @@ public final class UnsafeDirectLittleEndian extends WrappedByteBuf {
return this;
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/CachedVectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/CachedVectorContainer.java
index da0b186ad..ff6c14b82 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/CachedVectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/CachedVectorContainer.java
@@ -86,11 +86,13 @@ public class CachedVectorContainer extends LoopedAbstractDrillSerializable {
}
public void clear() {
- if(container != null) container.clear();
+ if (container != null) {
+ container.clear();
+ }
container = null;
}
- public byte[] getData(){
+ public byte[] getData() {
return data;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
index 1e0c9852c..019f9eedf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
@@ -21,7 +21,6 @@ import org.apache.drill.exec.exception.DrillbitStartupException;
import com.google.protobuf.Message;
-
public interface DistributedCache extends AutoCloseable{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DistributedCache.class);
@@ -38,13 +37,15 @@ public interface DistributedCache extends AutoCloseable{
PROTOBUF(String.class, Message.class);
private final Class<?>[] classes;
- private SerializationMode(Class<?>... classes){
+ private SerializationMode(Class<?>... classes) {
this.classes = classes;
}
- public void checkClass(Class<?> classToCheck){
- for(Class<?> c : classes){
- if(c.isAssignableFrom(classToCheck)) return;
+ public void checkClass(Class<?> classToCheck) {
+ for(Class<?> c : classes) {
+ if(c.isAssignableFrom(classToCheck)) {
+ return;
+ }
}
throw new UnsupportedOperationException(String.format("You are trying to serialize the class %s using the serialization mode %s. This is not allowed.", classToCheck.getName(), this.name()));
@@ -102,34 +103,43 @@ public interface DistributedCache extends AutoCloseable{
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
CacheConfig other = (CacheConfig) obj;
if (keyClass == null) {
- if (other.keyClass != null)
+ if (other.keyClass != null) {
return false;
- } else if (!keyClass.equals(other.keyClass))
+ }
+ } else if (!keyClass.equals(other.keyClass)) {
return false;
- if (mode != other.mode)
+ }
+ if (mode != other.mode) {
return false;
+ }
if (name == null) {
- if (other.name != null)
+ if (other.name != null) {
return false;
- } else if (!name.equals(other.name))
+ }
+ } else if (!name.equals(other.name)) {
return false;
+ }
if (valueClass == null) {
- if (other.valueClass != null)
+ if (other.valueClass != null) {
return false;
- } else if (!valueClass.equals(other.valueClass))
+ }
+ } else if (!valueClass.equals(other.valueClass)) {
return false;
+ }
return true;
}
-
}
public static class CacheConfigBuilder<K, V> {
@@ -145,40 +155,38 @@ public interface DistributedCache extends AutoCloseable{
this.name = keyClass.getName();
}
-
- public CacheConfigBuilder<K, V> mode(SerializationMode mode){
+ public CacheConfigBuilder<K, V> mode(SerializationMode mode) {
this.mode = mode;
return this;
}
- public CacheConfigBuilder<K, V> proto(){
+ public CacheConfigBuilder<K, V> proto() {
this.mode = SerializationMode.PROTOBUF;
return this;
}
- public CacheConfigBuilder<K, V> jackson(){
+ public CacheConfigBuilder<K, V> jackson() {
this.mode = SerializationMode.JACKSON;
return this;
}
- public CacheConfigBuilder<K, V> drill(){
+ public CacheConfigBuilder<K, V> drill() {
this.mode = SerializationMode.DRILL_SERIALIZIABLE;
return this;
}
- public CacheConfigBuilder<K, V> name(String name){
+ public CacheConfigBuilder<K, V> name(String name) {
this.name = name;
return this;
}
- public CacheConfig<K, V> build(){
+ public CacheConfig<K, V> build() {
mode.checkClass(keyClass);
mode.checkClass(valueClass);
return new CacheConfig<K, V>(keyClass, valueClass, name, mode);
}
-
-
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
index fc9775ca7..8e2ce96cc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
@@ -65,7 +65,7 @@ public class VectorAccessibleSerializable extends AbstractStreamSerializable {
this.va = new VectorContainer();
}
- public VectorAccessibleSerializable(WritableBatch batch, BufferAllocator allocator){
+ public VectorAccessibleSerializable(WritableBatch batch, BufferAllocator allocator) {
this(batch, null, allocator);
}
@@ -135,7 +135,6 @@ public class VectorAccessibleSerializable extends AbstractStreamSerializable {
writeToStream(output);
}
-
/**
* Serializes the VectorAccessible va and writes it to an output stream
* @param output the OutputStream to write to
@@ -153,29 +152,25 @@ public class VectorAccessibleSerializable extends AbstractStreamSerializable {
DrillBuf svBuf = null;
Integer svCount = null;
- if (svMode == BatchSchema.SelectionVectorMode.TWO_BYTE)
- {
+ if (svMode == BatchSchema.SelectionVectorMode.TWO_BYTE) {
svCount = sv2.getCount();
svBuf = sv2.getBuffer(); //this calls retain() internally
}
- try
- {
- /* Write the metadata to the file */
+ try {
+ /* Write the metadata to the file */
batchDef.writeDelimitedTo(output);
- /* If we have a selection vector, dump it to file first */
- if (svBuf != null)
- {
+ /* If we have a selection vector, dump it to file first */
+ if (svBuf != null) {
svBuf.getBytes(0, output, svBuf.readableBytes());
sv2.setBuffer(svBuf);
svBuf.release(); // sv2 now owns the buffer
sv2.setRecordCount(svCount);
}
- /* Dump the array of ByteBuf's associated with the value vectors */
- for (DrillBuf buf : incomingBuffers)
- {
+ /* Dump the array of ByteBuf's associated with the value vectors */
+ for (DrillBuf buf : incomingBuffers) {
/* dump the buffer into the OutputStream */
int bufLength = buf.readableBytes();
buf.getBytes(0, output, bufLength);
@@ -184,8 +179,7 @@ public class VectorAccessibleSerializable extends AbstractStreamSerializable {
output.flush();
timerContext.stop();
- } catch (IOException e)
- {
+ } catch (IOException e) {
throw new RuntimeException(e);
} finally {
clear();
@@ -195,7 +189,9 @@ public class VectorAccessibleSerializable extends AbstractStreamSerializable {
public void clear() {
if (!retain) {
batch.clear();
- if(sv2 != null) sv2.clear();
+ if (sv2 != null) {
+ sv2.clear();
+ }
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/local/LocalCache.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/local/LocalCache.java
index 7328257d9..99ead1c81 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/local/LocalCache.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/local/LocalCache.java
@@ -115,12 +115,11 @@ public class LocalCache implements DistributedCache {
}
private static BytesHolder serialize(Object obj, SerializationMode mode) {
- if(obj instanceof String){
+ if (obj instanceof String) {
return new BytesHolder( ((String)obj).getBytes(Charsets.UTF_8));
}
- try{
- switch(mode){
-
+ try{
+ switch (mode) {
case DRILL_SERIALIZIABLE: {
ByteArrayDataOutput out = ByteStreams.newDataOutput();
OutputStream outputStream = DataOutputOutputStream.constructOutputStream(out);
@@ -139,7 +138,7 @@ public class LocalCache implements DistributedCache {
return new BytesHolder(( (Message) obj).toByteArray());
}
- }catch(Exception e){
+ } catch (Exception e) {
throw new RuntimeException(e);
}
@@ -148,14 +147,13 @@ public class LocalCache implements DistributedCache {
private static <V> V deserialize(BytesHolder b, SerializationMode mode, Class<V> clazz) {
byte[] bytes = b.bytes;
- try{
+ try {
if (clazz == String.class) {
return (V) new String(bytes, Charsets.UTF_8);
}
switch (mode) {
-
case DRILL_SERIALIZIABLE: {
InputStream inputStream = new ByteArrayInputStream(bytes);
V obj = clazz.getConstructor(BufferAllocator.class).newInstance(allocator);
@@ -174,13 +172,15 @@ public class LocalCache implements DistributedCache {
parser = (Parser<V>) f.get(null);
}
}
- if (parser == null) throw new UnsupportedOperationException(String.format("Unable to find parser for class %s.", clazz.getName()));
+ if (parser == null) {
+ throw new UnsupportedOperationException(String.format("Unable to find parser for class %s.", clazz.getName()));
+ }
InputStream inputStream = new ByteArrayInputStream(bytes);
return parser.parseFrom(inputStream);
}
}
- }catch(Exception e){
+ } catch (Exception e) {
throw new RuntimeException(e);
}
@@ -189,9 +189,10 @@ public class LocalCache implements DistributedCache {
private static class BytesHolder {
final byte[] bytes;
- public BytesHolder(byte[] bytes){
+ public BytesHolder(byte[] bytes) {
this.bytes = bytes;
}
+
@Override
public int hashCode() {
final int prime = 31;
@@ -199,21 +200,25 @@ public class LocalCache implements DistributedCache {
result = prime * result + Arrays.hashCode(bytes);
return result;
}
+
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
BytesHolder other = (BytesHolder) obj;
- if (!Arrays.equals(bytes, other.bytes))
+ if (!Arrays.equals(bytes, other.bytes)) {
return false;
+ }
return true;
}
-
}
static class LocalDistributedMultiMapImpl<K, V> implements DistributedMultiMap<K, V> {
@@ -288,13 +293,15 @@ public class LocalCache implements DistributedCache {
@Override
public V get(K key) {
BytesHolder b = m.get(serialize(key, config.getMode()));
- if(b == null) return null;
+ if (b == null) {
+ return null;
+ }
return (V) deserialize(b, config.getMode(), config.getValueClass());
}
@Override
public Iterable<Entry<K, V>> getLocalEntries() {
- return new Iterable<Entry<K, V>>(){
+ return new Iterable<Entry<K, V>>() {
@Override
public Iterator<Entry<K, V>> iterator() {
return new DeserializingTransformer(m.entrySet().iterator());
@@ -376,7 +383,6 @@ public class LocalCache implements DistributedCache {
}
-
}
public static class LocalCounterImpl implements Counter {
@@ -397,4 +403,5 @@ public class LocalCache implements DistributedCache {
return al.decrementAndGet();
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
index 510d63714..74cc6a6e7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
@@ -88,11 +88,11 @@ public class DrillClient implements Closeable, ConnectionThrottle{
this(config, null);
}
- public DrillClient(DrillConfig config, ClusterCoordinator coordinator){
+ public DrillClient(DrillConfig config, ClusterCoordinator coordinator) {
this(config, coordinator, null);
}
- public DrillClient(DrillConfig config, ClusterCoordinator coordinator, BufferAllocator allocator){
+ public DrillClient(DrillConfig config, ClusterCoordinator coordinator, BufferAllocator allocator) {
this.ownsZkConnection = coordinator == null;
this.ownsAllocator = allocator == null;
this.allocator = allocator == null ? new TopLevelAllocator(config) : allocator;
@@ -103,7 +103,7 @@ public class DrillClient implements Closeable, ConnectionThrottle{
this.supportComplexTypes = config.getBoolean(ExecConstants.CLIENT_SUPPORT_COMPLEX_TYPES);
}
- public DrillConfig getConfig(){
+ public DrillConfig getConfig() {
return config;
}
@@ -139,7 +139,9 @@ public class DrillClient implements Closeable, ConnectionThrottle{
}
public synchronized void connect(String connect, Properties props) throws RpcException {
- if (connected) return;
+ if (connected) {
+ return;
+ }
if (ownsZkConnection) {
try {
@@ -152,8 +154,9 @@ public class DrillClient implements Closeable, ConnectionThrottle{
if (props != null) {
UserProperties.Builder upBuilder = UserProperties.newBuilder();
- for(String key : props.stringPropertyNames())
+ for (String key : props.stringPropertyNames()) {
upBuilder.addProperties(Property.newBuilder().setKey(key).setValue(props.getProperty(key)));
+ }
this.props = upBuilder.build();
}
@@ -210,10 +213,14 @@ public class DrillClient implements Closeable, ConnectionThrottle{
/**
* Closes this client's connection to the server
*/
- public void close(){
- if(this.client != null) this.client.close();
- if(this.ownsAllocator && allocator != null) allocator.close();
- if(ownsZkConnection){
+ public void close() {
+ if (this.client != null) {
+ this.client.close();
+ }
+ if (this.ownsAllocator && allocator != null) {
+ allocator.close();
+ }
+ if(ownsZkConnection) {
try {
this.clusterCoordinator.close();
} catch (IOException e) {
@@ -240,7 +247,7 @@ public class DrillClient implements Closeable, ConnectionThrottle{
return listener.getResults();
}
- public DrillRpcFuture<Ack> cancelQuery(QueryId id){
+ public DrillRpcFuture<Ack> cancelQuery(QueryId id) {
logger.debug("Cancelling query {}", QueryIdHelper.getQueryId(id));
return client.send(RpcType.CANCEL_QUERY, id, Ack.class);
}
@@ -253,7 +260,7 @@ public class DrillClient implements Closeable, ConnectionThrottle{
* @return a handle for the query result
* @throws RpcException
*/
- public void runQuery(QueryType type, String plan, UserResultsListener resultsListener){
+ public void runQuery(QueryType type, String plan, UserResultsListener resultsListener) {
client.submitQuery(resultsListener, newBuilder().setResultsMode(STREAM_FULL).setType(type).setPlan(plan).build());
}
@@ -294,15 +301,15 @@ public class DrillClient implements Closeable, ConnectionThrottle{
public void resultArrived(QueryResultBatch result, ConnectionThrottle throttle) {
// logger.debug("Result arrived. Is Last Chunk: {}. Full Result: {}", result.getHeader().getIsLastChunk(), result);
results.add(result);
- if(result.getHeader().getIsLastChunk()){
+ if (result.getHeader().getIsLastChunk()) {
future.set(results);
}
}
public List<QueryResultBatch> getResults() throws RpcException{
- try{
+ try {
return future.get();
- }catch(Throwable t){
+ } catch (Throwable t) {
throw RpcException.mapException(t);
}
}
@@ -328,7 +335,7 @@ public class DrillClient implements Closeable, ConnectionThrottle{
getInner().setException(new RpcException(String.format("Failure connecting to server. Failure of type %s.", type.name()), t));
}
- private SettableFuture<Void> getInner(){
+ private SettableFuture<Void> getInner() {
return (SettableFuture<Void>) delegate();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
index 54a5a3ab5..55d9cf3ee 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
@@ -87,7 +87,7 @@ public class DumpCat {
public void validate(String name, String value) throws ParameterException {
try {
int batch = Integer.parseInt(value);
- if(batch < 0) {
+ if (batch < 0) {
throw new ParameterException("Parameter " + name + " should be non-negative number.");
}
} catch (NumberFormatException e) {
@@ -140,10 +140,11 @@ public class DumpCat {
@Override
public String toString() {
String avgRecSizeStr = null;
- if (this.rows>0)
+ if (this.rows>0) {
avgRecSizeStr = String.format("Average Record Size : %d ", this.dataSize/this.rows);
- else
+ } else {
avgRecSizeStr = "Average Record Size : 0";
+ }
return String.format("Records : %d / %d \n", this.selectedRows, this.rows) +
avgRecSizeStr +
@@ -175,28 +176,29 @@ public class DumpCat {
while (input.available() > 0) {
VectorAccessibleSerializable vcSerializable = new VectorAccessibleSerializable(DumpCat.allocator);
vcSerializable.readFromStream(input);
- VectorContainer vectorContainer = (VectorContainer) vcSerializable.get();
+ VectorContainer vectorContainer = (VectorContainer) vcSerializable.get();
- aggBatchMetaInfo.add(getBatchMetaInfo(vcSerializable));
+ aggBatchMetaInfo.add(getBatchMetaInfo(vcSerializable));
- if (vectorContainer.getRecordCount() == 0) {
- emptyBatchNum ++;
- }
+ if (vectorContainer.getRecordCount() == 0) {
+ emptyBatchNum ++;
+ }
- if (prevSchema != null && !vectorContainer.getSchema().equals(prevSchema))
- schemaChangeIdx.add(batchNum);
+ if (prevSchema != null && !vectorContainer.getSchema().equals(prevSchema)) {
+ schemaChangeIdx.add(batchNum);
+ }
- prevSchema = vectorContainer.getSchema();
- batchNum ++;
+ prevSchema = vectorContainer.getSchema();
+ batchNum ++;
- vectorContainer.zeroVectors();
+ vectorContainer.zeroVectors();
}
- /* output the summary stat */
- System.out.println(String.format("Total # of batches: %d", batchNum));
- //output: rows, selectedRows, avg rec size, total data size.
- System.out.println(aggBatchMetaInfo.toString());
- System.out.println(String.format("Empty batch : %d", emptyBatchNum));
+ /* output the summary stat */
+ System.out.println(String.format("Total # of batches: %d", batchNum));
+ //output: rows, selectedRows, avg rec size, total data size.
+ System.out.println(aggBatchMetaInfo.toString());
+ System.out.println(String.format("Empty batch : %d", emptyBatchNum));
System.out.println(String.format("Schema changes : %d", schemaChangeIdx.size()));
System.out.println(String.format("Schema change batch index : %s", schemaChangeIdx.toString()));
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java
index 3302e7cc6..1ed3cb349 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java
@@ -94,7 +94,9 @@ public class PrintingResultsListener implements UserResultsListener {
public int await() throws Exception {
latch.await();
- if(exception != null) throw exception;
+ if (exception != null) {
+ throw exception;
+ }
return count.get();
}
@@ -106,4 +108,5 @@ public class PrintingResultsListener implements UserResultsListener {
public void queryIdArrived(QueryId queryId) {
this.queryId = queryId;
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java
index 2f5c02767..3a6dad014 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/QuerySubmitter.java
@@ -106,7 +106,7 @@ public class QuerySubmitter {
RemoteServiceSet serviceSet = null;
Drillbit[] drillbits = null;
- try{
+ try {
if (local) {
serviceSet = RemoteServiceSet.getLocalServiceSet();
drillbits = new Drillbit[bits];
@@ -133,10 +133,12 @@ public class QuerySubmitter {
} catch(Throwable th) {
System.err.println("Query Failed due to : " + th.getMessage());
return -1;
- }finally{
- if(client != null) client.close();
- if(local){
- for(Drillbit b : drillbits){
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ if (local) {
+ for (Drillbit b : drillbits) {
b.close();
}
serviceSet.close();
@@ -151,7 +153,7 @@ public class QuerySubmitter {
String[] queries;
QueryType queryType;
type = type.toLowerCase();
- switch(type) {
+ switch (type) {
case "sql":
queryType = QueryType.SQL;
queries = plan.trim().split(";");
@@ -171,7 +173,7 @@ public class QuerySubmitter {
Format outputFormat;
format = format.toLowerCase();
- switch(format) {
+ switch (format) {
case "csv":
outputFormat = Format.CSV;
break;
@@ -201,4 +203,5 @@ public class QuerySubmitter {
return 0;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/AbstractClassCompiler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/AbstractClassCompiler.java
index 98c51c64e..f4a3cc9ca 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/AbstractClassCompiler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/AbstractClassCompiler.java
@@ -33,14 +33,16 @@ public abstract class AbstractClassCompiler {
public byte[][] getClassByteCode(ClassNames className, String sourceCode)
throws CompileException, IOException, ClassNotFoundException, ClassTransformationException {
- if(getLogger().isDebugEnabled()){
+ if (getLogger().isDebugEnabled()) {
getLogger().debug("Compiling (source size={}):\n{}", DrillStringUtils.readable(sourceCode.length()), prefixLineNumbers(sourceCode));
}
return getByteCode(className, sourceCode);
}
protected String prefixLineNumbers(String code) {
- if (!debug) return code;
+ if (!debug) {
+ return code;
+ }
StringBuilder out = new StringBuilder();
int i = 1;
@@ -49,7 +51,7 @@ public abstract class AbstractClassCompiler {
out.append(i++);
int numLength = out.length() - start;
out.append(":");
- for (int spaces = 0; spaces < 7 - numLength; ++spaces){
+ for (int spaces = 0; spaces < 7 - numLength; ++spaces) {
out.append(" ");
}
out.append(line);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ByteCodeLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ByteCodeLoader.java
index 704a199aa..c11d02d4e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ByteCodeLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ByteCodeLoader.java
@@ -40,8 +40,9 @@ class ByteCodeLoader {
@Override
public byte[] load(String path) throws ClassTransformationException, IOException {
URL u = this.getClass().getResource(path);
- if (u == null)
+ if (u == null) {
throw new ClassTransformationException(String.format("Unable to find TemplateClass at path %s", path));
+ }
return Resources.toByteArray(u);
}
};
@@ -51,11 +52,14 @@ class ByteCodeLoader {
return byteCode.get(path);
} catch (ExecutionException e) {
Throwable c = e.getCause();
- if (c instanceof ClassTransformationException)
+ if (c instanceof ClassTransformationException) {
throw (ClassTransformationException) c;
- if (c instanceof IOException)
+ }
+ if (c instanceof IOException) {
throw (IOException) c;
+ }
throw new ClassTransformationException(c);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
index bb24b57a6..2d69ca319 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
@@ -61,11 +61,11 @@ public class ClassTransformer {
String.format("The new name of a class cannot start with the old name of a class, otherwise class renaming will cause problems. Precompiled class name %s. Generated class name %s", precompiled, generated));
}
- public ClassSet getChild(String precompiled, String generated){
+ public ClassSet getChild(String precompiled, String generated) {
return new ClassSet(this, precompiled, generated);
}
- public ClassSet getChild(String precompiled){
+ public ClassSet getChild(String precompiled) {
return new ClassSet(this, precompiled, precompiled.replace(this.precompiled.dot, this.generated.dot));
}
@@ -81,41 +81,49 @@ public class ClassTransformer {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
ClassSet other = (ClassSet) obj;
if (generated == null) {
- if (other.generated != null)
+ if (other.generated != null) {
return false;
- } else if (!generated.equals(other.generated))
+ }
+ } else if (!generated.equals(other.generated)) {
return false;
+ }
if (parent == null) {
- if (other.parent != null)
+ if (other.parent != null) {
return false;
- } else if (!parent.equals(other.parent))
+ }
+ } else if (!parent.equals(other.parent)) {
return false;
+ }
if (precompiled == null) {
- if (other.precompiled != null)
+ if (other.precompiled != null) {
return false;
- } else if (!precompiled.equals(other.precompiled))
+ }
+ } else if (!precompiled.equals(other.precompiled)) {
return false;
+ }
return true;
}
-
}
- public static class ClassNames{
+ public static class ClassNames {
public final String dot;
public final String slash;
public final String clazz;
- public ClassNames(String className){
+ public ClassNames(String className) {
dot = className;
slash = className.replace('.', FileUtils.separatorChar);
clazz = FileUtils.separatorChar + slash + ".class";
@@ -133,28 +141,37 @@ public class ClassTransformer {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
ClassNames other = (ClassNames) obj;
if (clazz == null) {
- if (other.clazz != null)
+ if (other.clazz != null) {
return false;
- } else if (!clazz.equals(other.clazz))
+ }
+ } else if (!clazz.equals(other.clazz)) {
return false;
+ }
if (dot == null) {
- if (other.dot != null)
+ if (other.dot != null) {
return false;
- } else if (!dot.equals(other.dot))
+ }
+ } else if (!dot.equals(other.dot)) {
return false;
+ }
if (slash == null) {
- if (other.slash != null)
+ if (other.slash != null) {
return false;
- } else if (!slash.equals(other.slash))
+ }
+ } else if (!slash.equals(other.slash)) {
return false;
+ }
return true;
}
}
@@ -179,7 +196,7 @@ public class ClassTransformer {
long totalBytecodeSize = 0;
Map<String, ClassNode> classesToMerge = Maps.newHashMap();
- for(byte[] clazz : implementationClasses) {
+ for (byte[] clazz : implementationClasses) {
totalBytecodeSize += clazz.length;
ClassNode node = getClassNodeFromByteCode(clazz);
classesToMerge.put(node.name, node);
@@ -191,14 +208,16 @@ public class ClassTransformer {
while ( !names.isEmpty() ) {
final ClassSet nextSet = names.removeFirst();
- if (namesCompleted.contains(nextSet)) continue;
+ if (namesCompleted.contains(nextSet)) {
+ continue;
+ }
final ClassNames nextPrecompiled = nextSet.precompiled;
final byte[] precompiledBytes = byteCodeLoader.getClassByteCodeFromPath(nextPrecompiled.clazz);
ClassNames nextGenerated = nextSet.generated;
ClassNode generatedNode = classesToMerge.get(nextGenerated.slash);
MergedClassResult result = MergeAdapter.getMergedClass(nextSet, precompiledBytes, generatedNode);
- for(String s : result.innerClasses) {
+ for (String s : result.innerClasses) {
s = s.replace(FileUtils.separatorChar, '.');
names.add(nextSet.getChild(s));
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/DrillJavaFileObject.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/DrillJavaFileObject.java
index 024fd01b3..acc32b521 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/DrillJavaFileObject.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/DrillJavaFileObject.java
@@ -83,8 +83,9 @@ final class DrillJavaFileObject extends SimpleJavaFileObject {
@Override
public CharSequence getCharContent(final boolean ignoreEncodingErrors) throws IOException {
- if (sourceCode == null)
+ if (sourceCode == null) {
throw new UnsupportedOperationException("This instance of DrillJavaFileObject is not an input object.");
+ }
return sourceCode;
}
@@ -106,4 +107,4 @@ final class DrillJavaFileObject extends SimpleJavaFileObject {
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/JDKClassCompiler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/JDKClassCompiler.java
index f056489d7..2e101dcd2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/JDKClassCompiler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/JDKClassCompiler.java
@@ -74,8 +74,12 @@ class JDKClassCompiler extends AbstractClassCompiler {
Throwable cause = rte.getCause();
if (cause != null) {
cause = cause.getCause();
- if (cause instanceof CompileException) throw (CompileException) cause;
- if (cause instanceof IOException) throw (IOException) cause;
+ if (cause instanceof CompileException) {
+ throw (CompileException) cause;
+ }
+ if (cause instanceof IOException) {
+ throw (IOException) cause;
+ }
}
throw rte;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
index 945c94a22..6a6be5041 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
@@ -64,11 +64,15 @@ class MergeAdapter extends ClassVisitor {
super(Opcodes.ASM4, cv);
this.classToMerge = cn;
this.set = set;
- for(Object o : classToMerge.methods){
+ for (Object o : classToMerge.methods) {
String name = ((MethodNode)o).name;
- if(name.equals("<init>")) continue;
- if(name.equals(SignatureHolder.DRILL_INIT_METHOD)) hasInit = true;
- mergingNames.add( name);
+ if (name.equals("<init>")) {
+ continue;
+ }
+ if (name.equals(SignatureHolder.DRILL_INIT_METHOD)) {
+ hasInit = true;
+ }
+ mergingNames.add(name);
}
}
@@ -99,9 +103,9 @@ class MergeAdapter extends ClassVisitor {
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
// use the access and names of the impl class.
this.name = name;
- if(name.contains("$")){
+ if (name.contains("$")) {
super.visit(version, access, name, signature, superName, interfaces);
- }else{
+ } else {
super.visit(version, access ^ Modifier.ABSTRACT | Modifier.FINAL, name, signature, superName, interfaces);
}
@@ -111,7 +115,6 @@ class MergeAdapter extends ClassVisitor {
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
-
// finalize all methods.
// skip all abstract methods as they should have implementations.
@@ -120,17 +123,17 @@ class MergeAdapter extends ClassVisitor {
// logger.debug("Skipping copy of '{}()' since it is abstract or listed elsewhere.", arg1);
return null;
}
- if(signature != null){
+ if (signature != null) {
signature = signature.replace(set.precompiled.slash, set.generated.slash);
}
- // if( (access & Modifier.PUBLIC) == 0){
+ // if ((access & Modifier.PUBLIC) == 0) {
// access = access ^ Modifier.PUBLIC ^ Modifier.PROTECTED | Modifier.PRIVATE;
// }
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
if (!name.equals("<init>")) {
access = access | Modifier.FINAL;
- }else{
- if(hasInit){
+ } else {
+ if (hasInit) {
return new DrillInitMethodVisitor(this.name, mv);
}
}
@@ -150,7 +153,9 @@ class MergeAdapter extends ClassVisitor {
for (Iterator<?> it = classToMerge.methods.iterator(); it.hasNext();) {
MethodNode mn = (MethodNode) it.next();
- if (mn.name.equals("<init>")) continue;
+ if (mn.name.equals("<init>")) {
+ continue;
+ }
String[] exceptions = new String[mn.exceptions.size()];
mn.exceptions.toArray(exceptions);
@@ -161,7 +166,9 @@ class MergeAdapter extends ClassVisitor {
// mn.accept(new RemappingMethodAdapter(mn.access, mn.desc, mv, new
// SimpleRemapper("org.apache.drill.exec.compile.ExampleTemplate", "Bunky")));
ClassSet top = set;
- while(top.parent != null) top = top.parent;
+ while (top.parent != null) {
+ top = top.parent;
+ }
mn.accept(new RemappingMethodAdapter(mn.access, mn.desc, mv, new SimpleRemapper(top.precompiled.slash, top.generated.slash)));
}
@@ -173,7 +180,6 @@ class MergeAdapter extends ClassVisitor {
return super.visitField(access, name, desc, signature, value);
}
-
public static class MergedClassResult{
public byte[] bytes;
public Collection<String> innerClasses;
@@ -183,7 +189,6 @@ class MergeAdapter extends ClassVisitor {
this.innerClasses = innerClasses;
}
-
}
public static MergedClassResult getMergedClass(ClassSet set, byte[] precompiledClass, ClassNode generatedClass) throws IOException{
@@ -193,15 +198,15 @@ class MergeAdapter extends ClassVisitor {
ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
RemapClasses re = new RemapClasses(set);
- try{
-// if(generatedClass != null){
+ try {
+// if(generatedClass != null) {
// ClassNode generatedMerged = new ClassNode();
// generatedClass.accept(new ValueHolderReplacementVisitor(generatedMerged));
// generatedClass = generatedMerged;
// }
ClassVisitor remappingAdapter = new RemappingClassAdapter(writer, re);
ClassVisitor visitor = remappingAdapter;
- if(generatedClass != null){
+ if (generatedClass != null) {
visitor = new MergeAdapter(set, remappingAdapter, generatedClass);
}
ClassReader tReader = new ClassReader(precompiledClass);
@@ -212,7 +217,7 @@ class MergeAdapter extends ClassVisitor {
// Files.write(outputClass, new File(String.format("/src/scratch/drill-generated-classes/%s-output.class", set.generated.dot)));
return new MergedClassResult(outputClass, re.getInnerClasses());
- }catch(Error | RuntimeException e){
+ } catch (Error | RuntimeException e) {
logger.error("Failure while merging classes.", e);
throw e;
}
@@ -228,7 +233,9 @@ class MergeAdapter extends ClassVisitor {
super();
this.current = set;
ClassSet top = set;
- while(top.parent != null) top = top.parent;
+ while (top.parent != null) {
+ top = top.parent;
+ }
this.top = top;
}
@@ -239,7 +246,7 @@ class MergeAdapter extends ClassVisitor {
if (typeName.startsWith(top.precompiled.slash)) {
// write down all the sub classes.
- if (typeName.startsWith(current.precompiled.slash + "$")){
+ if (typeName.startsWith(current.precompiled.slash + "$")) {
innerClasses.add(typeName);
}
@@ -258,31 +265,31 @@ class MergeAdapter extends ClassVisitor {
Exception e = null;
String error = "";
- try{
- ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
- ClassVisitor cv = new CheckClassAdapter(cw, true);
- node.accept(cv);
+ try {
+ ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
+ ClassVisitor cv = new CheckClassAdapter(cw, true);
+ node.accept(cv);
- StringWriter sw = new StringWriter();
- PrintWriter pw = new PrintWriter(sw);
- CheckClassAdapter.verify(new ClassReader(cw.toByteArray()), false, pw);
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw);
+ CheckClassAdapter.verify(new ClassReader(cw.toByteArray()), false, pw);
- error = sw.toString();
- }catch(Exception ex){
+ error = sw.toString();
+ } catch (Exception ex) {
e = ex;
}
- if(!error.isEmpty() || e != null){
+ if (!error.isEmpty() || e != null) {
StringWriter sw2 = new StringWriter();
PrintWriter pw2 = new PrintWriter(sw2);
TraceClassVisitor v = new TraceClassVisitor(pw2);
node.accept(v);
- if(e != null){
+ if (e != null) {
throw new RuntimeException("Failure validating class. ByteCode: \n" + sw2.toString() + "\n\n====ERRROR====\n" + error, e);
- }else{
+ } else {
throw new RuntimeException("Failure validating class. ByteCode: \n" + sw2.toString() + "\n\n====ERRROR====\n" + error);
}
-
}
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
index 398d109ca..e1ac7a8a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
@@ -76,19 +76,21 @@ public class QueryClassLoader extends URLClassLoader {
compilerSelector = new ClassCompilerSelector(config, sessionOptions);
}
- public long getNextClassIndex(){
+ public long getNextClassIndex() {
return index.getAndIncrement();
}
public void injectByteCode(String className, byte[] classBytes) throws IOException {
- if(customClasses.containsKey(className)) throw new IOException(String.format("The class defined {} has already been loaded.", className));
+ if (customClasses.containsKey(className)) {
+ throw new IOException(String.format("The class defined {} has already been loaded.", className));
+ }
customClasses.put(className, classBytes);
}
@Override
protected Class<?> findClass(String className) throws ClassNotFoundException {
byte[] ba = customClasses.get(className);
- if(ba != null){
+ if (ba != null) {
return this.defineClass(className, ba, 0, ba.length);
}else{
return super.findClass(className);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/InstructionModifier.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/InstructionModifier.java
index e736aab36..4585bd8b9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/InstructionModifier.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/InstructionModifier.java
@@ -61,8 +61,9 @@ public class InstructionModifier extends MethodVisitor {
private ReplacingBasicValue popCurrent(boolean includeReturnVals) {
// for vararg, we could try to pop an empty stack. TODO: handle this better.
- if (list.currentFrame.getStackSize() == 0)
+ if (list.currentFrame.getStackSize() == 0) {
return null;
+ }
Object o = list.currentFrame.pop();
if (o instanceof ReplacingBasicValue) {
@@ -76,8 +77,9 @@ public class InstructionModifier extends MethodVisitor {
private ReplacingBasicValue getReturn() {
Object o = list.nextFrame.getStack(list.nextFrame.getStackSize() - 1);
- if (o instanceof ReplacingBasicValue)
+ if (o instanceof ReplacingBasicValue) {
return (ReplacingBasicValue) o;
+ }
return null;
}
@@ -85,8 +87,9 @@ public class InstructionModifier extends MethodVisitor {
public void visitInsn(int opcode) {
switch (opcode) {
case Opcodes.DUP:
- if (popCurrent() != null)
+ if (popCurrent() != null) {
return;
+ }
}
super.visitInsn(opcode);
}
@@ -111,14 +114,14 @@ public class InstructionModifier extends MethodVisitor {
@Override
public void visitVarInsn(int opcode, int var) {
ReplacingBasicValue v;
- if(opcode == Opcodes.ASTORE && (v = popCurrent(true)) != null){
- if(!v.isFunctionReturn){
+ if (opcode == Opcodes.ASTORE && (v = popCurrent(true)) != null) {
+ if (!v.isFunctionReturn) {
ValueHolderSub from = oldToNew.get(v.getIndex());
ReplacingBasicValue current = local(var);
// if local var is set, then transfer to it to the existing holders in the local position.
- if(current != null){
- if(oldToNew.get(current.getIndex()).iden() == from.iden()){
+ if (current != null) {
+ if (oldToNew.get(current.getIndex()).iden() == from.iden()) {
int targetFirst = oldToNew.get(current.index).first();
from.transfer(this, targetFirst);
return;
@@ -126,9 +129,9 @@ public class InstructionModifier extends MethodVisitor {
}
// if local var is not set, then check map to see if existing holders are mapped to local var.
- if(oldLocalToFirst.containsKey(var)){
+ if (oldLocalToFirst.containsKey(var)) {
ValueHolderSub sub = oldToNew.get(oldLocalToFirst.lget());
- if(sub.iden() == from.iden()){
+ if (sub.iden() == from.iden()) {
// if they are, then transfer to that.
from.transfer(this, oldToNew.get(oldLocalToFirst.lget()).first());
return;
@@ -139,13 +142,13 @@ public class InstructionModifier extends MethodVisitor {
// map from variables to global space for future use.
oldLocalToFirst.put(var, v.getIndex());
- }else{
+ } else {
// this is storage of a function return, we need to map the fields to the holder spots.
int first;
- if(oldLocalToFirst.containsKey(var)){
+ if (oldLocalToFirst.containsKey(var)) {
first = oldToNew.get(oldLocalToFirst.lget()).first();
v.iden.transferToLocal(adder, first);
- }else{
+ } else {
first = v.iden.createLocalAndTrasfer(adder);
}
ValueHolderSub from = v.iden.getHolderSubWithDefinedLocals(first);
@@ -153,14 +156,12 @@ public class InstructionModifier extends MethodVisitor {
v.disableFunctionReturn();
}
- }else if(opcode == Opcodes.ALOAD && (v = getReturn()) != null){
-
+ } else if (opcode == Opcodes.ALOAD && (v = getReturn()) != null) {
// noop.
- }else{
+ } else {
super.visitVarInsn(opcode, var);
}
-
}
void directVarInsn(int opcode, int var) {
@@ -176,10 +177,10 @@ public class InstructionModifier extends MethodVisitor {
// pop twice for put.
v = popCurrent(true);
if (v != null) {
- if(v.isFunctionReturn){
+ if (v.isFunctionReturn) {
super.visitFieldInsn(opcode, owner, name, desc);
return;
- }else{
+ } else {
// we are trying to store a replaced variable in an external context, we need to generate an instance and
// transfer it out.
ValueHolderSub sub = oldToNew.get(v.getIndex());
@@ -197,8 +198,6 @@ public class InstructionModifier extends MethodVisitor {
sub.addInsn(name, this, opcode);
return;
}
-
-
}
super.visitFieldInsn(opcode, owner, name, desc);
@@ -246,14 +245,14 @@ public class InstructionModifier extends MethodVisitor {
}
private void checkArg(String name, ReplacingBasicValue obj) {
- if (obj == null)
+ if (obj == null) {
return;
+ }
throw new IllegalStateException(
String
.format(
"Holder types are not allowed to be passed between methods. Ran across problem attempting to invoke method '%s' on line number %d",
name, lastLineNumber));
-
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/ValueHolderIden.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/ValueHolderIden.java
index 1e10eeb6e..a0ce390e1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/ValueHolderIden.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/bytecode/ValueHolderIden.java
@@ -38,8 +38,8 @@ class ValueHolderIden {
Field[] fields = c.getFields();
List<Field> fldList = Lists.newArrayList();
- for(Field f : fields){
- if(!Modifier.isStatic(f.getModifiers())) {
+ for (Field f : fields) {
+ if (!Modifier.isStatic(f.getModifiers())) {
fldList.add(f);
}
}
@@ -48,7 +48,7 @@ class ValueHolderIden {
this.names = new String[fldList.size()];
fieldMap = new ObjectIntOpenHashMap<String>();
int i =0;
- for(Field f : fldList){
+ for (Field f : fldList) {
types[i] = Type.getType(f.getType());
names[i] = f.getName();
fieldMap.put(f.getName(), i);
@@ -56,8 +56,8 @@ class ValueHolderIden {
}
}
- private static void initType(int index, Type t, DirectSorter v){
- switch(t.getSort()){
+ private static void initType(int index, Type t, DirectSorter v) {
+ switch(t.getSort()) {
case Type.BOOLEAN:
case Type.BYTE:
case Type.CHAR:
@@ -97,30 +97,28 @@ class ValueHolderIden {
}
return new ValueHolderSub(first);
-
}
- public ValueHolderSub getHolderSubWithDefinedLocals(int first){
+ public ValueHolderSub getHolderSubWithDefinedLocals(int first) {
return new ValueHolderSub(first);
}
- private int dup(Type t){
+ private int dup(Type t) {
return t.getSize() == 1 ? Opcodes.DUP : Opcodes.DUP2;
}
- public void transferToLocal(DirectSorter adder, int localVariable){
+ public void transferToLocal(DirectSorter adder, int localVariable) {
for (int i = 0; i < types.length; i++) {
Type t = types[i];
- if(i + 1 < types.length) adder.visitInsn(dup(t)); // don't dup for last value.
+ if (i + 1 < types.length) {
+ adder.visitInsn(dup(t)); // don't dup for last value.
+ }
adder.visitFieldInsn(Opcodes.GETFIELD, type.getInternalName(), names[i], t.getDescriptor());
adder.directVarInsn(t.getOpcode(Opcodes.ISTORE), localVariable+i);
}
}
-
-
-
- public int createLocalAndTrasfer(DirectSorter adder){
+ public int createLocalAndTrasfer(DirectSorter adder) {
int first = 0;
for (int i = 0; i < types.length; i++) {
Type t = types[i];
@@ -141,29 +139,31 @@ class ValueHolderIden {
this.first = first;
}
- public ValueHolderIden iden(){
+ public ValueHolderIden iden() {
return ValueHolderIden.this;
}
- public void init(DirectSorter mv){
+ public void init(DirectSorter mv) {
for (int i = 0; i < types.length; i++) {
initType(first+i, types[i], mv);
}
}
- public int size(){
+ public int size() {
return types.length;
}
- public int first(){
+ public int first() {
return first;
}
- public void updateFirst(int newFirst){
+ public void updateFirst(int newFirst) {
this.first = newFirst;
}
private int field(String name, InstructionModifier mv) {
- if (!fieldMap.containsKey(name)) throw new IllegalArgumentException(String.format("Unknown name '%s' on line %d.", name, mv.lastLineNumber));
+ if (!fieldMap.containsKey(name)) {
+ throw new IllegalArgumentException(String.format("Unknown name '%s' on line %d.", name, mv.lastLineNumber));
+ }
return fieldMap.lget();
}
@@ -178,9 +178,11 @@ class ValueHolderIden {
}
}
- public void transfer(InstructionModifier mv, int newStart){
- if(first == newStart) return;
- for(int i =0; i < types.length; i++){
+ public void transfer(InstructionModifier mv, int newStart) {
+ if (first == newStart) {
+ return;
+ }
+ for (int i =0; i < types.length; i++) {
mv.directVarInsn(types[i].getOpcode(Opcodes.ILOAD), first + i);
mv.directVarInsn(types[i].getOpcode(Opcodes.ISTORE), newStart + i);
}
@@ -193,7 +195,7 @@ class ValueHolderIden {
mv.directVarInsn(t.getOpcode(analogOpcode), first + f);
}
- public void transferToExternal(DirectSorter adder, String owner, String name, String desc){
+ public void transferToExternal(DirectSorter adder, String owner, String name, String desc) {
// create a new object and assign it to the desired field.
adder.visitTypeInsn(Opcodes.NEW, type.getInternalName());
@@ -212,10 +214,8 @@ class ValueHolderIden {
// lastly we save it to the desired field.
adder.visitFieldInsn(Opcodes.PUTFIELD, owner, name, desc);
-
}
}
-
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java
index a7b5680b1..9df346cea 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java
@@ -26,7 +26,7 @@ import com.google.common.collect.Iterators;
import com.thoughtworks.paranamer.AnnotationParanamer;
import com.thoughtworks.paranamer.Paranamer;
-public class CodeGeneratorMethod implements Iterable<CodeGeneratorArgument>{
+public class CodeGeneratorMethod implements Iterable<CodeGeneratorArgument> {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CodeGeneratorMethod.class);
private final String methodName;
@@ -35,7 +35,7 @@ public class CodeGeneratorMethod implements Iterable<CodeGeneratorArgument>{
private final Class<?>[] exs;
private final Method underlyingMethod;
- public CodeGeneratorMethod(String name, Class<?> returnType){
+ public CodeGeneratorMethod(String name, Class<?> returnType) {
this.methodName = name;
this.returnType = returnType;
this.exs = new Class<?>[0];
@@ -43,18 +43,22 @@ public class CodeGeneratorMethod implements Iterable<CodeGeneratorArgument>{
this.arguments = new CodeGeneratorArgument[0];
}
- public CodeGeneratorMethod(Method m){
+ public CodeGeneratorMethod(Method m) {
this.underlyingMethod = m;
this.methodName = m.getName();
this.returnType = m.getReturnType();
// Paranamer para = new BytecodeReadingParanamer();
Paranamer para = new AnnotationParanamer();
String[] parameterNames = para.lookupParameterNames(m, true);
- if(parameterNames == null) throw new RuntimeException(String.format("Unable to read the parameter names for method %s. This is likely due to the class files not including the appropriate debugging information. Look up java -g for more information.", m));
+ if (parameterNames == null) {
+ throw new RuntimeException(String.format("Unable to read the parameter names for method %s. This is likely due to the class files not including the appropriate debugging information. Look up java -g for more information.", m));
+ }
Class<?>[] types = m.getParameterTypes();
- if(parameterNames.length != types.length) throw new RuntimeException(String.format("Unexpected number of parameter names %s. Expected %s on method %s.", Arrays.toString(parameterNames), Arrays.toString(types), m.toGenericString()));
+ if (parameterNames.length != types.length) {
+ throw new RuntimeException(String.format("Unexpected number of parameter names %s. Expected %s on method %s.", Arrays.toString(parameterNames), Arrays.toString(types), m.toGenericString()));
+ }
arguments = new CodeGeneratorArgument[parameterNames.length];
- for(int i =0 ; i < parameterNames.length; i++){
+ for (int i = 0 ; i < parameterNames.length; i++) {
arguments[i] = new CodeGeneratorArgument(parameterNames[i], types[i]);
}
exs = m.getExceptionTypes();
@@ -67,7 +71,7 @@ public class CodeGeneratorMethod implements Iterable<CodeGeneratorArgument>{
return returnType;
}
- public Iterable<Class<?>> getThrowsIterable(){
+ public Iterable<Class<?>> getThrowsIterable() {
return ImmutableList.copyOf(exs);
}
@@ -81,5 +85,4 @@ public class CodeGeneratorMethod implements Iterable<CodeGeneratorArgument>{
return "CodeGeneratorMethod [" + underlyingMethod.toGenericString() + "]";
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/MappingSet.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/MappingSet.java
index 80aaae840..fbc586f1d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/MappingSet.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/MappingSet.java
@@ -42,7 +42,7 @@ public class MappingSet {
this("inIndex", "outIndex", new GeneratorMapping[] { mapping, mapping });
}
- public boolean hasEmbeddedConstant(){
+ public boolean hasEmbeddedConstant() {
return constant == current;
}
@@ -98,16 +98,18 @@ public class MappingSet {
public void enterChild() {
assert current == mappings[mappingIndex];
mappingIndex++;
- if (mappingIndex >= mappings.length)
+ if (mappingIndex >= mappings.length) {
throw new IllegalStateException("This generator does not support mappings beyond");
+ }
current = mappings[mappingIndex];
}
public void exitChild() {
assert current == mappings[mappingIndex];
mappingIndex--;
- if (mappingIndex < 0)
+ if (mappingIndex < 0) {
throw new IllegalStateException("You tried to traverse higher than the provided mapping provides.");
+ }
current = mappings[mappingIndex];
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
index d397accac..7fe8e3b4e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
@@ -29,7 +29,7 @@ import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-public class SignatureHolder implements Iterable<CodeGeneratorMethod>{
+public class SignatureHolder implements Iterable<CodeGeneratorMethod> {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SignatureHolder.class);
private final Class<?> signature;
@@ -40,17 +40,19 @@ public class SignatureHolder implements Iterable<CodeGeneratorMethod>{
public static final String DRILL_INIT_METHOD = "__DRILL_INIT__";
public static final CodeGeneratorMethod DRILL_INIT = new CodeGeneratorMethod(DRILL_INIT_METHOD, void.class);
- public static SignatureHolder getHolder(Class<?> signature){
+ public static SignatureHolder getHolder(Class<?> signature) {
List<SignatureHolder> innerClasses = Lists.newArrayList();
- for(Class<?> inner : signature.getClasses()){
+ for (Class<?> inner : signature.getClasses()) {
SignatureHolder h = getHolder(inner);
- if(h.childHolders.length > 0 || h.methods.length > 0) innerClasses.add(h);
+ if (h.childHolders.length > 0 || h.methods.length > 0) {
+ innerClasses.add(h);
+ }
}
return new SignatureHolder(signature, innerClasses.toArray(new SignatureHolder[innerClasses.size()]));
}
- private SignatureHolder(Class<?> signature, SignatureHolder[] childHolders){
+ private SignatureHolder(Class<?> signature, SignatureHolder[] childHolders) {
this.childHolders = childHolders;
this.signature = signature;
Map<String, Integer> newMap = Maps.newHashMap();
@@ -58,30 +60,33 @@ public class SignatureHolder implements Iterable<CodeGeneratorMethod>{
List<CodeGeneratorMethod> methodHolders = Lists.newArrayList();
Method[] reflectMethods = signature.getDeclaredMethods();
- for(Method m : reflectMethods){
- if( (m.getModifiers() & Modifier.ABSTRACT) == 0 && m.getAnnotation(RuntimeOverridden.class) == null) continue;
+ for (Method m : reflectMethods) {
+ if ( (m.getModifiers() & Modifier.ABSTRACT) == 0 && m.getAnnotation(RuntimeOverridden.class) == null) {
+ continue;
+ }
methodHolders.add(new CodeGeneratorMethod(m));
}
methods = new CodeGeneratorMethod[methodHolders.size()+1];
- for(int i =0; i < methodHolders.size(); i++){
+ for (int i =0; i < methodHolders.size(); i++) {
methods[i] = methodHolders.get(i);
Integer old = newMap.put(methods[i].getMethodName(), i);
- if(old != null) throw new IllegalStateException(String.format("Attempting to add a method with name %s when there is already one method of that name in this class that is set to be runtime generated.", methods[i].getMethodName()));
+ if (old != null) {
+ throw new IllegalStateException(String.format("Attempting to add a method with name %s when there is already one method of that name in this class that is set to be runtime generated.", methods[i].getMethodName()));
+ }
}
methods[methodHolders.size()] = DRILL_INIT;
newMap.put(DRILL_INIT.getMethodName(), methodHolders.size());
methodMap = ImmutableMap.copyOf(newMap);
-
}
- public Class<?> getSignatureClass(){
+ public Class<?> getSignatureClass() {
return signature;
}
- public CodeGeneratorMethod get(int i){
+ public CodeGeneratorMethod get(int i) {
return methods[i];
}
@@ -90,7 +95,7 @@ public class SignatureHolder implements Iterable<CodeGeneratorMethod>{
return Iterators.forArray(methods);
}
- public int size(){
+ public int size() {
return methods.length;
}
@@ -99,10 +104,9 @@ public class SignatureHolder implements Iterable<CodeGeneratorMethod>{
return childHolders;
}
-
- public int get(String method){
+ public int get(String method) {
Integer meth = methodMap.get(method);
- if(meth == null){
+ if (meth == null) {
throw new IllegalStateException(String.format("Unknown method requested of name %s.", method));
}
return meth;
@@ -115,5 +119,4 @@ public class SignatureHolder implements Iterable<CodeGeneratorMethod>{
+ (methods != null ? Arrays.asList(methods).subList(0, Math.min(methods.length, maxLen)) : null) + "]";
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/local/LocalClusterCoordinator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/local/LocalClusterCoordinator.java
index 2ddf280e5..035c1aa2c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/local/LocalClusterCoordinator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/local/LocalClusterCoordinator.java
@@ -31,7 +31,7 @@ import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
import com.google.common.collect.Maps;
-public class LocalClusterCoordinator extends ClusterCoordinator{
+public class LocalClusterCoordinator extends ClusterCoordinator {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(LocalClusterCoordinator.class);
private volatile Map<RegistrationHandle, DrillbitEndpoint> endpoints = Maps.newConcurrentMap();
@@ -57,7 +57,9 @@ public class LocalClusterCoordinator extends ClusterCoordinator{
@Override
public void unregister(RegistrationHandle handle) {
- if(handle == null) return;
+ if(handle == null) {
+ return;
+ }
endpoints.remove(handle);
}
@@ -67,7 +69,6 @@ public class LocalClusterCoordinator extends ClusterCoordinator{
return endpoints.values();
}
-
private class Handle implements RegistrationHandle{
UUID id = UUID.randomUUID();
@@ -82,14 +83,26 @@ public class LocalClusterCoordinator extends ClusterCoordinator{
@Override
public boolean equals(Object obj) {
- if (this == obj) return true;
- if (obj == null) return false;
- if (getClass() != obj.getClass()) return false;
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
Handle other = (Handle) obj;
- if (!getOuterType().equals(other.getOuterType())) return false;
+ if (!getOuterType().equals(other.getOuterType())) {
+ return false;
+ }
if (id == null) {
- if (other.id != null) return false;
- } else if (!id.equals(other.id)) return false;
+ if (other.id != null) {
+ return false;
+ }
+ } else if (!id.equals(other.id)) {
+ return false;
+ }
return true;
}
@@ -99,7 +112,6 @@ public class LocalClusterCoordinator extends ClusterCoordinator{
}
-
@Override
public DistributedSemaphore getSemaphore(String name, int maximumLeases) {
semaphores.putIfAbsent(name, new LocalSemaphore(maximumLeases));
@@ -111,20 +123,19 @@ public class LocalClusterCoordinator extends ClusterCoordinator{
private final Semaphore inner;
private final LocalLease lease = new LocalLease();
- public LocalSemaphore(int size){
+ public LocalSemaphore(int size) {
inner = new Semaphore(size);
}
@Override
public DistributedLease acquire(long timeout, TimeUnit unit) throws Exception {
- if(!inner.tryAcquire(timeout, unit)){
+ if(!inner.tryAcquire(timeout, unit)) {
return null;
}else{
return lease;
}
}
-
private class LocalLease implements DistributedLease{
@Override
@@ -135,5 +146,4 @@ public class LocalClusterCoordinator extends ClusterCoordinator{
}
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/zk/ZKClusterCoordinator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/zk/ZKClusterCoordinator.java
index 76ad90b5f..7f538d2cd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/zk/ZKClusterCoordinator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/zk/ZKClusterCoordinator.java
@@ -75,7 +75,7 @@ public class ZKClusterCoordinator extends ClusterCoordinator {
// check if this is a complex zk string. If so, parse into components.
Matcher m = ZK_COMPLEX_STRING.matcher(connect);
- if(m.matches()){
+ if(m.matches()) {
connect = m.group(1);
zkRoot = m.group(2);
clusterId = m.group(3);
@@ -101,7 +101,7 @@ public class ZKClusterCoordinator extends ClusterCoordinator {
.build();
}
- public CuratorFramework getCurator(){
+ public CuratorFramework getCurator() {
return curator;
}
@@ -112,9 +112,11 @@ public class ZKClusterCoordinator extends ClusterCoordinator {
serviceCache.start();
serviceCache.addListener(new ZKListener());
- if(millisToWait != 0){
+ if(millisToWait != 0) {
boolean success = this.initialConnection.await(millisToWait, TimeUnit.MILLISECONDS);
- if(!success) throw new IOException(String.format("Failure to connect to the zookeeper cluster service within the allotted time of %d milliseconds.", millisToWait));
+ if (!success) {
+ throw new IOException(String.format("Failure to connect to the zookeeper cluster service within the allotted time of %d milliseconds.", millisToWait));
+ }
}else{
this.initialConnection.await();
}
@@ -126,7 +128,7 @@ public class ZKClusterCoordinator extends ClusterCoordinator {
@Override
public void stateChanged(CuratorFramework client, ConnectionState newState) {
- if(newState == ConnectionState.CONNECTED){
+ if(newState == ConnectionState.CONNECTED) {
ZKClusterCoordinator.this.initialConnection.countDown();
client.getConnectionStateListenable().removeListener(this);
}
@@ -166,7 +168,9 @@ public class ZKClusterCoordinator extends ClusterCoordinator {
@Override
public void unregister(RegistrationHandle handle) {
- if (!(handle instanceof ZKRegistrationHandle)) throw new UnsupportedOperationException("Unknown handle type: " + handle.getClass().getName());
+ if (!(handle instanceof ZKRegistrationHandle)) {
+ throw new UnsupportedOperationException("Unknown handle type: " + handle.getClass().getName());
+ }
ZKRegistrationHandle h = (ZKRegistrationHandle) handle;
try {
@@ -223,4 +227,5 @@ public class ZKClusterCoordinator extends ClusterCoordinator {
.serializer(DrillServiceInstanceHelper.SERIALIZER)
.build();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java b/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java
index a1132d743..8b6e1e3dd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java
@@ -139,8 +139,9 @@ public class View {
@JsonIgnore
public boolean hasStar() {
for (FieldType field : fields) {
- if (StarColumnHelper.isNonPrefixedStarColumn(field.name))
+ if (StarColumnHelper.isNonPrefixedStarColumn(field.name)) {
return true;
+ }
}
return false;
}
@@ -165,6 +166,4 @@ public class View {
return workspaceSchemaPath;
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
index 69d68bbe2..e0c429646 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
@@ -81,7 +81,7 @@ public class ClassGenerator<T>{
private int labelIndex = 0;
private MappingSet mappings;
- public static MappingSet getDefaultMapping(){
+ public static MappingSet getDefaultMapping() {
return new MappingSet("inIndex", "outIndex", DEFAULT_CONSTANT_MAP, DEFAULT_SCALAR_MAP);
}
@@ -94,29 +94,29 @@ public class ClassGenerator<T>{
this.evaluationVisitor = eval;
this.model = model;
blocks = (LinkedList<JBlock>[]) new LinkedList[sig.size()];
- for(int i =0; i < sig.size(); i++){
+ for (int i =0; i < sig.size(); i++) {
blocks[i] = Lists.newLinkedList();
}
rotateBlock();
- for(SignatureHolder child : signature.getChildHolders()){
+ for (SignatureHolder child : signature.getChildHolders()) {
String innerClassName = child.getSignatureClass().getSimpleName();
JDefinedClass innerClazz = clazz._class(Modifier.FINAL + Modifier.PRIVATE, innerClassName);
innerClasses.put(innerClassName, new ClassGenerator<>(codeGenerator, mappingSet, child, eval, innerClazz, model));
}
}
- public ClassGenerator<T> getInnerGenerator(String name){
+ public ClassGenerator<T> getInnerGenerator(String name) {
ClassGenerator<T> inner = innerClasses.get(name);
Preconditions.checkNotNull(inner);
return inner;
}
- public MappingSet getMappingSet(){
+ public MappingSet getMappingSet() {
return mappings;
}
- public void setMappingSet(MappingSet mappings){
+ public void setMappingSet(MappingSet mappings) {
this.mappings = mappings;
}
@@ -124,30 +124,30 @@ public class ClassGenerator<T>{
return codeGenerator;
}
- private GeneratorMapping getCurrentMapping(){
+ private GeneratorMapping getCurrentMapping() {
return mappings.getCurrentMapping();
}
- public JBlock getBlock(String methodName){
+ public JBlock getBlock(String methodName) {
JBlock blk = this.blocks[sig.get(methodName)].getLast();
Preconditions.checkNotNull(blk, "Requested method name of %s was not available for signature %s.", methodName, this.sig);
return blk;
}
- public JBlock getBlock(BlockType type){
+ public JBlock getBlock(BlockType type) {
return getBlock(getCurrentMapping().getMethodName(type));
}
- public JBlock getSetupBlock(){
+ public JBlock getSetupBlock() {
return getBlock(getCurrentMapping().getMethodName(BlockType.SETUP));
}
- public JBlock getEvalBlock(){
+ public JBlock getEvalBlock() {
return getBlock(getCurrentMapping().getMethodName(BlockType.EVAL));
}
- public JBlock getResetBlock(){
+ public JBlock getResetBlock() {
return getBlock(getCurrentMapping().getMethodName(BlockType.RESET));
}
- public JBlock getCleanupBlock(){
+ public JBlock getCleanupBlock() {
return getBlock(getCurrentMapping().getMethodName(BlockType.CLEANUP));
}
@@ -165,11 +165,11 @@ public class ClassGenerator<T>{
return getEvalBlock().label(prefix + labelIndex ++);
}
- public JVar declareVectorValueSetupAndMember(String batchName, TypedFieldId fieldId){
+ public JVar declareVectorValueSetupAndMember(String batchName, TypedFieldId fieldId) {
return declareVectorValueSetupAndMember( DirectExpression.direct(batchName), fieldId);
}
- public JVar declareVectorValueSetupAndMember(DirectExpression batchName, TypedFieldId fieldId){
+ public JVar declareVectorValueSetupAndMember(DirectExpression batchName, TypedFieldId fieldId) {
final ValueVectorSetup setup = new ValueVectorSetup(batchName, fieldId);
// JVar var = this.vvDeclaration.get(setup);
// if(var != null) return var;
@@ -178,7 +178,7 @@ public class ClassGenerator<T>{
JClass vvClass = model.ref(valueVectorClass);
JClass retClass = vvClass;
String vectorAccess = "getValueVector";
- if(fieldId.isHyperReader()){
+ if (fieldId.isHyperReader()) {
retClass = retClass.array();
vectorAccess = "getValueVectors";
}
@@ -191,7 +191,7 @@ public class ClassGenerator<T>{
JVar fieldArr = b.decl(model.INT.array(), "fieldIds" + index++, JExpr.newArray(model.INT, fieldId.getFieldIds().length));
int[] fieldIndices = fieldId.getFieldIds();
- for(int i = 0; i < fieldIndices.length; i++){
+ for (int i = 0; i < fieldIndices.length; i++) {
b.assign(fieldArr.component(JExpr.lit(i)), JExpr.lit(fieldIndices[i]));
}
@@ -213,18 +213,20 @@ public class ClassGenerator<T>{
return vv;
}
- public HoldingContainer addExpr(LogicalExpression ex){
+ public HoldingContainer addExpr(LogicalExpression ex) {
return addExpr(ex, true);
}
- public HoldingContainer addExpr(LogicalExpression ex, boolean rotate){
+ public HoldingContainer addExpr(LogicalExpression ex, boolean rotate) {
// logger.debug("Adding next write {}", ex);
- if(rotate) rotateBlock();
+ if (rotate) {
+ rotateBlock();
+ }
return evaluationVisitor.addExpr(ex, this);
}
- public void rotateBlock(){
- for(LinkedList<JBlock> b : blocks){
+ public void rotateBlock() {
+ for (LinkedList<JBlock> b : blocks) {
b.add(new JBlock(true, true));
}
}
@@ -249,11 +251,11 @@ public class ClassGenerator<T>{
if (blocksInMethod > MAX_BLOCKS_IN_FUNCTION) {
JMethod inner = clazz.method(JMod.PRIVATE, model._ref(method.getReturnType()), method.getMethodName() + methodIndex);
JInvocation methodCall = JExpr.invoke(inner);
- for(CodeGeneratorArgument arg : method){
+ for (CodeGeneratorArgument arg : method) {
inner.param(arg.getType(), arg.getName());
methodCall.arg(JExpr.direct(arg.getName()));
}
- for(Class<?> c : method.getThrowsIterable()){
+ for (Class<?> c : method.getThrowsIterable()) {
inner._throws(model.ref(c));
}
inner._throws(SchemaChangeException.class);
@@ -286,32 +288,32 @@ public class ClassGenerator<T>{
return "v" + index++;
}
- public String getNextVar(String prefix){
+ public String getNextVar(String prefix) {
return prefix + index++;
}
- public JVar declareClassField(String prefix, JType t){
+ public JVar declareClassField(String prefix, JType t) {
return clazz.field(JMod.NONE, t, prefix + index++);
}
- public JVar declareClassField(String prefix, JType t, JExpression init){
+ public JVar declareClassField(String prefix, JType t, JExpression init) {
return clazz.field(JMod.NONE, t, prefix + index++, init);
}
- public HoldingContainer declare(MajorType t){
+ public HoldingContainer declare(MajorType t) {
return declare(t, true);
}
- public HoldingContainer declare(MajorType t, boolean includeNewInstance){
+ public HoldingContainer declare(MajorType t, boolean includeNewInstance) {
JType holderType = getHolderType(t);
JVar var;
- if(includeNewInstance){
+ if (includeNewInstance) {
var = getEvalBlock().decl(holderType, "out" + index, JExpr._new(holderType));
- }else{
+ } else {
var = getEvalBlock().decl(holderType, "out" + index);
}
JFieldRef outputSet = null;
- if(t.getMode() == DataMode.OPTIONAL){
+ if (t.getMode() == DataMode.OPTIONAL) {
outputSet = var.ref("isSet");
}
index++;
@@ -347,23 +349,30 @@ public class ClassGenerator<T>{
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
ValueVectorSetup other = (ValueVectorSetup) obj;
if (batch == null) {
- if (other.batch != null)
+ if (other.batch != null) {
return false;
- } else if (!batch.equals(other.batch))
+ }
+ } else if (!batch.equals(other.batch)) {
return false;
+ }
if (fieldId == null) {
- if (other.fieldId != null)
+ if (other.fieldId != null) {
return false;
- } else if (!fieldId.equals(other.fieldId))
+ }
+ } else if (!fieldId.equals(other.fieldId)) {
return false;
+ }
return true;
}
@@ -396,7 +405,7 @@ public class ClassGenerator<T>{
return this.isReader;
}
- public boolean isSingularRepeated(){
+ public boolean isSingularRepeated() {
return singularRepeated;
}
@@ -405,7 +414,7 @@ public class ClassGenerator<T>{
return this;
}
- public JFieldRef f(String name){
+ public JFieldRef f(String name) {
return holder.ref(name);
}
@@ -421,7 +430,7 @@ public class ClassGenerator<T>{
return value;
}
- public MajorType getMajorType(){
+ public MajorType getMajorType() {
return type;
}
@@ -430,11 +439,11 @@ public class ClassGenerator<T>{
return isSet;
}
- public boolean isOptional(){
+ public boolean isOptional() {
return type.getMode() == DataMode.OPTIONAL;
}
- public boolean isRepeated(){
+ public boolean isRepeated() {
return type.getMode() == DataMode.REPEATED;
}
@@ -443,7 +452,7 @@ public class ClassGenerator<T>{
}
}
- public JType getHolderType(MajorType t){
+ public JType getHolderType(MajorType t) {
return TypeHelper.getHolderType(model, t.getMinorType(), t.getMode());
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
index 73c598007..4495ffeb2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
@@ -93,7 +93,7 @@ public class CodeGenerator<T> {
return generatedCode;
}
- public String getGeneratedCode(){
+ public String getGeneratedCode() {
return generatedCode;
}
@@ -111,12 +111,12 @@ public class CodeGenerator<T> {
}
public static <T> ClassGenerator<T> getRoot(TemplateClassDefinition<T> definition,
- FunctionImplementationRegistry funcRegistry){
+ FunctionImplementationRegistry funcRegistry) {
return get(definition, funcRegistry).getRoot();
}
public static <T> ClassGenerator<T> getRoot(MappingSet mappingSet, TemplateClassDefinition<T> definition,
- FunctionImplementationRegistry funcRegistry){
+ FunctionImplementationRegistry funcRegistry) {
return get(mappingSet, definition, funcRegistry).getRoot();
}
@@ -136,27 +136,31 @@ public class CodeGenerator<T> {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
CodeGenerator other = (CodeGenerator) obj;
if (definition == null) {
- if (other.definition != null)
+ if (other.definition != null) {
return false;
- } else if (!definition.equals(other.definition))
+ }
+ } else if (!definition.equals(other.definition)) {
return false;
+ }
if (generatedCode == null) {
- if (other.generatedCode != null)
+ if (other.generatedCode != null) {
return false;
- } else if (!generatedCode.equals(other.generatedCode))
+ }
+ } else if (!generatedCode.equals(other.generatedCode)) {
return false;
+ }
return true;
}
-
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/DirectExpression.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/DirectExpression.java
index bbf00c72c..c4c3e7a21 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/DirectExpression.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/DirectExpression.java
@@ -48,20 +48,24 @@ public class DirectExpression extends JExpressionImpl{
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
DirectExpression other = (DirectExpression) obj;
if (source == null) {
- if (other.source != null)
+ if (other.source != null) {
return false;
- } else if (!source.equals(other.source))
+ }
+ } else if (!source.equals(other.source)) {
return false;
+ }
return true;
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
index 1e671583b..a5b7beeb3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
@@ -83,9 +83,9 @@ public class EvaluationVisitor {
public HoldingContainer addExpr(LogicalExpression e, ClassGenerator<?> generator) {
Set<LogicalExpression> constantBoundaries;
- if(generator.getMappingSet().hasEmbeddedConstant()){
+ if (generator.getMappingSet().hasEmbeddedConstant()) {
constantBoundaries = Collections.emptySet();
- }else{
+ } else {
constantBoundaries = ConstantExpressionIdentifier.getConstantExpressionSet(e);
}
return e.accept(new ConstantFilter(constantBoundaries), generator);
@@ -104,7 +104,7 @@ public class EvaluationVisitor {
ClassGenerator<?> generator) throws RuntimeException {
if (op.getName().equals("booleanAnd")) {
return visitBooleanAnd(op, generator);
- }else if(op.getName().equals("booleanOr")) {
+ } else if(op.getName().equals("booleanOr")) {
return visitBooleanOr(op, generator);
} else {
throw new UnsupportedOperationException("BooleanOperator can only be booleanAnd, booleanOr. You are using " + op.getName());
@@ -119,8 +119,9 @@ public class EvaluationVisitor {
JVar[] workspaceVars = holder.renderStart(generator, null);
- if (holder.isNested())
+ if (holder.isNested()) {
generator.getMappingSet().enterChild();
+ }
HoldingContainer[] args = new HoldingContainer[holderExpr.args.size()];
for (int i = 0; i < holderExpr.args.size(); i++) {
@@ -129,8 +130,9 @@ public class EvaluationVisitor {
holder.renderMiddle(generator, args, workspaceVars);
- if (holder.isNested())
+ if (holder.isNested()) {
generator.getMappingSet().exitChild();
+ }
return holder.renderEnd(generator, args, workspaceVars);
}
@@ -392,8 +394,9 @@ public class EvaluationVisitor {
if (seg.isArray()) {
// stop once we get to the last segment and the final type is neither complex nor repeated (map, list, repeated list).
// In case of non-complex and non-repeated type, we return Holder, in stead of FieldReader.
- if (seg.isLastPath() && !complex && !repeated)
+ if (seg.isLastPath() && !complex && !repeated) {
break;
+ }
JVar list = generator.declareClassField("list", generator.getModel()._ref(FieldReader.class));
eval.assign(list, expr);
@@ -466,11 +469,13 @@ public class EvaluationVisitor {
*/
private boolean isNullReaderLikely(PathSegment seg, boolean complexOrRepeated) {
while (seg != null) {
- if (seg.isArray() && !seg.isLastPath())
+ if (seg.isArray() && !seg.isLastPath()) {
return true;
+ }
- if (seg.isLastPath() && complexOrRepeated)
+ if (seg.isLastPath() && complexOrRepeated) {
return true;
+ }
seg = seg.getChild();
}
@@ -624,9 +629,9 @@ public class EvaluationVisitor {
JBlock earlyExit = null;
if (arg.isOptional()) {
earlyExit = eval._if(arg.getIsSet().eq(JExpr.lit(1)).cand(arg.getValue().ne(JExpr.lit(1))))._then();
- if(e == null){
+ if (e == null) {
e = arg.getIsSet();
- }else{
+ } else {
e = e.mul(arg.getIsSet());
}
} else {
@@ -687,9 +692,9 @@ public class EvaluationVisitor {
JBlock earlyExit = null;
if (arg.isOptional()) {
earlyExit = eval._if(arg.getIsSet().eq(JExpr.lit(1)).cand(arg.getValue().eq(JExpr.lit(1))))._then();
- if(e == null){
+ if (e == null) {
e = arg.getIsSet();
- }else{
+ } else {
e = e.mul(arg.getIsSet());
}
} else {
@@ -1033,4 +1038,5 @@ public class EvaluationVisitor {
.setConstant(true);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
index 7b4c3d66a..2854c1400 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
@@ -93,9 +93,9 @@ public class ExpressionTreeMaterializer {
out = out.accept(ConditionalExprOptimizer.INSTANCE, null);
}
- if(out instanceof NullExpression){
+ if (out instanceof NullExpression) {
return new TypedNullConstant(Types.optional(MinorType.INT));
- }else{
+ } else {
return out;
}
}
@@ -258,7 +258,7 @@ public class ExpressionTreeMaterializer {
boolean first = true;
for(LogicalExpression e : call.args) {
TypeProtos.MajorType mt = e.getMajorType();
- if(first){
+ if (first) {
first = false;
} else {
sb.append(", ");
@@ -482,7 +482,7 @@ public class ExpressionTreeMaterializer {
}
@Override
- public LogicalExpression visitCastExpression(CastExpression e, FunctionImplementationRegistry value){
+ public LogicalExpression visitCastExpression(CastExpression e, FunctionImplementationRegistry value) {
// if the cast is pointless, remove it.
LogicalExpression input = e.getInput().accept(this, value);
@@ -490,9 +490,11 @@ public class ExpressionTreeMaterializer {
MajorType newMajor = e.getMajorType();
MinorType newMinor = input.getMajorType().getMinorType();
- if(castEqual(e.getPosition(), newMajor, input.getMajorType())) return input; // don't do pointless cast.
+ if (castEqual(e.getPosition(), newMajor, input.getMajorType())) {
+ return input; // don't do pointless cast.
+ }
- if(newMinor == MinorType.LATE){
+ if (newMinor == MinorType.LATE) {
// if the type still isn't fully bound, leave as cast expression.
return new CastExpression(input, e.getMajorType(), e.getPosition());
} else if (newMinor == MinorType.NULL) {
@@ -520,9 +522,11 @@ public class ExpressionTreeMaterializer {
}
}
- private boolean castEqual(ExpressionPosition pos, MajorType from, MajorType to){
- if(!from.getMinorType().equals(to.getMinorType())) return false;
- switch(from.getMinorType()){
+ private boolean castEqual(ExpressionPosition pos, MajorType from, MajorType to) {
+ if (!from.getMinorType().equals(to.getMinorType())) {
+ return false;
+ }
+ switch(from.getMinorType()) {
case FLOAT4:
case FLOAT8:
case INT:
@@ -564,10 +568,10 @@ public class ExpressionTreeMaterializer {
case VAR16CHAR:
case VARBINARY:
case VARCHAR:
- if(to.getWidth() < from.getWidth() && to.getWidth() > 0){
+ if (to.getWidth() < from.getWidth() && to.getWidth() > 0) {
this.errorCollector.addGeneralError(pos, "Casting from a longer variable length type to a shorter variable length type is not currently supported.");
return false;
- }else{
+ } else {
return true;
}
@@ -577,4 +581,5 @@ public class ExpressionTreeMaterializer {
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFuncHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFuncHolder.java
index fcc0b35d1..185273160 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFuncHolder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFuncHolder.java
@@ -123,17 +123,17 @@ public abstract class DrillFuncHolder extends AbstractFuncHolder {
WorkspaceReference ref = workspaceVars[i];
JType jtype = g.getModel()._ref(ref.type);
- if(ScalarReplacementTypes.CLASSES.contains(ref.type)){
+ if (ScalarReplacementTypes.CLASSES.contains(ref.type)) {
workspaceJVars[i] = g.declareClassField("work", jtype);
JBlock b = g.getBlock(SignatureHolder.DRILL_INIT_METHOD);
b.assign(workspaceJVars[i], JExpr._new(jtype));
- }else{
+ } else {
workspaceJVars[i] = g.declareClassField("work", jtype);
}
- if(ref.isInject()){
+ if (ref.isInject()) {
g.getBlock(BlockType.SETUP).assign(workspaceJVars[i], g.getMappingSet().getIncoming().invoke("getContext").invoke("getManagedBuffer"));
- }else{
+ } else {
//g.getBlock(BlockType.SETUP).assign(workspaceJVars[i], JExpr._new(jtype));
}
}
@@ -159,8 +159,9 @@ public abstract class DrillFuncHolder extends AbstractFuncHolder {
JVar[] workspaceJVars, boolean decConstInputOnly) {
if (inputVariables != null) {
for (int i = 0; i < inputVariables.length; i++) {
- if (decConstInputOnly && !inputVariables[i].isConstant())
+ if (decConstInputOnly && !inputVariables[i].isConstant()) {
continue;
+ }
ValueReference parameter = parameters[i];
HoldingContainer inputVariable = inputVariables[i];
@@ -177,10 +178,9 @@ public abstract class DrillFuncHolder extends AbstractFuncHolder {
JVar[] internalVars = new JVar[workspaceJVars.length];
for (int i = 0; i < workspaceJVars.length; i++) {
- if(decConstInputOnly){
+ if (decConstInputOnly) {
internalVars[i] = sub.decl(g.getModel()._ref(workspaceVars[i].type), workspaceVars[i].name, workspaceJVars[i]);
- }else{
-
+ } else {
internalVars[i] = sub.decl(g.getModel()._ref(workspaceVars[i].type), workspaceVars[i].name, workspaceJVars[i]);
}
@@ -313,7 +313,6 @@ public abstract class DrillFuncHolder extends AbstractFuncHolder {
MajorType type = Types.required(MinorType.LATE);
ValueReference ref = new ValueReference(type, name);
ref.isComplexWriter = true;
-
return ref;
}
@@ -342,7 +341,7 @@ public abstract class DrillFuncHolder extends AbstractFuncHolder {
this.majorType = majorType;
}
- public boolean isInject(){
+ public boolean isInject() {
return inject;
}
}
@@ -358,4 +357,5 @@ public abstract class DrillFuncHolder extends AbstractFuncHolder {
public ValueReference getReturnValue() {
return returnValue;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFunctionRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFunctionRegistry.java
index f53bd2842..399f3023c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFunctionRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillFunctionRegistry.java
@@ -39,16 +39,18 @@ public class DrillFunctionRegistry {
private ArrayListMultimap<String, DrillFuncHolder> methods = ArrayListMultimap.create();
- public DrillFunctionRegistry(DrillConfig config){
+ public DrillFunctionRegistry(DrillConfig config) {
FunctionConverter converter = new FunctionConverter();
Set<Class<? extends DrillFunc>> providerClasses = PathScanner.scanForImplementations(DrillFunc.class, config.getStringList(ExecConstants.FUNCTION_PACKAGES));
for (Class<? extends DrillFunc> clazz : providerClasses) {
DrillFuncHolder holder = converter.getHolder(clazz);
- if(holder != null){
+ if (holder != null) {
// register handle for each name the function can be referred to
String[] names = holder.getRegisteredNames();
- for(String name : names) methods.put(name.toLowerCase(), holder);
- }else{
+ for (String name : names) {
+ methods.put(name.toLowerCase(), holder);
+ }
+ } else {
logger.warn("Unable to initialize function for class {}", clazz.getName());
}
}
@@ -83,4 +85,5 @@ public class DrillFunctionRegistry {
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillSimpleFuncHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillSimpleFuncHolder.java
index 397cf9214..4731200c7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillSimpleFuncHolder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillSimpleFuncHolder.java
@@ -65,15 +65,15 @@ class DrillSimpleFuncHolder extends DrillFuncHolder{
}
@Override
- public boolean isNested(){
+ public boolean isNested() {
return false;
}
@Override
- public HoldingContainer renderEnd(ClassGenerator<?> g, HoldingContainer[] inputVariables, JVar[] workspaceJVars){
+ public HoldingContainer renderEnd(ClassGenerator<?> g, HoldingContainer[] inputVariables, JVar[] workspaceJVars) {
//If the function's annotation specifies a parameter has to be constant expression, but the HoldingContainer
//for the argument is not, then raise exception.
- for(int i =0; i < inputVariables.length; i++){
+ for (int i =0; i < inputVariables.length; i++) {
if (parameters[i].isConstant && !inputVariables[i].isConstant()) {
throw new DrillRuntimeException(String.format("The argument '%s' of Function '%s' has to be constant!", parameters[i].name, this.getRegisteredNames()[0]));
}
@@ -95,19 +95,19 @@ class DrillSimpleFuncHolder extends DrillFuncHolder{
MajorType returnValueType = returnValue.type;
// add outside null handling if it is defined.
- if(nullHandling == NullHandling.NULL_IF_NULL){
+ if (nullHandling == NullHandling.NULL_IF_NULL) {
JExpression e = null;
- for(HoldingContainer v : inputVariables){
- if(v.isOptional()){
- if(e == null){
+ for (HoldingContainer v : inputVariables) {
+ if (v.isOptional()) {
+ if (e == null) {
e = v.getIsSet();
- }else{
+ } else {
e = e.mul(v.getIsSet());
}
}
}
- if(e != null){
+ if (e != null) {
// if at least one expression must be checked, set up the conditional.
returnValueType = returnValue.type.toBuilder().setMode(DataMode.OPTIONAL).build();
out = g.declare(returnValueType);
@@ -118,7 +118,9 @@ class DrillSimpleFuncHolder extends DrillFuncHolder{
}
}
- if(out == null) out = g.declare(returnValueType);
+ if (out == null) {
+ out = g.declare(returnValueType);
+ }
// add the subblock after the out declaration.
g.getEvalBlock().add(topSub);
@@ -126,9 +128,13 @@ class DrillSimpleFuncHolder extends DrillFuncHolder{
JVar internalOutput = sub.decl(JMod.FINAL, g.getHolderType(returnValueType), returnValue.name, JExpr._new(g.getHolderType(returnValueType)));
addProtectedBlock(g, sub, body, inputVariables, workspaceJVars, false);
- if (sub != topSub) sub.assign(internalOutput.ref("isSet"),JExpr.lit(1));// Assign null if NULL_IF_NULL mode
+ if (sub != topSub) {
+ sub.assign(internalOutput.ref("isSet"),JExpr.lit(1));// Assign null if NULL_IF_NULL mode
+ }
sub.assign(out.getHolder(), internalOutput);
- if (sub != topSub) sub.assign(internalOutput.ref("isSet"),JExpr.lit(1));// Assign null if NULL_IF_NULL mode
+ if (sub != topSub) {
+ sub.assign(internalOutput.ref("isSet"),JExpr.lit(1));// Assign null if NULL_IF_NULL mode
+ }
g.getEvalBlock().directStatement(String.format("//---- end of eval portion of %s function. ----//", registeredNames[0]));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java
index 63ddb11d8..402a6fc7f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionConverter.java
@@ -67,12 +67,14 @@ public class FunctionConverter {
path = path.replace(".", FileUtils.separator);
path = "/" + path + ".java";
CompilationUnit cu = functionUnits.get(path);
- if(cu != null) return cu;
+ if(cu != null) {
+ return cu;
+ }
URL u = Resources.getResource(c, path);
InputSupplier<InputStream> supplier = Resources.newInputStreamSupplier(u);
- try(InputStream is = supplier.getInput()){
- if(is == null){
+ try (InputStream is = supplier.getInput()) {
+ if (is == null) {
throw new IOException(String.format("Failure trying to located source code for Class %s, tried to read on classpath location %s", c.getName(), path));
}
String body = IO.toString(is);
@@ -92,9 +94,9 @@ public class FunctionConverter {
}
- public <T extends DrillFunc> DrillFuncHolder getHolder(Class<T> clazz){
+ public <T extends DrillFunc> DrillFuncHolder getHolder(Class<T> clazz) {
FunctionTemplate template = clazz.getAnnotation(FunctionTemplate.class);
- if(template == null){
+ if (template == null) {
return failure("Class does not declare FunctionTemplate annotation.", clazz);
}
@@ -110,7 +112,7 @@ public class FunctionConverter {
ValueReference outputField = null;
- for(Field field : clazz.getDeclaredFields()){
+ for (Field field : clazz.getDeclaredFields()) {
Param param = field.getAnnotation(Param.class);
Output output = field.getAnnotation(Output.class);
@@ -118,17 +120,25 @@ public class FunctionConverter {
Inject inject = field.getAnnotation(Inject.class);
int i =0;
- if(param != null) i++;
- if(output != null) i++;
- if(workspace != null) i++;
- if(inject != null) i++;
- if(i == 0){
+ if (param != null) {
+ i++;
+ }
+ if (output != null) {
+ i++;
+ }
+ if (workspace != null) {
+ i++;
+ }
+ if (inject != null) {
+ i++;
+ }
+ if (i == 0) {
return failure("The field must be either a @Param, @Output, @Inject or @Workspace field.", clazz, field);
- }else if(i > 1){
+ } else if(i > 1) {
return failure("The field must be only one of @Param, @Output, @Inject or @Workspace. It currently has more than one of these annotations.", clazz, field);
}
- if(param != null || output != null){
+ if (param != null || output != null) {
// Special processing for @Param FieldReader
if (param != null && FieldReader.class.isAssignableFrom(field.getType())) {
@@ -138,8 +148,7 @@ public class FunctionConverter {
// Special processing for @Output ComplexWriter
if (output != null && ComplexWriter.class.isAssignableFrom(field.getType())) {
-
- if(outputField != null){
+ if (outputField != null) {
return failure("You've declared more than one @Output field. You must declare one and only @Output field per Function class.", clazz, field);
}else{
outputField = ValueReference.createComplexWriterRef(field.getName());
@@ -148,37 +157,39 @@ public class FunctionConverter {
}
// check that param and output are value holders.
- if(!ValueHolder.class.isAssignableFrom(field.getType())){
+ if (!ValueHolder.class.isAssignableFrom(field.getType())) {
return failure(String.format("The field doesn't holds value of type %s which does not implement the ValueHolder interface. All fields of type @Param or @Output must extend this interface..", field.getType()), clazz, field);
}
// get the type field from the value holder.
MajorType type = null;
- try{
+ try {
type = getStaticFieldValue("TYPE", field.getType(), MajorType.class);
- }catch(Exception e){
+ } catch (Exception e) {
return failure("Failure while trying to access the ValueHolder's TYPE static variable. All ValueHolders must contain a static TYPE variable that defines their MajorType.", e, clazz, field.getName());
}
ValueReference p = new ValueReference(type, field.getName());
- if(param != null){
+ if (param != null) {
if (param.constant()) {
p.setConstant(true);
}
params.add(p);
- }else{
- if(outputField != null){
+ } else {
+ if (outputField != null) {
return failure("You've declared more than one @Output field. You must declare one and only @Output field per Function class.", clazz, field);
- }else{
+ } else {
outputField = p;
}
}
- }else{
+ } else {
// workspace work.
boolean isInject = inject != null;
- if(isInject && !field.getType().equals(DrillBuf.class)) return failure(String.format("Only DrillBuf is allowed to be injected. You attempted to inject %s.", field.getType()), clazz, field);
+ if (isInject && !field.getType().equals(DrillBuf.class)) {
+ return failure(String.format("Only DrillBuf is allowed to be injected. You attempted to inject %s.", field.getType()), clazz, field);
+ }
WorkspaceReference wsReference = new WorkspaceReference(field.getType(), field.getName(), isInject);
if (!isInject && template.scope() == FunctionScope.POINT_AGGREGATE && !ValueHolder.class.isAssignableFrom(field.getType()) ) {
@@ -186,31 +197,32 @@ public class FunctionConverter {
}
//If the workspace var is of Holder type, get its MajorType and assign to WorkspaceReference.
- if(ValueHolder.class.isAssignableFrom(field.getType())){
+ if (ValueHolder.class.isAssignableFrom(field.getType())) {
MajorType majorType = null;
- try{
+ try {
majorType = getStaticFieldValue("TYPE", field.getType(), MajorType.class);
- }catch(Exception e){
+ } catch (Exception e) {
return failure("Failure while trying to access the ValueHolder's TYPE static variable. All ValueHolders must contain a static TYPE variable that defines their MajorType.", e, clazz, field.getName());
}
wsReference.setMajorType(majorType);
}
-
workspaceFields.add(wsReference);
}
-
}
-
- // if(!workspaceFields.isEmpty()) return failure("This function declares one or more workspace fields. However, those have not yet been implemented.", clazz);
- if(outputField == null) return failure("This function declares zero output fields. A function must declare one output field.", clazz);
+ // if (!workspaceFields.isEmpty()) return failure("This function declares one or more workspace fields. However, those have not yet been implemented.", clazz);
+ if (outputField == null) {
+ return failure("This function declares zero output fields. A function must declare one output field.", clazz);
+ }
// get function body.
CompilationUnit cu;
try {
cu = get(clazz);
- if(cu == null) return null;
+ if (cu == null) {
+ return null;
+ }
} catch (IOException e) {
return failure("Failure while getting class body.", e, clazz);
}
@@ -225,7 +237,7 @@ public class FunctionConverter {
String[] registeredNames = ((template.name().isEmpty()) ? template.names() : new String[] {template.name()} );
- switch(template.scope()){
+ switch (template.scope()) {
case POINT_AGGREGATE:
return new DrillAggFuncHolder(template.scope(), template.nulls(), template.isBinaryCommutative(),
template.isRandom(), registeredNames, ps, outputField, works, methods, imports, template.costCategory());
@@ -236,16 +248,17 @@ public class FunctionConverter {
return new DrillDecimalSumAggFuncHolder(template.scope(), template.nulls(), template.isBinaryCommutative(),
template.isRandom(), registeredNames, ps, outputField, works, methods, imports);
case SIMPLE:
- if (outputField.isComplexWriter)
+ if (outputField.isComplexWriter) {
return new DrillComplexWriterFuncHolder(template.scope(), template.nulls(),
template.isBinaryCommutative(),
template.isRandom(), registeredNames,
ps, outputField, works, methods, imports);
- else
+ } else {
return new DrillSimpleFuncHolder(template.scope(), template.nulls(),
template.isBinaryCommutative(),
template.isRandom(), registeredNames,
ps, outputField, works, methods, imports, template.costCategory());
+ }
case SC_BOOLEAN_OPERATOR:
return new DrillBooleanOPHolder(template.scope(), template.nulls(),
template.isBinaryCommutative(),
@@ -281,7 +294,7 @@ public class FunctionConverter {
default:
return failure("Unsupported Function Type.", clazz);
}
- }catch(Exception | NoSuchFieldError | AbstractMethodError ex){
+ } catch (Exception | NoSuchFieldError | AbstractMethodError ex) {
return failure("Failure while creating function holder.", ex, clazz);
}
@@ -296,8 +309,8 @@ public class FunctionConverter {
path = "/" + path + ".java";
URL u = Resources.getResource(c, path);
InputSupplier<InputStream> supplier = Resources.newInputStreamSupplier(u);
- try(InputStream is = supplier.getInput()){
- if(is == null){
+ try (InputStream is = supplier.getInput()) {
+ if (is == null) {
throw new IOException(String.format("Failure trying to located source code for Class %s, tried to read on classpath location %s", c.getName(), path));
}
String body = IO.toString(is);
@@ -318,30 +331,28 @@ public class FunctionConverter {
return (T) val;
}
- private static DrillFuncHolder failure(String message, Throwable t, Class<?> clazz, String fieldName){
+ private static DrillFuncHolder failure(String message, Throwable t, Class<?> clazz, String fieldName) {
logger.warn("Failure loading function class {}, field {}. " + message, clazz.getName(), fieldName, t);
return null;
}
- private DrillFuncHolder failure(String message, Class<?> clazz, String fieldName){
+ private DrillFuncHolder failure(String message, Class<?> clazz, String fieldName) {
logger.warn("Failure loading function class {}, field {}. " + message, clazz.getName(), fieldName);
return null;
}
- private DrillFuncHolder failure(String message, Class<?> clazz){
+ private DrillFuncHolder failure(String message, Class<?> clazz) {
logger.warn("Failure loading function class [{}]. Message: {}", clazz.getName(), message);
return null;
}
- private DrillFuncHolder failure(String message, Throwable t, Class<?> clazz){
+ private DrillFuncHolder failure(String message, Throwable t, Class<?> clazz) {
logger.warn("Failure loading function class [{}]. Message: {}", clazz.getName(), message, t);
return null;
}
- private DrillFuncHolder failure(String message, Class<?> clazz, Field field){
+ private DrillFuncHolder failure(String message, Class<?> clazz, Field field) {
return failure(message, clazz, field.getName());
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
index 293fbf7b6..e0f7f9339 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
@@ -45,11 +45,10 @@ public class FunctionGenerationHelper {
return getFunctionExpression(COMPARE_TO, Types.required(MinorType.INT), registry, left, right);
}
- public static FunctionHolderExpression getFunctionExpression(String name, MajorType returnType, FunctionImplementationRegistry registry, HoldingContainer... args){
-
+ public static FunctionHolderExpression getFunctionExpression(String name, MajorType returnType, FunctionImplementationRegistry registry, HoldingContainer... args) {
List<MajorType> argTypes = new ArrayList<MajorType>(args.length);
List<LogicalExpression> argExpressions = new ArrayList<LogicalExpression>(args.length);
- for(HoldingContainer c : args){
+ for(HoldingContainer c : args) {
argTypes.add(c.getMajorType());
argExpressions.add(new HoldingContainerExpression(c));
}
@@ -63,19 +62,22 @@ public class FunctionGenerationHelper {
sb.append("Failure finding function that runtime code generation expected. Signature: ");
sb.append(name);
sb.append("( ");
- for(int i =0; i < args.length; i++){
+ for(int i =0; i < args.length; i++) {
MajorType mt = args[i].getMajorType();
appendType(mt, sb);
- if(i != 0) sb.append(", ");
+ if (i != 0) {
+ sb.append(", ");
+ }
}
sb.append(" ) returns ");
appendType(returnType, sb);
throw new UnsupportedOperationException(sb.toString());
}
- private static final void appendType(MajorType mt, StringBuilder sb){
+ private static final void appendType(MajorType mt, StringBuilder sb) {
sb.append(mt.getMinorType().name());
sb.append(":");
sb.append(mt.getMode().name());
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/ModifiedUnparseVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/ModifiedUnparseVisitor.java
index aea46e8e9..966c46519 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/ModifiedUnparseVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/ModifiedUnparseVisitor.java
@@ -100,7 +100,9 @@ public class ModifiedUnparseVisitor extends UnparseVisitor {
bs instanceof Java.SynchronizedStatement ? 4 :
99
);
- if (state != -1 && state != x) this.pw.println(AutoIndentWriter.CLEAR_TABULATORS);
+ if (state != -1 && state != x) {
+ this.pw.println(AutoIndentWriter.CLEAR_TABULATORS);
+ }
state = x;
this.unparseBlockStatement(bs);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteFunctionHelpers.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteFunctionHelpers.java
index 8c55aa91b..d21add1f5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteFunctionHelpers.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteFunctionHelpers.java
@@ -92,7 +92,7 @@ public class ByteFunctionHelpers {
while (n > 7) {
long leftLong = PlatformDependent.getLong(lPos);
long rightLong = PlatformDependent.getLong(rPos);
- if(leftLong != rightLong){
+ if (leftLong != rightLong) {
return UnsignedLongs.compare(Long.reverseBytes(leftLong), Long.reverseBytes(rightLong));
}
lPos += 8;
@@ -110,7 +110,9 @@ public class ByteFunctionHelpers {
rPos++;
}
- if (lLen == rLen) return 0;
+ if (lLen == rLen) {
+ return 0;
+ }
return lLen > rLen ? 1 : -1;
@@ -134,8 +136,6 @@ public class ByteFunctionHelpers {
long lPos = laddr + lStart;
int rPos = rStart;
-
-
while (n-- != 0) {
byte leftByte = PlatformDependent.getByte(lPos);
byte rightByte = right[rPos];
@@ -146,10 +146,11 @@ public class ByteFunctionHelpers {
rPos++;
}
- if (lLen == rLen) return 0;
+ if (lLen == rLen) {
+ return 0;
+ }
return lLen > rLen ? 1 : -1;
-
}
/*
@@ -201,4 +202,5 @@ public class ByteFunctionHelpers {
public static boolean getSign(byte[] b) {
return ((getInteger(b, 0, false) & 0x80000000) != 0);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteSubstring.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteSubstring.java
index ad2efa22c..55dc35acd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteSubstring.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ByteSubstring.java
@@ -67,17 +67,20 @@ public class ByteSubstring implements DrillSimpleFunc {
} else {
// handle negative and positive offset values
- if (offset.value < 0)
+ if (offset.value < 0) {
out.start = string.end + (int)offset.value;
- else
+ } else {
out.start = (int)offset.value - 1;
+ }
// calculate end position from length and truncate to upper value bounds
- if (out.start + length.value > string.end)
+ if (out.start + length.value > string.end) {
out.end = string.end;
- else
+ } else {
out.end = out.start + (int)length.value;
+ }
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CharSubstring.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CharSubstring.java
index 5fdc6f2f1..62e9d701a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CharSubstring.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CharSubstring.java
@@ -89,8 +89,9 @@ public class CharSubstring implements DrillSimpleFunc {
break;
}
- if (currentByte < 128)
+ if (currentByte < 128) {
++charCount;
+ }
++byteCount;
}
@@ -112,8 +113,9 @@ public class CharSubstring implements DrillSimpleFunc {
// search forward until we find <length> characters
while (byteCount <= endBytePos) {
currentByte = string.buffer.getByte(byteCount);
- if (currentByte < 128)
+ if (currentByte < 128) {
++charCount;
+ }
++byteCount;
if (charCount == (int)length.value) {
out.end = byteCount;
@@ -122,8 +124,9 @@ public class CharSubstring implements DrillSimpleFunc {
}
break;
}
- if (currentByte < 128)
+ if (currentByte < 128) {
--charCount;
+ }
--byteCount;
}
}
@@ -134,4 +137,5 @@ public class CharSubstring implements DrillSimpleFunc {
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java
index 7c847eb4a..7f6d8a5c9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java
@@ -66,10 +66,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(Float.floatToIntBits(in.value)).asInt();
+ }
}
}
@@ -97,10 +98,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(Double.doubleToLongBits(in.value)).asInt();
+ }
}
}
@@ -128,10 +130,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
}
}
@@ -145,10 +148,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
}
}
@@ -162,10 +166,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
}
}
@@ -180,10 +185,11 @@ public class HashFunctions {
public void eval() {
// TODO: implement hash function for other types
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(in.value).asInt();
+ }
}
}
@@ -197,10 +203,11 @@ public class HashFunctions {
public void eval() {
// TODO: implement hash function for other types
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(in.value).asInt();
+ }
}
}
@@ -296,10 +303,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(in.value).asInt();
+ }
}
}
@@ -325,10 +333,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(in.value).asInt();
+ }
}
}
@@ -354,10 +363,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(in.value).asInt();
+ }
}
}
@@ -383,10 +393,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(in.value ^ in.index).asInt();
+ }
}
}
@@ -412,10 +423,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(in.value).asInt();
+ }
}
}
@@ -441,10 +453,11 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(in.value).asInt();
+ }
}
}
@@ -475,9 +488,9 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else {
+ } else {
int xor = 0;
for (int i = 0; i < in.nDecimalDigits; i++) {
xor = xor ^ NullableDecimal28SparseHolder.getInteger(i, in.start, in.buffer);
@@ -514,9 +527,9 @@ public class HashFunctions {
}
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else {
+ } else {
int xor = 0;
for (int i = 0; i < in.nDecimalDigits; i++) {
xor = xor ^ NullableDecimal38SparseHolder.getInteger(i, in.start, in.buffer);
@@ -525,4 +538,5 @@ public class HashFunctions {
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsFalse.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsFalse.java
index 52af8cd72..72378e281 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsFalse.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsFalse.java
@@ -37,10 +37,11 @@ public class IsFalse {
public void setup(RecordBatch incoming) { }
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = (in.value == 0 ? 1 : 0);
+ }
}
}
@@ -57,4 +58,4 @@ public class IsFalse {
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotFalse.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotFalse.java
index 65476ae79..07f420cb8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotFalse.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotFalse.java
@@ -37,10 +37,11 @@ public class IsNotFalse {
public void setup(RecordBatch incoming) { }
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 1;
- else
+ } else {
out.value = in.value;
+ }
}
}
@@ -57,4 +58,4 @@ public class IsNotFalse {
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotTrue.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotTrue.java
index 3f389af73..a7a87a3b9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotTrue.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsNotTrue.java
@@ -37,10 +37,11 @@ public class IsNotTrue {
public void setup(RecordBatch incoming) { }
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 1;
- else
+ } else {
out.value = (in.value == 0 ? 1 : 0);
+ }
}
}
@@ -56,4 +57,5 @@ public class IsNotTrue {
out.value = in.value == 0 ? 1 : 0;
}
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsTrue.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsTrue.java
index 7edf53905..d534b7df5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsTrue.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/IsTrue.java
@@ -37,10 +37,11 @@ public class IsTrue {
public void setup(RecordBatch incoming) { }
public void eval() {
- if (in.isSet == 0)
+ if (in.isSet == 0) {
out.value = 0;
- else
+ } else {
out.value = in.value;
+ }
}
}
@@ -56,4 +57,5 @@ public class IsTrue {
out.value = in.value;
}
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctionUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctionUtil.java
index 3b7efbdf3..dedd89eba 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctionUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctionUtil.java
@@ -42,8 +42,9 @@ public class StringFunctionUtil {
public static int getUTF8CharPosition(ByteBuf buffer, int start, int end, int charLength) {
int charCount = 0;
- if (start >= end)
+ if (start >= end) {
return -1; //wrong input here.
+ }
for (int idx = start, charLen = 0; idx < end; idx += charLen) {
charLen = utf8CharLen(buffer, idx);
@@ -60,8 +61,9 @@ public class StringFunctionUtil {
for (int i = strStart; i <= strEnd - (subEnd - subStart); i++) {
int j = subStart;
for (; j< subEnd; j++) {
- if (str.getByte(i + j - subStart) != substr.getByte(j))
+ if (str.getByte(i + j - subStart) != substr.getByte(j)) {
break;
+ }
}
if (j == subEnd && j!= subStart) { // found a matched substr (non-empty) in str.
@@ -88,18 +90,19 @@ public class StringFunctionUtil {
}
public static int utf8CharLen(byte currentByte) {
- if (currentByte >= 0){ // 1-byte char. First byte is 0xxxxxxx.
- return 1;
+ if (currentByte >= 0) { // 1-byte char. First byte is 0xxxxxxx.
+ return 1;
}
- else if ((currentByte & 0xE0) == 0xC0 ){ // 2-byte char. First byte is 110xxxxx
- return 2;
+ else if ((currentByte & 0xE0) == 0xC0 ) { // 2-byte char. First byte is 110xxxxx
+ return 2;
}
- else if ((currentByte & 0xF0) == 0xE0 ){ // 3-byte char. First byte is 1110xxxx
- return 3;
+ else if ((currentByte & 0xF0) == 0xE0 ) { // 3-byte char. First byte is 1110xxxx
+ return 3;
}
- else if ((currentByte & 0xF8) == 0xF0){ //4-byte char. First byte is 11110xxx
- return 4;
+ else if ((currentByte & 0xF8) == 0xF0) { //4-byte char. First byte is 11110xxx
+ return 4;
}
throw new DrillRuntimeException("Unexpected byte 0x" + Integer.toString((int)currentByte & 0xff, 16) + " encountered while decoding UTF8 string.");
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
index 0fa0da20a..9883f25e4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
@@ -43,7 +43,7 @@ import org.apache.drill.exec.record.RecordBatch;
public class StringFunctions{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StringFunctions.class);
- private StringFunctions(){}
+ private StringFunctions() {}
/*
* String Function Implementation.
@@ -57,12 +57,12 @@ public class StringFunctions{
@Output BitHolder out;
@Workspace java.util.regex.Matcher matcher;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
matcher = java.util.regex.Pattern.compile(org.apache.drill.exec.expr.fn.impl.RegexpUtil.sqlToRegexLike( //
org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(pattern.start, pattern.end, pattern.buffer))).matcher("");
}
- public void eval(){
+ public void eval() {
String i = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
matcher.reset(i);
out.value = matcher.matches()? 1:0;
@@ -76,12 +76,12 @@ public class StringFunctions{
@Output BitHolder out;
@Workspace java.util.regex.Matcher matcher;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
matcher = java.util.regex.Pattern.compile(org.apache.drill.exec.expr.fn.impl.RegexpUtil.sqlToRegexSimilar(org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(pattern.start, pattern.end, pattern.buffer))).matcher("");
}
- public void eval(){
+ public void eval() {
String i = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
matcher.reset(i);
out.value = matcher.matches()? 1:0;
@@ -101,11 +101,11 @@ public class StringFunctions{
@Workspace java.util.regex.Matcher matcher;
@Output VarCharHolder out;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
matcher = java.util.regex.Pattern.compile(org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(pattern.start, pattern.end, pattern.buffer)).matcher("");
}
- public void eval(){
+ public void eval() {
out.start = 0;
String i = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
@@ -117,16 +117,15 @@ public class StringFunctions{
}
}
-
@FunctionTemplate(names = {"char_length", "character_length", "length"}, scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class CharLength implements DrillSimpleFunc{
@Param VarCharHolder input;
@Output BigIntHolder out;
- public void setup(RecordBatch incoming){}
+ public void setup(RecordBatch incoming) {}
- public void eval(){
+ public void eval() {
out.value = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharLength(input.buffer, input.start, input.end);
}
}
@@ -137,9 +136,9 @@ public class StringFunctions{
@Param VarBinaryHolder input;
@Output BigIntHolder out;
- public void setup(RecordBatch incoming){}
+ public void setup(RecordBatch incoming) {}
- public void eval(){
+ public void eval() {
out.value = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharLength(input.buffer, input.start, input.end);
}
}
@@ -150,9 +149,9 @@ public class StringFunctions{
@Param VarCharHolder input;
@Output BigIntHolder out;
- public void setup(RecordBatch incoming){}
+ public void setup(RecordBatch incoming) {}
- public void eval(){
+ public void eval() {
out.value = input.end - input.start;
}
}
@@ -163,9 +162,9 @@ public class StringFunctions{
@Param VarCharHolder input;
@Output BigIntHolder out;
- public void setup(RecordBatch incoming){}
+ public void setup(RecordBatch incoming) {}
- public void eval(){
+ public void eval() {
out.value = (input.end - input.start) * 8;
}
}
@@ -186,9 +185,9 @@ public class StringFunctions{
@Output BigIntHolder out;
- public void setup(RecordBatch incoming){}
+ public void setup(RecordBatch incoming) {}
- public void eval(){
+ public void eval() {
//Do string match.
int pos = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.stringLeftMatchUTF8(str.buffer, str.start, str.end,
substr.buffer, substr.start, substr.end);
@@ -211,9 +210,9 @@ public class StringFunctions{
@Output BigIntHolder out;
- public void setup(RecordBatch incoming){}
+ public void setup(RecordBatch incoming) {}
- public void eval(){
+ public void eval() {
//Do string match.
int pos = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.stringLeftMatchUTF8(str.buffer, str.start, str.end,
substr.buffer, substr.start, substr.end);
@@ -237,10 +236,10 @@ public class StringFunctions{
@Output VarCharHolder out;
@Inject DrillBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
- public void eval(){
+ public void eval() {
out.buffer = buffer = buffer.reallocIfNeeded(input.end- input.start);
out.start = 0;
out.end = input.end - input.start;
@@ -268,7 +267,7 @@ public class StringFunctions{
@Output VarCharHolder out;
@Inject DrillBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -380,7 +379,7 @@ public class StringFunctions{
@Output VarCharHolder out;
@Workspace ByteBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -415,7 +414,7 @@ public class StringFunctions{
@Output VarCharHolder out;
@Workspace ByteBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -457,7 +456,7 @@ public class StringFunctions{
@Output VarCharHolder out;
@Inject DrillBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -479,11 +478,11 @@ public class StringFunctions{
@Inject DrillBuf buffer;
@Output VarCharHolder out;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
buffer = buffer.reallocIfNeeded(8000);
}
- public void eval(){
+ public void eval() {
out.buffer = buffer;
out.start = out.end = 0;
int fromL = from.end - from.start;
@@ -496,8 +495,9 @@ public class StringFunctions{
for (; i<=text.end - fromL; ) {
int j = from.start;
for (; j<from.end; j++) {
- if (text.buffer.getByte(i + j - from.start) != from.buffer.getByte(j))
+ if (text.buffer.getByte(i + j - from.start) != from.buffer.getByte(j)) {
break;
+ }
}
if (j == from.end ) {
@@ -544,7 +544,7 @@ public class StringFunctions{
@Output VarCharHolder out;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -579,8 +579,9 @@ public class StringFunctions{
while (count < length.value - textCharCount) {
for (id = fill.start; id < fill.end; id++) {
- if (count == length.value - textCharCount)
+ if (count == length.value - textCharCount) {
break;
+ }
currentByte = fill.buffer.getByte(id);
if (currentByte < 0x128 || // 1-byte char. First byte is 0xxxxxxx.
@@ -594,8 +595,9 @@ public class StringFunctions{
} // end of while
//copy "text" into "out"
- for (id = text.start; id < text.end; id++)
+ for (id = text.start; id < text.end; id++) {
out.buffer.setByte(out.end++, text.buffer.getByte(id));
+ }
}
} // end of eval
@@ -615,7 +617,7 @@ public class StringFunctions{
@Output VarCharHolder out;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -648,16 +650,18 @@ public class StringFunctions{
out.buffer = buffer;
out.start = out.end = 0;
- for (id = text.start; id < text.end; id++)
+ for (id = text.start; id < text.end; id++) {
out.buffer.setByte(out.end++, text.buffer.getByte(id));
+ }
//copy "fill" on right. Total # of char to copy : length.value - textCharCount
int count = 0;
while (count < length.value - textCharCount) {
for (id = fill.start; id < fill.end; id++) {
- if (count == length.value - textCharCount)
+ if (count == length.value - textCharCount) {
break;
+ }
currentByte = fill.buffer.getByte(id);
if (currentByte < 0x128 || // 1-byte char. First byte is 0xxxxxxx.
@@ -686,7 +690,7 @@ public class StringFunctions{
@Output VarCharHolder out;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -719,7 +723,7 @@ public class StringFunctions{
@Output VarCharHolder out;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -729,7 +733,9 @@ public class StringFunctions{
int bytePerChar = 0;
//Scan from right of "text", stop until find a char not in "from"
for (int id = text.end - 1; id >= text.start; id -= bytePerChar) {
- while ((text.buffer.getByte(id) & 0xC0) == 0x80 && id >= text.start) id--;
+ while ((text.buffer.getByte(id) & 0xC0) == 0x80 && id >= text.start) {
+ id--;
+ }
bytePerChar = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.utf8CharLen(text.buffer, id);
int pos = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.stringLeftMatchUTF8(from.buffer, from.start, from.end,
text.buffer, id, id + bytePerChar);
@@ -752,7 +758,7 @@ public class StringFunctions{
@Output VarCharHolder out;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
public void eval() {
@@ -773,7 +779,9 @@ public class StringFunctions{
//Scan from right of "text", stop until find a char not in "from"
for (int id = text.end - 1; id >= text.start; id -= bytePerChar) {
- while ((text.buffer.getByte(id) & 0xC0) == 0x80 && id >= text.start) id--;
+ while ((text.buffer.getByte(id) & 0xC0) == 0x80 && id >= text.start) {
+ id--;
+ }
bytePerChar = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.utf8CharLen(text.buffer, id);
int pos = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.stringLeftMatchUTF8(from.buffer, from.start, from.end,
text.buffer, id, id + bytePerChar);
@@ -796,19 +804,21 @@ public class StringFunctions{
@Inject DrillBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
- public void eval(){
+ public void eval() {
out.buffer = buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start));
out.start = out.end = 0;
int id = 0;
- for (id = left.start; id < left.end; id++)
+ for (id = left.start; id < left.end; id++) {
out.buffer.setByte(out.end++, left.buffer.getByte(id));
+ }
- for (id = right.start; id < right.end; id++)
+ for (id = right.start; id < right.end; id++) {
out.buffer.setByte(out.end++, right.buffer.getByte(id));
+ }
}
}
@@ -822,20 +832,22 @@ public class StringFunctions{
@Inject DrillBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
- public void eval(){
+ public void eval() {
out.buffer = buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start));;
out.start = out.end = 0;
int id = 0;
- for (id = left.start; id < left.end; id++)
+ for (id = left.start; id < left.end; id++) {
out.buffer.setByte(out.end++, left.buffer.getByte(id));
+ }
if (right.isSet == 1) {
- for (id = right.start; id < right.end; id++)
- out.buffer.setByte(out.end++, right.buffer.getByte(id));
+ for (id = right.start; id < right.end; id++) {
+ out.buffer.setByte(out.end++, right.buffer.getByte(id));
+ }
}
}
}
@@ -849,21 +861,23 @@ public class StringFunctions{
@Inject DrillBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
- public void eval(){
+ public void eval() {
out.buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start));
out.start = out.end = 0;
int id = 0;
if (left.isSet == 1) {
- for (id = left.start; id < left.end; id++)
+ for (id = left.start; id < left.end; id++) {
out.buffer.setByte(out.end++, left.buffer.getByte(id));
+ }
}
- for (id = right.start; id < right.end; id++)
- out.buffer.setByte(out.end++, right.buffer.getByte(id));
+ for (id = right.start; id < right.end; id++) {
+ out.buffer.setByte(out.end++, right.buffer.getByte(id));
+ }
}
}
@@ -876,22 +890,24 @@ public class StringFunctions{
@Inject DrillBuf buffer;
- public void setup(RecordBatch incoming){
+ public void setup(RecordBatch incoming) {
}
- public void eval(){
+ public void eval() {
out.buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start));
out.start = out.end = 0;
int id = 0;
if (left.isSet == 1) {
- for (id = left.start; id < left.end; id++)
+ for (id = left.start; id < left.end; id++) {
out.buffer.setByte(out.end++, left.buffer.getByte(id));
+ }
}
if (right.isSet == 1) {
- for (id = right.start; id < right.end; id++)
- out.buffer.setByte(out.end++, right.buffer.getByte(id));
+ for (id = right.start; id < right.end; id++) {
+ out.buffer.setByte(out.end++, right.buffer.getByte(id));
+ }
}
}
}
@@ -974,7 +990,7 @@ public class StringFunctions{
System.out.println(len + ":" + num);
out.start = 0;
out.buffer = buffer = buffer.reallocIfNeeded( len * num );
- for(int i =0; i < num; i++){
+ for (int i =0; i < num; i++) {
in.buffer.getBytes(in.start, out.buffer, i * len, len);
}
out.end = len * num;
@@ -1001,8 +1017,8 @@ public class StringFunctions{
public void eval() {
byte[] bytea = new byte[in.end - in.start];
int index =0;
- for(int i = in.start; i<in.end; i++, index++){
- bytea[index]=in.buffer.getByte(i);
+ for (int i = in.start; i<in.end; i++, index++) {
+ bytea[index]=in.buffer.getByte(i);
}
byte[] outBytea = new String(bytea, inCharset).getBytes(com.google.common.base.Charsets.UTF_8);
out.buffer = buffer = buffer.reallocIfNeeded(outBytea.length);
@@ -1035,10 +1051,10 @@ public class StringFunctions{
int index = in.end;
int innerindex = 0;
- for (int id = in.start; id < in.end; id+=charlen){
+ for (int id = in.start; id < in.end; id+=charlen) {
innerindex = charlen = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.utf8CharLen(in.buffer, id);
- while(innerindex > 0){
+ while (innerindex > 0) {
out.buffer.setByte(index - innerindex, in.buffer.getByte(id + (charlen - innerindex)));
innerindex-- ;
}
@@ -1047,4 +1063,5 @@ public class StringFunctions{
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/VarHelpers.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/VarHelpers.java
index 75b5ecde3..75fec8134 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/VarHelpers.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/VarHelpers.java
@@ -67,8 +67,9 @@ public class VarHelpers {
public static final int compare(NullableVarBinaryHolder left, NullableVarBinaryHolder right) {
if (left.isSet == 0) {
- if (right.isSet == 0)
+ if (right.isSet == 0) {
return 0;
+ }
return -1;
} else if (right.isSet == 0) {
return 1;
@@ -95,8 +96,9 @@ public class VarHelpers {
public static final int compare(NullableVarBinaryHolder left, NullableVarCharHolder right) {
if (left.isSet == 0) {
- if (right.isSet == 0)
+ if (right.isSet == 0) {
return 0;
+ }
return -1;
} else if (right.isSet == 0) {
return 1;
@@ -123,8 +125,9 @@ public class VarHelpers {
public static final int compare(NullableVarCharHolder left, NullableVarCharHolder right) {
if (left.isSet == 0) {
- if (right.isSet == 0)
+ if (right.isSet == 0) {
return 0;
+ }
return -1;
} else if (right.isSet == 0) {
return 1;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/Accountor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/Accountor.java
index 4df964615..d11f224a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/Accountor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/Accountor.java
@@ -58,7 +58,7 @@ public class Accountor {
}
}
- public boolean transferTo(Accountor target, DrillBuf buf, long size){
+ public boolean transferTo(Accountor target, DrillBuf buf, long size) {
boolean withinLimit = target.forceAdditionalReservation(size);
release(buf, size);
@@ -89,18 +89,20 @@ public class Accountor {
}
public boolean forceAdditionalReservation(long size) {
- if(size > 0){
+ if (size > 0) {
return remainder.forceGet(size);
- }else{
+ } else {
return true;
}
}
- public void reserved(long expected, DrillBuf buf){
+ public void reserved(long expected, DrillBuf buf) {
// make sure to take away the additional memory that happened due to rounding.
long additional = buf.capacity() - expected;
- if(additional > 0) remainder.forceGet(additional);
+ if (additional > 0) {
+ remainder.forceGet(additional);
+ }
if (ENABLE_ACCOUNTING) {
buffers.put(buf, new DebugStackTrace(buf.capacity(), Thread.currentThread().getStackTrace()));
@@ -108,14 +110,16 @@ public class Accountor {
}
- public void releasePartial(DrillBuf buf, long size){
+ public void releasePartial(DrillBuf buf, long size) {
remainder.returnAllocation(size);
if (ENABLE_ACCOUNTING) {
- if(buf != null){
+ if (buf != null) {
DebugStackTrace dst = buffers.get(buf);
- if(dst == null) throw new IllegalStateException("Partially releasing a buffer that has already been released. Buffer: " + buf);
+ if (dst == null) {
+ throw new IllegalStateException("Partially releasing a buffer that has already been released. Buffer: " + buf);
+ }
dst.size -= size;
- if(dst.size < 0){
+ if (dst.size < 0) {
throw new IllegalStateException("Partially releasing a buffer that has already been released. Buffer: " + buf);
}
}
@@ -125,7 +129,9 @@ public class Accountor {
public void release(DrillBuf buf, long size) {
remainder.returnAllocation(size);
if (ENABLE_ACCOUNTING) {
- if(buf != null && buffers.remove(buf) == null) throw new IllegalStateException("Releasing a buffer that has already been released. Buffer: " + buf);
+ if (buf != null && buffers.remove(buf) == null) {
+ throw new IllegalStateException("Releasing a buffer that has already been released. Buffer: " + buf);
+ }
}
}
@@ -136,7 +142,7 @@ public class Accountor {
sb.append("Attempted to close accountor with ");
sb.append(buffers.size());
sb.append(" buffer(s) still allocated");
- if(handle != null){
+ if (handle != null) {
sb.append("for QueryId: ");
sb.append(QueryIdHelper.getQueryId(handle.getQueryId()));
sb.append(", MajorFragmentId: ");
@@ -146,7 +152,6 @@ public class Accountor {
}
sb.append(".\n");
-
Multimap<DebugStackTrace, DebugStackTrace> multi = LinkedListMultimap.create();
for (DebugStackTrace t : buffers.values()) {
multi.put(t, t);
@@ -158,7 +163,7 @@ public class Accountor {
sb.append("\n\n\tTotal ");
sb.append(allocs.size());
sb.append(" allocation(s) of byte size(s): ");
- for(DebugStackTrace alloc : allocs){
+ for (DebugStackTrace alloc : allocs) {
sb.append(alloc.size);
sb.append(", ");
}
@@ -167,13 +172,12 @@ public class Accountor {
entry.addToString(sb);
}
IllegalStateException e = new IllegalStateException(sb.toString());
- if(errorOnLeak){
+ if (errorOnLeak) {
throw e;
- }else{
+ } else {
logger.warn("Memory leaked.", e);
}
-
}
remainder.close();
@@ -210,15 +214,19 @@ public class Accountor {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
DebugStackTrace other = (DebugStackTrace) obj;
- if (!Arrays.equals(elements, other.elements))
+ if (!Arrays.equals(elements, other.elements)) {
return false;
+ }
// weird equal where size doesn't matter for multimap purposes.
// if (size != other.size)
// return false;
@@ -226,4 +234,5 @@ public class Accountor {
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AtomicRemainder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AtomicRemainder.java
index 83cd3b748..263caa0f0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AtomicRemainder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AtomicRemainder.java
@@ -64,11 +64,13 @@ public class AtomicRemainder {
* @param size
*/
public boolean forceGet(long size) {
- if(get(size)){
+ if (get(size)) {
return true;
- }else{
+ } else {
availableShared.addAndGet(size);
- if (parent != null) parent.forceGet(size);
+ if (parent != null) {
+ parent.forceGet(size);
+ }
return false;
}
}
@@ -160,17 +162,19 @@ public class AtomicRemainder {
logger.warn("Tried to close remainder, but it has already been closed", new Exception());
return;
}
- if (availablePrivate.get() != initPrivate || availableShared.get() != initShared){
+ if (availablePrivate.get() != initPrivate || availableShared.get() != initShared) {
IllegalStateException e = new IllegalStateException(
String
.format(ERROR, initPrivate, availablePrivate.get(), initPrivate - availablePrivate.get(), initShared, availableShared.get(), initShared - availableShared.get()));
- if(errorOnLeak){
+ if (errorOnLeak) {
throw e;
- }else{
+ } else {
logger.warn("Memory leaked during query.", e);
}
}
- if(parent != null) parent.returnAllocation(initPrivate);
+ if (parent != null) {
+ parent.returnAllocation(initPrivate);
+ }
closed = true;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java
index 5a594aaa3..a8e8a283f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java
@@ -53,7 +53,7 @@ public class TopLevelAllocator implements BufferAllocator {
this(maximumAllocation, true);
}
- private TopLevelAllocator(long maximumAllocation, boolean errorOnLeak){
+ private TopLevelAllocator(long maximumAllocation, boolean errorOnLeak) {
this.errorOnLeak = errorOnLeak;
this.acct = new Accountor(errorOnLeak, null, null, maximumAllocation, 0);
this.empty = DrillBuf.getEmpty(this, acct);
@@ -72,8 +72,12 @@ public class TopLevelAllocator implements BufferAllocator {
}
public DrillBuf buffer(int min, int max) {
- if(min == 0) return empty;
- if(!acct.reserve(min)) return null;
+ if (min == 0) {
+ return empty;
+ }
+ if(!acct.reserve(min)) {
+ return null;
+ }
UnsafeDirectLittleEndian buffer = innerAllocator.directBuffer(min, max);
DrillBuf wrapped = new DrillBuf(this, acct, buffer);
acct.reserved(min, wrapped);
@@ -97,12 +101,14 @@ public class TopLevelAllocator implements BufferAllocator {
@Override
public BufferAllocator getChildAllocator(FragmentHandle handle, long initialReservation, long maximumReservation) throws OutOfMemoryException {
- if(!acct.reserve(initialReservation)){
+ if(!acct.reserve(initialReservation)) {
throw new OutOfMemoryException(String.format("You attempted to create a new child allocator with initial reservation %d but only %d bytes of memory were available.", initialReservation, acct.getCapacity() - acct.getAllocation()));
};
logger.debug("New child allocator with initial reservation {}", initialReservation);
ChildAllocator allocator = new ChildAllocator(handle, acct, maximumReservation, initialReservation, childrenMap);
- if(ENABLE_ACCOUNTING) childrenMap.put(allocator, Thread.currentThread().getStackTrace());
+ if (ENABLE_ACCOUNTING) {
+ childrenMap.put(allocator, Thread.currentThread().getStackTrace());
+ }
return allocator;
}
@@ -158,8 +164,10 @@ public class TopLevelAllocator implements BufferAllocator {
@Override
public DrillBuf buffer(int size, int max) {
- if(size == 0) return empty;
- if(!childAcct.reserve(size)){
+ if (size == 0) {
+ return empty;
+ }
+ if(!childAcct.reserve(size)) {
logger.warn("Unable to allocate buffer of size {} due to memory limit. Current allocation: {}", size, getAllocatedMemory(), new Exception());
return null;
};
@@ -182,7 +190,7 @@ public class TopLevelAllocator implements BufferAllocator {
@Override
public BufferAllocator getChildAllocator(FragmentHandle handle, long initialReservation, long maximumReservation)
throws OutOfMemoryException {
- if(!childAcct.reserve(initialReservation)){
+ if (!childAcct.reserve(initialReservation)) {
throw new OutOfMemoryException(String.format("You attempted to create a new child allocator with initial reservation %d but only %d bytes of memory were available.", initialReservation, childAcct.getAvailable()));
};
logger.debug("New child allocator with initial reservation {}", initialReservation);
@@ -191,14 +199,16 @@ public class TopLevelAllocator implements BufferAllocator {
return newChildAllocator;
}
- public PreAllocator getNewPreAllocator(){
+ public PreAllocator getNewPreAllocator() {
return new PreAlloc(this, this.childAcct);
}
@Override
public void close() {
if (ENABLE_ACCOUNTING) {
- if(thisMap != null) thisMap.remove(this);
+ if (thisMap != null) {
+ thisMap.remove(this);
+ }
for (ChildAllocator child : children.keySet()) {
if (!child.isClosed()) {
StringBuilder sb = new StringBuilder();
@@ -213,9 +223,9 @@ public class TopLevelAllocator implements BufferAllocator {
IllegalStateException e = new IllegalStateException(String.format(
"Failure while trying to close child allocator: Child level allocators not closed. Fragment %d:%d. Stack trace: \n %s",
handle.getMajorFragmentId(), handle.getMinorFragmentId(), sb.toString()));
- if(errorOnLeak){
+ if (errorOnLeak) {
throw e;
- }else{
+ } else {
logger.warn("Memory leak.", e);
}
}
@@ -242,7 +252,7 @@ public class TopLevelAllocator implements BufferAllocator {
}
- public PreAllocator getNewPreAllocator(){
+ public PreAllocator getNewPreAllocator() {
return new PreAlloc(this, this.acct);
}
@@ -250,7 +260,7 @@ public class TopLevelAllocator implements BufferAllocator {
int bytes = 0;
final Accountor acct;
final BufferAllocator allocator;
- private PreAlloc(BufferAllocator allocator, Accountor acct){
+ private PreAlloc(BufferAllocator allocator, Accountor acct) {
this.acct = acct;
this.allocator = allocator;
}
@@ -258,9 +268,9 @@ public class TopLevelAllocator implements BufferAllocator {
/**
*
*/
- public boolean preAllocate(int bytes){
+ public boolean preAllocate(int bytes) {
- if(!acct.reserve(bytes)){
+ if (!acct.reserve(bytes)) {
return false;
}
this.bytes += bytes;
@@ -269,10 +279,11 @@ public class TopLevelAllocator implements BufferAllocator {
}
- public DrillBuf getAllocation(){
+ public DrillBuf getAllocation() {
DrillBuf b = new DrillBuf(allocator, acct, innerAllocator.directBuffer(bytes, bytes));
acct.reserved(bytes, b);
return b;
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/metrics/DrillMetrics.java b/exec/java-exec/src/main/java/org/apache/drill/exec/metrics/DrillMetrics.java
index d3bc93e6f..a9799b22f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/metrics/DrillMetrics.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/metrics/DrillMetrics.java
@@ -65,8 +65,9 @@ public class DrillMetrics {
reporter.start();
return reporter;
- } else
+ } else {
return null;
+ }
}
private static Slf4jReporter getLogReporter() {
@@ -76,8 +77,9 @@ public class DrillMetrics {
reporter.start(config.getInt(ExecConstants.METRICS_LOG_OUTPUT_INTERVAL), TimeUnit.SECONDS);
return reporter;
- } else
+ } else {
return null;
+ }
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
index e54a967dc..a888ea7c5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
@@ -93,15 +93,15 @@ public class FragmentContext implements Closeable {
this.rootFragmentTimeZone = fragment.getTimeZone();
logger.debug("Getting initial memory allocation of {}", fragment.getMemInitial());
logger.debug("Fragment max allocation: {}", fragment.getMemMax());
- try{
+ try {
OptionList list;
- if(!fragment.hasOptionsJson() || fragment.getOptionsJson().isEmpty()){
+ if (!fragment.hasOptionsJson() || fragment.getOptionsJson().isEmpty()) {
list = new OptionList();
- }else{
+ } else {
list = dbContext.getConfig().getMapper().readValue(fragment.getOptionsJson(), OptionList.class);
}
this.fragmentOptions = new FragmentOptionManager(context.getOptionManager(), list);
- }catch(Exception e){
+ } catch (Exception e) {
throw new ExecutionSetupException("Failure while reading plan options.", e);
}
this.allocator = context.getAllocator().getChildAllocator(fragment.getHandle(), fragment.getMemInitial(), fragment.getMemMax());
@@ -130,7 +130,7 @@ public class FragmentContext implements Closeable {
return context;
}
- public SchemaPlus getRootSchema(){
+ public SchemaPlus getRootSchema() {
if (connection == null) {
fail(new UnsupportedOperationException("Schema tree can only be created in root fragment. " +
"This is a non-root fragment."));
@@ -150,7 +150,7 @@ public class FragmentContext implements Closeable {
return context.getEndpoint();
}
- public FragmentStats getStats(){
+ public FragmentStats getStats() {
return this.stats;
}
@@ -254,12 +254,14 @@ public class FragmentContext implements Closeable {
@Override
public void close() {
- for(Thread thread: daemonThreads){
+ for (Thread thread: daemonThreads) {
thread.interrupt();
}
Object[] mbuffers = ((LongObjectOpenHashMap<Object>)(Object)managedBuffers).values;
- for(int i =0; i < mbuffers.length; i++){
- if(managedBuffers.allocated[i]) ((DrillBuf)mbuffers[i]).release();
+ for (int i =0; i < mbuffers.length; i++) {
+ if (managedBuffers.allocated[i]) {
+ ((DrillBuf)mbuffers[i]).release();
+ }
}
if (buffers != null) {
@@ -268,17 +270,19 @@ public class FragmentContext implements Closeable {
allocator.close();
}
- public DrillBuf replace(DrillBuf old, int newSize){
- if(managedBuffers.remove(old.memoryAddress()) == null) throw new IllegalStateException("Tried to remove unmanaged buffer.");
+ public DrillBuf replace(DrillBuf old, int newSize) {
+ if (managedBuffers.remove(old.memoryAddress()) == null) {
+ throw new IllegalStateException("Tried to remove unmanaged buffer.");
+ }
old.release();
return getManagedBuffer(newSize);
}
- public DrillBuf getManagedBuffer(){
+ public DrillBuf getManagedBuffer() {
return getManagedBuffer(256);
}
- public DrillBuf getManagedBuffer(int size){
+ public DrillBuf getManagedBuffer(int size) {
DrillBuf newBuf = allocator.buffer(size);
managedBuffers.put(newBuf.memoryAddress(), newBuf);
newBuf.setFragmentContext(this);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java
index c5dea4f53..54edf88d7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java
@@ -51,32 +51,36 @@ public class OperatorContext implements Closeable {
this.stats = stats;
}
- public DrillBuf replace(DrillBuf old, int newSize){
- if(managedBuffers.remove(old.memoryAddress()) == null) throw new IllegalStateException("Tried to remove unmanaged buffer.");
+ public DrillBuf replace(DrillBuf old, int newSize) {
+ if (managedBuffers.remove(old.memoryAddress()) == null) {
+ throw new IllegalStateException("Tried to remove unmanaged buffer.");
+ }
old.release();
return getManagedBuffer(newSize);
}
- public DrillBuf getManagedBuffer(){
+ public DrillBuf getManagedBuffer() {
return getManagedBuffer(256);
}
- public DrillBuf getManagedBuffer(int size){
+ public DrillBuf getManagedBuffer(int size) {
DrillBuf newBuf = allocator.buffer(size);
managedBuffers.put(newBuf.memoryAddress(), newBuf);
newBuf.setOperatorContext(this);
return newBuf;
}
- public static int getChildCount(PhysicalOperator popConfig){
+ public static int getChildCount(PhysicalOperator popConfig) {
Iterator<PhysicalOperator> iter = popConfig.iterator();
int i = 0;
- while(iter.hasNext()){
+ while (iter.hasNext()) {
iter.next();
i++;
}
- if(i == 0) i = 1;
+ if (i == 0) {
+ i = 1;
+ }
return i;
}
@@ -101,8 +105,10 @@ public class OperatorContext implements Closeable {
// release managed buffers.
Object[] buffers = ((LongObjectOpenHashMap<Object>)(Object)managedBuffers).values;
- for(int i =0; i < buffers.length; i++){
- if(managedBuffers.allocated[i]) ((DrillBuf)buffers[i]).release();
+ for (int i =0; i < buffers.length; i++) {
+ if (managedBuffers.allocated[i]) {
+ ((DrillBuf)buffers[i]).release();
+ }
}
if (allocator != null) {
@@ -111,7 +117,8 @@ public class OperatorContext implements Closeable {
closed = true;
}
- public OperatorStats getStats(){
+ public OperatorStats getStats() {
return stats;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java
index b993d77d2..876ba37af 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/opt/BasicOptimizer.java
@@ -228,7 +228,9 @@ public class BasicOptimizer extends Optimizer{
@Override
public PhysicalOperator visitScan(Scan scan, Object obj) throws OptimizerException {
StoragePluginConfig config = logicalPlan.getStorageEngineConfig(scan.getStorageEngine());
- if(config == null) throw new OptimizerException(String.format("Logical plan referenced the storage engine config %s but the logical plan didn't have that available as a config.", scan.getStorageEngine()));
+ if(config == null) {
+ throw new OptimizerException(String.format("Logical plan referenced the storage engine config %s but the logical plan didn't have that available as a config.", scan.getStorageEngine()));
+ }
StoragePlugin storagePlugin;
try {
storagePlugin = context.getStorage().getPlugin(config);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractBase.java
index e54e67c3d..defb4e4af 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractBase.java
@@ -33,8 +33,10 @@ public abstract class AbstractBase implements PhysicalOperator{
@Override
public void accept(GraphVisitor<PhysicalOperator> visitor) {
visitor.enter(this);
- if(this.iterator() == null) throw new IllegalArgumentException("Null iterator for pop." + this);
- for(PhysicalOperator o : this){
+ if (this.iterator() == null) {
+ throw new IllegalArgumentException("Null iterator for pop." + this);
+ }
+ for (PhysicalOperator o : this) {
Preconditions.checkNotNull(o, String.format("Null in iterator for pop %s.", this));
o.accept(visitor);
}
@@ -46,7 +48,7 @@ public abstract class AbstractBase implements PhysicalOperator{
return true;
}
- public final void setOperatorId(int id){
+ public final void setOperatorId(int id) {
this.id = id;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractPhysicalVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractPhysicalVisitor.java
index 9e7beec47..48b38011f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractPhysicalVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractPhysicalVisitor.java
@@ -90,7 +90,7 @@ public abstract class AbstractPhysicalVisitor<T, X, E extends Throwable> impleme
@Override
public T visitHashAggregate(HashAggregate agg, X value) throws E {
- return visitOp(agg, value);
+ return visitOp(agg, value);
}
@Override
@@ -120,7 +120,7 @@ public abstract class AbstractPhysicalVisitor<T, X, E extends Throwable> impleme
public T visitChildren(PhysicalOperator op, X value) throws E{
- for(PhysicalOperator child : op){
+ for (PhysicalOperator child : op) {
child.accept(this, value);
}
return null;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/Screen.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/Screen.java
index 5f0648da4..980b413de 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/Screen.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/Screen.java
@@ -60,7 +60,9 @@ public class Screen extends AbstractStore {
public void applyAssignments(List<DrillbitEndpoint> endpoints) throws PhysicalOperatorSetupException {
// we actually don't have to do anything since nothing should have changed. we'll check just check that things
// didn't get screwed up.
- if (endpoints.size() != 1) throw new PhysicalOperatorSetupException("A Screen operator can only be assigned to a single node.");
+ if (endpoints.size() != 1) {
+ throw new PhysicalOperatorSetupException("A Screen operator can only be assigned to a single node.");
+ }
DrillbitEndpoint endpoint = endpoints.iterator().next();
// logger.debug("Endpoint this: {}, assignment: {}", this.endpoint, endpoint);
if (!endpoint.equals(this.endpoint)) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/SingleMergeExchange.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/SingleMergeExchange.java
index 26d881dc2..f6e11c479 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/SingleMergeExchange.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/SingleMergeExchange.java
@@ -63,8 +63,9 @@ public class SingleMergeExchange extends AbstractExchange {
protected void setupReceivers(List<CoordinationProtos.DrillbitEndpoint> receiverLocations)
throws PhysicalOperatorSetupException {
- if (receiverLocations.size() != 1)
+ if (receiverLocations.size() != 1) {
throw new PhysicalOperatorSetupException("SingleMergeExchange only supports a single receiver endpoint");
+ }
receiverLocation = receiverLocations.iterator().next();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/UnionExchange.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/UnionExchange.java
index cafdbdd47..bf2b4a150 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/UnionExchange.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/UnionExchange.java
@@ -48,7 +48,9 @@ public class UnionExchange extends AbstractExchange{
@Override
protected void setupReceivers(List<DrillbitEndpoint> receiverLocations) throws PhysicalOperatorSetupException {
- if(receiverLocations.size() != 1) throw new PhysicalOperatorSetupException("A Union Exchange only supports a single receiver endpoint.");
+ if (receiverLocations.size() != 1) {
+ throw new PhysicalOperatorSetupException("A Union Exchange only supports a single receiver endpoint.");
+ }
this.destinationLocation = receiverLocations.iterator().next();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
index 7f9762415..e25f1c08e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
@@ -41,9 +41,9 @@ public class ImplCreator extends AbstractPhysicalVisitor<RecordBatch, FragmentCo
private RootExec root = null;
- private ImplCreator(){}
+ private ImplCreator() {}
- private RootExec getRoot(){
+ private RootExec getRoot() {
return root;
}
@@ -78,7 +78,7 @@ public class ImplCreator extends AbstractPhysicalVisitor<RecordBatch, FragmentCo
public static RootExec getExec(FragmentContext context, FragmentRoot root) throws ExecutionSetupException {
ImplCreator i = new ImplCreator();
- if(AssertionUtil.isAssertionsEnabled()){
+ if (AssertionUtil.isAssertionsEnabled()) {
root = IteratorValidatorInjector.rewritePlanWithIteratorValidator(context, root);
}
@@ -86,9 +86,11 @@ public class ImplCreator extends AbstractPhysicalVisitor<RecordBatch, FragmentCo
watch.start();
root.accept(i, context);
logger.debug("Took {} ms to accept", watch.elapsed(TimeUnit.MILLISECONDS));
- if (i.root == null)
+ if (i.root == null) {
throw new ExecutionSetupException(
"The provided fragment did not have a root node that correctly created a RootExec value.");
+ }
return i.getRoot();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/OperatorCreatorRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/OperatorCreatorRegistry.java
index 8c768e508..82a9a6364 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/OperatorCreatorRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/OperatorCreatorRegistry.java
@@ -42,7 +42,9 @@ public class OperatorCreatorRegistry {
public synchronized Object getOperatorCreator(Class<?> operator) throws ExecutionSetupException {
Object opCreator = instanceRegistry.get(operator);
- if (opCreator != null) return opCreator;
+ if (opCreator != null) {
+ return opCreator;
+ }
Constructor<?> c = constructorRegistry.get(operator);
if(c == null) {
@@ -75,9 +77,9 @@ public class OperatorCreatorRegistry {
Type[] args = ((ParameterizedType)iface).getActualTypeArguments();
interfaceFound = true;
boolean constructorFound = false;
- for(Constructor<?> constructor : operatorClass.getConstructors()){
+ for (Constructor<?> constructor : operatorClass.getConstructors()) {
Class<?>[] params = constructor.getParameterTypes();
- if(params.length == 0){
+ if (params.length == 0) {
Constructor<?> old = constructorRegistry.put((Class<?>) args[0], constructor);
if (old != null) {
throw new RuntimeException(
@@ -88,7 +90,7 @@ public class OperatorCreatorRegistry {
constructorFound = true;
}
}
- if(!constructorFound){
+ if (!constructorFound) {
logger.debug("Skipping registration of OperatorCreator {} as it doesn't have a default constructor",
operatorClass.getCanonicalName());
}
@@ -97,4 +99,5 @@ public class OperatorCreatorRegistry {
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index c2a03b9d4..2712e2735 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -83,8 +83,9 @@ public class ScanBatch implements RecordBatch {
public ScanBatch(PhysicalOperator subScanConfig, FragmentContext context, Iterator<RecordReader> readers, List<String[]> partitionColumns, List<Integer> selectedPartitionColumns) throws ExecutionSetupException {
this.context = context;
this.readers = readers;
- if (!readers.hasNext())
+ if (!readers.hasNext()) {
throw new ExecutionSetupException("A scan batch must contain at least one reader.");
+ }
this.currentReader = readers.next();
this.oContext = new OperatorContext(subScanConfig, context);
this.currentReader.setOperatorContext(this.oContext);
@@ -121,7 +122,7 @@ public class ScanBatch implements RecordBatch {
@Override
public void kill(boolean sendUpstream) {
- if(currentReader != null){
+ if (currentReader != null) {
currentReader.cleanup();
}
@@ -220,8 +221,8 @@ public class ScanBatch implements RecordBatch {
private void addPartitionVectors() throws ExecutionSetupException{
try {
- if(partitionVectors != null){
- for(ValueVector v : partitionVectors){
+ if (partitionVectors != null) {
+ for (ValueVector v : partitionVectors) {
v.clear();
}
}
@@ -290,7 +291,9 @@ public class ScanBatch implements RecordBatch {
if (v == null || v.getClass() != clazz) {
// Field does not exist add it to the map and the output container
v = TypeHelper.getNewVector(field, oContext.getAllocator());
- if(!clazz.isAssignableFrom(v.getClass())) throw new SchemaChangeException(String.format("The class that was provided %s does not correspond to the expected vector type of %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
+ if (!clazz.isAssignableFrom(v.getClass())) {
+ throw new SchemaChangeException(String.format("The class that was provided %s does not correspond to the expected vector type of %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
+ }
container.add(v);
fieldVectorMap.put(field.key(), v);
@@ -342,9 +345,9 @@ public class ScanBatch implements RecordBatch {
return WritableBatch.get(this);
}
- public void cleanup(){
+ public void cleanup() {
container.clear();
- for(ValueVector v : partitionVectors){
+ for (ValueVector v : partitionVectors) {
v.clear();
}
fieldVectorMap.clear();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java
index 2b7fdf3b6..352deaea6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java
@@ -79,7 +79,7 @@ public class SingleSenderCreator implements RootCreator<SingleSender>{
@Override
public boolean innerNext() {
- if(!ok){
+ if (!ok) {
incoming.kill(false);
return false;
@@ -93,7 +93,7 @@ public class SingleSenderCreator implements RootCreator<SingleSender>{
out = IterOutcome.NONE;
}
// logger.debug("Outcome of sender next {}", out);
- switch(out){
+ switch (out) {
case STOP:
case NONE:
FragmentWritableBatch b2 = FragmentWritableBatch.getEmptyLastWithSchema(handle.getQueryId(), handle.getMajorFragmentId(),
@@ -158,7 +158,9 @@ public class SingleSenderCreator implements RootCreator<SingleSender>{
@Override
public void success(Ack value, ByteBuf buf) {
sendCount.decrement();
- if(value.getOk()) return;
+ if (value.getOk()) {
+ return;
+ }
logger.error("Downstream fragment was not accepted. Stopping future sends.");
// if we didn't get ack ok, we'll need to kill the query.
@@ -170,5 +172,4 @@ public class SingleSenderCreator implements RootCreator<SingleSender>{
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
index 6eede30dc..473e3a3f3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
@@ -132,10 +132,10 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
@Override
public IterOutcome innerNext() {
- if(schema != null){
- if(getSelectionVector4().next()){
+ if (schema != null) {
+ if (getSelectionVector4().next()) {
return IterOutcome.OK;
- }else{
+ } else {
return IterOutcome.NONE;
}
}
@@ -156,8 +156,10 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
return upstream;
case OK_NEW_SCHEMA:
// only change in the case that the schema truly changes. Artificial schema changes are ignored.
- if(!incoming.getSchema().equals(schema)){
- if (schema != null) throw new UnsupportedOperationException("Sort doesn't currently support sorts with changing schemas.");
+ if (!incoming.getSchema().equals(schema)) {
+ if (schema != null) {
+ throw new UnsupportedOperationException("Sort doesn't currently support sorts with changing schemas.");
+ }
this.schema = incoming.getSchema();
}
// fall through.
@@ -181,7 +183,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
}
}
- if (schema == null){
+ if (schema == null) {
// builder may be null at this point if the first incoming batch is empty
return IterOutcome.NONE;
}
@@ -196,7 +198,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
return IterOutcome.OK_NEW_SCHEMA;
- }catch(SchemaChangeException | ClassTransformationException | IOException ex){
+ } catch(SchemaChangeException | ClassTransformationException | IOException ex) {
kill(false);
logger.error("Failure during query", ex);
context.fail(ex);
@@ -215,7 +217,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
if (copier == null) {
copier = RemovingRecordBatch.getGenerated4Copier(batch, context, oContext.getAllocator(), newContainer, newBatch);
} else {
- for(VectorWrapper<?> i : batch){
+ for (VectorWrapper<?> i : batch) {
ValueVector v = TypeHelper.getNewVector(i.getField(), oContext.getAllocator());
newContainer.add(v);
@@ -227,7 +229,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
int count = selectionVector4.getCount();
int copiedRecords = copier.copyRecords(0, count);
assert copiedRecords == count;
- for(VectorWrapper<?> v : newContainer){
+ for (VectorWrapper<?> v : newContainer) {
ValueVector.Mutator m = v.getValueVector().getMutator();
m.setValueCount(count);
}
@@ -253,11 +255,13 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
ClassGenerator<PriorityQueue> g = cg.getRoot();
g.setMappingSet(mainMapping);
- for(Ordering od : orderings){
+ for (Ordering od : orderings) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
g.setMappingSet(leftMapping);
HoldingContainer left = g.addExpr(expr, false);
g.setMappingSet(rightMapping);
@@ -269,9 +273,9 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
HoldingContainer out = g.addExpr(fh, false);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
- if(od.getDirection() == Direction.ASCENDING){
+ if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
- }else{
+ } else {
jc._then()._return(out.getValue().minus());
}
g.rotateBlock();
@@ -377,5 +381,4 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TraceInjector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TraceInjector.java
index 58dd247e0..92d1882eb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TraceInjector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TraceInjector.java
@@ -82,10 +82,12 @@ public class TraceInjector extends AbstractPhysicalVisitor<PhysicalOperator, Fra
}
/* Inject trace operator */
- if (list.size() > 0)
- newOp = op.getNewWithChildren(list);
- newOp.setOperatorId(op.getOperatorId());
+ if (list.size() > 0) {
+ newOp = op.getNewWithChildren(list);
+ }
+ newOp.setOperatorId(op.getOperatorId());
return newOp;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
index 99eeed374..8c1a4c07b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
@@ -82,8 +82,9 @@ public class WriterRecordBatch extends AbstractRecordBatch<Writer> {
IterOutcome upstream;
do {
upstream = next(incoming);
- if(first && upstream == IterOutcome.OK)
+ if(first && upstream == IterOutcome.OK) {
upstream = IterOutcome.OK_NEW_SCHEMA;
+ }
first = false;
switch(upstream) {
@@ -91,14 +92,15 @@ public class WriterRecordBatch extends AbstractRecordBatch<Writer> {
case NONE:
case STOP:
cleanup();
- if (upstream == IterOutcome.STOP)
+ if (upstream == IterOutcome.STOP) {
return upstream;
+ }
break;
case OK_NEW_SCHEMA:
try{
setupNewSchema();
- }catch(Exception ex){
+ } catch(Exception ex) {
kill(false);
logger.error("Failure during query", ex);
context.fail(ex);
@@ -113,9 +115,9 @@ public class WriterRecordBatch extends AbstractRecordBatch<Writer> {
throw new RuntimeException(ex);
}
- for(VectorWrapper v : incoming)
+ for(VectorWrapper v : incoming) {
v.getValueVector().clear();
-
+ }
break;
default:
@@ -176,4 +178,5 @@ public class WriterRecordBatch extends AbstractRecordBatch<Writer> {
throw new RuntimeException("Failed to close RecordWriter", ex);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
index e9be2ac99..c5228709d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
@@ -82,7 +82,9 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
@Override
public int getRecordCount() {
- if(done) return 0;
+ if (done) {
+ return 0;
+ }
return aggregator.getOutputCount();
}
@@ -102,7 +104,7 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
case STOP:
return outcome;
case OK_NEW_SCHEMA:
- if (!createAggregator()){
+ if (!createAggregator()) {
done = true;
return IterOutcome.STOP;
}
@@ -131,10 +133,10 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
logger.debug("Starting aggregator doWork; incoming record count = {} ", incoming.getRecordCount());
- while(true){
+ while (true) {
AggOutcome out = aggregator.doWork();
logger.debug("Aggregator response {}, records {}", out, aggregator.getOutputCount());
- switch(out){
+ switch (out) {
case CLEANUP_AND_RETURN:
container.zeroVectors();
aggregator.cleanup();
@@ -150,7 +152,7 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
return aggregator.getOutcome();
case UPDATE_AGGREGATOR:
aggregator = null;
- if(!createAggregator()){
+ if (!createAggregator()) {
return IterOutcome.STOP;
}
continue;
@@ -168,23 +170,23 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
*/
private boolean createAggregator() {
logger.debug("Creating new aggregator.");
- try{
+ try {
stats.startSetup();
this.aggregator = createAggregatorInternal();
return true;
- }catch(SchemaChangeException | ClassTransformationException | IOException ex){
+ } catch (SchemaChangeException | ClassTransformationException | IOException ex) {
context.fail(ex);
container.clear();
incoming.kill(false);
return false;
- }finally{
+ } finally {
stats.stopSetup();
}
}
private HashAggregator createAggregatorInternal() throws SchemaChangeException, ClassTransformationException, IOException{
- CodeGenerator<HashAggregator> top = CodeGenerator.get(HashAggregator.TEMPLATE_DEFINITION, context.getFunctionRegistry());
- ClassGenerator<HashAggregator> cg = top.getRoot();
+ CodeGenerator<HashAggregator> top = CodeGenerator.get(HashAggregator.TEMPLATE_DEFINITION, context.getFunctionRegistry());
+ ClassGenerator<HashAggregator> cg = top.getRoot();
ClassGenerator<HashAggregator> cgInner = cg.getInnerGenerator("BatchHolder");
container.clear();
@@ -199,10 +201,12 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
int i;
- for(i = 0; i < numGroupByExprs; i++) {
+ for (i = 0; i < numGroupByExprs; i++) {
NamedExpression ne = popConfig.getGroupByExprs()[i];
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry() );
- if(expr == null) continue;
+ if (expr == null) {
+ continue;
+ }
final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
@@ -211,13 +215,17 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
groupByOutFieldIds[i] = container.add(vv);
}
- for(i = 0; i < numAggrExprs; i++){
+ for (i = 0; i < numAggrExprs; i++) {
NamedExpression ne = popConfig.getAggrExprs()[i];
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry() );
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
- if(expr == null) continue;
+ if (expr == null) {
+ continue;
+ }
final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
@@ -248,7 +256,6 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
return agg;
}
-
private void setupUpdateAggrValues(ClassGenerator<HashAggregator> cg) {
cg.setMappingSet(UpdateAggrValuesMapping);
@@ -260,8 +267,8 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
cg.getBlock(BlockType.EVAL)._return(JExpr.TRUE);
}
- private void setupGetIndex(ClassGenerator<HashAggregator> cg){
- switch(incoming.getSchema().getSelectionVectorMode()){
+ private void setupGetIndex(ClassGenerator<HashAggregator> cg) {
+ switch (incoming.getSchema().getSelectionVectorMode()) {
case FOUR_BYTE: {
JVar var = cg.declareClassField("sv4_", cg.getModel()._ref(SelectionVector4.class));
cg.getBlock("doSetup").assign(var, JExpr.direct("incoming").invoke("getSelectionVector4"));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
index b6b887415..d25a95266 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
@@ -156,7 +156,9 @@ public abstract class HashAggTemplate implements HashAggregator {
boolean status = true;
for (int i = batchOutputCount; i <= maxOccupiedIdx; i++) {
if (outputRecordValues(i, batchOutputCount) ) {
- if (EXTRA_DEBUG_2) logger.debug("Outputting values to output index: {}", batchOutputCount) ;
+ if (EXTRA_DEBUG_2) {
+ logger.debug("Outputting values to output index: {}", batchOutputCount) ;
+ }
batchOutputCount++;
outNumRecordsHolder.value++;
} else {
@@ -270,31 +272,41 @@ public abstract class HashAggTemplate implements HashAggregator {
outside: while(true) {
// loop through existing records, aggregating the values as necessary.
- if (EXTRA_DEBUG_1) logger.debug ("Starting outer loop of doWork()...");
+ if (EXTRA_DEBUG_1) {
+ logger.debug ("Starting outer loop of doWork()...");
+ }
for (; underlyingIndex < incoming.getRecordCount(); incIndex()) {
- if(EXTRA_DEBUG_2) logger.debug("Doing loop with values underlying {}, current {}", underlyingIndex, currentIndex);
+ if(EXTRA_DEBUG_2) {
+ logger.debug("Doing loop with values underlying {}, current {}", underlyingIndex, currentIndex);
+ }
boolean success = checkGroupAndAggrValues(currentIndex);
assert success : "HashAgg couldn't copy values.";
}
- if (EXTRA_DEBUG_1) logger.debug("Processed {} records", underlyingIndex);
+ if (EXTRA_DEBUG_1) {
+ logger.debug("Processed {} records", underlyingIndex);
+ }
- try{
+ try {
- while(true){
+ while (true) {
// Cleanup the previous batch since we are done processing it.
for (VectorWrapper<?> v : incoming) {
v.getValueVector().clear();
}
IterOutcome out = outgoing.next(0, incoming);
- if(EXTRA_DEBUG_1) logger.debug("Received IterOutcome of {}", out);
- switch(out){
+ if (EXTRA_DEBUG_1) {
+ logger.debug("Received IterOutcome of {}", out);
+ }
+ switch (out) {
case NOT_YET:
this.outcome = out;
return AggOutcome.RETURN_OUTCOME;
case OK_NEW_SCHEMA:
- if(EXTRA_DEBUG_1) logger.debug("Received new schema. Batch has {} records.", incoming.getRecordCount());
+ if (EXTRA_DEBUG_1) {
+ logger.debug("Received new schema. Batch has {} records.", incoming.getRecordCount());
+ }
newSchema = true;
this.cleanup();
// TODO: new schema case needs to be handled appropriately
@@ -302,14 +314,16 @@ public abstract class HashAggTemplate implements HashAggregator {
case OK:
resetIndex();
- if(incoming.getRecordCount() == 0){
+ if (incoming.getRecordCount() == 0) {
continue;
} else {
boolean success = checkGroupAndAggrValues(currentIndex);
assert success : "HashAgg couldn't copy values.";
incIndex();
- if(EXTRA_DEBUG_1) logger.debug("Continuing outside loop");
+ if (EXTRA_DEBUG_1) {
+ logger.debug("Continuing outside loop");
+ }
continue outside;
}
@@ -343,8 +357,10 @@ public abstract class HashAggTemplate implements HashAggregator {
// placeholder...
}
}
- } finally{
- if(first) first = !first;
+ } finally {
+ if (first) {
+ first = !first;
+ }
}
}
@@ -373,7 +389,7 @@ public abstract class HashAggTemplate implements HashAggregator {
}
@Override
- public void cleanup(){
+ public void cleanup() {
if (htable != null) {
htable.clear();
htable = null;
@@ -392,28 +408,28 @@ public abstract class HashAggTemplate implements HashAggregator {
}
}
- private final AggOutcome setOkAndReturn(){
- if(first){
+ private final AggOutcome setOkAndReturn() {
+ if (first) {
this.outcome = IterOutcome.OK_NEW_SCHEMA;
- }else{
+ } else {
this.outcome = IterOutcome.OK;
}
- for(VectorWrapper<?> v : outgoing){
+ for (VectorWrapper<?> v : outgoing) {
v.getValueVector().getMutator().setValueCount(outputCount);
}
return AggOutcome.RETURN_OUTCOME;
}
- private final void incIndex(){
+ private final void incIndex() {
underlyingIndex++;
- if(underlyingIndex >= incoming.getRecordCount()){
+ if (underlyingIndex >= incoming.getRecordCount()) {
currentIndex = Integer.MAX_VALUE;
return;
}
currentIndex = getVectorIndex(underlyingIndex);
}
- private final void resetIndex(){
+ private final void resetIndex() {
underlyingIndex = -1;
incIndex();
}
@@ -422,7 +438,9 @@ public abstract class HashAggTemplate implements HashAggregator {
BatchHolder bh = new BatchHolder();
batchHolders.add(bh);
- if (EXTRA_DEBUG_1) logger.debug("HashAggregate: Added new batch; num batches = {}.", batchHolders.size());
+ if (EXTRA_DEBUG_1) {
+ logger.debug("HashAggregate: Added new batch; num batches = {}.", batchHolders.size());
+ }
bh.setup();
}
@@ -465,9 +483,9 @@ public abstract class HashAggTemplate implements HashAggregator {
outputCount += numOutputRecords;
- if(first){
+ if (first) {
this.outcome = IterOutcome.OK_NEW_SCHEMA;
- }else{
+ } else {
this.outcome = IterOutcome.OK;
}
@@ -486,14 +504,14 @@ public abstract class HashAggTemplate implements HashAggregator {
} else {
if (!outputKeysStatus) {
logger.debug("Failed to output keys for current batch index: {} ", outBatchIndex);
- for(VectorWrapper<?> v : outContainer) {
+ for (VectorWrapper<?> v : outContainer) {
logger.debug("At the time of failure, size of valuevector in outContainer = {}.", v.getValueVector().getValueCapacity());
}
context.fail(new Exception("Failed to output keys for current batch !"));
}
if (!outputValuesStatus) {
logger.debug("Failed to output values for current batch index: {} ", outBatchIndex);
- for(VectorWrapper<?> v : outContainer) {
+ for (VectorWrapper<?> v : outContainer) {
logger.debug("At the time of failure, size of valuevector in outContainer = {}.", v.getValueVector().getValueCapacity());
}
context.fail(new Exception("Failed to output values for current batch !"));
@@ -557,7 +575,9 @@ public abstract class HashAggTemplate implements HashAggregator {
if (putStatus == HashTable.PutStatus.KEY_PRESENT) {
- if (EXTRA_DEBUG_2) logger.debug("Group-by key already present in hash table, updating the aggregate values");
+ if (EXTRA_DEBUG_2) {
+ logger.debug("Group-by key already present in hash table, updating the aggregate values");
+ }
// debugging
//if (holder.value == 100018 || holder.value == 100021) {
@@ -566,7 +586,9 @@ public abstract class HashAggTemplate implements HashAggregator {
}
else if (putStatus == HashTable.PutStatus.KEY_ADDED) {
- if (EXTRA_DEBUG_2) logger.debug("Group-by key was added to hash table, inserting new aggregate values") ;
+ if (EXTRA_DEBUG_2) {
+ logger.debug("Group-by key was added to hash table, inserting new aggregate values") ;
+ }
// debugging
// if (holder.value == 100018 || holder.value == 100021) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
index 4277f2306..238242bc1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
@@ -40,7 +40,7 @@ public interface HashAggregator {
public static enum AggOutcome {
RETURN_OUTCOME, CLEANUP_AND_RETURN, UPDATE_AGGREGATOR
- }
+ }
public abstract void setup(HashAggregate hashAggrConfig, HashTableConfig htConfig, FragmentContext context,
OperatorStats stats, BufferAllocator allocator, RecordBatch incoming,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java
index 3e6def128..e6900605f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java
@@ -34,8 +34,8 @@ public class InternalBatch implements Iterable<VectorWrapper<?>>{
private final SelectionVector2 sv2;
private final SelectionVector4 sv4;
- public InternalBatch(RecordBatch incoming){
- switch(incoming.getSchema().getSelectionVectorMode()){
+ public InternalBatch(RecordBatch incoming) {
+ switch(incoming.getSchema().getSelectionVectorMode()) {
case FOUR_BYTE:
this.sv4 = incoming.getSelectionVector4().createNewWrapperCurrent();
this.sv2 = null;
@@ -69,13 +69,17 @@ public class InternalBatch implements Iterable<VectorWrapper<?>>{
return container.iterator();
}
- public void clear(){
- if(sv2 != null) sv2.clear();
- if(sv4 != null) sv4.clear();
+ public void clear() {
+ if (sv2 != null) {
+ sv2.clear();
+ }
+ if (sv4 != null) {
+ sv4.clear();
+ }
container.clear();
}
- public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int[] fieldIds){
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int[] fieldIds) {
return container.getValueAccessorById(clazz, fieldIds);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
index 820f7229b..ced51798f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
@@ -67,8 +67,12 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
@Override
public int getRecordCount() {
- if(done) return 0;
- if (aggregator == null) return 0;
+ if (done) {
+ return 0;
+ }
+ if (aggregator == null) {
+ return 0;
+ }
return aggregator.getOutputCount();
}
@@ -88,7 +92,7 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
case STOP:
return outcome;
case OK_NEW_SCHEMA:
- if (!createAggregator()){
+ if (!createAggregator()) {
done = true;
return IterOutcome.STOP;
}
@@ -100,12 +104,14 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
}
}
- while(true){
+ while (true) {
AggOutcome out = aggregator.doWork();
logger.debug("Aggregator response {}, records {}", out, aggregator.getOutputCount());
- switch(out){
+ switch (out) {
case CLEANUP_AND_RETURN:
- if (!first) container.zeroVectors();
+ if (!first) {
+ container.zeroVectors();
+ }
done = true;
// fall through
case RETURN_OUTCOME:
@@ -122,7 +128,7 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
case UPDATE_AGGREGATOR:
first = false;
aggregator = null;
- if(!createAggregator()){
+ if (!createAggregator()) {
return IterOutcome.STOP;
}
continue;
@@ -142,23 +148,20 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
*/
private boolean createAggregator() {
logger.debug("Creating new aggregator.");
- try{
+ try {
stats.startSetup();
this.aggregator = createAggregatorInternal();
return true;
- }catch(SchemaChangeException | ClassTransformationException | IOException ex){
+ } catch (SchemaChangeException | ClassTransformationException | IOException ex) {
context.fail(ex);
container.clear();
incoming.kill(false);
return false;
- }finally{
+ } finally {
stats.stopSetup();
}
}
-
-
-
private StreamingAggregator createAggregatorInternal() throws SchemaChangeException, ClassTransformationException, IOException{
ClassGenerator<StreamingAggregator> cg = CodeGenerator.getRoot(StreamingAggTemplate.TEMPLATE_DEFINITION, context.getFunctionRegistry());
container.clear();
@@ -169,20 +172,24 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
ErrorCollector collector = new ErrorCollectorImpl();
- for(int i =0; i < keyExprs.length; i++){
+ for (int i =0; i < keyExprs.length; i++) {
NamedExpression ne = popConfig.getKeys()[i];
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector,context.getFunctionRegistry() );
- if(expr == null) continue;
+ if (expr == null) {
+ continue;
+ }
keyExprs[i] = expr;
final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
keyOutputIds[i] = container.add(vector);
}
- for(int i =0; i < valueExprs.length; i++){
+ for (int i =0; i < valueExprs.length; i++) {
NamedExpression ne = popConfig.getExprs()[i];
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
- if(expr == null) continue;
+ if (expr == null) {
+ continue;
+ }
final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
@@ -190,7 +197,9 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
valueExprs[i] = new ValueVectorWriteExpression(id, expr, true);
}
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
setupIsSame(cg, keyExprs);
setupIsSameApart(cg, keyExprs);
@@ -207,15 +216,13 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
return agg;
}
-
-
private final GeneratorMapping IS_SAME = GeneratorMapping.create("setupInterior", "isSame", null, null);
private final MappingSet IS_SAME_I1 = new MappingSet("index1", null, IS_SAME, IS_SAME);
private final MappingSet IS_SAME_I2 = new MappingSet("index2", null, IS_SAME, IS_SAME);
- private void setupIsSame(ClassGenerator<StreamingAggregator> cg, LogicalExpression[] keyExprs){
+ private void setupIsSame(ClassGenerator<StreamingAggregator> cg, LogicalExpression[] keyExprs) {
cg.setMappingSet(IS_SAME_I1);
- for(LogicalExpression expr : keyExprs){
+ for (LogicalExpression expr : keyExprs) {
// first, we rewrite the evaluation stack for each side of the comparison.
cg.setMappingSet(IS_SAME_I1);
HoldingContainer first = cg.addExpr(expr, false);
@@ -234,9 +241,9 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
private final MappingSet ISA_B1 = new MappingSet("b1Index", null, "b1", null, IS_SAME_PREV_INTERNAL_BATCH_READ, IS_SAME_PREV_INTERNAL_BATCH_READ);
private final MappingSet ISA_B2 = new MappingSet("b2Index", null, "incoming", null, IS_SAME_PREV, IS_SAME_PREV);
- private void setupIsSameApart(ClassGenerator<StreamingAggregator> cg, LogicalExpression[] keyExprs){
+ private void setupIsSameApart(ClassGenerator<StreamingAggregator> cg, LogicalExpression[] keyExprs) {
cg.setMappingSet(ISA_B1);
- for(LogicalExpression expr : keyExprs){
+ for (LogicalExpression expr : keyExprs) {
// first, we rewrite the evaluation stack for each side of the comparison.
cg.setMappingSet(ISA_B1);
HoldingContainer first = cg.addExpr(expr, false);
@@ -254,9 +261,9 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
private final GeneratorMapping EVAL_OUTSIDE = GeneratorMapping.create("setupInterior", "outputRecordValues", "resetValues", "cleanup");
private final MappingSet EVAL = new MappingSet("index", "outIndex", "incoming", "outgoing", EVAL_INSIDE, EVAL_OUTSIDE, EVAL_INSIDE);
- private void addRecordValues(ClassGenerator<StreamingAggregator> cg, LogicalExpression[] valueExprs){
+ private void addRecordValues(ClassGenerator<StreamingAggregator> cg, LogicalExpression[] valueExprs) {
cg.setMappingSet(EVAL);
- for(LogicalExpression ex : valueExprs){
+ for (LogicalExpression ex : valueExprs) {
HoldingContainer hc = cg.addExpr(ex);
cg.getBlock(BlockType.EVAL)._if(hc.getValue().eq(JExpr.lit(0)))._then()._return(JExpr.FALSE);
}
@@ -265,9 +272,9 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
private final MappingSet RECORD_KEYS = new MappingSet(GeneratorMapping.create("setupInterior", "outputRecordKeys", null, null));
- private void outputRecordKeys(ClassGenerator<StreamingAggregator> cg, TypedFieldId[] keyOutputIds, LogicalExpression[] keyExprs){
+ private void outputRecordKeys(ClassGenerator<StreamingAggregator> cg, TypedFieldId[] keyOutputIds, LogicalExpression[] keyExprs) {
cg.setMappingSet(RECORD_KEYS);
- for(int i =0; i < keyExprs.length; i++){
+ for (int i =0; i < keyExprs.length; i++) {
HoldingContainer hc = cg.addExpr(new ValueVectorWriteExpression(keyOutputIds[i], keyExprs[i], true));
cg.getBlock(BlockType.EVAL)._if(hc.getValue().eq(JExpr.lit(0)))._then()._return(JExpr.FALSE);
}
@@ -280,10 +287,10 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
private final GeneratorMapping PREVIOUS_KEYS = GeneratorMapping.create("outputRecordKeysPrev", "outputRecordKeysPrev", null, null);
private final MappingSet RECORD_KEYS_PREV = new MappingSet("previousIndex", "outIndex", "previous", null, PREVIOUS_KEYS, PREVIOUS_KEYS);
- private void outputRecordKeysPrev(ClassGenerator<StreamingAggregator> cg, TypedFieldId[] keyOutputIds, LogicalExpression[] keyExprs){
+ private void outputRecordKeysPrev(ClassGenerator<StreamingAggregator> cg, TypedFieldId[] keyOutputIds, LogicalExpression[] keyExprs) {
cg.setMappingSet(RECORD_KEYS_PREV);
- for(int i =0; i < keyExprs.length; i++){
+ for (int i =0; i < keyExprs.length; i++) {
// IMPORTANT: there is an implicit assertion here that the TypedFieldIds for the previous batch and the current batch are the same. This is possible because InternalBatch guarantees this.
logger.debug("Writing out expr {}", keyExprs[i]);
cg.rotateBlock();
@@ -297,8 +304,8 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
cg.getBlock(BlockType.EVAL)._return(JExpr.TRUE);
}
- private void getIndex(ClassGenerator<StreamingAggregator> g){
- switch(incoming.getSchema().getSelectionVectorMode()){
+ private void getIndex(ClassGenerator<StreamingAggregator> g) {
+ switch (incoming.getSchema().getSelectionVectorMode()) {
case FOUR_BYTE: {
JVar var = g.declareClassField("sv4_", g.getModel()._ref(SelectionVector4.class));
g.getBlock("setupInterior").assign(var, JExpr.direct("incoming").invoke("getSelectionVector4"));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
index 53ac1ed4f..c2a5715cf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
@@ -60,7 +60,7 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
private void allocateOutgoing() {
- for(VectorWrapper<?> w : outgoing){
+ for (VectorWrapper<?> w : outgoing) {
w.getValueVector().allocateNew();
}
}
@@ -75,7 +75,7 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
return outputCount;
}
- private AggOutcome tooBigFailure(){
+ private AggOutcome tooBigFailure() {
context.fail(new Exception(TOO_BIG_ERROR));
this.outcome = IterOutcome.STOP;
return AggOutcome.CLEANUP_AND_RETURN;
@@ -87,11 +87,11 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
outcome = IterOutcome.NONE;
return AggOutcome.CLEANUP_AND_RETURN;
}
- try{ // outside loop to ensure that first is set to false after the first run.
+ try { // outside loop to ensure that first is set to false after the first run.
outputCount = 0;
// if we're in the first state, allocate outgoing.
- if(first){
+ if (first) {
allocateOutgoing();
}
@@ -119,8 +119,10 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
}
// pick up a remainder batch if we have one.
- if(remainderBatch != null){
- if (!outputToBatch( previousIndex )) return tooBigFailure();
+ if (remainderBatch != null) {
+ if (!outputToBatch( previousIndex )) {
+ return tooBigFailure();
+ }
remainderBatch.clear();
remainderBatch = null;
return setOkAndReturn();
@@ -131,38 +133,56 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
if (pendingOutput) {
allocateOutgoing();
pendingOutput = false;
- if(EXTRA_DEBUG) logger.debug("Attempting to output remainder.");
- if (!outputToBatch( previousIndex)) return tooBigFailure();
+ if (EXTRA_DEBUG) {
+ logger.debug("Attempting to output remainder.");
+ }
+ if (!outputToBatch( previousIndex)) {
+ return tooBigFailure();
+ }
}
- if(newSchema){
+ if (newSchema) {
return AggOutcome.UPDATE_AGGREGATOR;
}
- if(lastOutcome != null){
+ if (lastOutcome != null) {
outcome = lastOutcome;
return AggOutcome.CLEANUP_AND_RETURN;
}
- outside: while(true){
+ outside: while(true) {
// loop through existing records, adding as necessary.
for (; underlyingIndex < incoming.getRecordCount(); incIndex()) {
- if(EXTRA_DEBUG) logger.debug("Doing loop with values underlying {}, current {}", underlyingIndex, currentIndex);
+ if (EXTRA_DEBUG) {
+ logger.debug("Doing loop with values underlying {}, current {}", underlyingIndex, currentIndex);
+ }
if (previousIndex == -1) {
- if (EXTRA_DEBUG) logger.debug("Adding the initial row's keys and values.");
+ if (EXTRA_DEBUG) {
+ logger.debug("Adding the initial row's keys and values.");
+ }
addRecordInc(currentIndex);
}
else if (isSame( previousIndex, currentIndex )) {
- if(EXTRA_DEBUG) logger.debug("Values were found the same, adding.");
+ if (EXTRA_DEBUG) {
+ logger.debug("Values were found the same, adding.");
+ }
addRecordInc(currentIndex);
} else {
- if(EXTRA_DEBUG) logger.debug("Values were different, outputting previous batch.");
+ if (EXTRA_DEBUG) {
+ logger.debug("Values were different, outputting previous batch.");
+ }
if (outputToBatch(previousIndex)) {
- if(EXTRA_DEBUG) logger.debug("Output successful.");
+ if (EXTRA_DEBUG) {
+ logger.debug("Output successful.");
+ }
addRecordInc(currentIndex);
} else {
- if(EXTRA_DEBUG) logger.debug("Output failed.");
- if(outputCount == 0) return tooBigFailure();
+ if (EXTRA_DEBUG) {
+ logger.debug("Output failed.");
+ }
+ if (outputCount == 0) {
+ return tooBigFailure();
+ }
// mark the pending output but move forward for the next cycle.
pendingOutput = true;
@@ -178,23 +198,29 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
InternalBatch previous = null;
- try{
- while(true){
+ try {
+ while (true) {
if (previous != null) {
previous.clear();
}
previous = new InternalBatch(incoming);
IterOutcome out = outgoing.next(0, incoming);
- if(EXTRA_DEBUG) logger.debug("Received IterOutcome of {}", out);
- switch(out){
+ if (EXTRA_DEBUG) {
+ logger.debug("Received IterOutcome of {}", out);
+ }
+ switch (out) {
case NONE:
done = true;
lastOutcome = out;
if (first && addedRecordCount == 0) {
return setOkAndReturn();
- } else if(addedRecordCount > 0){
- if( !outputToBatchPrev( previous, previousIndex, outputCount) ) remainderBatch = previous;
- if(EXTRA_DEBUG) logger.debug("Received no more batches, returning.");
+ } else if(addedRecordCount > 0) {
+ if ( !outputToBatchPrev( previous, previousIndex, outputCount) ) {
+ remainderBatch = previous;
+ }
+ if (EXTRA_DEBUG) {
+ logger.debug("Received no more batches, returning.");
+ }
return setOkAndReturn();
}else{
if (first && out == IterOutcome.OK) {
@@ -204,17 +230,21 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
return AggOutcome.CLEANUP_AND_RETURN;
}
-
-
case NOT_YET:
this.outcome = out;
return AggOutcome.RETURN_OUTCOME;
case OK_NEW_SCHEMA:
- if(EXTRA_DEBUG) logger.debug("Received new schema. Batch has {} records.", incoming.getRecordCount());
- if(addedRecordCount > 0){
- if( !outputToBatchPrev( previous, previousIndex, outputCount) ) remainderBatch = previous;
- if(EXTRA_DEBUG) logger.debug("Wrote out end of previous batch, returning.");
+ if (EXTRA_DEBUG) {
+ logger.debug("Received new schema. Batch has {} records.", incoming.getRecordCount());
+ }
+ if (addedRecordCount > 0) {
+ if ( !outputToBatchPrev( previous, previousIndex, outputCount) ) {
+ remainderBatch = previous;
+ }
+ if (EXTRA_DEBUG) {
+ logger.debug("Wrote out end of previous batch, returning.");
+ }
newSchema = true;
return setOkAndReturn();
}
@@ -222,21 +252,27 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
return AggOutcome.UPDATE_AGGREGATOR;
case OK:
resetIndex();
- if(incoming.getRecordCount() == 0){
+ if (incoming.getRecordCount() == 0) {
continue;
- }else{
- if(previousIndex != -1 && isSamePrev(previousIndex , previous, currentIndex)){
- if(EXTRA_DEBUG) logger.debug("New value was same as last value of previous batch, adding.");
+ } else {
+ if (previousIndex != -1 && isSamePrev(previousIndex , previous, currentIndex)) {
+ if (EXTRA_DEBUG) {
+ logger.debug("New value was same as last value of previous batch, adding.");
+ }
addRecordInc(currentIndex);
previousIndex = currentIndex;
incIndex();
- if(EXTRA_DEBUG) logger.debug("Continuing outside");
+ if (EXTRA_DEBUG) {
+ logger.debug("Continuing outside");
+ }
continue outside;
- }else{ // not the same
- if(EXTRA_DEBUG) logger.debug("This is not the same as the previous, add record and continue outside.");
+ } else { // not the same
+ if (EXTRA_DEBUG) {
+ logger.debug("This is not the same as the previous, add record and continue outside.");
+ }
previousIndex = currentIndex;
- if(addedRecordCount > 0){
- if( !outputToBatchPrev( previous, previousIndex, outputCount) ){
+ if (addedRecordCount > 0) {
+ if ( !outputToBatchPrev( previous, previousIndex, outputCount) ) {
remainderBatch = previous;
return setOkAndReturn();
}
@@ -251,72 +287,78 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
return AggOutcome.CLEANUP_AND_RETURN;
}
-
}
- }finally{
+ } finally {
// make sure to clear previous if we haven't saved it.
- if(remainderBatch == null && previous != null){
+ if (remainderBatch == null && previous != null) {
previous.clear();
}
}
}
- }finally{
- if(first) first = !first;
+ } finally {
+ if (first) {
+ first = !first;
+ }
}
}
-
- private final void incIndex(){
+ private final void incIndex() {
underlyingIndex++;
- if(underlyingIndex >= incoming.getRecordCount()){
+ if (underlyingIndex >= incoming.getRecordCount()) {
currentIndex = Integer.MAX_VALUE;
return;
}
currentIndex = getVectorIndex(underlyingIndex);
}
- private final void resetIndex(){
+ private final void resetIndex() {
underlyingIndex = -1;
incIndex();
}
- private final AggOutcome setOkAndReturn(){
- if(first){
+ private final AggOutcome setOkAndReturn() {
+ if (first) {
this.outcome = IterOutcome.OK_NEW_SCHEMA;
- }else{
+ } else {
this.outcome = IterOutcome.OK;
}
- for(VectorWrapper<?> v : outgoing){
+ for (VectorWrapper<?> v : outgoing) {
v.getValueVector().getMutator().setValueCount(outputCount);
}
return AggOutcome.RETURN_OUTCOME;
}
- private final boolean outputToBatch(int inIndex){
+ private final boolean outputToBatch(int inIndex) {
- if(!outputRecordKeys(inIndex, outputCount)){
- if(EXTRA_DEBUG) logger.debug("Failure while outputting keys {}", outputCount);
+ if (!outputRecordKeys(inIndex, outputCount)) {
+ if(EXTRA_DEBUG) {
+ logger.debug("Failure while outputting keys {}", outputCount);
+ }
return false;
}
- if(!outputRecordValues(outputCount)){
- if(EXTRA_DEBUG) logger.debug("Failure while outputting values {}", outputCount);
+ if (!outputRecordValues(outputCount)) {
+ if (EXTRA_DEBUG) {
+ logger.debug("Failure while outputting values {}", outputCount);
+ }
return false;
}
- if(EXTRA_DEBUG) logger.debug("{} values output successfully", outputCount);
+ if (EXTRA_DEBUG) {
+ logger.debug("{} values output successfully", outputCount);
+ }
resetValues();
outputCount++;
addedRecordCount = 0;
return true;
}
- private final boolean outputToBatchPrev(InternalBatch b1, int inIndex, int outIndex){
+ private final boolean outputToBatchPrev(InternalBatch b1, int inIndex, int outIndex) {
boolean success = outputRecordKeysPrev(b1, inIndex, outIndex) //
&& outputRecordValues(outIndex) //
&& resetValues();
- if(success){
+ if (success) {
resetValues();
outputCount++;
addedRecordCount = 0;
@@ -325,17 +367,18 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
return success;
}
- private void addRecordInc(int index){
+ private void addRecordInc(int index) {
addRecord(index);
this.addedRecordCount++;
}
@Override
- public void cleanup(){
- if(remainderBatch != null) remainderBatch.clear();
+ public void cleanup() {
+ if (remainderBatch != null) {
+ remainderBatch.clear();
+ }
}
-
public abstract void setupInterior(@Named("incoming") RecordBatch incoming, @Named("outgoing") RecordBatch outgoing) throws SchemaChangeException;
public abstract boolean isSame(@Named("index1") int index1, @Named("index2") int index2);
public abstract boolean isSamePrev(@Named("b1Index") int b1Index, @Named("b1") InternalBatch b1, @Named("b2Index") int b2Index);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java
index 8f5f29be1..96da00b46 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java
@@ -28,8 +28,8 @@ public interface StreamingAggregator {
public static TemplateClassDefinition<StreamingAggregator> TEMPLATE_DEFINITION = new TemplateClassDefinition<StreamingAggregator>(StreamingAggregator.class, StreamingAggTemplate.class);
public static enum AggOutcome {
- RETURN_OUTCOME, CLEANUP_AND_RETURN, UPDATE_AGGREGATOR;
- }
+ RETURN_OUTCOME, CLEANUP_AND_RETURN, UPDATE_AGGREGATOR;
+ }
public abstract void setup(FragmentContext context, RecordBatch incoming, StreamingAggBatch outgoing) throws SchemaChangeException;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
index 195d24900..f77407eac 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
@@ -53,23 +53,23 @@ public class ChainedHashTable {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ChainedHashTable.class);
private static final GeneratorMapping KEY_MATCH_BUILD =
- GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalBuild" /* eval method */,
+ GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalBuild" /* eval method */,
null /* reset */, null /* cleanup */);
private static final GeneratorMapping KEY_MATCH_PROBE =
- GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalProbe" /* eval method */,
+ GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalProbe" /* eval method */,
null /* reset */, null /* cleanup */);
private static final GeneratorMapping GET_HASH_BUILD =
- GeneratorMapping.create("doSetup" /* setup method */, "getHashBuild" /* eval method */,
+ GeneratorMapping.create("doSetup" /* setup method */, "getHashBuild" /* eval method */,
null /* reset */, null /* cleanup */);
private static final GeneratorMapping GET_HASH_PROBE =
- GeneratorMapping.create("doSetup" /* setup method */, "getHashProbe" /* eval method */,
+ GeneratorMapping.create("doSetup" /* setup method */, "getHashProbe" /* eval method */,
null /* reset */, null /* cleanup */);
private static final GeneratorMapping SET_VALUE =
- GeneratorMapping.create("setupInterior" /* setup method */, "setValue" /* eval method */,
+ GeneratorMapping.create("setupInterior" /* setup method */, "setValue" /* eval method */,
null /* reset */, null /* cleanup */);
private static final GeneratorMapping OUTPUT_KEYS =
@@ -138,8 +138,12 @@ public class ChainedHashTable {
int i = 0;
for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingBuild, collector, context.getFunctionRegistry());
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
- if (expr == null) continue;
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
+ if (expr == null) {
+ continue;
+ }
keyExprsBuild[i] = expr;
final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
@@ -155,8 +159,12 @@ public class ChainedHashTable {
i = 0;
for (NamedExpression ne : htConfig.getKeyExprsProbe()) {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingProbe, collector, context.getFunctionRegistry());
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
- if (expr == null) continue;
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
+ if (expr == null) {
+ continue;
+ }
keyExprsProbe[i] = expr;
i++;
}
@@ -293,4 +301,3 @@ public class ChainedHashTable {
}
}
}
-
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
index b03880cce..6024523e9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
@@ -164,10 +164,11 @@ public abstract class HashTableTemplate implements HashTable {
assert (currentIdxWithinBatch < HashTable.BATCH_SIZE);
assert (incomingRowIdx < HashTable.BATCH_SIZE);
- if (isProbe)
+ if (isProbe) {
match = isKeyMatchInternalProbe(incomingRowIdx, currentIdxWithinBatch);
- else
+ } else {
match = isKeyMatchInternalBuild(incomingRowIdx, currentIdxWithinBatch);
+ }
if (! match) {
currentIdxHolder.value = links.getAccessor().get(currentIdxWithinBatch);
@@ -196,7 +197,9 @@ public abstract class HashTableTemplate implements HashTable {
maxOccupiedIdx = Math.max(maxOccupiedIdx, currentIdxWithinBatch);
- if (EXTRA_DEBUG) logger.debug("BatchHolder: inserted key at incomingRowIdx = {}, currentIdx = {}, hash value = {}.", incomingRowIdx, currentIdx, hashValue);
+ if (EXTRA_DEBUG) {
+ logger.debug("BatchHolder: inserted key at incomingRowIdx = {}, currentIdx = {}, hash value = {}.", incomingRowIdx, currentIdx, hashValue);
+ }
return true;
}
@@ -225,7 +228,9 @@ public abstract class HashTableTemplate implements HashTable {
newLinks.getMutator().setSafe(entryIdxWithinBatch, EMPTY_SLOT);
newHashValues.getMutator().setSafe(entryIdxWithinBatch, hash);
- if (EXTRA_DEBUG) logger.debug("New bucket was empty. bucketIdx = {}, newStartIndices[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, bucketIdx, newStartIndices.getAccessor().get(bucketIdx), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+ if (EXTRA_DEBUG) {
+ logger.debug("New bucket was empty. bucketIdx = {}, newStartIndices[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, bucketIdx, newStartIndices.getAccessor().get(bucketIdx), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+ }
} else {
// follow the new table's hash chain until we encounter empty slot. Note that the hash chain could
@@ -245,7 +250,9 @@ public abstract class HashTableTemplate implements HashTable {
newLinks.getMutator().setSafe(entryIdxWithinBatch, EMPTY_SLOT);
newHashValues.getMutator().setSafe(entryIdxWithinBatch, hash);
- if (EXTRA_DEBUG) logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch, newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+ if (EXTRA_DEBUG) {
+ logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch, newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+ }
break;
} else if (bh != this && bh.links.getAccessor().get(idxWithinBatch) == EMPTY_SLOT) {
@@ -253,7 +260,9 @@ public abstract class HashTableTemplate implements HashTable {
newLinks.getMutator().setSafe(entryIdxWithinBatch, EMPTY_SLOT); // update the newLink entry in this batch to mark end of the hash chain
newHashValues.getMutator().setSafe(entryIdxWithinBatch, hash);
- if (EXTRA_DEBUG) logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch, newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+ if (EXTRA_DEBUG) {
+ logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch, newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+ }
break;
}
@@ -381,11 +390,19 @@ public abstract class HashTableTemplate implements HashTable {
float loadf = htConfig.getLoadFactor();
int initialCap = htConfig.getInitialCapacity();
- if (loadf <= 0 || Float.isNaN(loadf)) throw new IllegalArgumentException("Load factor must be a valid number greater than 0");
- if (initialCap <= 0) throw new IllegalArgumentException("The initial capacity must be greater than 0");
- if (initialCap > MAXIMUM_CAPACITY) throw new IllegalArgumentException("The initial capacity must be less than maximum capacity allowed");
+ if (loadf <= 0 || Float.isNaN(loadf)) {
+ throw new IllegalArgumentException("Load factor must be a valid number greater than 0");
+ }
+ if (initialCap <= 0) {
+ throw new IllegalArgumentException("The initial capacity must be greater than 0");
+ }
+ if (initialCap > MAXIMUM_CAPACITY) {
+ throw new IllegalArgumentException("The initial capacity must be less than maximum capacity allowed");
+ }
- if (htConfig.getKeyExprsBuild() == null || htConfig.getKeyExprsBuild().length == 0) throw new IllegalArgumentException("Hash table must have at least 1 key expression");
+ if (htConfig.getKeyExprsBuild() == null || htConfig.getKeyExprsBuild().length == 0) {
+ throw new IllegalArgumentException("Hash table must have at least 1 key expression");
+ }
this.htConfig = htConfig;
this.context = context;
@@ -397,8 +414,9 @@ public abstract class HashTableTemplate implements HashTable {
// round up the initial capacity to nearest highest power of 2
tableSize = roundUpToPowerOf2(initialCap);
- if (tableSize > MAXIMUM_CAPACITY)
+ if (tableSize > MAXIMUM_CAPACITY) {
tableSize = MAXIMUM_CAPACITY;
+ }
threshold = (int) Math.ceil(tableSize * loadf);
@@ -500,7 +518,9 @@ public abstract class HashTableTemplate implements HashTable {
currentIdx = freeIndex++;
addBatchIfNeeded(currentIdx);
- if (EXTRA_DEBUG) logger.debug("Empty bucket index = {}. incomingRowIdx = {}; inserting new entry at currentIdx = {}.", i, incomingRowIdx, currentIdx);
+ if (EXTRA_DEBUG) {
+ logger.debug("Empty bucket index = {}. incomingRowIdx = {}; inserting new entry at currentIdx = {}.", i, incomingRowIdx, currentIdx);
+ }
if (insertEntry(incomingRowIdx, currentIdx, hash, lastEntryBatch, lastEntryIdxWithinBatch)) {
// update the start index array
@@ -543,14 +563,16 @@ public abstract class HashTableTemplate implements HashTable {
currentIdx = freeIndex++;
addBatchIfNeeded(currentIdx);
- if (EXTRA_DEBUG) logger.debug("No match was found for incomingRowIdx = {}; inserting new entry at currentIdx = {}.", incomingRowIdx, currentIdx);
+ if (EXTRA_DEBUG) {
+ logger.debug("No match was found for incomingRowIdx = {}; inserting new entry at currentIdx = {}.", incomingRowIdx, currentIdx);
+ }
if (insertEntry(incomingRowIdx, currentIdx, hash, lastEntryBatch, lastEntryIdxWithinBatch)) {
htIdxHolder.value = currentIdx;
return PutStatus.KEY_ADDED;
- }
- else
+ } else {
return PutStatus.PUT_FAILED;
+ }
}
return found ? PutStatus.KEY_PRESENT : PutStatus.KEY_ADDED ;
@@ -618,7 +640,9 @@ public abstract class HashTableTemplate implements HashTable {
if (currentIdx >= totalBatchSize) {
BatchHolder bh = addBatchHolder();
- if (EXTRA_DEBUG) logger.debug("HashTable: Added new batch. Num batches = {}.", batchHolders.size());
+ if (EXTRA_DEBUG) {
+ logger.debug("HashTable: Added new batch. Num batches = {}.", batchHolders.size());
+ }
return bh;
}
else {
@@ -638,12 +662,15 @@ public abstract class HashTableTemplate implements HashTable {
// in the new table.. the metadata consists of the startIndices, links and hashValues.
// Note that the keys stored in the BatchHolders are not moved around.
private void resizeAndRehashIfNeeded() {
- if (numEntries < threshold)
+ if (numEntries < threshold) {
return;
+ }
long t0 = System.currentTimeMillis();
- if (EXTRA_DEBUG) logger.debug("Hash table numEntries = {}, threshold = {}; resizing the table...", numEntries, threshold);
+ if (EXTRA_DEBUG) {
+ logger.debug("Hash table numEntries = {}, threshold = {}; resizing the table...", numEntries, threshold);
+ }
// If the table size is already MAXIMUM_CAPACITY, don't resize
// the table, but set the threshold to Integer.MAX_VALUE such that
@@ -656,8 +683,9 @@ public abstract class HashTableTemplate implements HashTable {
int newSize = 2 * tableSize;
tableSize = roundUpToPowerOf2(newSize);
- if (tableSize > MAXIMUM_CAPACITY)
+ if (tableSize > MAXIMUM_CAPACITY) {
tableSize = MAXIMUM_CAPACITY;
+ }
// set the new threshold based on the new table size and load factor
threshold = (int) Math.ceil(tableSize * htConfig.getLoadFactor());
@@ -717,5 +745,3 @@ public abstract class HashTableTemplate implements HashTable {
protected abstract int getHashProbe(@Named("incomingRowIdx") int incomingRowIdx) ;
}
-
-
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
index bf00194aa..f1fcce0d6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
@@ -79,7 +79,7 @@ public class FilterRecordBatch extends AbstractSingleRecordBatch<Filter>{
protected void doWork() {
int recordCount = incoming.getRecordCount();
filter.filterBatch(recordCount);
-// for(VectorWrapper<?> v : container){
+// for (VectorWrapper<?> v : container) {
// ValueVector.Mutator m = v.getValueVector().getMutator();
// m.setValueCount(recordCount);
// }
@@ -88,8 +88,12 @@ public class FilterRecordBatch extends AbstractSingleRecordBatch<Filter>{
@Override
public void cleanup() {
- if(sv2 != null) sv2.clear();
- if(sv4 != null) sv4.clear();
+ if (sv2 != null) {
+ sv2.clear();
+ }
+ if (sv4 != null) {
+ sv4.clear();
+ }
super.cleanup();
}
@@ -100,7 +104,7 @@ public class FilterRecordBatch extends AbstractSingleRecordBatch<Filter>{
sv2.clear();
}
- switch(incoming.getSchema().getSelectionVectorMode()){
+ switch (incoming.getSchema().getSelectionVectorMode()) {
case NONE:
sv2 = new SelectionVector2(oContext.getAllocator());
this.filter = generateSV2Filterer();
@@ -137,13 +141,13 @@ public class FilterRecordBatch extends AbstractSingleRecordBatch<Filter>{
final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION4, context.getFunctionRegistry());
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector, context.getFunctionRegistry());
- if(collector.hasErrors()){
+ if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
cg.addExpr(new ReturnValueExpression(expr));
-// for(VectorWrapper<?> i : incoming){
+// for (VectorWrapper<?> i : incoming) {
// ValueVector v = TypeHelper.getNewVector(i.getField(), context.getAllocator());
// container.add(v);
// allocators.add(getAllocator4(v));
@@ -177,13 +181,13 @@ public class FilterRecordBatch extends AbstractSingleRecordBatch<Filter>{
final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION2, context.getFunctionRegistry());
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector, context.getFunctionRegistry());
- if(collector.hasErrors()){
+ if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
cg.addExpr(new ReturnValueExpression(expr));
- for(VectorWrapper<?> v : incoming){
+ for (VectorWrapper<?> v : incoming) {
TransferPair pair = v.getValueVector().getTransferPair();
container.add(pair.getTo());
transfers.add(pair);
@@ -202,5 +206,4 @@ public class FilterRecordBatch extends AbstractSingleRecordBatch<Filter>{
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
index 15044b823..2a08c053a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
@@ -457,8 +457,8 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP> {
return hj;
}
- private void allocateVectors(){
- for(VectorWrapper<?> v : container){
+ private void allocateVectors() {
+ for(VectorWrapper<?> v : container) {
v.getValueVector().allocateNew();
}
}
@@ -472,7 +472,9 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP> {
}
private void updateStats(HashTable htable) {
- if(htable == null) return;
+ if (htable == null) {
+ return;
+ }
htable.getStats(htStats);
this.stats.setLongStat(Metric.NUM_BUCKETS, htStats.numBuckets);
this.stats.setLongStat(Metric.NUM_ENTRIES, htStats.numEntries);
@@ -488,7 +490,7 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP> {
@Override
public void cleanup() {
- if(hjHelper != null){
+ if (hjHelper != null) {
hjHelper.clear();
}
@@ -504,4 +506,5 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP> {
left.cleanup();
right.cleanup();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
index 785deae79..133289e08 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
@@ -94,11 +94,13 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
boolean success = true;
while (outputRecords < TARGET_RECORDS_PER_BATCH && recordsProcessed < recordsToProcess) {
success = projectBuildRecord(unmatchedBuildIndexes.get(recordsProcessed), outputRecords);
- if(success){
+ if (success) {
recordsProcessed++;
outputRecords++;
- }else{
- if(outputRecords == 0) throw new IllegalStateException("Too big to fail.");
+ } else {
+ if (outputRecords == 0) {
+ throw new IllegalStateException("Too big to fail.");
+ }
break;
}
}
@@ -166,11 +168,11 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
boolean success = projectBuildRecord(currentCompositeIdx, outputRecords) //
&& projectProbeRecord(recordsProcessed, outputRecords);
- if(!success){
+ if (!success) {
// we failed to project. redo this record.
getNextRecord = false;
return;
- }else{
+ } else {
outputRecords++;
/* Projected single row from the build side with matching key but there
@@ -182,8 +184,7 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
* from the probe side. Drain the next row in the probe side.
*/
recordsProcessed++;
- }
- else {
+ } else {
/* There is more than one row with the same key on the build side
* don't drain more records from the probe side till we have projected
* all the rows with this key
@@ -197,10 +198,10 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
// If we have a left outer join, project the keys
if (joinType == JoinRelType.LEFT || joinType == JoinRelType.FULL) {
boolean success = projectProbeRecord(recordsProcessed, outputRecords);
- if(!success){
- if(outputRecords == 0){
+ if (!success) {
+ if (outputRecords == 0) {
throw new IllegalStateException("Record larger than single batch.");
- }else{
+ } else {
// we've output some records but failed to output this one. return and wait for next call.
return;
}
@@ -214,10 +215,10 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
hjHelper.setRecordMatched(currentCompositeIdx);
boolean success = projectBuildRecord(currentCompositeIdx, outputRecords) //
&& projectProbeRecord(recordsProcessed, outputRecords);
- if(!success){
- if(outputRecords == 0){
+ if (!success) {
+ if (outputRecords == 0) {
throw new IllegalStateException("Record larger than single batch.");
- }else{
+ } else {
// we've output some records but failed to output this one. return and wait for next call.
return;
}
@@ -264,5 +265,7 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
public abstract void doSetup(@Named("context") FragmentContext context, @Named("buildBatch") VectorContainer buildBatch, @Named("probeBatch") RecordBatch probeBatch,
@Named("outgoing") RecordBatch outgoing);
public abstract boolean projectBuildRecord(@Named("buildIndex") int buildIndex, @Named("outIndex") int outIndex);
+
public abstract boolean projectProbeRecord(@Named("probeIndex") int probeIndex, @Named("outIndex") int outIndex);
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java
index faca32a97..39bdb9440 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java
@@ -66,31 +66,32 @@ public final class JoinStatus {
this.joinType = output.getJoinType();
}
- private final IterOutcome nextLeft(){
+ private final IterOutcome nextLeft() {
return outputBatch.next(LEFT_INPUT, left);
}
- private final IterOutcome nextRight(){
+ private final IterOutcome nextRight() {
return outputBatch.next(RIGHT_INPUT, right);
}
- public final void ensureInitial(){
- if(!initialSet){
+ public final void ensureInitial() {
+ if(!initialSet) {
this.lastLeft = nextLeft();
this.lastRight = nextRight();
initialSet = true;
}
}
- public final void advanceLeft(){
+ public final void advanceLeft() {
leftPosition++;
}
- public final void advanceRight(){
- if (rightSourceMode == RightSourceMode.INCOMING)
+ public final void advanceRight() {
+ if (rightSourceMode == RightSourceMode.INCOMING) {
rightPosition++;
- else
+ } else {
svRightPosition++;
+ }
}
public final int getLeftPosition() {
@@ -101,7 +102,7 @@ public final class JoinStatus {
return (rightSourceMode == RightSourceMode.INCOMING) ? rightPosition : svRightPosition;
}
- public final int getRightCount(){
+ public final int getRightCount() {
return right.getRecordCount();
}
@@ -153,9 +154,10 @@ public final class JoinStatus {
* Check if the left record position can advance by one.
* Side effect: advances to next left batch if current left batch size is exceeded.
*/
- public final boolean isLeftPositionAllowed(){
- if (lastLeft == IterOutcome.NONE)
+ public final boolean isLeftPositionAllowed() {
+ if (lastLeft == IterOutcome.NONE) {
return false;
+ }
if (!isLeftPositionInCurrentBatch()) {
leftPosition = 0;
releaseData(left);
@@ -170,11 +172,13 @@ public final class JoinStatus {
* Check if the right record position can advance by one.
* Side effect: advances to next right batch if current right batch size is exceeded
*/
- public final boolean isRightPositionAllowed(){
- if (rightSourceMode == RightSourceMode.SV4)
+ public final boolean isRightPositionAllowed() {
+ if (rightSourceMode == RightSourceMode.SV4) {
return svRightPosition < sv4.getCount();
- if (lastRight == IterOutcome.NONE)
+ }
+ if (lastRight == IterOutcome.NONE) {
return false;
+ }
if (!isRightPositionInCurrentBatch()) {
rightPosition = 0;
releaseData(right);
@@ -185,11 +189,13 @@ public final class JoinStatus {
return true;
}
- private void releaseData(RecordBatch b){
- for(VectorWrapper<?> v : b){
+ private void releaseData(RecordBatch b) {
+ for (VectorWrapper<?> v : b) {
v.clear();
}
- if(b.getSchema().getSelectionVectorMode() == SelectionVectorMode.TWO_BYTE) b.getSelectionVector2().clear();
+ if (b.getSchema().getSelectionVectorMode() == SelectionVectorMode.TWO_BYTE) {
+ b.getSelectionVector2().clear();
+ }
}
/**
@@ -220,29 +226,34 @@ public final class JoinStatus {
return rightPosition + 1 < right.getRecordCount();
}
- public JoinOutcome getOutcome(){
- if (!ok)
+ public JoinOutcome getOutcome() {
+ if (!ok) {
return JoinOutcome.FAILURE;
+ }
if (bothMatches(IterOutcome.NONE) ||
(joinType == JoinRelType.INNER && eitherMatches(IterOutcome.NONE)) ||
(joinType == JoinRelType.LEFT && lastLeft == IterOutcome.NONE) ||
- (joinType == JoinRelType.RIGHT && lastRight == IterOutcome.NONE))
+ (joinType == JoinRelType.RIGHT && lastRight == IterOutcome.NONE)) {
return JoinOutcome.NO_MORE_DATA;
+ }
if (bothMatches(IterOutcome.OK) ||
- (eitherMatches(IterOutcome.NONE) && eitherMatches(IterOutcome.OK)))
+ (eitherMatches(IterOutcome.NONE) && eitherMatches(IterOutcome.OK))) {
return JoinOutcome.BATCH_RETURNED;
- if (eitherMatches(IterOutcome.OK_NEW_SCHEMA))
+ }
+ if (eitherMatches(IterOutcome.OK_NEW_SCHEMA)) {
return JoinOutcome.SCHEMA_CHANGED;
- if (eitherMatches(IterOutcome.NOT_YET))
+ }
+ if (eitherMatches(IterOutcome.NOT_YET)) {
return JoinOutcome.WAITING;
+ }
return JoinOutcome.FAILURE;
}
- private boolean bothMatches(IterOutcome outcome){
+ private boolean bothMatches(IterOutcome outcome) {
return lastLeft == outcome && lastRight == outcome;
}
- private boolean eitherMatches(IterOutcome outcome){
+ private boolean eitherMatches(IterOutcome outcome) {
return lastLeft == outcome || lastRight == outcome;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
index bb3b9ac6d..c1dffc107 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
@@ -94,8 +94,9 @@ public abstract class JoinTemplate implements JoinWorker {
if (((MergeJoinPOP)status.outputBatch.getPopConfig()).getJoinType() == JoinRelType.LEFT) {
// we've hit the end of the right record batch; copy any remaining values from the left batch
while (status.isLeftPositionAllowed()) {
- if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition()))
+ if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition())) {
return false;
+ }
status.incOutputPos();
status.advanceLeft();
@@ -103,8 +104,9 @@ public abstract class JoinTemplate implements JoinWorker {
}
return true;
}
- if (!status.isLeftPositionAllowed())
+ if (!status.isLeftPositionAllowed()) {
return true;
+ }
int comparison = doCompare(status.getLeftPosition(), status.getRightPosition());
switch (comparison) {
@@ -112,8 +114,9 @@ public abstract class JoinTemplate implements JoinWorker {
case -1:
// left key < right key
if (((MergeJoinPOP)status.outputBatch.getPopConfig()).getJoinType() == JoinRelType.LEFT) {
- if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition()))
+ if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition())) {
return false;
+ }
status.incOutputPos();
}
status.advanceLeft();
@@ -125,25 +128,27 @@ public abstract class JoinTemplate implements JoinWorker {
// check for repeating values on the left side
if (!status.isLeftRepeating() &&
status.isNextLeftPositionInCurrentBatch() &&
- doCompareNextLeftKey(status.getLeftPosition()) == 0)
+ doCompareNextLeftKey(status.getLeftPosition()) == 0) {
// subsequent record(s) in the left batch have the same key
status.notifyLeftRepeating();
-
- else if (status.isLeftRepeating() &&
+ } else if (status.isLeftRepeating() &&
status.isNextLeftPositionInCurrentBatch() &&
- doCompareNextLeftKey(status.getLeftPosition()) != 0)
+ doCompareNextLeftKey(status.getLeftPosition()) != 0) {
// this record marks the end of repeated keys
status.notifyLeftStoppedRepeating();
+ }
boolean crossedBatchBoundaries = false;
int initialRightPosition = status.getRightPosition();
do {
// copy all equal right keys to the output record batch
- if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition()))
+ if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition())) {
return false;
+ }
- if (!doCopyRight(status.getRightPosition(), status.getOutPosition()))
+ if (!doCopyRight(status.getRightPosition(), status.getOutPosition())) {
return false;
+ }
status.incOutputPos();
@@ -159,9 +164,10 @@ public abstract class JoinTemplate implements JoinWorker {
} while ((!status.isLeftRepeating() || status.isRightPositionInCurrentBatch()) && status.isRightPositionAllowed() && doCompare(status.getLeftPosition(), status.getRightPosition()) == 0);
if (status.getRightPosition() > initialRightPosition &&
- (status.isLeftRepeating() || ! status.isNextLeftPositionInCurrentBatch()))
+ (status.isLeftRepeating() || ! status.isNextLeftPositionInCurrentBatch())) {
// more than one matching result from right table; reset position in case of subsequent left match
status.setRightPosition(initialRightPosition);
+ }
status.advanceLeft();
if (status.isLeftRepeating() && doCompareNextLeftKey(status.getLeftPosition()) != 0) {
@@ -233,5 +239,4 @@ public abstract class JoinTemplate implements JoinWorker {
*/
protected abstract int doCompareNextLeftKey(@Named("leftIndex") int leftIndex);
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
index b24b5348a..1d4e353c6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
@@ -144,19 +144,21 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
status.ensureInitial();
// loop so we can start over again if we find a new batch was created.
- while(true){
+ while (true) {
JoinOutcome outcome = status.getOutcome();
// if the previous outcome was a change in schema or we sent a batch, we have to set up a new batch.
if (outcome == JoinOutcome.BATCH_RETURNED ||
- outcome == JoinOutcome.SCHEMA_CHANGED)
+ outcome == JoinOutcome.SCHEMA_CHANGED) {
allocateBatch();
+ }
// reset the output position to zero after our parent iterates this RecordBatch
if (outcome == JoinOutcome.BATCH_RETURNED ||
outcome == JoinOutcome.SCHEMA_CHANGED ||
- outcome == JoinOutcome.NO_MORE_DATA)
+ outcome == JoinOutcome.NO_MORE_DATA) {
status.resetOutputPos();
+ }
if (outcome == JoinOutcome.NO_MORE_DATA) {
logger.debug("NO MORE DATA; returning {} NONE");
@@ -164,7 +166,7 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
}
boolean first = false;
- if(worker == null){
+ if (worker == null) {
try {
logger.debug("Creating New Worker");
stats.startSetup();
@@ -180,11 +182,12 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
}
// join until we have a complete outgoing batch
- if (!worker.doJoin(status))
+ if (!worker.doJoin(status)) {
worker = null;
+ }
// get the outcome of the join.
- switch(status.getOutcome()){
+ switch (status.getOutcome()) {
case BATCH_RETURNED:
// only return new schema if new worker has been setup.
logger.debug("BATCH RETURNED; returning {}", (first ? "OK_NEW_SCHEMA" : "OK"));
@@ -200,7 +203,7 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
return status.getOutPosition() > 0 ? (first ? IterOutcome.OK_NEW_SCHEMA : IterOutcome.OK): (first ? IterOutcome.OK_NEW_SCHEMA : IterOutcome.NONE);
case SCHEMA_CHANGED:
worker = null;
- if(status.getOutPosition() > 0){
+ if (status.getOutPosition() > 0) {
// if we have current data, let's return that.
logger.debug("SCHEMA CHANGED; returning {} ", (first ? "OK_NEW_SCHEMA" : "OK"));
setRecordCountInContainer();
@@ -218,7 +221,7 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
}
private void setRecordCountInContainer() {
- for(VectorWrapper vw : container){
+ for (VectorWrapper vw : container) {
Preconditions.checkArgument(!vw.isHyper());
vw.getValueVector().getMutator().setValueCount(getRecordCount());
}
@@ -257,9 +260,10 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
// materialize value vector readers from join expression
final LogicalExpression materializedLeftExpr = ExpressionTreeMaterializer.materialize(leftFieldExpr, left, collector, context.getFunctionRegistry());
- if (collector.hasErrors())
+ if (collector.hasErrors()) {
throw new ClassTransformationException(String.format(
"Failure while trying to materialize incoming left field. Errors:\n %s.", collector.toErrorString()));
+ }
// generate compareNextLeftKey()
////////////////////////////////
@@ -475,9 +479,10 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
} else {
materializedLeftExpr = new TypedNullConstant(Types.optional(MinorType.INT));
}
- if (collector.hasErrors())
+ if (collector.hasErrors()) {
throw new ClassTransformationException(String.format(
"Failure while trying to materialize incoming left field. Errors:\n %s.", collector.toErrorString()));
+ }
LogicalExpression materializedRightExpr;
if (worker == null || status.isRightPositionAllowed()) {
@@ -485,9 +490,10 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
} else {
materializedRightExpr = new TypedNullConstant(Types.optional(MinorType.INT));
}
- if (collector.hasErrors())
+ if (collector.hasErrors()) {
throw new ClassTransformationException(String.format(
"Failure while trying to materialize incoming right field. Errors:\n %s.", collector.toErrorString()));
+ }
// generate compare()
////////////////////////
@@ -519,4 +525,5 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP> {
//Pass the equality check for all the join conditions. Finally, return 0.
cg.getEvalBlock()._return(JExpr.lit(0));
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatchBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatchBuilder.java
index 904d38cea..1187bd6da 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatchBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatchBuilder.java
@@ -50,15 +50,24 @@ public class MergeJoinBatchBuilder {
}
public boolean add(RecordBatch batch) {
- if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE)
+ if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) {
throw new UnsupportedOperationException("A merge join cannot currently work against a sv4 batch.");
- if (batch.getRecordCount() == 0) return true; // skip over empty record batches.
+ }
+ if (batch.getRecordCount() == 0) {
+ return true; // skip over empty record batches.
+ }
// resource checks
long batchBytes = getSize(batch);
- if (batchBytes + runningBytes > Integer.MAX_VALUE) return false; // TODO: 2GB is arbitrary
- if (runningBatches++ >= Character.MAX_VALUE) return false; // allowed in batch.
- if (!svAllocator.preAllocate(batch.getRecordCount()*4)) return false; // sv allocation available.
+ if (batchBytes + runningBytes > Integer.MAX_VALUE) {
+ return false; // TODO: 2GB is arbitrary
+ }
+ if (runningBatches++ >= Character.MAX_VALUE) {
+ return false; // allowed in batch.
+ }
+ if (!svAllocator.preAllocate(batch.getRecordCount()*4)) {
+ return false; // sv allocation available.
+ }
// transfer VVs to a new RecordBatchData
RecordBatchData bd = new RecordBatchData(batch);
@@ -68,9 +77,9 @@ public class MergeJoinBatchBuilder {
return true;
}
- private long getSize(RecordBatch batch){
+ private long getSize(RecordBatch batch) {
long bytes = 0;
- for(VectorWrapper<?> v : batch){
+ for (VectorWrapper<?> v : batch) {
bytes += v.getValueVector().getBufferSize();
}
return bytes;
@@ -78,18 +87,20 @@ public class MergeJoinBatchBuilder {
public void build() throws SchemaChangeException {
container.clear();
- if (queuedRightBatches.size() > Character.MAX_VALUE) throw new SchemaChangeException("Join cannot work on more than %d batches at a time.", (int) Character.MAX_VALUE);
+ if (queuedRightBatches.size() > Character.MAX_VALUE) {
+ throw new SchemaChangeException("Join cannot work on more than %d batches at a time.", (int) Character.MAX_VALUE);
+ }
status.sv4 = new SelectionVector4(svAllocator.getAllocation(), recordCount, Character.MAX_VALUE);
BatchSchema schema = queuedRightBatches.keySet().iterator().next();
List<RecordBatchData> data = queuedRightBatches.get(schema);
// now we're going to generate the sv4 pointers
- switch(schema.getSelectionVectorMode()){
+ switch (schema.getSelectionVectorMode()) {
case NONE: {
int index = 0;
int recordBatchId = 0;
- for(RecordBatchData d : data){
- for(int i =0; i < d.getRecordCount(); i++, index++){
+ for (RecordBatchData d : data) {
+ for (int i =0; i < d.getRecordCount(); i++, index++) {
status.sv4.set(index, recordBatchId, i);
}
recordBatchId++;
@@ -99,8 +110,8 @@ public class MergeJoinBatchBuilder {
case TWO_BYTE: {
int index = 0;
int recordBatchId = 0;
- for(RecordBatchData d : data){
- for(int i =0; i < d.getRecordCount(); i++, index++){
+ for (RecordBatchData d : data) {
+ for (int i =0; i < d.getRecordCount(); i++, index++) {
status.sv4.set(index, recordBatchId, (int) d.getSv2().getIndex(i));
}
// might as well drop the selection vector since we'll stop using it now.
@@ -121,7 +132,7 @@ public class MergeJoinBatchBuilder {
}
}
- for(MaterializedField f : vectors.keySet()){
+ for (MaterializedField f : vectors.keySet()) {
List<ValueVector> v = vectors.get(f);
container.addHyperList(v);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
index cf2e36f9d..29fd80f72 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
@@ -133,7 +133,7 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
stats.startWait();
try {
RawFragmentBatch b = provider.getNext();
- if(b != null){
+ if (b != null) {
stats.addLongStat(Metric.BYTES_RECEIVED, b.getByteCount());
stats.batchReceived(0, b.getHeader().getDef().getRecordCount(), false);
}
@@ -191,7 +191,9 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
emptyBatch = rawBatch;
}
try {
- while ((rawBatch = getNext(provider)) != null && rawBatch.getHeader().getDef().getRecordCount() == 0);
+ while ((rawBatch = getNext(provider)) != null && rawBatch.getHeader().getDef().getRecordCount() == 0) {
+ ;
+ }
if (rawBatch == null && context.isCancelled()) {
return IterOutcome.STOP;
}
@@ -400,14 +402,17 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
batchOffsets[node.batchId] = 0;
// add front value from batch[x] to priority queue
- if (batchLoaders[node.batchId].getRecordCount() != 0)
+ if (batchLoaders[node.batchId].getRecordCount() != 0) {
pqueue.add(new Node(node.batchId, 0));
+ }
} else {
pqueue.add(new Node(node.batchId, node.valueIndex + 1));
}
- if (prevBatchWasFull) break;
+ if (prevBatchWasFull) {
+ break;
+ }
}
// set the value counts in the outgoing vectors
@@ -589,11 +594,13 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
private void generateComparisons(ClassGenerator g, VectorAccessible batch) throws SchemaChangeException {
g.setMappingSet(MAIN_MAPPING);
- for(Ordering od : popConfig.getOrderings()){
+ for (Ordering od : popConfig.getOrderings()) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector,context.getFunctionRegistry());
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
g.setMappingSet(LEFT_MAPPING);
HoldingContainer left = g.addExpr(expr, false);
g.setMappingSet(RIGHT_MAPPING);
@@ -605,9 +612,9 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
HoldingContainer out = g.addExpr(fh, false);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
- if(od.getDirection() == Direction.ASCENDING){
+ if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
- }else{
+ } else {
jc._then()._return(out.getValue().minus());
}
}
@@ -648,7 +655,7 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
public void cleanup() {
outgoingContainer.clear();
if (batchLoaders != null) {
- for(RecordBatchLoader rbl : batchLoaders){
+ for (RecordBatchLoader rbl : batchLoaders) {
if (rbl != null) {
rbl.clear();
}
@@ -662,4 +669,4 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
index 45f32cff4..aecf3636d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
@@ -189,8 +189,9 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
}
builder.add(incoming);
recordsSampled += incoming.getRecordCount();
- if (upstream == IterOutcome.NONE)
+ if (upstream == IterOutcome.NONE) {
break;
+ }
}
VectorContainer sortedSamples = new VectorContainer();
builder.build(context, sortedSamples);
@@ -258,7 +259,7 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
try {
- if (!saveSamples()){
+ if (!saveSamples()) {
return false;
}
@@ -277,16 +278,17 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
// Wait until sufficient number of fragments have submitted samples, or proceed after xx ms passed
// TODO: this should be polling.
- if (val < fragmentsBeforeProceed)
+ if (val < fragmentsBeforeProceed) {
Thread.sleep(10);
+ }
for (int i = 0; i < 100 && finalTable == null; i++) {
finalTable = tableMap.get(finalTableKey);
- if (finalTable != null){
+ if (finalTable != null) {
break;
}
Thread.sleep(10);
}
- if (finalTable == null){
+ if (finalTable == null) {
buildTable();
}
finalTable = tableMap.get(finalTableKey);
@@ -429,8 +431,9 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
// if we got IterOutcome.NONE while getting partition vectors, and there are no batches on the queue, then we are
// done
- if (upstreamNone && (batchQueue == null || batchQueue.size() == 0))
+ if (upstreamNone && (batchQueue == null || batchQueue.size() == 0)) {
return IterOutcome.NONE;
+ }
// if there are batches on the queue, process them first, rather than calling incoming.next()
if (batchQueue != null && batchQueue.size() > 0) {
@@ -461,7 +464,7 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
// If this is the first iteration, we need to generate the partition vectors before we can proceed
if (this.first && upstream == IterOutcome.OK_NEW_SCHEMA) {
- if (!getPartitionVectors()){
+ if (!getPartitionVectors()) {
cleanup();
return IterOutcome.STOP;
}
@@ -490,8 +493,9 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
// we need to generate a new schema, even if the outcome is IterOutcome.OK After that we can reuse the schema.
if (this.startedUnsampledBatches == false) {
this.startedUnsampledBatches = true;
- if (upstream == IterOutcome.OK)
+ if (upstream == IterOutcome.OK) {
upstream = IterOutcome.OK_NEW_SCHEMA;
+ }
}
switch (upstream) {
case NONE:
@@ -560,8 +564,9 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
int count = 0;
for (Ordering od : popConfig.getOrderings()) {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
- if (collector.hasErrors())
+ if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
cg.setMappingSet(incomingMapping);
ClassGenerator.HoldingContainer left = cg.addExpr(expr, false);
cg.setMappingSet(partitionMapping);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
index 051a590f2..7f3a96637 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
@@ -120,7 +120,9 @@ public class ProducerConsumerBatch extends AbstractRecordBatch {
@Override
public void run() {
try {
- if (stop) return;
+ if (stop) {
+ return;
+ }
outer:
while (true) {
IterOutcome upstream = incoming.next();
@@ -208,4 +210,5 @@ public class ProducerConsumerBatch extends AbstractRecordBatch {
this.failed = failed;
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index ec29cac55..a1a834052 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -195,55 +195,64 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
private boolean doAlloc() {
//Allocate vv in the allocationVectors.
- for(ValueVector v : this.allocationVectors){
+ for (ValueVector v : this.allocationVectors) {
//AllocationHelper.allocate(v, remainingRecordCount, 250);
- if (!v.allocateNewSafe())
+ if (!v.allocateNewSafe()) {
return false;
+ }
}
//Allocate vv for complexWriters.
- if (complexWriters == null)
+ if (complexWriters == null) {
return true;
+ }
- for (ComplexWriter writer : complexWriters)
+ for (ComplexWriter writer : complexWriters) {
writer.allocate();
+ }
return true;
}
private void setValueCount(int count) {
- for(ValueVector v : allocationVectors){
+ for (ValueVector v : allocationVectors) {
ValueVector.Mutator m = v.getMutator();
m.setValueCount(count);
}
- if (complexWriters == null)
+ if (complexWriters == null) {
return;
+ }
- for (ComplexWriter writer : complexWriters)
+ for (ComplexWriter writer : complexWriters) {
writer.setValueCount(count);
+ }
}
/** hack to make ref and full work together... need to figure out if this is still necessary. **/
- private FieldReference getRef(NamedExpression e){
+ private FieldReference getRef(NamedExpression e) {
FieldReference ref = e.getRef();
PathSegment seg = ref.getRootSegment();
-// if(seg.isNamed() && "output".contentEquals(seg.getNameSegment().getPath())){
+// if (seg.isNamed() && "output".contentEquals(seg.getNameSegment().getPath())) {
// return new FieldReference(ref.getPath().toString().subSequence(7, ref.getPath().length()), ref.getPosition());
// }
return ref;
}
- private boolean isAnyWildcard(List<NamedExpression> exprs){
- for(NamedExpression e : exprs){
- if(isWildcard(e)) return true;
+ private boolean isAnyWildcard(List<NamedExpression> exprs) {
+ for (NamedExpression e : exprs) {
+ if (isWildcard(e)) {
+ return true;
+ }
}
return false;
}
- private boolean isWildcard(NamedExpression ex){
- if( !(ex.getExpr() instanceof SchemaPath)) return false;
+ private boolean isWildcard(NamedExpression ex) {
+ if ( !(ex.getExpr() instanceof SchemaPath)) {
+ return false;
+ }
NameSegment expr = ((SchemaPath)ex.getExpr()).getRootSegment();
NameSegment ref = ex.getRef().getRootSegment();
return ref.getPath().equals("*") && expr.getPath().equals("*");
@@ -266,7 +275,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
ClassifierResult result = new ClassifierResult();
boolean classify = isClassificationNeeded(exprs);
- for(int i = 0; i < exprs.size(); i++){
+ for (int i = 0; i < exprs.size(); i++) {
final NamedExpression namedExpression = exprs.get(i);
result.clear();
@@ -278,14 +287,16 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
Integer value = result.prefixMap.get(result.prefix);
if (value != null && value.intValue() == 1) {
int k = 0;
- for(VectorWrapper<?> wrapper : incoming) {
+ for (VectorWrapper<?> wrapper : incoming) {
ValueVector vvIn = wrapper.getValueVector();
SchemaPath originalPath = vvIn.getField().getPath();
if (k > result.outputNames.size()-1) {
assert false;
}
String name = result.outputNames.get(k++); // get the renamed column names
- if (name == EMPTY_STRING) continue;
+ if (name == EMPTY_STRING) {
+ continue;
+ }
FieldReference ref = new FieldReference(name);
TransferPair tp = wrapper.getValueVector().getTransferPair(ref);
transfers.add(tp);
@@ -293,17 +304,19 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
}
} else if (value != null && value.intValue() > 1) { // subsequent wildcards should do a copy of incoming valuevectors
int k = 0;
- for(VectorWrapper<?> wrapper : incoming) {
+ for (VectorWrapper<?> wrapper : incoming) {
ValueVector vvIn = wrapper.getValueVector();
SchemaPath originalPath = vvIn.getField().getPath();
if (k > result.outputNames.size()-1) {
assert false;
}
String name = result.outputNames.get(k++); // get the renamed column names
- if (name == EMPTY_STRING) continue;
+ if (name == EMPTY_STRING) {
+ continue;
+ }
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(originalPath, incoming, collector, context.getFunctionRegistry() );
- if(collector.hasErrors()){
+ if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
@@ -333,16 +346,15 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(namedExpression.getExpr(), incoming, collector, context.getFunctionRegistry(), true);
final MaterializedField outputField = MaterializedField.create(outputName, expr.getMajorType());
- if(collector.hasErrors()){
+ if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
// add value vector to transfer if direct reference and this is allowed, otherwise, add to evaluation stack.
- if(expr instanceof ValueVectorReadExpression && incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE
+ if (expr instanceof ValueVectorReadExpression && incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE
&& !((ValueVectorReadExpression) expr).hasReadPath()
&& !isAnyWildcard
- && !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldIds()[0])
- ) {
+ && !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldIds()[0])) {
ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
TypedFieldId id = vectorRead.getFieldId();
@@ -358,8 +370,9 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
((DrillFuncHolderExpr) expr).isComplexWriterFuncHolder()) {
// Need to process ComplexWriter function evaluation.
// Lazy initialization of the list of complex writers, if not done yet.
- if (complexWriters == null)
+ if (complexWriters == null) {
complexWriters = Lists.newArrayList();
+ }
// The reference name will be passed to ComplexWriter, used as the name of the output vector from the writer.
((DrillComplexWriterFuncHolder) ((DrillFuncHolderExpr) expr).getHolder()).setReference(namedExpression.getRef());
@@ -419,9 +432,11 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
private boolean isClassificationNeeded(List<NamedExpression> exprs) {
boolean needed = false;
- for(int i = 0; i < exprs.size(); i++){
+ for (int i = 0; i < exprs.size(); i++) {
final NamedExpression ex = exprs.get(i);
- if (!(ex.getExpr() instanceof SchemaPath)) continue;
+ if (!(ex.getExpr() instanceof SchemaPath)) {
+ continue;
+ }
NameSegment expr = ((SchemaPath) ex.getExpr()).getRootSegment();
NameSegment ref = ex.getRef().getRootSegment();
boolean refHasPrefix = ref.getPath().contains(StarColumnHelper.PREFIX_DELIMITER);
@@ -530,7 +545,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
result.outputNames.add(EMPTY_STRING); // initialize
}
- for(VectorWrapper<?> wrapper : incoming) {
+ for (VectorWrapper<?> wrapper : incoming) {
ValueVector vvIn = wrapper.getValueVector();
String incomingName = vvIn.getField().getPath().getRootSegment().getPath();
// get the prefix of the name
@@ -586,7 +601,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
result.outputNames.add(EMPTY_STRING); // initialize
}
- for(VectorWrapper<?> wrapper : incoming) {
+ for (VectorWrapper<?> wrapper : incoming) {
ValueVector vvIn = wrapper.getValueVector();
String name = vvIn.getField().getPath().getRootSegment().getPath();
String[] components = name.split(StarColumnHelper.PREFIX_DELIMITER, 2);
@@ -627,7 +642,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
}
int k = 0;
- for(VectorWrapper<?> wrapper : incoming) {
+ for (VectorWrapper<?> wrapper : incoming) {
ValueVector vvIn = wrapper.getValueVector();
String incomingName = vvIn.getField().getPath().getRootSegment().getPath();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java
index b36bd92a9..49ad39071 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java
@@ -39,27 +39,25 @@ public abstract class ProjectorTemplate implements Projector {
private SelectionVector4 vector4;
private SelectionVectorMode svMode;
- public ProjectorTemplate() throws SchemaChangeException{
+ public ProjectorTemplate() throws SchemaChangeException {
}
@Override
public final int projectRecords(int startIndex, final int recordCount, int firstOutputIndex) {
- switch(svMode){
+ switch (svMode) {
case FOUR_BYTE:
throw new UnsupportedOperationException();
-
case TWO_BYTE:
final int count = recordCount;
- for(int i = 0; i < count; i++, firstOutputIndex++){
- if (!doEval(vector2.getIndex(i), firstOutputIndex))
+ for (int i = 0; i < count; i++, firstOutputIndex++) {
+ if (!doEval(vector2.getIndex(i), firstOutputIndex)) {
return i;
+ }
}
return recordCount;
-
case NONE:
-
final int countN = recordCount;
int i;
for (i = startIndex; i < startIndex + countN; i++, firstOutputIndex++) {
@@ -68,18 +66,16 @@ public abstract class ProjectorTemplate implements Projector {
}
}
if (i < startIndex + recordCount || startIndex > 0) {
- for(TransferPair t : transfers){
+ for (TransferPair t : transfers) {
t.splitAndTransfer(startIndex, i - startIndex);
}
return i - startIndex;
}
- for(TransferPair t : transfers){
+ for (TransferPair t : transfers) {
t.transfer();
}
return recordCount;
-
-
default:
throw new UnsupportedOperationException();
}
@@ -89,7 +85,7 @@ public abstract class ProjectorTemplate implements Projector {
public final void setup(FragmentContext context, RecordBatch incoming, RecordBatch outgoing, List<TransferPair> transfers) throws SchemaChangeException{
this.svMode = incoming.getSchema().getSelectionVectorMode();
- switch(svMode){
+ switch (svMode) {
case FOUR_BYTE:
this.vector4 = incoming.getSelectionVector4();
break;
@@ -104,8 +100,4 @@ public abstract class ProjectorTemplate implements Projector {
public abstract void doSetup(@Named("context") FragmentContext context, @Named("incoming") RecordBatch incoming, @Named("outgoing") RecordBatch outgoing);
public abstract boolean doEval(@Named("inIndex") int inIndex, @Named("outIndex") int outIndex);
-
-
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java
index 8116869c4..419dc8587 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java
@@ -40,7 +40,7 @@ public class RecordBatchData {
private int recordCount;
VectorContainer container = new VectorContainer();
- public RecordBatchData(VectorAccessible batch){
+ public RecordBatchData(VectorAccessible batch) {
List<ValueVector> vectors = Lists.newArrayList();
if (batch instanceof RecordBatch && batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.TWO_BYTE) {
this.sv2 = ((RecordBatch)batch).getSelectionVector2().clone();
@@ -48,8 +48,10 @@ public class RecordBatchData {
this.sv2 = null;
}
- for(VectorWrapper<?> v : batch){
- if(v.isHyper()) throw new UnsupportedOperationException("Record batch data can't be created based on a hyper batch.");
+ for (VectorWrapper<?> v : batch) {
+ if (v.isHyper()) {
+ throw new UnsupportedOperationException("Record batch data can't be created based on a hyper batch.");
+ }
TransferPair tp = v.getValueVector().getTransferPair();
tp.transfer();
vectors.add(tp.getTo());
@@ -67,9 +69,10 @@ public class RecordBatchData {
container.buildSchema(mode);
}
- public int getRecordCount(){
+ public int getRecordCount() {
return recordCount;
}
+
public List<ValueVector> getVectors() {
List<ValueVector> vectors = Lists.newArrayList();
for (VectorWrapper w : container) {
@@ -91,4 +94,5 @@ public class RecordBatchData {
public VectorContainer getContainer() {
return container;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java
index 3a374910c..19f542302 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java
@@ -82,8 +82,6 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
return builder.getSv4();
}
-
-
@Override
public void cleanup() {
builder.clear();
@@ -93,15 +91,14 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
@Override
public IterOutcome innerNext() {
- if(schema != null){
- if(getSelectionVector4().next()){
+ if (schema != null) {
+ if (getSelectionVector4().next()) {
return IterOutcome.OK;
- }else{
+ } else {
return IterOutcome.NONE;
}
}
-
try{
outer: while (true) {
IterOutcome upstream = incoming.next();
@@ -114,13 +111,15 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
return upstream;
case OK_NEW_SCHEMA:
// only change in the case that the schema truly changes. Artificial schema changes are ignored.
- if(!incoming.getSchema().equals(schema)){
- if (schema != null) throw new UnsupportedOperationException("Sort doesn't currently support sorts with changing schemas.");
+ if (!incoming.getSchema().equals(schema)) {
+ if (schema != null) {
+ throw new UnsupportedOperationException("Sort doesn't currently support sorts with changing schemas.");
+ }
this.schema = incoming.getSchema();
}
// fall through.
case OK:
- if(!builder.add(incoming)){
+ if (!builder.add(incoming)) {
throw new UnsupportedOperationException("Sort doesn't currently support doing an external sort.");
};
break;
@@ -129,7 +128,7 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
}
}
- if (schema == null || builder.isEmpty()){
+ if (schema == null || builder.isEmpty()) {
// builder may be null at this point if the first incoming batch is empty
return IterOutcome.NONE;
}
@@ -141,7 +140,7 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
return IterOutcome.OK_NEW_SCHEMA;
- }catch(SchemaChangeException | ClassTransformationException | IOException ex){
+ } catch(SchemaChangeException | ClassTransformationException | IOException ex) {
kill(false);
logger.error("Failure during query", ex);
context.fail(ex);
@@ -167,11 +166,13 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
ClassGenerator<Sorter> g = cg.getRoot();
g.setMappingSet(mainMapping);
- for(Ordering od : orderings){
+ for(Ordering od : orderings) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector,context.getFunctionRegistry());
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
g.setMappingSet(leftMapping);
HoldingContainer left = g.addExpr(expr, false);
g.setMappingSet(rightMapping);
@@ -183,7 +184,7 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
HoldingContainer out = g.addExpr(fh, false);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
- if(od.getDirection() == Direction.ASCENDING){
+ if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
}else{
jc._then()._return(out.getValue().minus());
@@ -193,8 +194,6 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
g.getEvalBlock()._return(JExpr.lit(0));
return context.getImplementationClass(cg);
-
-
}
@Override
@@ -207,7 +206,4 @@ public class SortBatch extends AbstractRecordBatch<Sort> {
incoming.kill(sendUpstream);
}
-
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java
index 80b4ef664..707c41c0d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java
@@ -49,14 +49,14 @@ public class SortRecordBatchBuilder {
private SelectionVector4 sv4;
final PreAllocator svAllocator;
- public SortRecordBatchBuilder(BufferAllocator a, long maxBytes){
+ public SortRecordBatchBuilder(BufferAllocator a, long maxBytes) {
this.maxBytes = maxBytes;
this.svAllocator = a.getNewPreAllocator();
}
- private long getSize(VectorAccessible batch){
+ private long getSize(VectorAccessible batch) {
long bytes = 0;
- for(VectorWrapper<?> v : batch){
+ for (VectorWrapper<?> v : batch) {
bytes += v.getValueVector().getBufferSize();
}
return bytes;
@@ -68,8 +68,10 @@ public class SortRecordBatchBuilder {
* @return True if the requested add completed successfully. Returns false in the case that this builder is full and cannot receive additional packages.
* @throws SchemaChangeException
*/
- public boolean add(VectorAccessible batch){
- if(batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE) throw new UnsupportedOperationException("A sort cannot currently work against a sv4 batch.");
+ public boolean add(VectorAccessible batch) {
+ if (batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE) {
+ throw new UnsupportedOperationException("A sort cannot currently work against a sv4 batch.");
+ }
if (batch.getRecordCount() == 0 && batches.size() > 0) {
return true; // skip over empty record batches.
}
@@ -78,9 +80,15 @@ public class SortRecordBatchBuilder {
if (batchBytes == 0 && batches.size() > 0) {
return true;
}
- if(batchBytes + runningBytes > maxBytes) return false; // enough data memory.
- if(runningBatches+1 > Character.MAX_VALUE) return false; // allowed in batch.
- if(!svAllocator.preAllocate(batch.getRecordCount()*4)) return false; // sv allocation available.
+ if (batchBytes + runningBytes > maxBytes) {
+ return false; // enough data memory.
+ }
+ if (runningBatches+1 > Character.MAX_VALUE) {
+ return false; // allowed in batch.
+ }
+ if (!svAllocator.preAllocate(batch.getRecordCount()*4)) {
+ return false; // sv allocation available.
+ }
RecordBatchData bd = new RecordBatchData(batch);
@@ -126,15 +134,19 @@ public class SortRecordBatchBuilder {
}
}
- public boolean isEmpty(){
+ public boolean isEmpty() {
return batches.isEmpty();
}
public void build(FragmentContext context, VectorContainer outputContainer) throws SchemaChangeException{
outputContainer.clear();
- if(batches.keySet().size() > 1) throw new SchemaChangeException("Sort currently only supports a single schema.");
- if(batches.size() > Character.MAX_VALUE) throw new SchemaChangeException("Sort cannot work on more than %d batches at a time.", (int) Character.MAX_VALUE);
- if(batches.keys().size() < 1){
+ if (batches.keySet().size() > 1) {
+ throw new SchemaChangeException("Sort currently only supports a single schema.");
+ }
+ if (batches.size() > Character.MAX_VALUE) {
+ throw new SchemaChangeException("Sort cannot work on more than %d batches at a time.", (int) Character.MAX_VALUE);
+ }
+ if (batches.keys().size() < 1) {
assert false : "Invalid to have an empty set of batches with no schemas.";
}
sv4 = new SelectionVector4(svAllocator.getAllocation(), recordCount, Character.MAX_VALUE);
@@ -142,12 +154,12 @@ public class SortRecordBatchBuilder {
List<RecordBatchData> data = batches.get(schema);
// now we're going to generate the sv4 pointers
- switch(schema.getSelectionVectorMode()){
+ switch (schema.getSelectionVectorMode()) {
case NONE: {
int index = 0;
int recordBatchId = 0;
- for(RecordBatchData d : data){
- for(int i =0; i < d.getRecordCount(); i++, index++){
+ for (RecordBatchData d : data) {
+ for (int i =0; i < d.getRecordCount(); i++, index++) {
sv4.set(index, recordBatchId, i);
}
recordBatchId++;
@@ -157,8 +169,8 @@ public class SortRecordBatchBuilder {
case TWO_BYTE: {
int index = 0;
int recordBatchId = 0;
- for(RecordBatchData d : data){
- for(int i =0; i < d.getRecordCount(); i++, index++){
+ for (RecordBatchData d : data) {
+ for (int i =0; i < d.getRecordCount(); i++, index++) {
sv4.set(index, recordBatchId, (int) d.getSv2().getIndex(i));
}
// might as well drop the selection vector since we'll stop using it now.
@@ -173,13 +185,13 @@ public class SortRecordBatchBuilder {
// next, we'll create lists of each of the vector types.
ArrayListMultimap<MaterializedField, ValueVector> vectors = ArrayListMultimap.create();
- for(RecordBatchData rbd : batches.values()){
- for(ValueVector v : rbd.getVectors()){
+ for (RecordBatchData rbd : batches.values()) {
+ for (ValueVector v : rbd.getVectors()) {
vectors.put(v.getField(), v);
}
}
- for(MaterializedField f : schema){
+ for (MaterializedField f : schema) {
List<ValueVector> v = vectors.get(f);
outputContainer.addHyperList(v, false);
}
@@ -191,11 +203,13 @@ public class SortRecordBatchBuilder {
return sv4;
}
- public void clear(){
- for(RecordBatchData d : batches.values()){
+ public void clear() {
+ for (RecordBatchData d : batches.values()) {
d.container.clear();
}
- if(sv4 != null) sv4.clear();
+ if (sv4 != null) {
+ sv4.clear();
+ }
}
public List<VectorContainer> getHeldRecordBatches() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/trace/TraceRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/trace/TraceRecordBatch.java
index 609cb29bb..6d909623a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/trace/TraceRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/trace/TraceRecordBatch.java
@@ -88,10 +88,11 @@ public class TraceRecordBatch extends AbstractSingleRecordBatch<Trace> {
@Override
public int getRecordCount() {
- if (sv == null)
+ if (sv == null) {
return incoming.getRecordCount();
- else
+ } else {
return sv.getCount();
+ }
}
/**
@@ -125,8 +126,9 @@ public class TraceRecordBatch extends AbstractSingleRecordBatch<Trace> {
@Override
protected void setupNewSchema() throws SchemaChangeException {
/* Trace operator does not deal with hyper vectors yet */
- if (incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE)
+ if (incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE) {
throw new SchemaChangeException("Trace operator does not work with hyper vectors");
+ }
/*
* we have a new schema, clear our existing container to load the new value vectors
@@ -152,8 +154,9 @@ public class TraceRecordBatch extends AbstractSingleRecordBatch<Trace> {
@Override
public void cleanup() {
/* Release the selection vector */
- if (sv != null)
+ if (sv != null) {
sv.clear();
+ }
/* Close the file descriptors */
try {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
index 0e69bcf71..171d12c9c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
@@ -111,7 +111,9 @@ public class IteratorValidatorBatchIterator implements RecordBatch {
@Override
public IterOutcome next() {
- if(state == IterOutcome.NONE ) throw new IllegalStateException("The incoming iterator has previously moved to a state of NONE. You should not be attempting to call next() again.");
+ if (state == IterOutcome.NONE ) {
+ throw new IllegalStateException("The incoming iterator has previously moved to a state of NONE. You should not be attempting to call next() again.");
+ }
state = incoming.next();
if (first && state == IterOutcome.NONE) {
throw new IllegalStateException("The incoming iterator returned a state of NONE on the first batch. There should always be at least one batch output before returning NONE");
@@ -119,14 +121,16 @@ public class IteratorValidatorBatchIterator implements RecordBatch {
if (first && state == IterOutcome.OK) {
throw new IllegalStateException("The incoming iterator returned a state of OK on the first batch. There should always be a new schema on the first batch. Incoming: " + incoming.getClass().getName());
}
- if (first) first = !first;
+ if (first) {
+ first = !first;
+ }
- if(state == IterOutcome.OK || state == IterOutcome.OK_NEW_SCHEMA) {
+ if (state == IterOutcome.OK || state == IterOutcome.OK_NEW_SCHEMA) {
BatchSchema schema = incoming.getSchema();
- if(schema.getFieldCount() == 0){
+ if (schema.getFieldCount() == 0) {
throw new IllegalStateException ("Incoming batch has an empty schema. This is not allowed.");
}
- if(incoming.getRecordCount() > MAX_BATCH_SIZE){
+ if (incoming.getRecordCount() > MAX_BATCH_SIZE) {
throw new IllegalStateException (String.format("Incoming batch of %s has size %d, which is beyond the limit of %d", incoming.getClass().getName(), incoming.getRecordCount(), MAX_BATCH_SIZE));
}
@@ -157,4 +161,5 @@ public class IteratorValidatorBatchIterator implements RecordBatch {
public VectorContainer getOutgoingContainer() {
throw new UnsupportedOperationException(String.format(" You should not call getOutgoingContainer() for class %s", this.getClass().getCanonicalName()));
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorInjector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorInjector.java
index 428f335a5..2f7f531e8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorInjector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorInjector.java
@@ -36,10 +36,11 @@ public class IteratorValidatorInjector extends
IteratorValidatorInjector inject = new IteratorValidatorInjector();
PhysicalOperator newOp = root.accept(inject, context);
- if( !(newOp instanceof FragmentRoot) ) throw new IllegalStateException("This shouldn't happen.");
+ if ( !(newOp instanceof FragmentRoot) ) {
+ throw new IllegalStateException("This shouldn't happen.");
+ }
return (FragmentRoot) newOp;
-
}
/**
@@ -67,12 +68,11 @@ public class IteratorValidatorInjector extends
}
/* Inject trace operator */
- if (newChildren.size() > 0){
+ if (newChildren.size() > 0) {
newOp = op.getNewWithChildren(newChildren);
newOp.setOperatorId(op.getOperatorId());
}
-
return newOp;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java
index 237007046..9359ea188 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java
@@ -143,14 +143,24 @@ public class BatchGroup implements VectorAccessible {
}
public void cleanup() throws IOException {
- if (sv2 != null) sv2.clear();
- if (outputStream != null) outputStream.close();
- if (inputStream != null) inputStream.close();
- if (fs != null && fs.exists(path)) fs.delete(path, false);
+ if (sv2 != null) {
+ sv2.clear();
+ }
+ if (outputStream != null) {
+ outputStream.close();
+ }
+ if (inputStream != null) {
+ inputStream.close();
+ }
+ if (fs != null && fs.exists(path)) {
+ fs.delete(path, false);
+ }
}
public void closeOutputStream() throws IOException {
- if (outputStream != null) outputStream.close();
+ if (outputStream != null) {
+ outputStream.close();
+ }
}
@Override
@@ -181,4 +191,5 @@ public class BatchGroup implements VectorAccessible {
public Iterator<VectorWrapper<?>> iterator() {
return currentContainer.iterator();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
index 505f56745..52249e9e7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
@@ -192,12 +192,12 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
@Override
public IterOutcome innerNext() {
- if(schema != null){
+ if (schema != null) {
if (spillCount == 0) {
- if(schema != null){
- if(getSelectionVector4().next()){
+ if (schema != null) {
+ if (getSelectionVector4().next()) {
return IterOutcome.OK;
- }else{
+ } else {
return IterOutcome.NONE;
}
}
@@ -206,12 +206,12 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
w.start();
// int count = selector.next();
int count = copier.next(targetRecordCount);
- if(count > 0){
+ if (count > 0) {
long t = w.elapsed(TimeUnit.MICROSECONDS);
logger.debug("Took {} us to merge {} records", t, count);
container.setRecordCount(count);
return IterOutcome.OK;
- }else{
+ } else {
logger.debug("copier returned 0 records");
return IterOutcome.NONE;
}
@@ -236,8 +236,10 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
return upstream;
case OK_NEW_SCHEMA:
// only change in the case that the schema truly changes. Artificial schema changes are ignored.
- if(!incoming.getSchema().equals(schema)){
- if (schema != null) throw new UnsupportedOperationException("Sort doesn't currently support sorts with changing schemas.");
+ if (!incoming.getSchema().equals(schema)) {
+ if (schema != null) {
+ throw new UnsupportedOperationException("Sort doesn't currently support sorts with changing schemas.");
+ }
this.schema = incoming.getSchema();
this.sorter = createNewSorter(context, incoming);
}
@@ -249,7 +251,9 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
}
break;
}
- if (first) first = false;
+ if (first) {
+ first = false;
+ }
totalSizeInMemory += getBufferSize(incoming);
SelectionVector2 sv2;
if (incoming.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.TWO_BYTE) {
@@ -291,7 +295,9 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
break;
case OUT_OF_MEMORY:
highWaterMark = totalSizeInMemory;
- if (batchesSinceLastSpill > 2) mergeAndSpill();
+ if (batchesSinceLastSpill > 2) {
+ mergeAndSpill();
+ }
batchesSinceLastSpill = 0;
break;
default:
@@ -348,7 +354,7 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
return IterOutcome.OK_NEW_SCHEMA;
- }catch(SchemaChangeException | ClassTransformationException | IOException ex){
+ } catch(SchemaChangeException | ClassTransformationException | IOException ex) {
kill(false);
logger.error("Failure during query", ex);
context.fail(ex);
@@ -502,11 +508,13 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
ClassGenerator<MSorter> g = cg.getRoot();
g.setMappingSet(mainMapping);
- for(Ordering od : orderings){
+ for (Ordering od : orderings) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
g.setMappingSet(leftMapping);
HoldingContainer left = g.addExpr(expr, false);
g.setMappingSet(rightMapping);
@@ -518,7 +526,7 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
HoldingContainer out = g.addExpr(fh, false);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
- if(od.getDirection() == Direction.ASCENDING){
+ if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
}else{
jc._then()._return(out.getValue().minus());
@@ -547,11 +555,13 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
private void generateComparisons(ClassGenerator g, VectorAccessible batch) throws SchemaChangeException {
g.setMappingSet(MAIN_MAPPING);
- for(Ordering od : popConfig.getOrderings()){
+ for (Ordering od : popConfig.getOrderings()) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector,context.getFunctionRegistry());
- if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ if (collector.hasErrors()) {
+ throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
+ }
g.setMappingSet(LEFT_MAPPING);
HoldingContainer left = g.addExpr(expr, false);
g.setMappingSet(RIGHT_MAPPING);
@@ -563,7 +573,7 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
HoldingContainer out = g.addExpr(fh, false);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
- if(od.getDirection() == Direction.ASCENDING){
+ if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
}else{
jc._then()._return(out.getValue().minus());
@@ -590,7 +600,7 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
}
List<VectorAllocator> allocators = Lists.newArrayList();
- for(VectorWrapper<?> i : batch){
+ for (VectorWrapper<?> i : batch) {
ValueVector v = TypeHelper.getNewVector(i.getField(), copierAllocator);
outputContainer.add(v);
allocators.add(VectorAllocator.getAllocator(v, 110));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/MSortTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/MSortTemplate.java
index df79b1acb..3fd744ff8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/MSortTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/MSortTemplate.java
@@ -84,7 +84,7 @@ public abstract class MSortTemplate implements MSorter, IndexedSortable{
while (l < rightStart) {
aux.set(o++, vector4.get(l++));
}
- while (r < rightEnd){
+ while (r < rightEnd) {
aux.set(o++, vector4.get(r++));
}
assert o == outStart + (rightEnd - leftStart);
@@ -97,7 +97,7 @@ public abstract class MSortTemplate implements MSorter, IndexedSortable{
}
@Override
- public void sort(VectorContainer container){
+ public void sort(VectorContainer container) {
Stopwatch watch = new Stopwatch();
watch.start();
while (runStarts.size() > 1) {
@@ -109,9 +109,13 @@ public abstract class MSortTemplate implements MSorter, IndexedSortable{
int left = runStarts.poll();
int right = runStarts.poll();
Integer end = runStarts.peek();
- if (end == null) end = vector4.getTotalCount();
+ if (end == null) {
+ end = vector4.getTotalCount();
+ }
outIndex = merge(left, right, end, outIndex);
- if (outIndex < vector4.getTotalCount()) newRunStarts.add(outIndex);
+ if (outIndex < vector4.getTotalCount()) {
+ newRunStarts.add(outIndex);
+ }
}
if (outIndex < vector4.getTotalCount()) {
copyRun(outIndex, vector4.getTotalCount());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
index aa2f78665..9beef3945 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
@@ -35,8 +35,9 @@ public class StarColumnHelper {
List<String> fieldNames = type.getFieldNames();
for (String s : fieldNames) {
- if (s.startsWith(STAR_COLUMN))
+ if (s.startsWith(STAR_COLUMN)) {
return true;
+ }
}
return false;
@@ -71,8 +72,9 @@ public class StarColumnHelper {
// Given a set of prefixes, check if a regular column is subsumed by any of the prefixed star column in the set.
public static boolean subsumeRegColumn(Set<String> prefixes, String fieldName) {
- if (isPrefixedStarColumn(fieldName))
+ if (isPrefixedStarColumn(fieldName)) {
return false; // only applies to regular column.
+ }
return prefixes.contains(extractColumnPrefix(fieldName));
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java
index c33bb22ce..87a1ea3eb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java
@@ -130,25 +130,25 @@ public class DrillCostBase implements DrillRelOptCost {
this.memory = memory;
}
- @Override
- public double getRows() {
- return rowCount;
- }
-
- @Override
- public double getCpu() {
- return cpu;
- }
-
- @Override
- public double getIo() {
- return io;
- }
-
- @Override
- public double getNetwork() {
- return network;
- }
+ @Override
+ public double getRows() {
+ return rowCount;
+ }
+
+ @Override
+ public double getCpu() {
+ return cpu;
+ }
+
+ @Override
+ public double getIo() {
+ return io;
+ }
+
+ @Override
+ public double getNetwork() {
+ return network;
+ }
public double getMemory() {
return memory;
@@ -159,31 +159,31 @@ public class DrillCostBase implements DrillRelOptCost {
return Util.hashCode(rowCount) + Util.hashCode(cpu) + Util.hashCode(io) + Util.hashCode(network);
}
- @Override
- public boolean isInfinite() {
+ @Override
+ public boolean isInfinite() {
return (this == INFINITY)
|| (this.cpu == Double.POSITIVE_INFINITY)
|| (this.io == Double.POSITIVE_INFINITY)
|| (this.network == Double.POSITIVE_INFINITY)
|| (this.rowCount == Double.POSITIVE_INFINITY);
- }
-
- @Override
- public boolean equals(RelOptCost other) {
- // here we compare the individual components similar to VolcanoCost, however
- // an alternative would be to add up the components and compare the total.
- // Note that VolcanoPlanner mainly uses isLe() and isLt() for cost comparisons,
- // not equals().
+ }
+
+ @Override
+ public boolean equals(RelOptCost other) {
+ // here we compare the individual components similar to VolcanoCost, however
+ // an alternative would be to add up the components and compare the total.
+ // Note that VolcanoPlanner mainly uses isLe() and isLt() for cost comparisons,
+ // not equals().
return this == other
|| (other instanceof DrillCostBase
&& (this.cpu == ((DrillCostBase) other).cpu)
&& (this.io == ((DrillCostBase) other).io)
&& (this.network == ((DrillCostBase) other).network)
&& (this.rowCount == ((DrillCostBase) other).rowCount));
- }
+ }
- @Override
- public boolean isEqWithEpsilon(RelOptCost other) {
+ @Override
+ public boolean isEqWithEpsilon(RelOptCost other) {
if (!(other instanceof DrillCostBase)) {
return false;
}
@@ -193,7 +193,7 @@ public class DrillCostBase implements DrillRelOptCost {
&& (Math.abs(this.io - that.io) < RelOptUtil.EPSILON)
&& (Math.abs(this.network - that.network) < RelOptUtil.EPSILON)
&& (Math.abs(this.rowCount - that.rowCount) < RelOptUtil.EPSILON));
- }
+ }
@Override
public boolean isLe(RelOptCost other) {
@@ -216,8 +216,8 @@ public class DrillCostBase implements DrillRelOptCost {
);
}
- @Override
- public RelOptCost plus(RelOptCost other) {
+ @Override
+ public RelOptCost plus(RelOptCost other) {
DrillCostBase that = (DrillCostBase) other;
if ((this == INFINITY) || (that == INFINITY)) {
return INFINITY;
@@ -228,10 +228,10 @@ public class DrillCostBase implements DrillRelOptCost {
this.io + that.io,
this.network + that.network,
this.memory + that.memory);
- }
+ }
- @Override
- public RelOptCost minus(RelOptCost other) {
+ @Override
+ public RelOptCost minus(RelOptCost other) {
if (this == INFINITY) {
return this;
}
@@ -242,18 +242,18 @@ public class DrillCostBase implements DrillRelOptCost {
this.io - that.io,
this.network - that.network,
this.memory - that.memory);
- }
+ }
- @Override
- public RelOptCost multiplyBy(double factor) {
+ @Override
+ public RelOptCost multiplyBy(double factor) {
if (this == INFINITY) {
return this;
}
return new DrillCostBase(rowCount * factor, cpu * factor, io * factor, network * factor);
- }
+ }
- @Override
- public double divideBy(RelOptCost cost) {
+ @Override
+ public double divideBy(RelOptCost cost) {
// Compute the geometric average of the ratios of all of the factors
// which are non-zero and finite.
DrillCostBase that = (DrillCostBase) cost;
@@ -292,7 +292,7 @@ public class DrillCostBase implements DrillRelOptCost {
return 1.0;
}
return Math.pow(d, 1 / n);
- }
+ }
@Override
public String toString() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelOptCost.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelOptCost.java
index 88e4e28bf..73c6c72d9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelOptCost.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelOptCost.java
@@ -22,7 +22,6 @@ import org.eigenbase.relopt.RelOptCost;
public interface DrillRelOptCost extends RelOptCost {
- double getNetwork();
+ double getNetwork();
}
-
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/Fragment.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/Fragment.java
index 47d6f140b..e527960ce 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/Fragment.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/Fragment.java
@@ -26,7 +26,7 @@ import org.apache.drill.exec.physical.base.PhysicalOperator;
import com.google.common.collect.Lists;
-public class Fragment implements Iterable<Fragment.ExchangeFragmentPair>{
+public class Fragment implements Iterable<Fragment.ExchangeFragmentPair> {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Fragment.class);
private PhysicalOperator root;
@@ -34,19 +34,21 @@ public class Fragment implements Iterable<Fragment.ExchangeFragmentPair>{
private final List<ExchangeFragmentPair> receivingExchangePairs = Lists.newLinkedList();
private Stats stats = new Stats();
- public void addOperator(PhysicalOperator o){
- if(root == null){
+ public void addOperator(PhysicalOperator o) {
+ if (root == null) {
root = o;
}
}
public void addSendExchange(Exchange e) throws FragmentSetupException{
- if(sendingExchange != null) throw new FragmentSetupException("Fragment was trying to add a second SendExchange. ");
+ if (sendingExchange != null) {
+ throw new FragmentSetupException("Fragment was trying to add a second SendExchange. ");
+ }
addOperator(e);
sendingExchange = e;
}
- public void addReceiveExchange(Exchange e, Fragment fragment){
+ public void addReceiveExchange(Exchange e, Fragment fragment) {
this.receivingExchangePairs.add(new ExchangeFragmentPair(e, fragment));
}
@@ -67,28 +69,32 @@ public class Fragment implements Iterable<Fragment.ExchangeFragmentPair>{
return sendingExchange;
}
-// public <T, V> T accept(FragmentVisitor<T, V> visitor, V extra){
+// public <T, V> T accept(FragmentVisitor<T, V> visitor, V extra) {
// return visitor.visit(this, extra);
// }
- public Stats getStats(){
+ public Stats getStats() {
return stats;
}
public class ExchangeFragmentPair {
private Exchange exchange;
private Fragment node;
+
public ExchangeFragmentPair(Exchange exchange, Fragment node) {
super();
this.exchange = exchange;
this.node = node;
}
+
public Exchange getExchange() {
return exchange;
}
+
public Fragment getNode() {
return node;
}
+
@Override
public int hashCode() {
final int prime = 31;
@@ -97,13 +103,12 @@ public class Fragment implements Iterable<Fragment.ExchangeFragmentPair>{
result = prime * result + ((node == null) ? 0 : node.hashCode());
return result;
}
+
@Override
public String toString() {
return "ExchangeFragmentPair [exchange=" + exchange + "]";
}
-
-
}
@Override
@@ -119,22 +124,44 @@ public class Fragment implements Iterable<Fragment.ExchangeFragmentPair>{
@Override
public boolean equals(Object obj) {
- if (this == obj) return true;
- if (obj == null) return false;
- if (getClass() != obj.getClass()) return false;
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
Fragment other = (Fragment) obj;
if (receivingExchangePairs == null) {
- if (other.receivingExchangePairs != null) return false;
- } else if (!receivingExchangePairs.equals(other.receivingExchangePairs)) return false;
+ if (other.receivingExchangePairs != null) {
+ return false;
+ }
+ } else if (!receivingExchangePairs.equals(other.receivingExchangePairs)) {
+ return false;
+ }
if (root == null) {
- if (other.root != null) return false;
- } else if (!root.equals(other.root)) return false;
+ if (other.root != null) {
+ return false;
+ }
+ } else if (!root.equals(other.root)) {
+ return false;
+ }
if (sendingExchange == null) {
- if (other.sendingExchange != null) return false;
- } else if (!sendingExchange.equals(other.sendingExchange)) return false;
+ if (other.sendingExchange != null) {
+ return false;
+ }
+ } else if (!sendingExchange.equals(other.sendingExchange)) {
+ return false;
+ }
if (stats == null) {
- if (other.stats != null) return false;
- } else if (!stats.equals(other.stats)) return false;
+ if (other.stats != null) {
+ return false;
+ }
+ } else if (!stats.equals(other.stats)) {
+ return false;
+ }
return true;
}
@@ -144,6 +171,4 @@ public class Fragment implements Iterable<Fragment.ExchangeFragmentPair>{
+ receivingExchangePairs + ", stats=" + stats + "]";
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/MakeFragmentsVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/MakeFragmentsVisitor.java
index 690fe4582..594356a4e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/MakeFragmentsVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/MakeFragmentsVisitor.java
@@ -30,13 +30,15 @@ public class MakeFragmentsVisitor extends AbstractPhysicalVisitor<Fragment, Frag
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MakeFragmentsVisitor.class);
- public MakeFragmentsVisitor(){
+ public MakeFragmentsVisitor() {
}
@Override
public Fragment visitExchange(Exchange exchange, Fragment value) throws FragmentSetupException {
// logger.debug("Visiting Exchange {}", exchange);
- if(value == null) throw new FragmentSetupException("The simple fragmenter was called without a FragmentBuilder value. This will only happen if the initial call to SimpleFragmenter is by a Exchange node. This should never happen since an Exchange node should never be the root node of a plan.");
+ if (value == null) {
+ throw new FragmentSetupException("The simple fragmenter was called without a FragmentBuilder value. This will only happen if the initial call to SimpleFragmenter is by a Exchange node. This should never happen since an Exchange node should never be the root node of a plan.");
+ }
Fragment next = getNextBuilder();
value.addReceiveExchange(exchange, next);
next.addSendExchange(exchange);
@@ -55,21 +57,21 @@ public class MakeFragmentsVisitor extends AbstractPhysicalVisitor<Fragment, Frag
// logger.debug("Visiting Other {}", op);
value = ensureBuilder(value);
value.addOperator(op);
- for(PhysicalOperator child : op){
+ for (PhysicalOperator child : op) {
child.accept(this, value);
}
return value;
}
private Fragment ensureBuilder(Fragment value) throws FragmentSetupException{
- if(value != null){
+ if (value != null) {
return value;
- }else{
+ } else {
return getNextBuilder();
}
}
- public Fragment getNextBuilder(){
+ public Fragment getNextBuilder() {
return new Fragment();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/SimpleParallelizer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/SimpleParallelizer.java
index cea546014..bf4dae704 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/SimpleParallelizer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/SimpleParallelizer.java
@@ -59,7 +59,7 @@ public class SimpleParallelizer {
private final int maxGlobalWidth;
private double affinityFactor;
- public SimpleParallelizer(QueryContext context){
+ public SimpleParallelizer(QueryContext context) {
long sliceTarget = context.getOptions().getOption(ExecConstants.SLICE_TARGET).num_val;
this.parallelizationThreshold = sliceTarget > 0 ? sliceTarget : 1;
this.maxWidthPerNode = context.getOptions().getOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY).num_val.intValue();
@@ -67,7 +67,7 @@ public class SimpleParallelizer {
this.affinityFactor = context.getOptions().getOption(ExecConstants.AFFINITY_FACTOR_KEY).float_val.intValue();
}
- public SimpleParallelizer(long parallelizationThreshold, int maxWidthPerNode, int maxGlobalWidth, double affinityFactor){
+ public SimpleParallelizer(long parallelizationThreshold, int maxWidthPerNode, int maxGlobalWidth, double affinityFactor) {
this.parallelizationThreshold = parallelizationThreshold;
this.maxWidthPerNode = maxWidthPerNode;
this.maxGlobalWidth = maxGlobalWidth;
@@ -113,11 +113,12 @@ public class SimpleParallelizer {
final PhysicalOperator physicalOperatorRoot = node.getRoot();
boolean isRootNode = rootNode == node;
- if (isRootNode && wrapper.getWidth() != 1)
+ if (isRootNode && wrapper.getWidth() != 1) {
throw new FragmentSetupException(
String.format(
"Failure while trying to setup fragment. The root fragment must always have parallelization one. In the current case, the width was set to %d.",
wrapper.getWidth()));
+ }
// a fragment is self driven if it doesn't rely on any other exchanges.
boolean isLeafFragment = node.getReceivingExchangePairs().size() == 0;
@@ -187,7 +188,9 @@ public class SimpleParallelizer {
width = Math.min(width, maxWidthPerNode*allNodes.size());
- if (width < 1) width = 1;
+ if (width < 1) {
+ width = 1;
+ }
// logger.debug("Setting width {} on fragment {}", width, wrapper);
wrapper.setWidth(width);
// figure out endpoint assignments. also informs the exchanges about their respective endpoints.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillAggregateRel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillAggregateRel.java
index e31aaa7c6..6b0c3b4b4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillAggregateRel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillAggregateRel.java
@@ -82,17 +82,16 @@ public class DrillAggregateRel extends DrillAggregateRelBase implements DrillRel
return builder.build();
}
-
-
-
private LogicalExpression toDrill(AggregateCall call, List<String> fn, DrillImplementor implementor) {
List<LogicalExpression> args = Lists.newArrayList();
- for(Integer i : call.getArgList()){
+ for(Integer i : call.getArgList()) {
args.add(new FieldReference(fn.get(i)));
}
// for count(1).
- if(args.isEmpty()) args.add(new ValueExpressions.LongExpression(1l));
+ if (args.isEmpty()) {
+ args.add(new ValueExpressions.LongExpression(1l));
+ }
LogicalExpression expr = FunctionCallFactory.createExpression(call.getAggregation().getName().toLowerCase(), ExpressionPosition.UNKNOWN, args);
return expr;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjIntoScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjIntoScan.java
index 829eb1406..082daccbf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjIntoScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjIntoScan.java
@@ -50,9 +50,11 @@ public class DrillPushProjIntoScan extends RelOptRule {
try {
ProjectPushInfo columnInfo = PrelUtil.getColumns(scan.getRowType(), proj.getProjects());
- if(columnInfo == null || columnInfo.isStarQuery() //
+ if (columnInfo == null || columnInfo.isStarQuery() //
|| !scan.getTable().unwrap(DrillTable.class) //
- .getGroupScan().canPushdownProjects(columnInfo.columns)) return;
+ .getGroupScan().canPushdownProjects(columnInfo.columns)) {
+ return;
+ }
final DrillScanRel newScan =
new DrillScanRel(scan.getCluster(),
@@ -63,7 +65,7 @@ public class DrillPushProjIntoScan extends RelOptRule {
List<RexNode> newProjects = Lists.newArrayList();
- for(RexNode n : proj.getChildExps()){
+ for (RexNode n : proj.getChildExps()) {
newProjects.add(n.accept(columnInfo.getInputRewriter()));
}
@@ -84,4 +86,4 @@ public class DrillPushProjIntoScan extends RelOptRule {
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
index 2e7f9f2f8..c8f872e65 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
@@ -31,8 +31,7 @@ import org.apache.drill.exec.store.StoragePlugin;
import org.eigenbase.rel.RelNode;
import org.eigenbase.relopt.RelOptTable;
-
-public abstract class DrillTable implements Table{
+public abstract class DrillTable implements Table {
private final String storageEngineName;
public final StoragePluginConfig storageEngineConfig;
@@ -50,17 +49,17 @@ public abstract class DrillTable implements Table{
}
public GroupScan getGroupScan() throws IOException{
- if(scan == null){
+ if (scan == null) {
this.scan = plugin.getPhysicalScan(new JSONOptions(selection));
}
return scan;
}
- public StoragePluginConfig getStorageEngineConfig(){
+ public StoragePluginConfig getStorageEngineConfig() {
return storageEngineConfig;
}
- public StoragePlugin getPlugin(){
+ public StoragePlugin getPlugin() {
return plugin;
}
@@ -100,31 +99,38 @@ public abstract class DrillTable implements Table{
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
DrillTable other = (DrillTable) obj;
if (selection == null) {
- if (other.selection != null)
+ if (other.selection != null) {
return false;
- } else if (!selection.equals(other.selection))
+ }
+ } else if (!selection.equals(other.selection)) {
return false;
+ }
if (storageEngineConfig == null) {
- if (other.storageEngineConfig != null)
+ if (other.storageEngineConfig != null) {
return false;
- } else if (!storageEngineConfig.equals(other.storageEngineConfig))
+ }
+ } else if (!storageEngineConfig.equals(other.storageEngineConfig)) {
return false;
+ }
if (storageEngineName == null) {
- if (other.storageEngineName != null)
+ if (other.storageEngineName != null) {
return false;
- } else if (!storageEngineName.equals(other.storageEngineName))
+ }
+ } else if (!storageEngineName.equals(other.storageEngineName)) {
return false;
+ }
return true;
}
-
-
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExprHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExprHelper.java
index d4321bcbe..579683911 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExprHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExprHelper.java
@@ -27,14 +27,17 @@ public class ExprHelper {
private final static String COMPOUND_FAIL_MESSAGE = "The current Optiq based logical plan interpreter does not complicated expressions. For Order By and Filter";
- public static String getAggregateFieldName(FunctionCall c){
+ public static String getAggregateFieldName(FunctionCall c) {
List<LogicalExpression> exprs = c.args;
- if(exprs.size() != 1) throw new UnsupportedOperationException(COMPOUND_FAIL_MESSAGE);
+ if (exprs.size() != 1) {
+ throw new UnsupportedOperationException(COMPOUND_FAIL_MESSAGE);
+ }
return getFieldName(exprs.iterator().next());
}
- public static String getFieldName(LogicalExpression e){
+ public static String getFieldName(LogicalExpression e) {
//if(e instanceof SchemaPath) return ((SchemaPath) e).getPath().toString();
throw new UnsupportedOperationException(COMPOUND_FAIL_MESSAGE);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
index 5e2565930..337bb0935 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
@@ -37,7 +37,7 @@ public class StoragePlugins implements Iterable<Map.Entry<String, StoragePluginC
private Map<String, StoragePluginConfig> storage;
@JsonCreator
- public StoragePlugins(@JsonProperty("storage") Map<String, StoragePluginConfig> storage){
+ public StoragePlugins(@JsonProperty("storage") Map<String, StoragePluginConfig> storage) {
this.storage = storage;
}
@@ -73,8 +73,9 @@ public class StoragePlugins implements Iterable<Map.Entry<String, StoragePluginC
builder.append("[");
int i = 0;
for (Iterator<?> iterator = collection.iterator(); iterator.hasNext() && i < maxLen; i++) {
- if (i > 0)
+ if (i > 0) {
builder.append(", ");
+ }
builder.append(iterator.next());
}
builder.append("]");
@@ -89,5 +90,4 @@ public class StoragePlugins implements Iterable<Map.Entry<String, StoragePluginC
return storage.equals(((StoragePlugins) obj).getStorage());
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java
index 3dedb552a..05fb64a9c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java
@@ -51,8 +51,7 @@ import org.eigenbase.sql.type.ReturnTypes;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
-
-public abstract class AggPrelBase extends AggregateRelBase implements Prel{
+public abstract class AggPrelBase extends AggregateRelBase implements Prel {
protected static enum OperatorPhase {PHASE_1of1, PHASE_1of2, PHASE_2of2};
@@ -61,7 +60,6 @@ public abstract class AggPrelBase extends AggregateRelBase implements Prel{
protected List<NamedExpression> aggExprs = Lists.newArrayList();
protected List<AggregateCall> phase2AggCallList = Lists.newArrayList();
-
/**
* Specialized aggregate function for SUMing the COUNTs. Since return type of
* COUNT is non-nullable and return type of SUM is nullable, this class enables
@@ -166,12 +164,14 @@ public abstract class AggPrelBase extends AggregateRelBase implements Prel{
protected LogicalExpression toDrill(AggregateCall call, List<String> fn, DrillParseContext pContext) {
List<LogicalExpression> args = Lists.newArrayList();
- for(Integer i : call.getArgList()){
+ for (Integer i : call.getArgList()) {
args.add(new FieldReference(fn.get(i)));
}
// for count(1).
- if(args.isEmpty()) args.add(new ValueExpressions.LongExpression(1l));
+ if (args.isEmpty()) {
+ args.add(new ValueExpressions.LongExpression(1l));
+ }
LogicalExpression expr = new FunctionCall(call.getAggregation().getName().toLowerCase(), args, ExpressionPosition.UNKNOWN );
return expr;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java
index 0439bbb01..1a43df552 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java
@@ -48,7 +48,9 @@ public class HashAggPrule extends AggPruleBase {
@Override
public void onMatch(RelOptRuleCall call) {
- if (!PrelUtil.getPlannerSettings(call.getPlanner()).isHashAggEnabled()) return;
+ if (!PrelUtil.getPlannerSettings(call.getPlanner()).isHashAggEnabled()) {
+ return;
+ }
final DrillAggregateRel aggregate = (DrillAggregateRel) call.rel(0);
final RelNode input = call.rel(1);
@@ -96,7 +98,6 @@ public class HashAggPrule extends AggPruleBase {
}
}
-
private class TwoPhaseSubset extends SubsetTransformer<DrillAggregateRel, InvalidRelException> {
final RelTrait distOnAllKeys;
@@ -124,7 +125,6 @@ public class HashAggPrule extends AggPruleBase {
aggregate.getGroupSet(),
phase1Agg.getPhase2AggCalls(),
OperatorPhase.PHASE_2of2);
-
return phase2Agg;
}
@@ -140,4 +140,5 @@ public class HashAggPrule extends AggPruleBase {
call.transformTo(newAgg);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashJoinPrule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashJoinPrule.java
index 61321c17a..433405a11 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashJoinPrule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashJoinPrule.java
@@ -44,7 +44,9 @@ public class HashJoinPrule extends JoinPruleBase {
@Override
public void onMatch(RelOptRuleCall call) {
- if (!PrelUtil.getPlannerSettings(call.getPlanner()).isHashJoinEnabled()) return;
+ if (!PrelUtil.getPlannerSettings(call.getPlanner()).isHashJoinEnabled()) {
+ return;
+ }
final DrillJoinRel join = (DrillJoinRel) call.rel(0);
final RelNode left = join.getLeft();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java
index 95ef876f1..7920187f3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java
@@ -34,7 +34,6 @@ import org.eigenbase.relopt.RelOptCost;
import org.eigenbase.relopt.RelOptPlanner;
import org.eigenbase.relopt.RelTraitSet;
-
public class HashToMergeExchangePrel extends ExchangePrel {
private final List<DistributionField> distFields;
@@ -52,10 +51,9 @@ public class HashToMergeExchangePrel extends ExchangePrel {
assert input.getConvention() == Prel.DRILL_PHYSICAL;
}
-
@Override
public RelOptCost computeSelfCost(RelOptPlanner planner) {
- if(PrelUtil.getSettings(getCluster()).useDefaultCosting()) {
+ if (PrelUtil.getSettings(getCluster()).useDefaultCosting()) {
return super.computeSelfCost(planner).multiplyBy(.1);
}
RelNode child = this.getChild();
@@ -81,7 +79,9 @@ public class HashToMergeExchangePrel extends ExchangePrel {
PhysicalOperator childPOP = child.getPhysicalOperator(creator);
- if(PrelUtil.getSettings(getCluster()).isSingleMode()) return childPOP;
+ if (PrelUtil.getSettings(getCluster()).isSingleMode()) {
+ return childPOP;
+ }
HashToMergeExchange g = new HashToMergeExchange(childPOP,
PrelUtil.getHashExpression(this.distFields, getChild().getRowType()),
@@ -102,4 +102,5 @@ public class HashToMergeExchangePrel extends ExchangePrel {
public SelectionVectorMode getEncoding() {
return SelectionVectorMode.NONE;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToRandomExchangePrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToRandomExchangePrel.java
index 30107bb68..372c75db7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToRandomExchangePrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToRandomExchangePrel.java
@@ -89,7 +89,9 @@ public class HashToRandomExchangePrel extends ExchangePrel {
PhysicalOperator childPOP = child.getPhysicalOperator(creator);
- if(PrelUtil.getSettings(getCluster()).isSingleMode()) return childPOP;
+ if (PrelUtil.getSettings(getCluster()).isSingleMode()) {
+ return childPOP;
+ }
HashToRandomExchange g = new HashToRandomExchange(childPOP, PrelUtil.getHashExpression(this.fields, getChild().getRowType()));
return creator.addMetadata(this, g);
@@ -118,5 +120,4 @@ public class HashToRandomExchangePrel extends ExchangePrel {
return SelectionVectorMode.ALL;
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/JoinPruleBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/JoinPruleBase.java
index babf00569..afcbf7147 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/JoinPruleBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/JoinPruleBase.java
@@ -108,8 +108,9 @@ public abstract class JoinPruleBase extends Prule {
assert (join.getLeftKeys().size() == join.getRightKeys().size());
- if (!hashSingleKey)
+ if (!hashSingleKey) {
return;
+ }
int numJoinKeys = join.getLeftKeys().size();
if (numJoinKeys > 1) {
@@ -185,7 +186,7 @@ public abstract class JoinPruleBase extends Prule {
final RelNode convertedLeft = convert(left, traitsLeft);
final RelNode convertedRight = convert(right, traitsRight);
- new SubsetTransformer<DrillJoinRel, InvalidRelException>(call){
+ new SubsetTransformer<DrillJoinRel, InvalidRelException>(call) {
@Override
public RelNode convertChild(final DrillJoinRel join, final RelNode rel) throws InvalidRelException {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
index 3d4476091..1adc54f33 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
@@ -92,7 +92,7 @@ public class PrelUtil {
return func;
}
- public static Iterator<Prel> iter(RelNode... nodes){
+ public static Iterator<Prel> iter(RelNode... nodes) {
return (Iterator<Prel>) (Object) Arrays.asList(nodes).iterator();
}
@@ -100,7 +100,7 @@ public class PrelUtil {
return (Iterator<Prel>) (Object) nodes.iterator();
}
- public static PlannerSettings getSettings(RelOptCluster cluster){
+ public static PlannerSettings getSettings(RelOptCluster cluster) {
return cluster.getPlanner().getContext().unwrap(PlannerSettings.class);
}
@@ -108,17 +108,21 @@ public class PrelUtil {
return planner.getContext().unwrap(PlannerSettings.class);
}
- public static Prel removeSvIfRequired(Prel prel, SelectionVectorMode... allowed){
+ public static Prel removeSvIfRequired(Prel prel, SelectionVectorMode... allowed) {
SelectionVectorMode current = prel.getEncoding();
- for(SelectionVectorMode m : allowed){
- if(current == m) return prel;
+ for (SelectionVectorMode m : allowed) {
+ if (current == m) {
+ return prel;
+ }
}
return new SelectionVectorRemoverPrel(prel);
}
public static ProjectPushInfo getColumns(RelDataType rowType, List<RexNode> projects) {
final List<String> fieldNames = rowType.getFieldNames();
- if (fieldNames.isEmpty()) return null;
+ if (fieldNames.isEmpty()) {
+ return null;
+ }
RefFieldsVisitor v = new RefFieldsVisitor(rowType);
for (RexNode exp : projects) {
@@ -154,31 +158,38 @@ public class PrelUtil {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
DesiredField other = (DesiredField) obj;
if (field == null) {
- if (other.field != null)
+ if (other.field != null) {
return false;
- } else if (!field.equals(other.field))
+ }
+ } else if (!field.equals(other.field)) {
return false;
+ }
if (name == null) {
- if (other.name != null)
+ if (other.name != null) {
return false;
- } else if (!name.equals(other.name))
+ }
+ } else if (!name.equals(other.name)) {
return false;
- if (origIndex != other.origIndex)
+ }
+ if (origIndex != other.origIndex) {
return false;
+ }
return true;
}
}
-
public static class ProjectPushInfo {
public final List<SchemaPath> columns;
public final List<DesiredField> desiredFields;
@@ -196,7 +207,7 @@ public class PrelUtil {
IntIntOpenHashMap oldToNewIds = new IntIntOpenHashMap();
int i =0;
- for(DesiredField f : desiredFields){
+ for (DesiredField f : desiredFields) {
fieldNames.add(f.name);
types.add(f.field.getType());
oldToNewIds.put(f.origIndex, i);
@@ -205,7 +216,7 @@ public class PrelUtil {
this.rewriter = new InputRewriter(oldToNewIds);
}
- public InputRewriter getInputRewriter(){
+ public InputRewriter getInputRewriter() {
return rewriter;
}
@@ -242,11 +253,10 @@ public class PrelUtil {
}
}
- public ProjectPushInfo getInfo(){
+ public ProjectPushInfo getInfo() {
return new ProjectPushInfo(ImmutableList.copyOf(columns), ImmutableList.copyOf(desiredFields));
}
-
@Override
public PathSegment visitInputRef(RexInputRef inputRef) {
int index = inputRef.getIndex();
@@ -288,14 +298,14 @@ public class PrelUtil {
}
- public static RelTraitSet fixTraits(RelOptRuleCall call, RelTraitSet set){
+ public static RelTraitSet fixTraits(RelOptRuleCall call, RelTraitSet set) {
return fixTraits(call.getPlanner(), set);
}
- public static RelTraitSet fixTraits(RelOptPlanner cluster, RelTraitSet set){
- if(getPlannerSettings(cluster).isSingleMode()){
+ public static RelTraitSet fixTraits(RelOptPlanner cluster, RelTraitSet set) {
+ if (getPlannerSettings(cluster).isSingleMode()) {
return set.replace(DrillDistributionTrait.ANY);
- }else{
+ } else {
return set;
}
}
@@ -316,10 +326,8 @@ public class PrelUtil {
return newIndex;
}
-
}
-
public static class InputRewriter extends RexShuttle {
final IntIntOpenHashMap map;
@@ -340,4 +348,5 @@ public class PrelUtil {
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java
index 16bfd88f6..826450962 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java
@@ -61,7 +61,7 @@ public class SingleMergeExchangePrel extends ExchangePrel {
*/
@Override
public RelOptCost computeSelfCost(RelOptPlanner planner) {
- if(PrelUtil.getSettings(getCluster()).useDefaultCosting()) {
+ if (PrelUtil.getSettings(getCluster()).useDefaultCosting()) {
return super.computeSelfCost(planner).multiplyBy(.1);
}
RelNode child = this.getChild();
@@ -85,7 +85,9 @@ public class SingleMergeExchangePrel extends ExchangePrel {
PhysicalOperator childPOP = child.getPhysicalOperator(creator);
- if(PrelUtil.getSettings(getCluster()).isSingleMode()) return childPOP;
+ if (PrelUtil.getSettings(getCluster()).isSingleMode()) {
+ return childPOP;
+ }
SingleMergeExchange g = new SingleMergeExchange(childPOP, PrelUtil.getOrdering(this.collation, getChild().getRowType()));
return creator.addMetadata(this, g);
@@ -109,7 +111,4 @@ public class SingleMergeExchangePrel extends ExchangePrel {
return SelectionVectorMode.NONE;
}
-
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SubsetTransformer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SubsetTransformer.java
index 450b19768..d4cd21f84 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SubsetTransformer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SubsetTransformer.java
@@ -30,13 +30,13 @@ public abstract class SubsetTransformer<T extends RelNode, E extends Exception>
private final RelOptRuleCall call;
- public SubsetTransformer(RelOptRuleCall call){
+ public SubsetTransformer(RelOptRuleCall call) {
this.call = call;
}
- public RelTraitSet newTraitSet(RelTrait... traits){
+ public RelTraitSet newTraitSet(RelTrait... traits) {
RelTraitSet set = call.getPlanner().emptyTraitSet();
- for(RelTrait t : traits){
+ for (RelTrait t : traits) {
set = set.plus(t);
}
return set;
@@ -44,14 +44,16 @@ public abstract class SubsetTransformer<T extends RelNode, E extends Exception>
}
boolean go(T n, RelNode candidateSet) throws E {
- if( !(candidateSet instanceof RelSubset) ) return false;
+ if ( !(candidateSet instanceof RelSubset) ) {
+ return false;
+ }
boolean transform = false;
for (RelNode rel : ((RelSubset)candidateSet).getRelList()) {
if (!isDefaultDist(rel)) {
RelNode out = convertChild(n, rel);
- if(out != null){
+ if (out != null) {
call.transformTo(out);
transform = true;
@@ -62,7 +64,7 @@ public abstract class SubsetTransformer<T extends RelNode, E extends Exception>
return transform;
}
- private boolean isDefaultDist(RelNode n){
+ private boolean isDefaultDist(RelNode n) {
return n.getTraitSet().getTrait(DrillDistributionTraitDef.INSTANCE).equals(DrillDistributionTrait.DEFAULT);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/UnionExchangePrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/UnionExchangePrel.java
index 8b0c2519c..c35328b45 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/UnionExchangePrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/UnionExchangePrel.java
@@ -52,7 +52,7 @@ public class UnionExchangePrel extends ExchangePrel {
*/
@Override
public RelOptCost computeSelfCost(RelOptPlanner planner) {
- if(PrelUtil.getSettings(getCluster()).useDefaultCosting()) {
+ if (PrelUtil.getSettings(getCluster()).useDefaultCosting()) {
return super.computeSelfCost(planner).multiplyBy(.1);
}
@@ -75,7 +75,9 @@ public class UnionExchangePrel extends ExchangePrel {
PhysicalOperator childPOP = child.getPhysicalOperator(creator);
- if(PrelUtil.getSettings(getCluster()).isSingleMode()) return childPOP;
+ if (PrelUtil.getSettings(getCluster()).isSingleMode()) {
+ return childPOP;
+ }
UnionExchange g = new UnionExchange(childPOP);
return creator.addMetadata(this, g);
@@ -86,5 +88,4 @@ public class UnionExchangePrel extends ExchangePrel {
return SelectionVectorMode.NONE;
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/PrelSequencer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/PrelSequencer.java
index c3020cf2d..cb6d6565e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/PrelSequencer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/PrelSequencer.java
@@ -33,12 +33,11 @@ import org.eigenbase.sql.SqlExplainLevel;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, RuntimeException>{
+public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, RuntimeException> {
private List<Frag> frags = Lists.newLinkedList();
-
- public static String printWithIds(final Prel rel, SqlExplainLevel explainlevel){
+ public static String printWithIds(final Prel rel, SqlExplainLevel explainlevel) {
if (rel == null) {
return null;
}
@@ -49,13 +48,12 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
}
- public static Map<Prel, OpId> getIdMap(Prel rel){
+ public static Map<Prel, OpId> getIdMap(Prel rel) {
PrelSequencer s = new PrelSequencer();
return s.go(rel);
}
-
- static class Frag implements Iterable<Frag>{
+ static class Frag implements Iterable<Frag> {
Prel root;
int majorFragmentId;
final List<Frag> children = Lists.newArrayList();
@@ -81,25 +79,33 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
Frag other = (Frag) obj;
if (children == null) {
- if (other.children != null)
+ if (other.children != null) {
return false;
- } else if (!children.equals(other.children))
+ }
+ } else if (!children.equals(other.children)) {
return false;
- if (majorFragmentId != other.majorFragmentId)
+ }
+ if (majorFragmentId != other.majorFragmentId) {
return false;
+ }
if (root == null) {
- if (other.root != null)
+ if (other.root != null) {
return false;
- } else if (!root.equals(other.root))
+ }
+ } else if (!root.equals(other.root)) {
return false;
+ }
return true;
}
@@ -110,7 +116,6 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
+ (children != null ? children.subList(0, Math.min(children.size(), maxLen)) : null) + "]";
}
-
}
public static class OpId{
@@ -122,7 +127,6 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
this.opId = opId;
}
-
public int getFragmentId() {
return fragmentId;
}
@@ -132,7 +136,7 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
return opId;
}
- public int getAsSingleInt(){
+ public int getAsSingleInt() {
return (fragmentId << 16) + opId;
}
@@ -144,30 +148,36 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
result = prime * result + opId;
return result;
}
+
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
OpId other = (OpId) obj;
- if (fragmentId != other.fragmentId)
+ if (fragmentId != other.fragmentId) {
return false;
- if (opId != other.opId)
+ }
+ if (opId != other.opId) {
return false;
+ }
return true;
}
+
@Override
public String toString() {
return fragmentId + ":*:" + opId;
}
-
}
- public Map<Prel, OpId> go(Prel root){
+ public Map<Prel, OpId> go(Prel root) {
// get fragments.
Frag rootFrag = new Frag(root);
@@ -179,12 +189,12 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
q.add(rootFrag);
int majorFragmentId = 0;
- while(!q.isEmpty()){
+ while (!q.isEmpty()) {
Frag frag = q.remove();
frag.majorFragmentId = majorFragmentId++;
- for(Frag child : frag){
+ for (Frag child : frag) {
q.add(child);
}
}
@@ -193,22 +203,21 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
Map<Prel, OpId> ids = Maps.newIdentityHashMap();
ids.put(rootFrag.root, new OpId(0, 0));
- for(Frag f : frags){
+ for (Frag f : frags) {
int id = 1;
Queue<Prel> ops = Lists.newLinkedList();
ops.add(f.root);
- while(!ops.isEmpty()){
+ while (!ops.isEmpty()) {
Prel p = ops.remove();
boolean isExchange = p instanceof ExchangePrel;
- if(p != f.root){ // we account for exchanges as receviers to guarantee unique identifiers.
+ if (p != f.root) { // we account for exchanges as receviers to guarantee unique identifiers.
ids.put(p, new OpId(f.majorFragmentId, id++) );
}
-
- if(!isExchange || p == f.root){
+ if (!isExchange || p == f.root) {
List<Prel> children = Lists.reverse(Lists.newArrayList(p.iterator()));
- for(Prel child : children){
+ for (Prel child : children) {
ops.add(child);
}
}
@@ -217,16 +226,14 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
return ids;
-
}
-
@Override
public Void visitExchange(ExchangePrel prel, Frag value) throws RuntimeException {
Frag newFrag = new Frag(prel);
frags.add(newFrag);
value.children.add(newFrag);
- for(Prel child : prel){
+ for (Prel child : prel) {
child.accept(this, newFrag);
}
@@ -235,14 +242,10 @@ public class PrelSequencer extends BasePrelVisitor<Void, PrelSequencer.Frag, Run
@Override
public Void visitPrel(Prel prel, Frag value) throws RuntimeException {
- for(Prel children : prel){
+ for (Prel children : prel) {
children.accept(this, value);
}
return null;
}
-
-
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java
index eb8eab337..82409d85a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java
@@ -33,11 +33,11 @@ public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, Excessive
private final long targetSliceSize;
- public ExcessiveExchangeIdentifier(long targetSliceSize){
+ public ExcessiveExchangeIdentifier(long targetSliceSize) {
this.targetSliceSize = targetSliceSize;
}
- public static Prel removeExcessiveEchanges(Prel prel, long targetSliceSize){
+ public static Prel removeExcessiveEchanges(Prel prel, long targetSliceSize) {
ExcessiveExchangeIdentifier exchange = new ExcessiveExchangeIdentifier(targetSliceSize);
return prel.accept(exchange, exchange.getNewStat());
}
@@ -48,9 +48,9 @@ public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, Excessive
MajorFragmentStat newFrag = new MajorFragmentStat();
Prel newChild = ((Prel) prel.getChild()).accept(this, newFrag);
- if(newFrag.isSingular() && parent.isSingular()){
+ if (newFrag.isSingular() && parent.isSingular()) {
return newChild;
- }else{
+ } else {
return (Prel) prel.copy(prel.getTraitSet(), Collections.singletonList((RelNode) newChild));
}
}
@@ -68,44 +68,46 @@ public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, Excessive
return prel;
}
-
@Override
public Prel visitPrel(Prel prel, MajorFragmentStat s) throws RuntimeException {
List<RelNode> children = Lists.newArrayList();
s.add(prel);
- for(Prel p : prel){
+ for(Prel p : prel) {
children.add(p.accept(this, s));
}
return (Prel) prel.copy(prel.getTraitSet(), children);
}
- public MajorFragmentStat getNewStat(){
+ public MajorFragmentStat getNewStat() {
return new MajorFragmentStat();
}
- class MajorFragmentStat{
+ class MajorFragmentStat {
private double maxRows = 0d;
private int maxWidth = Integer.MAX_VALUE;
- public void add(Prel prel){
+ public void add(Prel prel) {
maxRows = Math.max(prel.getRows(), maxRows);
}
- public void setSingular(){
+ public void setSingular() {
maxWidth = 1;
}
- public void addScan(ScanPrel prel){
+ public void addScan(ScanPrel prel) {
maxWidth = Math.min(maxWidth, prel.getGroupScan().getMaxParallelizationWidth());
add(prel);
}
- public boolean isSingular(){
+ public boolean isSingular() {
int suggestedWidth = (int) Math.ceil((maxRows+1)/targetSliceSize);
int w = Math.min(maxWidth, suggestedWidth);
- if(w < 1) w = 1;
+ if (w < 1) {
+ w = 1;
+ }
return w == 1;
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/FinalColumnReorderer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/FinalColumnReorderer.java
index c0556e3b8..60a9e4b4e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/FinalColumnReorderer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/FinalColumnReorderer.java
@@ -36,7 +36,7 @@ public class FinalColumnReorderer extends BasePrelVisitor<Prel, Void, RuntimeExc
private static FinalColumnReorderer INSTANCE = new FinalColumnReorderer();
- public static Prel addFinalColumnOrdering(Prel prel){
+ public static Prel addFinalColumnOrdering(Prel prel) {
return prel.accept(INSTANCE, null);
}
@@ -46,7 +46,7 @@ public class FinalColumnReorderer extends BasePrelVisitor<Prel, Void, RuntimeExc
return prel.copy(prel.getTraitSet(), Collections.singletonList( (RelNode) addTrivialOrderedProjectPrel( newChild )));
}
- private Prel addTrivialOrderedProjectPrel(Prel prel){
+ private Prel addTrivialOrderedProjectPrel(Prel prel) {
if ( !prel.needsFinalColumnReordering()) {
return prel;
@@ -59,9 +59,11 @@ public class FinalColumnReorderer extends BasePrelVisitor<Prel, Void, RuntimeExc
int projectCount = t.getFieldList().size();
// no point in reordering if we only have one column
- if(projectCount < 2) return prel;
+ if (projectCount < 2) {
+ return prel;
+ }
- for(int i =0; i < projectCount; i++){
+ for (int i =0; i < projectCount; i++) {
projections.add(b.makeInputRef(prel, i));
}
return new ProjectPrel(prel.getCluster(), prel.getTraitSet(), prel, projections, prel.getRowType());
@@ -77,17 +79,18 @@ public class FinalColumnReorderer extends BasePrelVisitor<Prel, Void, RuntimeExc
public Prel visitPrel(Prel prel, Void value) throws RuntimeException {
List<RelNode> children = Lists.newArrayList();
boolean changed = false;
- for(Prel p : prel){
+ for (Prel p : prel) {
Prel newP = p.accept(this, null);
- if(newP != p) changed = true;
+ if (newP != p) {
+ changed = true;
+ }
children.add(newP);
}
- if(changed){
+ if (changed) {
return (Prel) prel.copy(prel.getTraitSet(), children);
- }else{
+ } else {
return prel;
}
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RelUniqifier.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RelUniqifier.java
index 4f7f3482e..6ef9776cb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RelUniqifier.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RelUniqifier.java
@@ -31,24 +31,28 @@ public class RelUniqifier extends BasePrelVisitor<Prel, Set<Prel>, RuntimeExcept
private static final RelUniqifier INSTANCE = new RelUniqifier();
- public static Prel uniqifyGraph(Prel p){
+ public static Prel uniqifyGraph(Prel p) {
Set<Prel> data = Sets.newIdentityHashSet();
return p.accept(INSTANCE, data);
}
+
@Override
public Prel visitPrel(Prel prel, Set<Prel> data) throws RuntimeException {
List<RelNode> children = Lists.newArrayList();
boolean childrenChanged = false;
- for(Prel child : prel){
+ for (Prel child : prel) {
Prel newChild = visitPrel(child, data);
- if(newChild != child) childrenChanged = true;
+ if (newChild != child) {
+ childrenChanged = true;
+ }
children.add(newChild);
}
- if(data.contains(prel) || childrenChanged){
+ if (data.contains(prel) || childrenChanged) {
return (Prel) prel.copy(prel.getTraitSet(), children);
- }else{
+ } else {
return prel;
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SelectionVectorPrelVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SelectionVectorPrelVisitor.java
index 8d0b1559c..4e4d6c113 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SelectionVectorPrelVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SelectionVectorPrelVisitor.java
@@ -31,7 +31,7 @@ public class SelectionVectorPrelVisitor extends BasePrelVisitor<Prel, Void, Runt
private static SelectionVectorPrelVisitor INSTANCE = new SelectionVectorPrelVisitor();
- public static Prel addSelectionRemoversWhereNecessary(Prel prel){
+ public static Prel addSelectionRemoversWhereNecessary(Prel prel) {
return prel.accept(INSTANCE, null);
}
@@ -39,7 +39,7 @@ public class SelectionVectorPrelVisitor extends BasePrelVisitor<Prel, Void, Runt
public Prel visitPrel(Prel prel, Void value) throws RuntimeException {
SelectionVectorMode[] encodings = prel.getSupportedEncodings();
List<RelNode> children = Lists.newArrayList();
- for(Prel child : prel){
+ for (Prel child : prel) {
child = child.accept(this, null);
children.add(convert(encodings, child));
}
@@ -47,12 +47,13 @@ public class SelectionVectorPrelVisitor extends BasePrelVisitor<Prel, Void, Runt
return (Prel) prel.copy(prel.getTraitSet(), children);
}
- private Prel convert(SelectionVectorMode[] encodings, Prel prel){
- for(SelectionVectorMode m : encodings){
- if(prel.getEncoding() == m) return prel;
+ private Prel convert(SelectionVectorMode[] encodings, Prel prel) {
+ for (SelectionVectorMode m : encodings) {
+ if (prel.getEncoding() == m) {
+ return prel;
+ }
}
return new SelectionVectorRemoverPrel(prel);
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
index ddc8cf67a..159099db3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
@@ -44,7 +44,7 @@ public class StarColumnConverter extends BasePrelVisitor<Prel, boolean[], Runtim
private static final AtomicLong tableNumber = new AtomicLong(0);
- public static Prel insertRenameProject(Prel root, RelDataType origRowType){
+ public static Prel insertRenameProject(Prel root, RelDataType origRowType) {
// Insert top project to do rename only when : 1) there is a join
// 2) there is a SCAN with * column. We pass two boolean to keep track of
// these two conditions.
@@ -82,7 +82,7 @@ public class StarColumnConverter extends BasePrelVisitor<Prel, boolean[], Runtim
@Override
public Prel visitPrel(Prel prel, boolean [] renamedForStar) throws RuntimeException {
List<RelNode> children = Lists.newArrayList();
- for(Prel child : prel){
+ for (Prel child : prel) {
child = child.accept(this, renamedForStar);
children.add(child);
}
@@ -171,8 +171,9 @@ public class StarColumnConverter extends BasePrelVisitor<Prel, boolean[], Runtim
if (uniqueNames.contains(s)) {
for (int i = 0; ; i++ ) {
s = s + i;
- if (! origNames.contains(s) && ! uniqueNames.contains(s))
+ if (! origNames.contains(s) && ! uniqueNames.contains(s)) {
break;
+ }
}
}
uniqueNames.add(s);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/ExpandingConcurrentMap.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/ExpandingConcurrentMap.java
index 9caa29b21..44649b980 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/ExpandingConcurrentMap.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/ExpandingConcurrentMap.java
@@ -56,10 +56,12 @@ public class ExpandingConcurrentMap<KEY, VALUE> implements ConcurrentMap<KEY, VA
return internalMap.isEmpty();
}
- public boolean alreadyContainsKey(Object k){
+ public boolean alreadyContainsKey(Object k) {
@SuppressWarnings("unchecked") KEY key = (KEY) k;
- if(internalMap.containsKey(key)) return true;
+ if (internalMap.containsKey(key)) {
+ return true;
+ }
return false;
}
@@ -67,7 +69,9 @@ public class ExpandingConcurrentMap<KEY, VALUE> implements ConcurrentMap<KEY, VA
public boolean containsKey(Object k) {
@SuppressWarnings("unchecked") KEY key = (KEY) k;
- if(internalMap.containsKey(key)) return true;
+ if (internalMap.containsKey(key)) {
+ return true;
+ }
VALUE v = getNewEntry(k);
return v != null;
}
@@ -80,17 +84,23 @@ public class ExpandingConcurrentMap<KEY, VALUE> implements ConcurrentMap<KEY, VA
@Override
public VALUE get(Object key) {
VALUE out = internalMap.get(key);
- if(out != null) return out;
+ if (out != null) {
+ return out;
+ }
return getNewEntry(key);
}
- private VALUE getNewEntry(Object k){
+ private VALUE getNewEntry(Object k) {
@SuppressWarnings("unchecked")
KEY key = (KEY) k;
VALUE v = this.fac.create(key);
- if(v == null) return null;
+ if (v == null) {
+ return null;
+ }
VALUE old = internalMap.putIfAbsent(key, v);
- if(old == null) return v;
+ if (old == null) {
+ return v;
+ }
fac.destroy(v);
return old;
}
@@ -194,8 +204,10 @@ public class ExpandingConcurrentMap<KEY, VALUE> implements ConcurrentMap<KEY, VA
@Override
public boolean containsAll(Collection<?> c) {
- for(Object o : c){
- if(this.contains(o)) return false;
+ for (Object o : c) {
+ if (this.contains(o)) {
+ return false;
+ }
}
return true;
}
@@ -227,4 +239,5 @@ public class ExpandingConcurrentMap<KEY, VALUE> implements ConcurrentMap<KEY, VA
public VALUE create(KEY key);
public void destroy(VALUE value);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java
index 8a52796b6..99f597c48 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java
@@ -35,10 +35,10 @@ public abstract class AbstractSqlHandler {
public abstract PhysicalPlan getPlan(SqlNode sqlNode) throws ValidationException, RelConversionException, IOException;
- public static <T> T unwrap(Object o, Class<T> clazz) throws RelConversionException{
- if(clazz.isAssignableFrom(o.getClass())){
+ public static <T> T unwrap(Object o, Class<T> clazz) throws RelConversionException {
+ if (clazz.isAssignableFrom(o.getClass())) {
return (T) o;
- }else{
+ } else {
throw new RelConversionException(String.format("Failure trying to treat %s as type %s.", o.getClass().getSimpleName(), clazz.getSimpleName()));
}
}
@@ -47,12 +47,12 @@ public abstract class AbstractSqlHandler {
* From a given SchemaPlus return a Drill schema object of type AbstractSchema if exists.
* Otherwise throw errors.
*/
- public static AbstractSchema getDrillSchema(SchemaPlus schemaPlus) throws Exception{
+ public static AbstractSchema getDrillSchema(SchemaPlus schemaPlus) throws Exception {
AbstractSchema drillSchema;
try {
drillSchema = schemaPlus.unwrap(AbstractSchema.class);
drillSchema = drillSchema.getDefaultSchema();
- } catch(ClassCastException e) {
+ } catch (ClassCastException e) {
throw new Exception("Current schema is not a Drill schema. " +
"Can't create new relations (tables or views) in non-Drill schemas.", e);
}
@@ -66,16 +66,19 @@ public abstract class AbstractSqlHandler {
*/
public static SchemaPlus findSchema(SchemaPlus rootSchema, SchemaPlus defaultSchema, List<String> schemaPath)
throws Exception {
- if (schemaPath.size() == 0)
+ if (schemaPath.size() == 0) {
return defaultSchema;
+ }
SchemaPlus schema;
- if ((schema = searchSchemaTree(defaultSchema, schemaPath)) != null)
+ if ((schema = searchSchemaTree(defaultSchema, schemaPath)) != null) {
return schema;
+ }
- if ((schema = searchSchemaTree(rootSchema, schemaPath)) != null)
+ if ((schema = searchSchemaTree(rootSchema, schemaPath)) != null) {
return schema;
+ }
throw new Exception(String.format("Invalid schema path '%s'.", Joiner.on(".").join(schemaPath)));
}
@@ -93,4 +96,5 @@ public abstract class AbstractSqlHandler {
}
return schema;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
index 66d49630f..708951a27 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
@@ -63,15 +63,17 @@ public class CreateTableHandler extends DefaultSqlHandler {
if (tblFiledNames.size() > 0) {
// Field count should match.
- if (tblFiledNames.size() != queryRowType.getFieldCount())
+ if (tblFiledNames.size() != queryRowType.getFieldCount()) {
return DirectPlan.createDirectPlan(context, false,
"Table's field list and the table's query field list have different counts.");
+ }
// CTAS's query field list shouldn't have "*" when table's field list is specified.
- for(String field : queryRowType.getFieldNames()) {
- if (field.equals("*"))
+ for (String field : queryRowType.getFieldNames()) {
+ if (field.equals("*")) {
return DirectPlan.createDirectPlan(context, false,
"Table's query field list has a '*', which is invalid when table's field list is specified.");
+ }
}
}
@@ -90,9 +92,10 @@ public class CreateTableHandler extends DefaultSqlHandler {
AbstractSchema drillSchema = getDrillSchema(schema);
- if (!drillSchema.isMutable())
+ if (!drillSchema.isMutable()) {
return DirectPlan.createDirectPlan(context, false, String.format("Current schema '%s' is not a mutable schema. " +
"Can't create tables in this schema.", drillSchema.getFullSchemaName()));
+ }
String newTblName = sqlCreateTable.getName();
if (schema.getTable(newTblName) != null) {
@@ -121,11 +124,13 @@ public class CreateTableHandler extends DefaultSqlHandler {
RelNode convertedRelNode = planner.transform(DrillSqlWorker.LOGICAL_RULES,
relNode.getTraitSet().plus(DrillRel.DRILL_LOGICAL), relNode);
- if (convertedRelNode instanceof DrillStoreRel)
+ if (convertedRelNode instanceof DrillStoreRel) {
throw new UnsupportedOperationException();
+ }
DrillWriterRel writerRel = new DrillWriterRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(),
convertedRelNode, schema.createNewTable(tableName));
return new DrillScreenRel(writerRel.getCluster(), writerRel.getTraitSet(), writerRel);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java
index 25fa0cb19..f3243212e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java
@@ -41,7 +41,7 @@ import org.eigenbase.sql.SqlExplainLevel;
import org.eigenbase.sql.SqlLiteral;
import org.eigenbase.sql.SqlNode;
-public class ExplainHandler extends DefaultSqlHandler{
+public class ExplainHandler extends DefaultSqlHandler {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExplainHandler.class);
private ResultMode mode;
@@ -59,7 +59,7 @@ public class ExplainHandler extends DefaultSqlHandler{
DrillRel drel = convertToDrel(rel);
log("Drill Logical", drel);
- if(mode == ResultMode.LOGICAL){
+ if (mode == ResultMode.LOGICAL) {
LogicalExplain logicalResult = new LogicalExplain(drel, level, context);
return DirectPlan.createDirectPlan(context, logicalResult);
}
@@ -78,8 +78,10 @@ public class ExplainHandler extends DefaultSqlHandler{
SqlExplain node = unwrap(sqlNode, SqlExplain.class);
SqlLiteral op = node.operand(2);
SqlExplain.Depth depth = (SqlExplain.Depth) op.getValue();
- if(node.getDetailLevel() != null) level = node.getDetailLevel();
- switch(depth){
+ if (node.getDetailLevel() != null) {
+ level = node.getDetailLevel();
+ }
+ switch (depth) {
case LOGICAL:
mode = ResultMode.LOGICAL;
break;
@@ -98,7 +100,7 @@ public class ExplainHandler extends DefaultSqlHandler{
public String text;
public String json;
- public LogicalExplain(RelNode node, SqlExplainLevel level, QueryContext context){
+ public LogicalExplain(RelNode node, SqlExplainLevel level, QueryContext context) {
this.text = RelOptUtil.toString(node, level);
DrillImplementor implementor = new DrillImplementor(new DrillParseContext(), ResultMode.LOGICAL);
implementor.go( (DrillRel) node);
@@ -111,12 +113,10 @@ public class ExplainHandler extends DefaultSqlHandler{
public String text;
public String json;
- public PhysicalExplain(RelNode node, PhysicalPlan plan, SqlExplainLevel level, QueryContext context){
+ public PhysicalExplain(RelNode node, PhysicalPlan plan, SqlExplainLevel level, QueryContext context) {
this.text = PrelSequencer.printWithIds((Prel) node, level);
this.json = plan.unparse(context.getConfig().getMapper().writer());
}
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/UseSchemaHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/UseSchemaHandler.java
index 9b23e238d..06ed28bd2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/UseSchemaHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/UseSchemaHandler.java
@@ -28,7 +28,7 @@ import org.apache.drill.exec.planner.sql.DirectPlan;
import org.apache.drill.exec.planner.sql.parser.SqlUseSchema;
import org.eigenbase.sql.SqlNode;
-public class UseSchemaHandler extends AbstractSqlHandler{
+public class UseSchemaHandler extends AbstractSqlHandler {
QueryContext context;
public UseSchemaHandler(QueryContext context) {
@@ -43,8 +43,11 @@ public class UseSchemaHandler extends AbstractSqlHandler{
boolean status = context.getSession().setDefaultSchemaPath(defaultSchema, context.getRootSchema());
String msg;
- if (status) msg = String.format("Default schema changed to '%s'", defaultSchema);
- else msg = String.format("Failed to change default schema to '%s'", defaultSchema);
+ if (status) {
+ msg = String.format("Default schema changed to '%s'", defaultSchema);
+ } else {
+ msg = String.format("Failed to change default schema to '%s'", defaultSchema);
+ }
return DirectPlan.createDirectPlan(context, status, msg);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java
index ec5e2c932..8eca21e51 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java
@@ -40,7 +40,7 @@ import org.eigenbase.sql.SqlNode;
import com.google.common.collect.ImmutableList;
-public abstract class ViewHandler extends AbstractSqlHandler{
+public abstract class ViewHandler extends AbstractSqlHandler {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ViewHandler.class);
protected Planner planner;
@@ -68,9 +68,10 @@ public abstract class ViewHandler extends AbstractSqlHandler{
AbstractSchema drillSchema = getDrillSchema(schema);
String schemaPath = drillSchema.getFullSchemaName();
- if (!drillSchema.isMutable())
+ if (!drillSchema.isMutable()) {
return DirectPlan.createDirectPlan(context, false, String.format("Unable to create view. " +
"Schema [%s] is immutable. ", schemaPath));
+ }
// find current workspace schema path
List<String> workspaceSchemaPath = ImmutableList.of();
@@ -89,15 +90,17 @@ public abstract class ViewHandler extends AbstractSqlHandler{
List<String> viewFieldNames = createView.getFieldNames();
if (viewFieldNames.size() > 0) {
// number of fields match.
- if (viewFieldNames.size() != queryRowType.getFieldCount())
+ if (viewFieldNames.size() != queryRowType.getFieldCount()) {
return DirectPlan.createDirectPlan(context, false,
"View's field list and View's query field list have different counts.");
+ }
// make sure View's query field list has no "*"
- for(String field : queryRowType.getFieldNames()) {
- if (field.equals("*"))
+ for (String field : queryRowType.getFieldNames()) {
+ if (field.equals("*")) {
return DirectPlan.createDirectPlan(context, false,
"View's query field list has a '*', which is invalid when View's field list is specified.");
+ }
}
queryRowType = new DrillFixedRelDataTypeImpl(planner.getTypeFactory(), viewFieldNames);
@@ -112,7 +115,7 @@ public abstract class ViewHandler extends AbstractSqlHandler{
return DirectPlan.createDirectPlan(context, false, "View with given name already exists in current schema");
}
replaced = ((WorkspaceSchema) drillSchema).createView(view);
- }else{
+ } else {
return DirectPlan.createDirectPlan(context, false, "Schema provided was not a workspace schema.");
}
@@ -142,13 +145,14 @@ public abstract class ViewHandler extends AbstractSqlHandler{
AbstractSchema drillSchema = getDrillSchema(schema);
String schemaPath = drillSchema.getFullSchemaName();
- if (!drillSchema.isMutable())
+ if (!drillSchema.isMutable()) {
return DirectPlan.createDirectPlan(context, false, String.format("Schema '%s' is not a mutable schema. " +
"Views don't exist in this schema", schemaPath));
+ }
if (drillSchema instanceof WorkspaceSchema) {
((WorkspaceSchema) drillSchema).dropView(dropView.getName());;
- }else{
+ } else {
return DirectPlan.createDirectPlan(context, false, "Schema provided was not a workspace schema.");
}
@@ -160,4 +164,5 @@ public abstract class ViewHandler extends AbstractSqlHandler{
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/DrillParserUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/DrillParserUtil.java
index b3fec9ddc..cba5a1865 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/DrillParserUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/DrillParserUtil.java
@@ -37,8 +37,9 @@ public class DrillParserUtil {
public static SqlNode createCondition(SqlNode left, SqlOperator op, SqlNode right) {
// if one of the operands is null, return the other
- if (left == null || right == null)
+ if (left == null || right == null) {
return left != null ? left : right;
+ }
List<Object> listCondition = Lists.newArrayList();
listCondition.add(left);
@@ -47,4 +48,5 @@ public class DrillParserUtil {
return SqlParserUtil.toTree(listCondition);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateTable.java
index c4180a99d..10db4c49b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateTable.java
@@ -39,7 +39,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
public class SqlCreateTable extends DrillSqlCall {
- public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("CREATE_TABLE", SqlKind.OTHER){
+ public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("CREATE_TABLE", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlCreateTable(pos, (SqlIdentifier) operands[0], (SqlNodeList) operands[1], operands[2]);
@@ -79,7 +79,7 @@ public class SqlCreateTable extends DrillSqlCall {
if (fieldList != null && fieldList.size() > 0) {
writer.keyword("(");
fieldList.get(0).unparse(writer, leftPrec, rightPrec);
- for(int i=1; i<fieldList.size(); i++) {
+ for (int i=1; i<fieldList.size(); i++) {
writer.keyword(",");
fieldList.get(i).unparse(writer, leftPrec, rightPrec);
}
@@ -103,14 +103,17 @@ public class SqlCreateTable extends DrillSqlCall {
}
public String getName() {
- if (tblName.isSimple())
+ if (tblName.isSimple()) {
return tblName.getSimple();
+ }
return tblName.names.get(tblName.names.size() - 1);
}
public List<String> getFieldNames() {
- if (fieldList == null) return ImmutableList.of();
+ if (fieldList == null) {
+ return ImmutableList.of();
+ }
List<String> columnNames = Lists.newArrayList();
for(SqlNode node : fieldList.getList()) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateView.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateView.java
index ee9680109..ccd08e14c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateView.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlCreateView.java
@@ -39,7 +39,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
public class SqlCreateView extends DrillSqlCall {
- public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("CREATE_VIEW", SqlKind.OTHER){
+ public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("CREATE_VIEW", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlCreateView(pos, (SqlIdentifier) operands[0], (SqlNodeList) operands[1], operands[2], (SqlLiteral) operands[3]);
@@ -92,7 +92,7 @@ public class SqlCreateView extends DrillSqlCall {
if (fieldList != null && fieldList.size() > 0) {
writer.keyword("(");
fieldList.get(0).unparse(writer, leftPrec, rightPrec);
- for(int i=1; i<fieldList.size(); i++) {
+ for (int i=1; i<fieldList.size(); i++) {
writer.keyword(",");
fieldList.get(i).unparse(writer, leftPrec, rightPrec);
}
@@ -116,17 +116,20 @@ public class SqlCreateView extends DrillSqlCall {
}
public String getName() {
- if (viewName.isSimple())
+ if (viewName.isSimple()) {
return viewName.getSimple();
+ }
return viewName.names.get(viewName.names.size() - 1);
}
public List<String> getFieldNames() {
- if (fieldList == null) return ImmutableList.of();
+ if (fieldList == null) {
+ return ImmutableList.of();
+ }
List<String> fieldNames = Lists.newArrayList();
- for(SqlNode node : fieldList.getList()) {
+ for (SqlNode node : fieldList.getList()) {
fieldNames.add(node.toString());
}
return fieldNames;
@@ -134,4 +137,5 @@ public class SqlCreateView extends DrillSqlCall {
public SqlNode getQuery() { return query; }
public boolean getReplace() { return replaceView; }
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDescribeTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDescribeTable.java
index e1cda4f0e..29275d753 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDescribeTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDescribeTable.java
@@ -47,7 +47,7 @@ public class SqlDescribeTable extends DrillSqlCall {
private final SqlNode columnQualifier;
public static final SqlSpecialOperator OPERATOR =
- new SqlSpecialOperator("DESCRIBE_TABLE", SqlKind.OTHER){
+ new SqlSpecialOperator("DESCRIBE_TABLE", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlDescribeTable(pos, (SqlIdentifier) operands[0], (SqlIdentifier) operands[1], operands[2]);
@@ -80,8 +80,12 @@ public class SqlDescribeTable extends DrillSqlCall {
writer.keyword("DESCRIBE");
writer.keyword("TABLE");
table.unparse(writer, leftPrec, rightPrec);
- if (column != null) column.unparse(writer, leftPrec, rightPrec);
- if (columnQualifier != null) columnQualifier.unparse(writer, leftPrec, rightPrec);
+ if (column != null) {
+ column.unparse(writer, leftPrec, rightPrec);
+ }
+ if (columnQualifier != null) {
+ columnQualifier.unparse(writer, leftPrec, rightPrec);
+ }
}
@Override
@@ -92,4 +96,5 @@ public class SqlDescribeTable extends DrillSqlCall {
public SqlIdentifier getTable() { return table; }
public SqlIdentifier getColumn() { return column; }
public SqlNode getColumnQualifier() { return columnQualifier; }
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropView.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropView.java
index 9b9e785ca..33b71b74c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropView.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropView.java
@@ -38,7 +38,7 @@ import org.eigenbase.sql.parser.SqlParserPos;
import com.google.common.collect.ImmutableList;
public class SqlDropView extends DrillSqlCall {
- public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("DROP_VIEW", SqlKind.OTHER){
+ public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("DROP_VIEW", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlDropView(pos, (SqlIdentifier) operands[0]);
@@ -83,9 +83,11 @@ public class SqlDropView extends DrillSqlCall {
}
public String getName() {
- if (viewName.isSimple())
+ if (viewName.isSimple()) {
return viewName.getSimple();
+ }
return viewName.names.get(viewName.names.size() - 1);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowFiles.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowFiles.java
index 50bbca3f7..8779969e9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowFiles.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowFiles.java
@@ -44,7 +44,7 @@ public class SqlShowFiles extends DrillSqlCall {
private final SqlIdentifier db;
public static final SqlSpecialOperator OPERATOR =
- new SqlSpecialOperator("SHOW_FILES", SqlKind.OTHER){
+ new SqlSpecialOperator("SHOW_FILES", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlShowFiles(pos, (SqlIdentifier) operands[0]);
@@ -70,12 +70,16 @@ public class SqlShowFiles extends DrillSqlCall {
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("SHOW");
writer.keyword("FILES");
- if (db != null) db.unparse(writer, leftPrec, rightPrec);
+ if (db != null) {
+ db.unparse(writer, leftPrec, rightPrec);
+ }
}
@Override
public AbstractSqlHandler getSqlHandler(Planner planner, QueryContext context) {
return new ShowFileHandler(planner, context);
}
+
public SqlIdentifier getDb() { return db; }
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowSchemas.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowSchemas.java
index 7ccc91701..9b4229548 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowSchemas.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowSchemas.java
@@ -45,7 +45,7 @@ public class SqlShowSchemas extends DrillSqlCall {
private final SqlNode whereClause;
public static final SqlSpecialOperator OPERATOR =
- new SqlSpecialOperator("SHOW_SCHEMAS", SqlKind.OTHER){
+ new SqlSpecialOperator("SHOW_SCHEMAS", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlShowSchemas(pos, operands[0], operands[1]);
@@ -79,7 +79,9 @@ public class SqlShowSchemas extends DrillSqlCall {
writer.keyword("LIKE");
likePattern.unparse(writer, leftPrec, rightPrec);
}
- if (whereClause != null) whereClause.unparse(writer, leftPrec, rightPrec);
+ if (whereClause != null) {
+ whereClause.unparse(writer, leftPrec, rightPrec);
+ }
}
@Override
@@ -89,4 +91,5 @@ public class SqlShowSchemas extends DrillSqlCall {
public SqlNode getLikePattern() { return likePattern; }
public SqlNode getWhereClause() { return whereClause; }
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowTables.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowTables.java
index 15adb603f..33d20aa4d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowTables.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlShowTables.java
@@ -47,7 +47,7 @@ public class SqlShowTables extends DrillSqlCall {
private final SqlNode whereClause;
public static final SqlSpecialOperator OPERATOR =
- new SqlSpecialOperator("SHOW_TABLES", SqlKind.OTHER){
+ new SqlSpecialOperator("SHOW_TABLES", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlShowTables(pos, (SqlIdentifier) operands[0], operands[1], operands[2]);
@@ -79,12 +79,16 @@ public class SqlShowTables extends DrillSqlCall {
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("SHOW");
writer.keyword("TABLES");
- if (db != null) db.unparse(writer, leftPrec, rightPrec);
+ if (db != null) {
+ db.unparse(writer, leftPrec, rightPrec);
+ }
if (likePattern != null) {
writer.keyword("LIKE");
likePattern.unparse(writer, leftPrec, rightPrec);
}
- if (whereClause != null) whereClause.unparse(writer, leftPrec, rightPrec);
+ if (whereClause != null) {
+ whereClause.unparse(writer, leftPrec, rightPrec);
+ }
}
@Override
@@ -95,4 +99,5 @@ public class SqlShowTables extends DrillSqlCall {
public SqlIdentifier getDb() { return db; }
public SqlNode getLikePattern() { return likePattern; }
public SqlNode getWhereClause() { return whereClause; }
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
index 8df342ecd..d88f883b0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
@@ -41,8 +41,9 @@ public class DrillFixedRelDataTypeImpl extends RelDataTypeImpl {
this.typeFactory = typeFactory;
// Add the initial list of columns.
- for (String column : columnNames)
+ for (String column : columnNames) {
addField(column);
+ }
computeDigest();
}
@@ -76,7 +77,7 @@ public class DrillFixedRelDataTypeImpl extends RelDataTypeImpl {
@Override
public List<String> getFieldNames() {
List<String> fieldNames = Lists.newArrayList();
- for(RelDataTypeField f : fields){
+ for (RelDataTypeField f : fields) {
fieldNames.add(f.getName());
}
@@ -102,4 +103,5 @@ public class DrillFixedRelDataTypeImpl extends RelDataTypeImpl {
public boolean isStruct() {
return true;
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
index ce67cb23b..eae351035 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
@@ -92,19 +92,24 @@ public class RelDataTypeDrillImpl extends RelDataTypeImpl {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
RelDataTypeDrillImpl other = (RelDataTypeDrillImpl) obj;
if (holder == null) {
- if (other.holder != null)
+ if (other.holder != null) {
return false;
- } else if (!holder.equals(other.holder))
+ }
+ } else if (!holder.equals(other.holder)) {
return false;
+ }
return true;
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractSingleRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractSingleRecordBatch.java
index 9054e3f89..0adc09ec1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractSingleRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractSingleRecordBatch.java
@@ -49,8 +49,10 @@ public abstract class AbstractSingleRecordBatch<T extends PhysicalOperator> exte
}
} while ((upstream = next(incoming)) == IterOutcome.OK && incoming.getRecordCount() == 0);
}
- if(first && upstream == IterOutcome.OK) upstream = IterOutcome.OK_NEW_SCHEMA;
- switch(upstream){
+ if (first && upstream == IterOutcome.OK) {
+ upstream = IterOutcome.OK_NEW_SCHEMA;
+ }
+ switch (upstream) {
case NONE:
assert !first;
case NOT_YET:
@@ -60,15 +62,15 @@ public abstract class AbstractSingleRecordBatch<T extends PhysicalOperator> exte
return upstream;
case OK_NEW_SCHEMA:
first = false;
- try{
+ try {
stats.startSetup();
setupNewSchema();
- }catch(SchemaChangeException ex){
+ } catch (SchemaChangeException ex) {
kill(false);
logger.error("Failure during query", ex);
context.fail(ex);
return IterOutcome.STOP;
- }finally{
+ } finally {
stats.stopSetup();
}
// fall through.
@@ -99,4 +101,5 @@ public abstract class AbstractSingleRecordBatch<T extends PhysicalOperator> exte
protected abstract void setupNewSchema() throws SchemaChangeException;
protected abstract void doWork();
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
index 72992747f..da2e9ebb7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
@@ -27,7 +27,6 @@ import com.google.common.collect.Lists;
public class BatchSchema implements Iterable<MaterializedField> {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BatchSchema.class);
final SelectionVectorMode selectionVectorMode;
- ;
private final List<MaterializedField> fields;
BatchSchema(SelectionVectorMode selectionVector, List<MaterializedField> fields) {
@@ -39,12 +38,14 @@ public class BatchSchema implements Iterable<MaterializedField> {
return new SchemaBuilder();
}
- public int getFieldCount(){
+ public int getFieldCount() {
return fields.size();
}
- public MaterializedField getColumn(int index){
- if(index < 0 || index >= fields.size()) return null;
+ public MaterializedField getColumn(int index) {
+ if (index < 0 || index >= fields.size()) {
+ return null;
+ }
return fields.get(index);
}
@@ -95,23 +96,27 @@ public class BatchSchema implements Iterable<MaterializedField> {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
BatchSchema other = (BatchSchema) obj;
if (fields == null) {
- if (other.fields != null)
+ if (other.fields != null) {
return false;
- } else if (!fields.equals(other.fields))
+ }
+ } else if (!fields.equals(other.fields)) {
return false;
- if (selectionVectorMode != other.selectionVectorMode)
+ }
+ if (selectionVectorMode != other.selectionVectorMode) {
return false;
+ }
return true;
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
index 7872e081c..9dbb583cc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
@@ -33,11 +33,11 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
private MaterializedField f;
private final boolean releasable;
- public HyperVectorWrapper(MaterializedField f, T[] v){
+ public HyperVectorWrapper(MaterializedField f, T[] v) {
this(f, v, true);
}
- public HyperVectorWrapper(MaterializedField f, T[] v, boolean releasable){
+ public HyperVectorWrapper(MaterializedField f, T[] v, boolean releasable) {
assert(v.length > 0);
this.f = f;
this.vectors = v;
@@ -72,22 +72,26 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
@Override
public void clear() {
- if(!releasable) return;
- for(T x : vectors){
+ if (!releasable) {
+ return;
+ }
+ for (T x : vectors) {
x.clear();
}
}
@Override
public VectorWrapper<?> getChildWrapper(int[] ids) {
- if(ids.length == 1) return this;
+ if (ids.length == 1) {
+ return this;
+ }
ValueVector[] vectors = new ValueVector[this.vectors.length];
int index = 0;
- for(ValueVector v : this.vectors){
+ for (ValueVector v : this.vectors) {
ValueVector vector = v;
- for(int i = 1; i < ids.length; i++){
+ for (int i = 1; i < ids.length; i++) {
MapVector map = (MapVector) vector;
vector = map.getVectorById(ids[i]);
}
@@ -100,9 +104,11 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
@Override
public TypedFieldId getFieldIdIfMatches(int id, SchemaPath expectedPath) {
ValueVector v = vectors[0];
- if(!expectedPath.getRootSegment().segmentEquals(v.getField().getPath().getRootSegment())) return null;
+ if (!expectedPath.getRootSegment().segmentEquals(v.getField().getPath().getRootSegment())) {
+ return null;
+ }
- if(v instanceof AbstractContainerVector){
+ if (v instanceof AbstractContainerVector) {
// we're looking for a multi path.
AbstractContainerVector c = (AbstractContainerVector) v;
TypedFieldId.Builder builder = TypedFieldId.newBuilder();
@@ -111,7 +117,7 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
builder.addId(id);
return c.getFieldIdIfMatches(builder, true, expectedPath.getRootSegment().getChild());
- }else{
+ } else {
return TypedFieldId.newBuilder() //
.intermediateType(v.getField().getType()) //
.finalType(v.getField().getType()) //
@@ -126,7 +132,7 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
public VectorWrapper<T> cloneAndTransfer() {
return new HyperVectorWrapper<T>(f, vectors, false);
// T[] newVectors = (T[]) Array.newInstance(vectors.getClass().getComponentType(), vectors.length);
-// for(int i =0; i < newVectors.length; i++){
+// for(int i =0; i < newVectors.length; i++) {
// TransferPair tp = vectors[i].getTransferPair();
// tp.transfer();
// newVectors[i] = (T) tp.getTo();
@@ -134,7 +140,7 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
// return new HyperVectorWrapper<T>(f, newVectors);
}
- public static <T extends ValueVector> HyperVectorWrapper<T> create(MaterializedField f, T[] v, boolean releasable){
+ public static <T extends ValueVector> HyperVectorWrapper<T> create(MaterializedField f, T[] v, boolean releasable) {
return new HyperVectorWrapper<T>(f, v, releasable);
}
@@ -146,4 +152,5 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
public void addVectors(ValueVector[] vv) {
vectors = (T[]) ArrayUtils.add(vectors, vv);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MajorTypeSerDe.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MajorTypeSerDe.java
index 54e1136c2..328f6ee70 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MajorTypeSerDe.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MajorTypeSerDe.java
@@ -53,10 +53,8 @@ public class MajorTypeSerDe {
return jp.readValueAs(MajorTypeHolder.class).getMajorType();
}
-
}
-
public static class Se extends StdSerializer<MajorType> {
public Se() {
@@ -90,27 +88,40 @@ public class MajorTypeSerDe {
this.scale = scale;
}
- private MajorTypeHolder(){}
+ private MajorTypeHolder() {}
@JsonIgnore
- public MajorType getMajorType(){
+ public MajorType getMajorType() {
MajorType.Builder b = MajorType.newBuilder();
b.setMode(mode);
b.setMinorType(minorType);
- if(precision != null) b.setPrecision(precision);
- if(width != null) b.setWidth(width);
- if(scale != null) b.setScale(scale);
+ if (precision != null) {
+ b.setPrecision(precision);
+ }
+ if (width != null) {
+ b.setWidth(width);
+ }
+ if (scale != null) {
+ b.setScale(scale);
+ }
return b.build();
}
- public static MajorTypeHolder get(MajorType mt){
+ public static MajorTypeHolder get(MajorType mt) {
MajorTypeHolder h = new MajorTypeHolder();
h.minorType = mt.getMinorType();
h.mode = mt.getMode();
- if(mt.hasPrecision()) h.precision = mt.getPrecision();
- if(mt.hasScale()) h.scale = mt.getScale();
- if(mt.hasWidth()) h.width = mt.getWidth();
+ if (mt.hasPrecision()) {
+ h.precision = mt.getPrecision();
+ }
+ if (mt.hasScale()) {
+ h.scale = mt.getScale();
+ }
+ if (mt.hasWidth()) {
+ h.width = mt.getWidth();
+ }
return h;
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
index 540977dec..0ed74fb0e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
@@ -64,7 +64,9 @@ public class MaterializedField {
public String getLastName(){
PathSegment seg = key.path.getRootSegment();
- while(seg.getChild() != null) seg = seg.getChild();
+ while (seg.getChild() != null) {
+ seg = seg.getChild();
+ }
return seg.getNameSegment().getPath();
}
@@ -143,7 +145,7 @@ public class MaterializedField {
public MaterializedField getOtherNullableVersion(){
MajorType mt = key.type;
DataMode newDataMode = null;
- switch(mt.getMode()){
+ switch (mt.getMode()){
case OPTIONAL:
newDataMode = DataMode.REQUIRED;
break;
@@ -161,7 +163,9 @@ public class MaterializedField {
}
public boolean matches(SchemaPath path) {
- if(!path.isSimplePath()) return false;
+ if (!path.isSimplePath()) {
+ return false;
+ }
return key.path.equals(path);
}
@@ -178,23 +182,30 @@ public class MaterializedField {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
MaterializedField other = (MaterializedField) obj;
if (children == null) {
- if (other.children != null)
+ if (other.children != null) {
return false;
- } else if (!children.equals(other.children))
+ }
+ } else if (!children.equals(other.children)) {
return false;
+ }
if (key == null) {
- if (other.key != null)
+ if (other.key != null) {
return false;
- } else if (!key.equals(other.key))
+ }
+ } else if (!key.equals(other.key)) {
return false;
+ }
return true;
}
@@ -237,26 +248,33 @@ public class MaterializedField {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
Key other = (Key) obj;
if (path == null) {
- if (other.path != null)
+ if (other.path != null) {
return false;
- } else if (!path.equals(other.path))
+ }
+ } else if (!path.equals(other.path)) {
return false;
+ }
if (type == null) {
- if (other.type != null)
+ if (other.type != null) {
return false;
- } else if (!type.equals(other.type))
+ }
+ } else if (!type.equals(other.type)) {
return false;
+ }
return true;
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RawFragmentBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RawFragmentBatch.java
index dd0f89af5..e1725e693 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RawFragmentBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RawFragmentBatch.java
@@ -38,7 +38,9 @@ public class RawFragmentBatch {
this.body = body;
this.connection = connection;
this.sender = sender;
- if(body != null) body.retain();
+ if (body != null) {
+ body.retain();
+ }
}
public FragmentRecordBatch getHeader() {
@@ -54,8 +56,10 @@ public class RawFragmentBatch {
return "RawFragmentBatch [header=" + header + ", body=" + body + "]";
}
- public void release(){
- if(body != null) body.release();
+ public void release() {
+ if (body != null) {
+ body.release();
+ }
}
public RemoteConnection getConnection() {
@@ -66,7 +70,7 @@ public class RawFragmentBatch {
return sender;
}
- public void sendOk(){
+ public void sendOk() {
sender.send(DataRpcConfig.OK);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaBuilder.java
index 78214db69..8bf346dab 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaBuilder.java
@@ -57,8 +57,8 @@ public class SchemaBuilder {
return this;
}
- public SchemaBuilder addFields(Iterable<MaterializedField> fields){
- for(MaterializedField f : fields){
+ public SchemaBuilder addFields(Iterable<MaterializedField> fields) {
+ for (MaterializedField f : fields) {
addField(f);
}
return this;
@@ -86,7 +86,7 @@ public class SchemaBuilder {
// fields.put(f.getFieldId(), f);
// }
- public SchemaBuilder addField(MaterializedField f){
+ public SchemaBuilder addField(MaterializedField f) {
fields.add(f);
return this;
}
@@ -104,12 +104,14 @@ public class SchemaBuilder {
// if (!fields.containsKey(fieldId))
// throw new SchemaChangeException(
// String.format("An attempt was made to replace a field in the schema, however the schema does " +
-// "not currently contain that field id. The offending fieldId was %d", fieldId));
+// "not currently contain that field id. The offending fieldId was %d", fieldId));
// setTypedField(fieldId, type, nullable, mode, valueClass);
// }
- public SchemaBuilder removeField(MaterializedField f) throws SchemaChangeException{
- if(!fields.remove(f)) throw new SchemaChangeException("You attempted to remove an nonexistent field.");
+ public SchemaBuilder removeField(MaterializedField f) throws SchemaChangeException {
+ if (!fields.remove(f)) {
+ throw new SchemaChangeException("You attempted to remove an nonexistent field.");
+ }
return this;
}
@@ -118,8 +120,9 @@ public class SchemaBuilder {
* @return
* @throws SchemaChangeException
*/
- public BatchSchema build(){
+ public BatchSchema build() {
List<MaterializedField> fieldList = Lists.newArrayList(fields);
return new BatchSchema(this.selectionVectorMode, fieldList);
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
index 8a2312a93..5bd3e41d8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
@@ -29,7 +29,7 @@ public class SimpleVectorWrapper<T extends ValueVector> implements VectorWrapper
private T v;
- public SimpleVectorWrapper(T v){
+ public SimpleVectorWrapper(T v) {
this.v = v;
}
@@ -72,18 +72,19 @@ public class SimpleVectorWrapper<T extends ValueVector> implements VectorWrapper
v.clear();
}
- public static <T extends ValueVector> SimpleVectorWrapper<T> create(T v){
+ public static <T extends ValueVector> SimpleVectorWrapper<T> create(T v) {
return new SimpleVectorWrapper<T>(v);
}
@Override
public VectorWrapper<?> getChildWrapper(int[] ids) {
- if(ids.length == 1) return this;
+ if (ids.length == 1) {
+ return this;
+ }
ValueVector vector = v;
-
- for(int i = 1; i < ids.length; i++){
+ for (int i = 1; i < ids.length; i++) {
MapVector map = (MapVector) vector;
vector = map.getVectorById(ids[i]);
}
@@ -93,10 +94,12 @@ public class SimpleVectorWrapper<T extends ValueVector> implements VectorWrapper
@Override
public TypedFieldId getFieldIdIfMatches(int id, SchemaPath expectedPath) {
- if(!expectedPath.getRootSegment().segmentEquals(v.getField().getPath().getRootSegment())) return null;
+ if (!expectedPath.getRootSegment().segmentEquals(v.getField().getPath().getRootSegment())) {
+ return null;
+ }
PathSegment seg = expectedPath.getRootSegment();
- if(v instanceof AbstractContainerVector){
+ if (v instanceof AbstractContainerVector) {
// we're looking for a multi path.
AbstractContainerVector c = (AbstractContainerVector) v;
TypedFieldId.Builder builder = TypedFieldId.newBuilder();
@@ -104,28 +107,26 @@ public class SimpleVectorWrapper<T extends ValueVector> implements VectorWrapper
builder.addId(id);
return c.getFieldIdIfMatches(builder, true, expectedPath.getRootSegment().getChild());
- }else{
+ } else {
TypedFieldId.Builder builder = TypedFieldId.newBuilder();
builder.intermediateType(v.getField().getType());
builder.addId(id);
builder.finalType(v.getField().getType());
- if(seg.isLastPath()){
+ if (seg.isLastPath()) {
return builder.build();
- }else{
+ } else {
PathSegment child = seg.getChild();
- if(child.isArray() && child.isLastPath()){
+ if (child.isArray() && child.isLastPath()) {
builder.remainder(child);
builder.withIndex();
builder.finalType(v.getField().getType().toBuilder().setMode(DataMode.OPTIONAL).build());
return builder.build();
- }else{
+ } else {
return null;
}
}
-
}
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
index acb56d6e8..f7cfefa39 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
@@ -36,15 +36,15 @@ public class TypedFieldId {
final boolean isHyperReader;
final PathSegment remainder;
- public TypedFieldId(MajorType type, int... fieldIds){
+ public TypedFieldId(MajorType type, int... fieldIds) {
this(type, type, type, false, null, fieldIds);
}
- public TypedFieldId(MajorType type, IntArrayList breadCrumb, PathSegment remainder){
+ public TypedFieldId(MajorType type, IntArrayList breadCrumb, PathSegment remainder) {
this(type, type, type, false, remainder, breadCrumb.toArray());
}
- public TypedFieldId(MajorType type, boolean isHyper, int... fieldIds){
+ public TypedFieldId(MajorType type, boolean isHyper, int... fieldIds) {
this(type, type, type, isHyper, null, fieldIds);
}
@@ -58,35 +58,35 @@ public class TypedFieldId {
this.remainder = remainder;
}
-
-
- public TypedFieldId cloneWithChild(int id){
+ public TypedFieldId cloneWithChild(int id) {
int[] fieldIds = ArrayUtils.add(this.fieldIds, id);
return new TypedFieldId(intermediateType, secondaryFinal, finalType, isHyperReader, remainder, fieldIds);
}
- public PathSegment getLastSegment(){
- if(remainder == null) return null;
+ public PathSegment getLastSegment() {
+ if (remainder == null) {
+ return null;
+ }
PathSegment seg = remainder;
- while(seg.getChild() != null){
+ while (seg.getChild() != null) {
seg = seg.getChild();
}
return seg;
}
- public TypedFieldId cloneWithRemainder(PathSegment remainder){
+ public TypedFieldId cloneWithRemainder(PathSegment remainder) {
return new TypedFieldId(intermediateType, secondaryFinal, finalType, isHyperReader, remainder, fieldIds);
}
- public boolean hasRemainder(){
+ public boolean hasRemainder() {
return remainder != null;
}
- public PathSegment getRemainder(){
+ public PathSegment getRemainder() {
return remainder;
}
- public boolean isHyperReader(){
+ public boolean isHyperReader() {
return isHyperReader;
}
@@ -94,11 +94,11 @@ public class TypedFieldId {
return intermediateType;
}
- public Class<? extends ValueVector> getIntermediateClass(){
+ public Class<? extends ValueVector> getIntermediateClass() {
return (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(intermediateType.getMinorType(), intermediateType.getMode());
}
- public MajorType getFinalType(){
+ public MajorType getFinalType() {
return finalType;
}
@@ -106,13 +106,11 @@ public class TypedFieldId {
return fieldIds;
}
-
-
public MajorType getSecondaryFinal() {
return secondaryFinal;
}
- public static Builder newBuilder(){
+ public static Builder newBuilder() {
return new Builder();
}
@@ -125,27 +123,27 @@ public class TypedFieldId {
boolean hyperReader = false;
boolean withIndex = false;
- public Builder addId(int id){
+ public Builder addId(int id) {
ids.add(id);
return this;
}
- public Builder withIndex(){
+ public Builder withIndex() {
withIndex = true;
return this;
}
- public Builder remainder(PathSegment remainder){
+ public Builder remainder(PathSegment remainder) {
this.remainder = remainder;
return this;
}
- public Builder hyper(){
+ public Builder hyper() {
this.hyperReader = true;
return this;
}
- public Builder finalType(MajorType finalType){
+ public Builder finalType(MajorType finalType) {
this.finalType = finalType;
return this;
}
@@ -155,17 +153,21 @@ public class TypedFieldId {
return this;
}
- public Builder intermediateType(MajorType intermediateType){
+ public Builder intermediateType(MajorType intermediateType) {
this.intermediateType = intermediateType;
return this;
}
- public TypedFieldId build(){
+ public TypedFieldId build() {
Preconditions.checkNotNull(intermediateType);
Preconditions.checkNotNull(finalType);
- if(intermediateType == null) intermediateType = finalType;
- if (secondaryFinal == null) secondaryFinal = finalType;
+ if (intermediateType == null) {
+ intermediateType = finalType;
+ }
+ if (secondaryFinal == null) {
+ secondaryFinal = finalType;
+ }
MajorType actualFinalType = finalType;
//MajorType secondaryFinal = finalType;
@@ -196,37 +198,50 @@ public class TypedFieldId {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
TypedFieldId other = (TypedFieldId) obj;
- if (!Arrays.equals(fieldIds, other.fieldIds))
+ if (!Arrays.equals(fieldIds, other.fieldIds)) {
return false;
+ }
if (finalType == null) {
- if (other.finalType != null)
+ if (other.finalType != null) {
return false;
- } else if (!finalType.equals(other.finalType))
+ }
+ } else if (!finalType.equals(other.finalType)) {
return false;
+ }
if (intermediateType == null) {
- if (other.intermediateType != null)
+ if (other.intermediateType != null) {
return false;
- } else if (!intermediateType.equals(other.intermediateType))
+ }
+ } else if (!intermediateType.equals(other.intermediateType)) {
return false;
- if (isHyperReader != other.isHyperReader)
+ }
+ if (isHyperReader != other.isHyperReader) {
return false;
+ }
if (remainder == null) {
- if (other.remainder != null)
+ if (other.remainder != null) {
return false;
- } else if (!remainder.equals(other.remainder))
+ }
+ } else if (!remainder.equals(other.remainder)) {
return false;
+ }
if (secondaryFinal == null) {
- if (other.secondaryFinal != null)
+ if (other.secondaryFinal != null) {
return false;
- } else if (!secondaryFinal.equals(other.secondaryFinal))
+ }
+ } else if (!secondaryFinal.equals(other.secondaryFinal)) {
return false;
+ }
return true;
}
@@ -238,6 +253,4 @@ public class TypedFieldId {
+ ", remainder=" + remainder + "]";
}
-
-
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index ef09f39b2..e2f4a954b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -74,15 +74,15 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
add(vv, releasable);
}
- public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz){
+ public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
MaterializedField field = MaterializedField.create(name, type);
ValueVector v = TypeHelper.getNewVector(field, this.oContext.getAllocator());
add(v);
- if(clazz.isAssignableFrom(v.getClass())){
+ if (clazz.isAssignableFrom(v.getClass())) {
return (T) v;
- }else{
+ } else {
throw new IllegalStateException(String.format("Vector requested [%s] was different than type stored [%s]. Drill doesn't yet support hetergenous types.", clazz.getSimpleName(), v.getClass().getSimpleName()));
}
}
@@ -107,9 +107,7 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
public static VectorContainer canonicalize(VectorContainer original) {
VectorContainer vc = new VectorContainer();
-
List<VectorWrapper<?>> canonicalWrappers = new ArrayList<VectorWrapper<?>>(original.wrappers);
-
// Sort list of VectorWrapper alphabetically based on SchemaPath.
Collections.sort(canonicalWrappers, new Comparator<VectorWrapper<?>>() {
public int compare(VectorWrapper<?> v1, VectorWrapper<?> v2) {
@@ -123,7 +121,6 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
return vc;
}
-
private void cloneAndTransfer(VectorWrapper<?> wrapper) {
wrappers.add(wrapper.cloneAndTransfer());
}
@@ -145,6 +142,7 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
public void add(ValueVector[] hyperVector) {
add(hyperVector, true);
}
+
public void add(ValueVector[] hyperVector, boolean releasable) {
assert hyperVector.length != 0;
schema = null;
@@ -167,7 +165,6 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
return;
}
}
-
throw new IllegalStateException("You attempted to remove a vector that didn't exist.");
}
@@ -175,7 +172,7 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
for (int i = 0; i < wrappers.size(); i++) {
VectorWrapper<?> va = wrappers.get(i);
TypedFieldId id = va.getFieldIdIfMatches(i, path);
- if(id != null){
+ if (id != null) {
return id;
}
}
@@ -183,15 +180,14 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
return null;
}
-
-
-
@Override
public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... fieldIds) {
Preconditions.checkArgument(fieldIds.length >= 1);
VectorWrapper<?> va = wrappers.get(fieldIds[0]);
- if(va == null) return null;
+ if (va == null) {
+ return null;
+ }
if (fieldIds.length == 1 && clazz != null && !clazz.isAssignableFrom(va.getVectorClass())) {
throw new IllegalStateException(String.format(
@@ -242,7 +238,7 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
return recordCount;
}
- public void zeroVectors(){
+ public void zeroVectors() {
for (VectorWrapper<?> w : wrappers) {
w.clear();
}
@@ -252,17 +248,18 @@ public class VectorContainer extends AbstractMapVector implements Iterable<Vecto
return this.wrappers.size();
}
- public void allocateNew(){
+ public void allocateNew() {
for (VectorWrapper<?> w : wrappers) {
w.getValueVector().allocateNew();
}
}
- public boolean allocateNewSafe(){
+ public boolean allocateNewSafe() {
for (VectorWrapper<?> w : wrappers) {
- if(!w.getValueVector().allocateNewSafe()) return false;
+ if (!w.getValueVector().allocateNewSafe()) {
+ return false;
+ }
}
-
return true;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
index b9690a6ca..308a8bcdb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
@@ -63,7 +63,7 @@ public class WritableBatch {
"Attempted to reconstruct a container from a WritableBatch after it had been cleared");
if (buffers.length > 0) { /* If we have DrillBuf's associated with value vectors */
int len = 0;
- for(DrillBuf b : buffers){
+ for (DrillBuf b : buffers) {
len += b.capacity();
}
@@ -114,7 +114,9 @@ public class WritableBatch {
}
public void clear() {
- if(cleared) return;
+ if(cleared) {
+ return;
+ }
for (DrillBuf buf : buffers) {
buf.release();
}
@@ -157,8 +159,9 @@ public class WritableBatch {
}
public static WritableBatch get(RecordBatch batch) {
- if (batch.getSchema() != null && batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE)
+ if (batch.getSchema() != null && batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE) {
throw new UnsupportedOperationException("Only batches without hyper selections vectors are writable.");
+ }
boolean sv2 = (batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.TWO_BYTE);
return getBatchNoHVWrap(batch.getRecordCount(), batch, sv2);
@@ -175,4 +178,5 @@ public class WritableBatch {
buf.retain(increment);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java
index fd0932cbd..69bc78fc7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java
@@ -31,18 +31,20 @@ public class SelectionVector4 {
private int length;
public SelectionVector4(ByteBuf vector, int recordCount, int batchRecordCount) throws SchemaChangeException {
- if(recordCount > Integer.MAX_VALUE /4) throw new SchemaChangeException(String.format("Currently, Drill can only support allocations up to 2gb in size. You requested an allocation of %d bytes.", recordCount * 4));
+ if (recordCount > Integer.MAX_VALUE /4) {
+ throw new SchemaChangeException(String.format("Currently, Drill can only support allocations up to 2gb in size. You requested an allocation of %d bytes.", recordCount * 4));
+ }
this.recordCount = recordCount;
this.start = 0;
this.length = Math.min(batchRecordCount, recordCount);
this.data = vector;
}
- public int getTotalCount(){
+ public int getTotalCount() {
return recordCount;
}
- public int getCount(){
+ public int getCount() {
return length;
}
@@ -51,14 +53,15 @@ public class SelectionVector4 {
this.recordCount = length;
}
- public void set(int index, int compound){
+ public void set(int index, int compound) {
data.setInt(index*4, compound);
}
- public void set(int index, int recordBatch, int recordIndex){
+
+ public void set(int index, int recordBatch, int recordIndex) {
data.setInt(index*4, (recordBatch << 16) | (recordIndex & 65535));
}
- public int get(int index){
+ public int get(int index) {
return data.getInt( (start+index)*4);
}
@@ -67,7 +70,7 @@ public class SelectionVector4 {
* @return Newly created single batch SelectionVector4.
* @throws SchemaChangeException
*/
- public SelectionVector4 createNewWrapperCurrent(){
+ public SelectionVector4 createNewWrapperCurrent() {
try {
data.retain();
SelectionVector4 sv4 = new SelectionVector4(data, recordCount, length);
@@ -78,10 +81,10 @@ public class SelectionVector4 {
}
}
- public boolean next(){
+ public boolean next() {
// logger.debug("Next called. Start: {}, Length: {}, recordCount: " + recordCount, start, length);
- if(start + length >= recordCount){
+ if (start + length >= recordCount) {
start = recordCount;
length = 0;
@@ -96,7 +99,7 @@ public class SelectionVector4 {
return true;
}
- public void clear(){
+ public void clear() {
start = 0;
length = 0;
if (data != DeadBuf.DEAD_BUFFER) {
@@ -105,5 +108,4 @@ public class SelectionVector4 {
}
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4Builder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4Builder.java
index 8de9948a3..c0f8ebaa3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4Builder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4Builder.java
@@ -30,15 +30,15 @@ public class SelectionVector4Builder {
private List<BatchSchema> schemas = Lists.newArrayList();
- public void add(RecordBatch batch, boolean newSchema) throws SchemaChangeException{
- if(!schemas.isEmpty() && newSchema) throw new SchemaChangeException("Currently, the sv4 builder doesn't support embedded types");
- if(newSchema){
+ public void add(RecordBatch batch, boolean newSchema) throws SchemaChangeException {
+ if (!schemas.isEmpty() && newSchema) {
+ throw new SchemaChangeException("Currently, the sv4 builder doesn't support embedded types");
+ }
+ if (newSchema) {
schemas.add(batch.getSchema());
}
-
}
-
// deals with managing selection vectors.
// take a four byte int
/**
@@ -48,7 +48,7 @@ public class SelectionVector4Builder {
* we should manage an array of valuevectors
*/
- private class VectorSchemaBuilder{
-
+ private class VectorSchemaBuilder {
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/DefaultFunctionResolver.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/DefaultFunctionResolver.java
index 9f02d74c9..4f128b318 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/DefaultFunctionResolver.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/DefaultFunctionResolver.java
@@ -37,7 +37,7 @@ public class DefaultFunctionResolver implements FunctionResolver {
currcost = TypeCastRules.getCost(call, h);
// if cost is lower than 0, func implementation is not matched, either w/ or w/o implicit casts
- if (currcost < 0 ){
+ if (currcost < 0 ) {
continue;
}
@@ -51,8 +51,9 @@ public class DefaultFunctionResolver implements FunctionResolver {
//did not find a matched func implementation, either w/ or w/o implicit casts
//TODO: raise exception here?
return null;
- } else
+ } else {
return bestmatch;
+ }
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolver.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolver.java
index 2bd80a5bd..14d46c9d8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolver.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolver.java
@@ -25,6 +25,6 @@ import org.apache.drill.exec.expr.fn.DrillFuncHolder;
public interface FunctionResolver {
- public DrillFuncHolder getBestMatch(List<DrillFuncHolder> methods, FunctionCall call);
+ public DrillFuncHolder getBestMatch(List<DrillFuncHolder> methods, FunctionCall call);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolverFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolverFactory.java
index fa5a3ce4b..0205aef9e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolverFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/FunctionResolverFactory.java
@@ -22,8 +22,8 @@ import org.apache.drill.common.expression.FunctionCall;
public class FunctionResolverFactory {
- public static FunctionResolver getResolver(FunctionCall call){
- return new DefaultFunctionResolver();
- }
+ public static FunctionResolver getResolver(FunctionCall call) {
+ return new DefaultFunctionResolver();
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
index 838c49c13..ea3155d9f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
@@ -44,34 +44,34 @@ public class ResolverTypePrecedence {
*/
int i = 0;
precedenceMap = new HashMap<MinorType, Integer>();
- precedenceMap.put(MinorType.NULL, i += 2); // NULL is legal to implicitly be promoted to any other type
- precedenceMap.put(MinorType.FIXEDBINARY, i += 2); // Fixed-length is promoted to var length
- precedenceMap.put(MinorType.VARBINARY, i += 2);
+ precedenceMap.put(MinorType.NULL, i += 2); // NULL is legal to implicitly be promoted to any other type
+ precedenceMap.put(MinorType.FIXEDBINARY, i += 2); // Fixed-length is promoted to var length
+ precedenceMap.put(MinorType.VARBINARY, i += 2);
precedenceMap.put(MinorType.FIXEDCHAR, i += 2);
- precedenceMap.put(MinorType.VARCHAR, i += 2);
+ precedenceMap.put(MinorType.VARCHAR, i += 2);
precedenceMap.put(MinorType.FIXED16CHAR, i += 2);
- precedenceMap.put(MinorType.VAR16CHAR, i += 2);
- precedenceMap.put(MinorType.BIT, i += 2);
- precedenceMap.put(MinorType.TINYINT, i += 2); //type with few bytes is promoted to type with more bytes ==> no data loss.
- precedenceMap.put(MinorType.UINT1, i += 2); //signed is legal to implicitly be promoted to unsigned.
- precedenceMap.put(MinorType.SMALLINT, i += 2);
- precedenceMap.put(MinorType.UINT2, i += 2);
- precedenceMap.put(MinorType.INT, i += 2);
- precedenceMap.put(MinorType.UINT4, i += 2);
- precedenceMap.put(MinorType.BIGINT, i += 2);
- precedenceMap.put(MinorType.UINT8, i += 2);
- precedenceMap.put(MinorType.MONEY, i += 2);
- precedenceMap.put(MinorType.FLOAT4, i += 2);
+ precedenceMap.put(MinorType.VAR16CHAR, i += 2);
+ precedenceMap.put(MinorType.BIT, i += 2);
+ precedenceMap.put(MinorType.TINYINT, i += 2); //type with few bytes is promoted to type with more bytes ==> no data loss.
+ precedenceMap.put(MinorType.UINT1, i += 2); //signed is legal to implicitly be promoted to unsigned.
+ precedenceMap.put(MinorType.SMALLINT, i += 2);
+ precedenceMap.put(MinorType.UINT2, i += 2);
+ precedenceMap.put(MinorType.INT, i += 2);
+ precedenceMap.put(MinorType.UINT4, i += 2);
+ precedenceMap.put(MinorType.BIGINT, i += 2);
+ precedenceMap.put(MinorType.UINT8, i += 2);
+ precedenceMap.put(MinorType.MONEY, i += 2);
+ precedenceMap.put(MinorType.FLOAT4, i += 2);
precedenceMap.put(MinorType.DECIMAL9, i += 2);
precedenceMap.put(MinorType.DECIMAL18, i += 2);
precedenceMap.put(MinorType.DECIMAL28DENSE, i += 2);
precedenceMap.put(MinorType.DECIMAL28SPARSE, i += 2);
precedenceMap.put(MinorType.DECIMAL38DENSE, i += 2);
precedenceMap.put(MinorType.DECIMAL38SPARSE, i += 2);
- precedenceMap.put(MinorType.FLOAT8, i += 2);
- precedenceMap.put(MinorType.TIME, i += 2);
- precedenceMap.put(MinorType.DATE, i += 2);
- precedenceMap.put(MinorType.TIMESTAMP, i += 2);
+ precedenceMap.put(MinorType.FLOAT8, i += 2);
+ precedenceMap.put(MinorType.TIME, i += 2);
+ precedenceMap.put(MinorType.DATE, i += 2);
+ precedenceMap.put(MinorType.TIMESTAMP, i += 2);
precedenceMap.put(MinorType.TIMETZ, i += 2);
precedenceMap.put(MinorType.TIMESTAMPTZ, i += 2);
precedenceMap.put(MinorType.INTERVALDAY, i+= 2);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
index d1ed95e6f..7969d49f6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
@@ -770,7 +770,9 @@ public class TypeCastRules {
}
public static boolean isCastableWithNullHandling(MajorType from, MajorType to, NullHandling nullHandling) {
- if (nullHandling == NullHandling.INTERNAL && from.getMode() != to.getMode()) return false;
+ if (nullHandling == NullHandling.INTERNAL && from.getMode() != to.getMode()) {
+ return false;
+ }
return isCastable(from.getMinorType(), to.getMinorType());
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractHandshakeHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractHandshakeHandler.java
index 78e4c1024..90482417d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractHandshakeHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractHandshakeHandler.java
@@ -42,17 +42,22 @@ public abstract class AbstractHandshakeHandler<T extends MessageLite> extends Me
@Override
protected void decode(ChannelHandlerContext ctx, InboundRpcMessage inbound, List<Object> outputs) throws Exception {
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Received handshake {}", inbound);
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Received handshake {}", inbound);
+ }
this.coordinationId = inbound.coordinationId;
ctx.channel().pipeline().remove(this);
- if (inbound.rpcType != handshakeType.getNumber())
+ if (inbound.rpcType != handshakeType.getNumber()) {
throw new RpcException(String.format("Handshake failure. Expected %s[%d] but received number [%d]",
handshakeType, handshakeType.getNumber(), inbound.rpcType));
+ }
T msg = parser.parseFrom(inbound.getProtobufBodyAsIS());
consumeHandshake(ctx, msg);
inbound.pBody.release();
- if(inbound.dBody != null) inbound.dBody.release();
+ if (inbound.dBody != null) {
+ inbound.dBody.release();
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java
index f7b396990..8f43b06c2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/CoordinationQueue.java
@@ -39,11 +39,11 @@ public class CoordinationQueue {
}
void channelClosed(Throwable ex) {
- if(ex != null){
+ if (ex != null) {
RpcException e;
- if(ex instanceof RpcException){
+ if (ex instanceof RpcException) {
e = (RpcException) ex;
- }else{
+ } else {
e = new RpcException(ex);
}
for (RpcOutcome<?> f : map.values()) {
@@ -52,13 +52,14 @@ public class CoordinationQueue {
}
}
- public <V> ChannelListenerWithCoordinationId get(RpcOutcomeListener<V> handler, Class<V> clazz, RemoteConnection connection){
+ public <V> ChannelListenerWithCoordinationId get(RpcOutcomeListener<V> handler, Class<V> clazz, RemoteConnection connection) {
int i = circularInt.getNext();
RpcListener<V> future = new RpcListener<V>(handler, clazz, i, connection);
Object old = map.put(i, future);
- if (old != null)
+ if (old != null) {
throw new IllegalStateException(
"You attempted to reuse a coordination id when the previous coordination id has not been removed. This is likely rpc future callback memory leak.");
+ }
return future;
}
@@ -79,11 +80,11 @@ public class CoordinationQueue {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
- if(!future.isSuccess()){
+ if (!future.isSuccess()) {
removeFromMap(coordinationId);
- if(future.channel().isActive()) {
+ if (future.channel().isActive()) {
throw new RpcException("Future failed") ;
- }else{
+ } else {
throw new ChannelClosedException();
}
}
@@ -111,7 +112,6 @@ public class CoordinationQueue {
return coordinationId;
}
-
}
private RpcOutcome<?> removeFromMap(int coordinationId) {
@@ -130,7 +130,6 @@ public class CoordinationQueue {
Class<?> outcomeClass = rpc.getOutcomeType();
if (outcomeClass != clazz) {
-
throw new IllegalStateException(
String
.format(
@@ -149,11 +148,12 @@ public class CoordinationQueue {
public void updateFailedFuture(int coordinationId, RpcFailure failure) {
// logger.debug("Updating failed future.");
- try{
+ try {
RpcOutcome<?> rpc = removeFromMap(coordinationId);
rpc.setException(new RemoteRpcException(failure));
- }catch(Exception ex){
+ } catch(Exception ex) {
logger.warn("Failed to remove from map. Not a problem since we were updating on failed future.", ex);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFutureImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFutureImpl.java
index 291c71a99..19d9c30af 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFutureImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/DrillRpcFutureImpl.java
@@ -76,10 +76,10 @@ class DrillRpcFutureImpl<V> extends AbstractCheckedFuture<V, RpcException> imple
return buffer;
}
- public void release(){
- if(buffer != null) buffer.release();
+ public void release() {
+ if (buffer != null) {
+ buffer.release();
+ }
}
-
-
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java
index 012b9e4c1..d73903457 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/InboundRpcMessage.java
@@ -35,16 +35,22 @@ public class InboundRpcMessage extends RpcMessage{
}
@Override
- public int getBodySize(){
+ public int getBodySize() {
int len = pBody.capacity();
- if(dBody != null) len += dBody.capacity();
+ if (dBody != null) {
+ len += dBody.capacity();
+ }
return len;
}
@Override
- void release(){
- if (pBody != null) pBody.release();
- if(dBody != null) dBody.release();
+ void release() {
+ if (pBody != null) {
+ pBody.release();
+ }
+ if (dBody != null) {
+ dBody.release();
+ }
}
@Override
@@ -53,7 +59,8 @@ public class InboundRpcMessage extends RpcMessage{
+ coordinationId + ", dBody=" + dBody + "]";
}
- public InputStream getProtobufBodyAsIS(){
+ public InputStream getProtobufBodyAsIS() {
return new ByteBufInputStream(pBody);
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java
index ef966cbcb..edad63e48 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/OutboundRpcMessage.java
@@ -40,10 +40,10 @@ public class OutboundRpcMessage extends RpcMessage {
// Netty doesn't traditionally release the reference on an unreadable buffer. However, we need to so that if we send a empty or unwritable buffer, we still release. otherwise we get weird memory leaks when sending empty vectors.
List<ByteBuf> bufs = Lists.newArrayList();
- for(ByteBuf d : dBodies){
- if(d.readableBytes() == 0){
+ for (ByteBuf d : dBodies) {
+ if (d.readableBytes() == 0) {
d.release();
- }else{
+ } else {
bufs.add(d);
}
}
@@ -58,12 +58,16 @@ public class OutboundRpcMessage extends RpcMessage {
return len;
}
- public int getRawBodySize(){
- if(dBodies == null) return 0;
+ public int getRawBodySize() {
+ if (dBodies == null) {
+ return 0;
+ }
int len = 0;
for (int i = 0; i < dBodies.length; i++) {
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Reader Index {}, Writer Index {}", dBodies[i].readerIndex(), dBodies[i].writerIndex());
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Reader Index {}, Writer Index {}", dBodies[i].readerIndex(), dBodies[i].writerIndex());
+ }
len += dBodies[i].readableBytes();
}
return len;
@@ -76,13 +80,12 @@ public class OutboundRpcMessage extends RpcMessage {
}
@Override
- void release(){
- if(dBodies != null){
- for(ByteBuf b : dBodies){
+ void release() {
+ if (dBodies != null) {
+ for (ByteBuf b : dBodies) {
b.release();
}
}
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ProtobufLengthDecoder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ProtobufLengthDecoder.java
index 02fb75e13..4f075d305 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ProtobufLengthDecoder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ProtobufLengthDecoder.java
@@ -47,8 +47,9 @@ public class ProtobufLengthDecoder extends ByteToMessageDecoder {
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
if (!ctx.channel().isOpen()) {
- if (in.readableBytes() > 0)
+ if (in.readableBytes() > 0) {
logger.info("Channel is closed, discarding remaining {} byte(s) in buffer.", in.readableBytes());
+ }
in.skipBytes(in.readableBytes());
return;
}
@@ -80,7 +81,7 @@ public class ProtobufLengthDecoder extends ByteToMessageDecoder {
// need to make buffer copy, otherwise netty will try to refill this buffer if we move the readerIndex forward...
// TODO: Can we avoid this copy?
ByteBuf outBuf = allocator.buffer(length);
- if(outBuf == null){
+ if (outBuf == null) {
logger.warn("Failure allocating buffer on incoming stream due to memory limits. Current Allocation: {}.", allocator.getAllocatedMemory());
in.resetReaderIndex();
outOfMemoryHandler.handle();
@@ -90,10 +91,11 @@ public class ProtobufLengthDecoder extends ByteToMessageDecoder {
in.skipBytes(length);
- if (RpcConstants.EXTRA_DEBUGGING)
+ if (RpcConstants.EXTRA_DEBUGGING) {
logger.debug(String.format(
"ReaderIndex is %d after length header of %d bytes and frame body of length %d bytes.",
in.readerIndex(), i + 1, length));
+ }
out.add(outBuf);
return;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ReconnectingConnection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ReconnectingConnection.java
index 1675b521f..f214c4d1e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ReconnectingConnection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ReconnectingConnection.java
@@ -136,7 +136,9 @@ public abstract class ReconnectingConnection<CONNECTION_TYPE extends RemoteConne
break;
}
connection = connectionHolder.get();
- if (connection != null) break;
+ if (connection != null) {
+ break;
+ }
}
if (connection != incoming) {
@@ -218,8 +220,9 @@ public abstract class ReconnectingConnection<CONNECTION_TYPE extends RemoteConne
break;
}
connection = connectionHolder.get();
- if (connection != null)
+ if (connection != null) {
break;
+ }
}
if (connection == incoming) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ResettableBarrier.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ResettableBarrier.java
index 615bccc5c..a2a6d2a24 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ResettableBarrier.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/ResettableBarrier.java
@@ -49,10 +49,11 @@ public class ResettableBarrier {
while(true) {
int c = getState();
- if (c == 0)
+ if (c == 0) {
return false;
+ }
int nextc = c - 1;
- if (compareAndSetState(c, nextc)){
+ if (compareAndSetState(c, nextc)) {
return nextc == 0;
}
}
@@ -79,7 +80,7 @@ public class ResettableBarrier {
sync.releaseShared(1);
}
- public void closeBarrier(){
+ public void closeBarrier() {
// logger.debug("closing barrier.");
sync.reset();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
index c6979e5b1..918ca0b5a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
@@ -81,10 +81,14 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
public <SEND extends MessageLite, RECEIVE extends MessageLite> void send(RpcOutcomeListener<RECEIVE> listener, C connection, T rpcType,
SEND protobufBody, Class<RECEIVE> clazz, boolean allowInEventLoop, ByteBuf... dataBodies) {
- if(!allowInEventLoop){
- if(connection.inEventLoop()) throw new IllegalStateException("You attempted to send while inside the rpc event thread. This isn't allowed because sending will block if the channel is backed up.");
+ if (!allowInEventLoop) {
+ if (connection.inEventLoop()) {
+ throw new IllegalStateException("You attempted to send while inside the rpc event thread. This isn't allowed because sending will block if the channel is backed up.");
+ }
- if(!connection.blockOnNotWritable(listener)) return;
+ if (!connection.blockOnNotWritable(listener)) {
+ return;
+ }
}
ByteBuf pBuffer = null;
@@ -102,11 +106,13 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
channelFuture.addListener(futureListener);
channelFuture.addListener(ChannelFutureListener.FIRE_EXCEPTION_ON_FAILURE);
completed = true;
- } catch(Exception | AssertionError e){
+ } catch (Exception | AssertionError e) {
listener.failed(new RpcException("Failure sending message.", e));
} finally {
if (!completed) {
- if (pBuffer != null) pBuffer.release();
+ if (pBuffer != null) {
+ pBuffer.release();
+ }
if (dataBodies != null) {
for (ByteBuf b : dataBodies) {
b.release();
@@ -130,7 +136,9 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
}
protected void closeQueueDueToChannelClose() {
- if (this.isClient()) queue.channelClosed(new ChannelClosedException("Queue closed due to channel closure."));
+ if (this.isClient()) {
+ queue.channelClosed(new ChannelClosedException("Queue closed due to channel closure."));
+ }
}
protected GenericFutureListener<ChannelFuture> getCloseHandler(C clientConnection) {
@@ -148,11 +156,13 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
this.coordinationId = coordinationId;
}
- public void send(Response r){
+ public void send(Response r) {
assert rpcConfig.checkResponseSend(r.rpcType, r.pBody.getClass());
OutboundRpcMessage outMessage = new OutboundRpcMessage(RpcMode.RESPONSE, r.rpcType, coordinationId,
r.pBody, r.dBodies);
- if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Adding message to outbound buffer. {}", outMessage);
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Adding message to outbound buffer. {}", outMessage);
+ }
connection.getChannel().writeAndFlush(outMessage);
}
@@ -168,8 +178,12 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
@Override
protected void decode(ChannelHandlerContext ctx, InboundRpcMessage msg, List<Object> output) throws Exception {
- if (!ctx.channel().isOpen()) return;
- if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Received message {}", msg);
+ if (!ctx.channel().isOpen()) {
+ return;
+ }
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Received message {}", msg);
+ }
switch (msg.mode) {
case REQUEST: {
// handle message and ack.
@@ -188,8 +202,10 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
Object value = parser.parseFrom(new ByteBufInputStream(msg.pBody, msg.pBody.readableBytes()));
rpcFuture.set(value, msg.dBody);
msg.release(); // we release our ownership. Handle could have taken over ownership.
- if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Updated rpc future {} with value {}", rpcFuture, value);
- }catch(Exception ex){
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Updated rpc future {} with value {}", rpcFuture, value);
+ }
+ }catch(Exception ex) {
logger.error("Failure while handling response.", ex);
throw ex;
}
@@ -199,8 +215,9 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
RpcFailure failure = RpcFailure.parseFrom(new ByteBufInputStream(msg.pBody, msg.pBody.readableBytes()));
queue.updateFailedFuture(msg.coordinationId, failure);
msg.release();
- if (RpcConstants.EXTRA_DEBUGGING)
+ if (RpcConstants.EXTRA_DEBUGGING) {
logger.debug("Updated rpc future with coordinationId {} with failure ", msg.coordinationId, failure);
+ }
break;
default:
@@ -252,4 +269,5 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
throw new RpcException(String.format("Failure while decoding message with parser of type. %s", parser.getClass().getCanonicalName()), e);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConfig.java
index 3010f2b7b..b5974f61e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcConfig.java
@@ -31,7 +31,7 @@ public class RpcConfig {
private final Map<EnumLite, RpcMessageType<?, ?, ?>> sendMap;
private final Map<Integer, RpcMessageType<?, ?, ?>> receiveMap;
- private RpcConfig(String name, Map<EnumLite, RpcMessageType<?, ?, ?>> sendMap, Map<Integer, RpcMessageType<?, ?, ?>> receiveMap){
+ private RpcConfig(String name, Map<EnumLite, RpcMessageType<?, ?, ?>> sendMap, Map<Integer, RpcMessageType<?, ?, ?>> receiveMap) {
this.name = name;
this.sendMap = ImmutableMap.copyOf(sendMap);
this.receiveMap = ImmutableMap.copyOf(receiveMap);
@@ -41,33 +41,51 @@ public class RpcConfig {
return name;
}
- public boolean checkReceive(int rpcType, Class<?> receiveClass){
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug(String.format("Checking reception for rpcType %d and receive class %s.", rpcType, receiveClass));
+ public boolean checkReceive(int rpcType, Class<?> receiveClass) {
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug(String.format("Checking reception for rpcType %d and receive class %s.", rpcType, receiveClass));
+ }
RpcMessageType<?,?,?> type = receiveMap.get(rpcType);
- if(type == null) throw new IllegalStateException(String.format("%s: There is no defined RpcMessage type for a Rpc receive type number of %s.", name, rpcType));
+ if (type == null) {
+ throw new IllegalStateException(String.format("%s: There is no defined RpcMessage type for a Rpc receive type number of %s.", name, rpcType));
+ }
- if(receiveClass != type.getRet()){
+ if (receiveClass != type.getRet()) {
throw new IllegalStateException(String.format("%s: The definition for receive doesn't match implementation code. The definition is %s however the current receive for this type was of type %s.", name, type, receiveClass.getCanonicalName()));
}
return true;
}
- public boolean checkSend(EnumLite send, Class<?> sendClass, Class<?> receiveClass){
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug(String.format("Checking send classes for send RpcType %s. Send Class is %s and Receive class is %s.", send, sendClass, receiveClass));
+ public boolean checkSend(EnumLite send, Class<?> sendClass, Class<?> receiveClass) {
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug(String.format("Checking send classes for send RpcType %s. Send Class is %s and Receive class is %s.", send, sendClass, receiveClass));
+ }
RpcMessageType<?,?,?> type = sendMap.get(send);
- if(type == null) throw new IllegalStateException(String.format("%s: There is no defined RpcMessage type for a Rpc send type of %s.", name, send));
+ if (type == null) {
+ throw new IllegalStateException(String.format("%s: There is no defined RpcMessage type for a Rpc send type of %s.", name, send));
+ }
- if(type.getSend() != sendClass) throw new IllegalStateException(String.format("%s: The definition for send doesn't match implementation code. The definition is %s however the current send is trying to send an object of type %s.", name, type, sendClass.getCanonicalName()));
- if(type.getRet() != receiveClass) throw new IllegalStateException(String.format("%s: The definition for send doesn't match implementation code. The definition is %s however the current send is trying to setup an expected reception of an object of type %s.", name, type, receiveClass.getCanonicalName()));
+ if (type.getSend() != sendClass) {
+ throw new IllegalStateException(String.format("%s: The definition for send doesn't match implementation code. The definition is %s however the current send is trying to send an object of type %s.", name, type, sendClass.getCanonicalName()));
+ }
+ if (type.getRet() != receiveClass) {
+ throw new IllegalStateException(String.format("%s: The definition for send doesn't match implementation code. The definition is %s however the current send is trying to setup an expected reception of an object of type %s.", name, type, receiveClass.getCanonicalName()));
+ }
return true;
}
- public boolean checkResponseSend(EnumLite responseType, Class<?> responseClass){
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug(String.format("Checking responce send of type %s with response class of %s.", responseType, responseClass));
+ public boolean checkResponseSend(EnumLite responseType, Class<?> responseClass) {
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug(String.format("Checking responce send of type %s with response class of %s.", responseType, responseClass));
+ }
RpcMessageType<?,?,?> type = receiveMap.get(responseType.getNumber());
- if(type == null) throw new IllegalStateException(String.format("%s: There is no defined RpcMessage type for a Rpc response of type %s.", name, responseType));
- if(type.getRet() != responseClass) throw new IllegalStateException(String.format("%s: The definition for the response doesn't match implementation code. The definition is %s however the current response is trying to response with an object of type %s.", name, type, responseClass.getCanonicalName()));
+ if (type == null) {
+ throw new IllegalStateException(String.format("%s: There is no defined RpcMessage type for a Rpc response of type %s.", name, responseType));
+ }
+ if (type.getRet() != responseClass) {
+ throw new IllegalStateException(String.format("%s: The definition for the response doesn't match implementation code. The definition is %s however the current response is trying to response with an object of type %s.", name, type, responseClass.getCanonicalName()));
+ }
return true;
}
@@ -114,10 +132,9 @@ public class RpcConfig {
+ ret + "]";
}
-
}
- public static RpcConfigBuilder newBuilder(String name){
+ public static RpcConfigBuilder newBuilder(String name) {
return new RpcConfigBuilder(name);
}
@@ -126,25 +143,21 @@ public class RpcConfig {
private Map<EnumLite, RpcMessageType<?, ?, ?>> sendMap = Maps.newHashMap();
private Map<Integer, RpcMessageType<?, ?, ?>> receiveMap = Maps.newHashMap();
- private RpcConfigBuilder(String name){
+ private RpcConfigBuilder(String name) {
this.name = name;
}
- public <SEND extends MessageLite, RECEIVE extends MessageLite, T extends EnumLite> RpcConfigBuilder add(T sendEnum, Class<SEND> send, T receiveEnum, Class<RECEIVE> rec){
+ public <SEND extends MessageLite, RECEIVE extends MessageLite, T extends EnumLite> RpcConfigBuilder add(T sendEnum, Class<SEND> send, T receiveEnum, Class<RECEIVE> rec) {
RpcMessageType<SEND, RECEIVE, T> type = new RpcMessageType<SEND, RECEIVE, T>(sendEnum, send, receiveEnum, rec);
this.sendMap.put(sendEnum, type);
this.receiveMap.put(receiveEnum.getNumber(), type);
return this;
}
- public RpcConfig build(){
+ public RpcConfig build() {
return new RpcConfig(name, sendMap, receiveMap);
-
}
- }
-
+ }
}
-
-
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java
index f4fe64db0..74a4afb18 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcDecoder.java
@@ -37,18 +37,20 @@ class RpcDecoder extends MessageToMessageDecoder<ByteBuf> {
private final AtomicLong messageCounter = new AtomicLong();
- public RpcDecoder(String name){
+ public RpcDecoder(String name) {
this.logger = org.slf4j.LoggerFactory.getLogger(RpcDecoder.class.getCanonicalName() + "-" + name);
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out) throws Exception {
- if(!ctx.channel().isOpen()){
+ if (!ctx.channel().isOpen()) {
return;
}
- if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Inbound rpc message received.");
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Inbound rpc message received.");
+ }
// now, we know the entire message is in the buffer and the buffer is constrained to this message. Additionally,
// this process should avoid reading beyond the end of this buffer so we inform the ByteBufInputStream to throw an
@@ -59,7 +61,9 @@ class RpcDecoder extends MessageToMessageDecoder<ByteBuf> {
checkTag(is, RpcEncoder.HEADER_TAG);
final RpcHeader header = RpcHeader.parseDelimitedFrom(is);
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug(" post header read index {}", buffer.readerIndex());
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug(" post header read index {}", buffer.readerIndex());
+ }
// read the protobuf body into a buffer.
checkTag(is, RpcEncoder.PROTOBUF_BODY_TAG);
@@ -67,9 +71,13 @@ class RpcDecoder extends MessageToMessageDecoder<ByteBuf> {
final ByteBuf pBody = buffer.slice(buffer.readerIndex(), pBodyLength);
buffer.skipBytes(pBodyLength);
pBody.retain();
- if (RpcConstants.EXTRA_DEBUGGING) logger.debug("Read protobuf body of length {} into buffer {}.", pBodyLength, pBody);
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Read protobuf body of length {} into buffer {}.", pBodyLength, pBody);
+ }
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("post protobufbody read index {}", buffer.readerIndex());
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("post protobufbody read index {}", buffer.readerIndex());
+ }
ByteBuf dBody = null;
int dBodyLength = 0;
@@ -77,16 +85,24 @@ class RpcDecoder extends MessageToMessageDecoder<ByteBuf> {
// read the data body.
if (buffer.readableBytes() > 0) {
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Reading raw body, buffer has {} bytes available, is available {}.", buffer.readableBytes(), is.available());
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Reading raw body, buffer has {} bytes available, is available {}.", buffer.readableBytes(), is.available());
+ }
checkTag(is, RpcEncoder.RAW_BODY_TAG);
dBodyLength = readRawVarint32(is);
- if(buffer.readableBytes() != dBodyLength) throw new CorruptedFrameException(String.format("Expected to receive a raw body of %d bytes but received a buffer with %d bytes.", dBodyLength, buffer.readableBytes()));
+ if (buffer.readableBytes() != dBodyLength) {
+ throw new CorruptedFrameException(String.format("Expected to receive a raw body of %d bytes but received a buffer with %d bytes.", dBodyLength, buffer.readableBytes()));
+ }
dBody = buffer.slice();
dBody.retain();
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Read raw body of {}", dBody);
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Read raw body of {}", dBody);
+ }
}else{
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("No need to read raw body, no readable bytes left.");
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("No need to read raw body, no readable bytes left.");
+ }
}
@@ -97,14 +113,16 @@ class RpcDecoder extends MessageToMessageDecoder<ByteBuf> {
// move the reader index forward so the next rpc call won't try to work with it.
buffer.skipBytes(dBodyLength);
messageCounter.incrementAndGet();
- if (RpcConstants.SOME_DEBUGGING) logger.debug("Inbound Rpc Message Decoded {}.", m);
+ if (RpcConstants.SOME_DEBUGGING) {
+ logger.debug("Inbound Rpc Message Decoded {}.", m);
+ }
out.add(m);
}
private void checkTag(ByteBufInputStream is, int expectedTag) throws IOException {
int actualTag = readRawVarint32(is);
- if (actualTag != expectedTag){
+ if (actualTag != expectedTag) {
throw new CorruptedFrameException(String.format("Expected to read a tag of %d but actually received a value of %d. Happened after reading %d message.", expectedTag, actualTag, messageCounter.get()));
}
}
@@ -143,4 +161,5 @@ class RpcDecoder extends MessageToMessageDecoder<ByteBuf> {
}
return result;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java
index 8bf3483e1..34256f35d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcEncoder.java
@@ -45,22 +45,26 @@ class RpcEncoder extends MessageToMessageEncoder<OutboundRpcMessage>{
static final int PROTOBUF_BODY_TAG_LENGTH = getRawVarintSize(PROTOBUF_BODY_TAG);
static final int RAW_BODY_TAG_LENGTH = getRawVarintSize(RAW_BODY_TAG);
- public RpcEncoder(String name){
+ public RpcEncoder(String name) {
this.logger = org.slf4j.LoggerFactory.getLogger(RpcEncoder.class.getCanonicalName() + "-" + name);
}
@Override
protected void encode(ChannelHandlerContext ctx, OutboundRpcMessage msg, List<Object> out) throws Exception {
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Rpc Encoder called with msg {}", msg);
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Rpc Encoder called with msg {}", msg);
+ }
- if(!ctx.channel().isOpen()){
+ if (!ctx.channel().isOpen()) {
//output.add(ctx.alloc().buffer(0));
logger.debug("Channel closed, skipping encode.");
return;
}
try{
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Encoding outbound message {}", msg);
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Encoding outbound message {}", msg);
+ }
// first we build the RpcHeader
RpcHeader header = RpcHeader.newBuilder() //
.setMode(msg.mode) //
@@ -75,7 +79,7 @@ class RpcEncoder extends MessageToMessageEncoder<OutboundRpcMessage>{
HEADER_TAG_LENGTH + getRawVarintSize(headerLength) + headerLength + //
PROTOBUF_BODY_TAG_LENGTH + getRawVarintSize(protoBodyLength) + protoBodyLength; //
- if(rawBodyLength > 0){
+ if (rawBodyLength > 0) {
fullLength += (RAW_BODY_TAG_LENGTH + getRawVarintSize(rawBodyLength) + rawBodyLength);
}
@@ -97,8 +101,10 @@ class RpcEncoder extends MessageToMessageEncoder<OutboundRpcMessage>{
msg.pBody.writeTo(cos);
// if exists, write data body and tag.
- if(msg.getRawBodySize() > 0){
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Writing raw body of size {}", msg.getRawBodySize());
+ if (msg.getRawBodySize() > 0) {
+ if(RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Writing raw body of size {}", msg.getRawBodySize());
+ }
cos.writeRawVarint32(RAW_BODY_TAG);
cos.writeRawVarint32(rawBodyLength);
@@ -107,23 +113,24 @@ class RpcEncoder extends MessageToMessageEncoder<OutboundRpcMessage>{
CompositeByteBuf cbb = new CompositeByteBuf(buf.alloc(), true, msg.dBodies.length + 1);
cbb.addComponent(buf);
int bufLength = buf.readableBytes();
- for(ByteBuf b : msg.dBodies){
+ for (ByteBuf b : msg.dBodies) {
cbb.addComponent(b);
bufLength += b.readableBytes();
}
cbb.writerIndex(bufLength);
out.add(cbb);
-
-
- }else{
+ } else {
cos.flush();
out.add(buf);
}
- if(RpcConstants.SOME_DEBUGGING) logger.debug("Wrote message length {}:{} bytes (head:body). Message: " + msg, getRawVarintSize(fullLength), fullLength);
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Sent message. Ending writer index was {}.", buf.writerIndex());
-
- }finally{
+ if (RpcConstants.SOME_DEBUGGING) {
+ logger.debug("Wrote message length {}:{} bytes (head:body). Message: " + msg, getRawVarintSize(fullLength), fullLength);
+ }
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Sent message. Ending writer index was {}.", buf.writerIndex());
+ }
+ } finally {
// make sure to release Rpc Messages underlying byte buffers.
//msg.release();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java
index 3d8f02ba7..eb870b336 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcException.java
@@ -49,14 +49,20 @@ public class RpcException extends DrillIOException{
super(cause);
}
- public static RpcException mapException(Throwable t){
- while(t instanceof ExecutionException) t = ((ExecutionException)t).getCause();
- if(t instanceof RpcException) return ((RpcException) t);
+ public static RpcException mapException(Throwable t) {
+ while (t instanceof ExecutionException) {
+ t = ((ExecutionException)t).getCause();
+ }
+ if (t instanceof RpcException) {
+ return ((RpcException) t);
+ }
return new RpcException(t);
}
- public static RpcException mapException(String message, Throwable t){
- while(t instanceof ExecutionException) t = ((ExecutionException)t).getCause();
+ public static RpcException mapException(String message, Throwable t) {
+ while (t instanceof ExecutionException) {
+ t = ((ExecutionException)t).getCause();
+ }
return new RpcException(message, t);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ConnectionManagerRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ConnectionManagerRegistry.java
index 37c9ce2d0..06d6e7783 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ConnectionManagerRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ConnectionManagerRegistry.java
@@ -26,7 +26,7 @@ import org.apache.drill.exec.work.batch.ControlMessageHandler;
import com.google.common.collect.Maps;
-public class ConnectionManagerRegistry implements Iterable<ControlConnectionManager>{
+public class ConnectionManagerRegistry implements Iterable<ControlConnectionManager> {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ConnectionManagerRegistry.class);
private final ConcurrentMap<DrillbitEndpoint, ControlConnectionManager> registry = Maps.newConcurrentMap();
@@ -41,13 +41,15 @@ public class ConnectionManagerRegistry implements Iterable<ControlConnectionMana
this.context = context;
}
- public ControlConnectionManager getConnectionManager(DrillbitEndpoint endpoint){
+ public ControlConnectionManager getConnectionManager(DrillbitEndpoint endpoint) {
assert localEndpoint != null : "DrillbitEndpoint must be set before a connection manager can be retrieved";
ControlConnectionManager m = registry.get(endpoint);
- if(m == null){
+ if (m == null) {
m = new ControlConnectionManager(endpoint, localEndpoint, handler, context);
ControlConnectionManager m2 = registry.putIfAbsent(endpoint, m);
- if(m2 != null) m = m2;
+ if (m2 != null) {
+ m = m2;
+ }
}
return m;
@@ -58,7 +60,7 @@ public class ConnectionManagerRegistry implements Iterable<ControlConnectionMana
return registry.values().iterator();
}
- public void setEndpoint(DrillbitEndpoint endpoint){
+ public void setEndpoint(DrillbitEndpoint endpoint) {
this.localEndpoint = endpoint;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlClient.java
index 879df40f6..d546db30a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlClient.java
@@ -86,7 +86,9 @@ public class ControlClient extends BasicClient<RpcType, ControlConnection, BitCo
@Override
protected void validateHandshake(BitControlHandshake handshake) throws RpcException {
- if(handshake.getRpcVersion() != ControlRpcConfig.RPC_VERSION) throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.", handshake.getRpcVersion(), ControlRpcConfig.RPC_VERSION));
+ if (handshake.getRpcVersion() != ControlRpcConfig.RPC_VERSION) {
+ throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.", handshake.getRpcVersion(), ControlRpcConfig.RPC_VERSION));
+ }
}
@Override
@@ -94,7 +96,7 @@ public class ControlClient extends BasicClient<RpcType, ControlConnection, BitCo
connection.setEndpoint(handshake.getEndpoint());
}
- public ControlConnection getConnection(){
+ public ControlConnection getConnection() {
return this.connection;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlConnection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlConnection.java
index 6ac6dd553..a7aaa9c3e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlConnection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlConnection.java
@@ -87,28 +87,35 @@ public class ControlConnection extends RemoteConnection {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
ControlConnection other = (ControlConnection) obj;
if (id == null) {
- if (other.id != null)
+ if (other.id != null) {
return false;
- } else if (!id.equals(other.id))
+ }
+ } else if (!id.equals(other.id)) {
return false;
+ }
return true;
}
public void shutdownIfClient() {
- if (bus.isClient())
+ if (bus.isClient()) {
Closeables.closeQuietly(bus);
+ }
}
@Override
public BufferAllocator getAllocator() {
return allocator;
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlServer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlServer.java
index 393773d99..43089d33c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlServer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/ControlServer.java
@@ -74,13 +74,17 @@ public class ControlServer extends BasicServer<RpcType, ControlConnection>{
@Override
protected ServerHandshakeHandler<BitControlHandshake> getHandshakeHandler(final ControlConnection connection) {
- return new ServerHandshakeHandler<BitControlHandshake>(RpcType.HANDSHAKE, BitControlHandshake.PARSER){
+ return new ServerHandshakeHandler<BitControlHandshake>(RpcType.HANDSHAKE, BitControlHandshake.PARSER) {
@Override
public MessageLite getHandshakeResponse(BitControlHandshake inbound) throws Exception {
// logger.debug("Handling handshake from other bit. {}", inbound);
- if(inbound.getRpcVersion() != ControlRpcConfig.RPC_VERSION) throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.", inbound.getRpcVersion(), ControlRpcConfig.RPC_VERSION));
- if(!inbound.hasEndpoint() || inbound.getEndpoint().getAddress().isEmpty() || inbound.getEndpoint().getControlPort() < 1) throw new RpcException(String.format("RPC didn't provide valid counter endpoint information. Received %s.", inbound.getEndpoint()));
+ if (inbound.getRpcVersion() != ControlRpcConfig.RPC_VERSION) {
+ throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.", inbound.getRpcVersion(), ControlRpcConfig.RPC_VERSION));
+ }
+ if (!inbound.hasEndpoint() || inbound.getEndpoint().getAddress().isEmpty() || inbound.getEndpoint().getControlPort() < 1) {
+ throw new RpcException(String.format("RPC didn't provide valid counter endpoint information. Received %s.", inbound.getEndpoint()));
+ }
connection.setEndpoint(inbound.getEndpoint());
// add the
@@ -129,5 +133,4 @@ public class ControlServer extends BasicServer<RpcType, ControlConnection>{
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java
index 60d2cdf9e..5c126e139 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java
@@ -64,9 +64,10 @@ public class WorkEventBus {
public void setFragmentStatusListener(QueryId queryId, FragmentStatusListener listener) throws RpcException {
logger.debug("Adding fragment status listener for queryId {}.", queryId);
FragmentStatusListener old = listeners.putIfAbsent(queryId, listener);
- if (old != null)
+ if (old != null) {
throw new RpcException(
"Failure. The provided handle already exists in the listener pool. You need to remove one listener before adding another.");
+ }
}
public void status(FragmentStatus status) {
@@ -83,12 +84,13 @@ public class WorkEventBus {
public void setRootFragmentManager(RootFragmentManager fragmentManager) {
FragmentManager old = managers.putIfAbsent(fragmentManager.getHandle(), fragmentManager);
- if (old != null)
+ if (old != null) {
throw new IllegalStateException(
"Tried to set fragment manager when has already been set for the provided fragment handle.");
+ }
}
- public FragmentManager getFragmentManager(FragmentHandle handle){
+ public FragmentManager getFragmentManager(FragmentHandle handle) {
return managers.get(handle);
}
@@ -103,9 +105,11 @@ public class WorkEventBus {
return null;
}
FragmentManager manager = managers.get(handle);
- if (manager != null) return manager;
+ if (manager != null) {
+ return manager;
+ }
DistributedMap<FragmentHandle, PlanFragment> planCache = bee.getContext().getCache().getMap(Foreman.FRAGMENT_CACHE);
- for(Map.Entry<FragmentHandle, PlanFragment> e : planCache.getLocalEntries()){
+ for (Map.Entry<FragmentHandle, PlanFragment> e : planCache.getLocalEntries()) {
// logger.debug("Key: {}", e.getKey());
// logger.debug("Value: {}", e.getValue());
}
@@ -130,7 +134,8 @@ public class WorkEventBus {
return manager;
}
- public void removeFragmentManager(FragmentHandle handle){
+ public void removeFragmentManager(FragmentHandle handle) {
managers.remove(handle);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClient.java
index 67856f351..a9eb66fda 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClient.java
@@ -74,20 +74,22 @@ public class DataClient extends BasicClient<RpcType, DataClientConnection, BitCl
throw new UnsupportedOperationException("DataClient is unidirectional by design.");
}
- BufferAllocator getAllocator(){
+ BufferAllocator getAllocator() {
return allocator;
}
@Override
protected void validateHandshake(BitServerHandshake handshake) throws RpcException {
- if(handshake.getRpcVersion() != DataRpcConfig.RPC_VERSION) throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.", handshake.getRpcVersion(), DataRpcConfig.RPC_VERSION));
+ if (handshake.getRpcVersion() != DataRpcConfig.RPC_VERSION) {
+ throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.", handshake.getRpcVersion(), DataRpcConfig.RPC_VERSION));
+ }
}
@Override
protected void finalizeConnection(BitServerHandshake handshake, DataClientConnection connection) {
}
- public DataClientConnection getConnection(){
+ public DataClientConnection getConnection() {
return this.connection;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClientConnection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClientConnection.java
index ecd10eb7a..3a569db05 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClientConnection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataClientConnection.java
@@ -36,7 +36,7 @@ public class DataClientConnection extends RemoteConnection{
private final DataClient client;
private final UUID id;
- public DataClientConnection(Channel channel, DataClient client){
+ public DataClientConnection(Channel channel, DataClient client) {
super(channel);
this.client = client;
// we use a local listener pool unless a global one is provided.
@@ -49,7 +49,7 @@ public class DataClientConnection extends RemoteConnection{
}
public <SEND extends MessageLite, RECEIVE extends MessageLite> void send(RpcOutcomeListener<RECEIVE> outcomeListener, RpcType rpcType,
- SEND protobufBody, Class<RECEIVE> clazz, ByteBuf... dataBodies){
+ SEND protobufBody, Class<RECEIVE> clazz, ByteBuf... dataBodies) {
client.send(outcomeListener, this, rpcType, protobufBody, clazz, dataBodies);
}
@@ -64,17 +64,28 @@ public class DataClientConnection extends RemoteConnection{
@Override
public boolean equals(Object obj) {
- if (this == obj) return true;
- if (obj == null) return false;
- if (getClass() != obj.getClass()) return false;
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
DataClientConnection other = (DataClientConnection) obj;
if (id == null) {
- if (other.id != null) return false;
- } else if (!id.equals(other.id)) return false;
+ if (other.id != null) {
+ return false;
+ }
+ } else if (!id.equals(other.id)) {
+ return false;
+ }
return true;
}
- public void shutdownIfClient(){
+ public void shutdownIfClient() {
Closeables.closeQuietly(client);
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataServer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataServer.java
index 8e503ec6a..2c6e02c81 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataServer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/data/DataServer.java
@@ -83,12 +83,14 @@ public class DataServer extends BasicServer<RpcType, BitServerConnection> {
@Override
public MessageLite getHandshakeResponse(BitClientHandshake inbound) throws Exception {
// logger.debug("Handling handshake from other bit. {}", inbound);
- if (inbound.getRpcVersion() != DataRpcConfig.RPC_VERSION)
+ if (inbound.getRpcVersion() != DataRpcConfig.RPC_VERSION) {
throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.",
inbound.getRpcVersion(), DataRpcConfig.RPC_VERSION));
- if (inbound.getChannel() != RpcChannel.BIT_DATA)
+ }
+ if (inbound.getChannel() != RpcChannel.BIT_DATA) {
throw new RpcException(String.format("Invalid NodeMode. Expected BIT_DATA but received %s.",
inbound.getChannel()));
+ }
return BitServerHandshake.newBuilder().setRpcVersion(DataRpcConfig.RPC_VERSION).build();
}
@@ -113,8 +115,8 @@ public class DataServer extends BasicServer<RpcType, BitServerConnection> {
}
}
BufferAllocator allocator = manager.getFragmentContext().getAllocator();
- if(body != null){
- if(!allocator.takeOwnership((DrillBuf) body.unwrap())){
+ if (body != null) {
+ if (!allocator.takeOwnership((DrillBuf) body.unwrap())) {
dataHandler.handle(connection, manager, OOM_FRAGMENT, null, null);
}
}
@@ -142,8 +144,6 @@ public class DataServer extends BasicServer<RpcType, BitServerConnection> {
}
-
-
@Override
public OutOfMemoryHandler getOutOfMemoryHandler() {
return new OutOfMemoryHandler() {
@@ -158,4 +158,5 @@ public class DataServer extends BasicServer<RpcType, BitServerConnection> {
public ProtobufLengthDecoder getDecoder(BufferAllocator allocator, OutOfMemoryHandler outOfMemoryHandler) {
return new DataProtobufLengthDecoder(allocator, outOfMemoryHandler);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultBatch.java
index e36a1c620..ab4c9efbb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultBatch.java
@@ -31,10 +31,11 @@ public class QueryResultBatch {
// logger.debug("New Result Batch with header {} and data {}", header, data);
this.header = header;
this.data = data;
- if(this.data != null) data.retain();
+ if (this.data != null) {
+ data.retain();
+ }
}
-
public QueryResult getHeader() {
return header;
}
@@ -43,13 +44,14 @@ public class QueryResultBatch {
return data;
}
-
- public boolean hasData(){
+ public boolean hasData() {
return data != null;
}
- public void release(){
- if(data != null) data.release();
+ public void release() {
+ if (data != null) {
+ data.release();
+ }
}
@Override
@@ -57,6 +59,4 @@ public class QueryResultBatch {
return "QueryResultBatch [header=" + header + ", data=" + data + "]";
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
index b12a4cfdf..9015a162c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
@@ -49,7 +49,7 @@ public class QueryResultHandler {
private ConcurrentMap<QueryId, UserResultsListener> resultsListener = Maps.newConcurrentMap();
- public RpcOutcomeListener<QueryId> getWrappedListener(UserResultsListener listener){
+ public RpcOutcomeListener<QueryId> getWrappedListener(UserResultsListener listener) {
return new SubmissionListener(listener);
}
@@ -66,7 +66,9 @@ public class QueryResultHandler {
BufferingListener bl = new BufferingListener();
l = resultsListener.putIfAbsent(result.getQueryId(), bl);
// if we had a succesful insert, use that reference. Otherwise, just throw away the new bufering listener.
- if (l == null) l = bl;
+ if (l == null) {
+ l = bl;
+ }
if (result.getQueryId().toString().equals("")) {
failAll();
}
@@ -125,7 +127,7 @@ public class QueryResultHandler {
l.resultArrived(r, throttle);
last = r.getHeader().getIsLastChunk();
}
- if(ex != null){
+ if (ex != null) {
l.submissionFailed(ex);
return true;
}
@@ -136,7 +138,9 @@ public class QueryResultHandler {
@Override
public void resultArrived(QueryResultBatch result, ConnectionThrottle throttle) {
this.throttle = throttle;
- if(result.getHeader().getIsLastChunk()) finished = true;
+ if (result.getHeader().getIsLastChunk()) {
+ finished = true;
+ }
synchronized (this) {
if (output == null) {
@@ -151,7 +155,7 @@ public class QueryResultHandler {
public void submissionFailed(RpcException ex) {
finished = true;
synchronized (this) {
- if (output == null){
+ if (output == null) {
this.ex = ex;
} else{
output.submissionFailed(ex);
@@ -159,7 +163,7 @@ public class QueryResultHandler {
}
}
- public boolean isFinished(){
+ public boolean isFinished() {
return finished;
}
@@ -201,7 +205,9 @@ public class QueryResultHandler {
resultsListener.remove(oldListener);
} else {
boolean replaced = resultsListener.replace(queryId, oldListener, listener);
- if (!replaced) throw new IllegalStateException();
+ if (!replaced) {
+ throw new IllegalStateException();
+ }
}
} else {
throw new IllegalStateException("Trying to replace a non-buffering User Results listener.");
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
index f352a1539..4df6bfea2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
@@ -99,9 +99,10 @@ public class UserClient extends BasicClientWithConnection<RpcType, UserToBitHand
@Override
protected void validateHandshake(BitToUserHandshake inbound) throws RpcException {
// logger.debug("Handling handshake from bit to user. {}", inbound);
- if (inbound.getRpcVersion() != UserRpcConfig.RPC_VERSION)
+ if (inbound.getRpcVersion() != UserRpcConfig.RPC_VERSION) {
throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.", inbound.getRpcVersion(),
UserRpcConfig.RPC_VERSION));
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java
index 2710837d3..d196743eb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java
@@ -95,7 +95,7 @@ public class UserSession {
return sessionOptions;
}
- public DrillUser getUser(){
+ public DrillUser getUser() {
return user;
}
@@ -105,9 +105,11 @@ public class UserSession {
* @param schema The root schema to find this path within.
* @return true if the path was set successfully. false if this path was unavailable.
*/
- public boolean setDefaultSchemaPath(String fullPath, SchemaPlus schema){
+ public boolean setDefaultSchemaPath(String fullPath, SchemaPlus schema) {
SchemaPlus newDefault = findSchema(schema, fullPath);
- if(newDefault == null) return false;
+ if (newDefault == null) {
+ return false;
+ }
setProp(SCHEMA, fullPath);
return true;
}
@@ -117,11 +119,11 @@ public class UserSession {
* @param rootSchema
* @return A {@link net.hydromatic.optiq.SchemaPlus} object.
*/
- public SchemaPlus getDefaultSchema(SchemaPlus rootSchema){
+ public SchemaPlus getDefaultSchema(SchemaPlus rootSchema) {
return findSchema(rootSchema, getProp(SCHEMA));
}
- public boolean setSessionOption(String name, String value){
+ public boolean setSessionOption(String name, String value) {
return true;
}
@@ -136,9 +138,11 @@ public class UserSession {
private SchemaPlus findSchema(SchemaPlus rootSchema, String schemaPath) {
String[] paths = schemaPath.split("\\.");
SchemaPlus schema = rootSchema;
- for(String p : paths){
+ for (String p : paths) {
schema = schema.getSubSchema(p);
- if(schema == null) break;
+ if (schema == null) {
+ break;
+ }
}
return schema;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
index a9e11a411..2125166f7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
@@ -118,7 +118,9 @@ public class Drillbit implements Closeable{
}
private void startJetty() throws Exception{
- if(embeddedJetty == null) return;
+ if (embeddedJetty == null) {
+ return;
+ }
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
@@ -155,7 +157,9 @@ public class Drillbit implements Closeable{
}
public void close() {
- if (coord != null && handle != null) coord.unregister(handle);
+ if (coord != null && handle != null) {
+ coord.unregister(handle);
+ }
try {
Thread.sleep(context.getConfig().getInt(ExecConstants.ZK_REFRESH) * 2);
@@ -163,7 +167,9 @@ public class Drillbit implements Closeable{
logger.warn("Interrupted while sleeping during coordination deregistration.");
}
try {
- if(embeddedJetty != null) embeddedJetty.stop();
+ if (embeddedJetty != null) {
+ embeddedJetty.stop();
+ }
} catch (Exception e) {
logger.warn("Failure while shutting down embedded jetty server.");
}
@@ -192,11 +198,12 @@ public class Drillbit implements Closeable{
}
}
- public ClusterCoordinator getCoordinator(){
+ public ClusterCoordinator getCoordinator() {
return coord;
}
- public DrillbitContext getContext(){
+ public DrillbitContext getContext() {
return this.manager.getContext();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/RemoteServiceSet.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/RemoteServiceSet.java
index 906e03db8..96e9d8e9e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/RemoteServiceSet.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/RemoteServiceSet.java
@@ -28,7 +28,7 @@ import org.apache.drill.exec.coord.ClusterCoordinator;
import org.apache.drill.exec.coord.local.LocalClusterCoordinator;
import org.apache.drill.exec.memory.BufferAllocator;
-public class RemoteServiceSet implements Closeable{
+public class RemoteServiceSet implements Closeable {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RemoteServiceSet.class);
private final DistributedCache cache;
@@ -40,7 +40,6 @@ public class RemoteServiceSet implements Closeable{
this.coordinator = coordinator;
}
-
public DistributedCache getCache() {
return cache;
}
@@ -49,19 +48,20 @@ public class RemoteServiceSet implements Closeable{
return coordinator;
}
-
@Override
public void close() throws IOException {
- try{
- cache.close();
- }catch(Exception e){
- if(e instanceof IOException) throw (IOException) e;
+ try {
+ cache.close();
+ } catch(Exception e) {
+ if (e instanceof IOException) {
+ throw (IOException) e;
+ }
throw new IOException("Failure while closing cache", e);
}
coordinator.close();
}
- public static RemoteServiceSet getLocalServiceSet(){
+ public static RemoteServiceSet getLocalServiceSet() {
return new RemoteServiceSet(new LocalCache(), new LocalClusterCoordinator());
}
@@ -69,4 +69,5 @@ public class RemoteServiceSet implements Closeable{
ICache c = new ICache(config, allocator, true);
return new RemoteServiceSet(c, new LocalClusterCoordinator());
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValue.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValue.java
index 7401246c2..13894ef4c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValue.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValue.java
@@ -57,7 +57,7 @@ public class OptionValue{
return new OptionValue(Kind.DOUBLE, type, name, null, null, null, val);
}
- public OptionValue(){}
+ public OptionValue() {}
public static OptionValue createOption(Kind kind, OptionType type, String name, String val) {
switch (kind) {
@@ -118,44 +118,58 @@ public class OptionValue{
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
OptionValue other = (OptionValue) obj;
if (bool_val == null) {
- if (other.bool_val != null)
+ if (other.bool_val != null) {
return false;
- } else if (!bool_val.equals(other.bool_val))
+ }
+ } else if (!bool_val.equals(other.bool_val)) {
return false;
+ }
if (float_val == null) {
- if (other.float_val != null)
+ if (other.float_val != null) {
return false;
- } else if (!float_val.equals(other.float_val))
+ }
+ } else if (!float_val.equals(other.float_val)) {
return false;
- if (kind != other.kind)
+ }
+ if (kind != other.kind) {
return false;
+ }
if (name == null) {
- if (other.name != null)
+ if (other.name != null) {
return false;
- } else if (!name.equals(other.name))
+ }
+ } else if (!name.equals(other.name)) {
return false;
+ }
if (num_val == null) {
- if (other.num_val != null)
+ if (other.num_val != null) {
return false;
- } else if (!num_val.equals(other.num_val))
+ }
+ } else if (!num_val.equals(other.num_val)) {
return false;
+ }
if (string_val == null) {
- if (other.string_val != null)
+ if (other.string_val != null) {
return false;
- } else if (!string_val.equals(other.string_val))
+ }
+ } else if (!string_val.equals(other.string_val)) {
return false;
- if (type != other.type)
+ }
+ if (type != other.type) {
return false;
+ }
return true;
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index 40e2aafae..4fa61e159 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -36,7 +36,7 @@ import org.eigenbase.sql.SqlLiteral;
import com.google.common.collect.Maps;
-public class SystemOptionManager implements OptionManager{
+public class SystemOptionManager implements OptionManager {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SystemOptionManager.class);
@@ -93,7 +93,7 @@ public class SystemOptionManager implements OptionManager{
private final ConcurrentMap<String, OptionValidator> knownOptions = Maps.newConcurrentMap();
private final PStoreProvider provider;
- public SystemOptionManager(DrillConfig config, PStoreProvider provider){
+ public SystemOptionManager(DrillConfig config, PStoreProvider provider) {
this.provider = provider;
this.config = PStoreConfig //
.newJacksonBuilder(config.getMapper(), OptionValue.class) //
@@ -110,7 +110,7 @@ public class SystemOptionManager implements OptionManager{
private class Iter implements Iterator<OptionValue>{
private Iterator<Map.Entry<String, OptionValue>> inner;
- public Iter(Iterator<Map.Entry<String, OptionValue>> inner){
+ public Iter(Iterator<Map.Entry<String, OptionValue>> inner) {
this.inner = inner;
}
@@ -172,8 +172,8 @@ public class SystemOptionManager implements OptionManager{
private class SystemOptionAdmin implements OptionAdmin{
- public SystemOptionAdmin(){
- for(OptionValidator v : VALIDATORS){
+ public SystemOptionAdmin() {
+ for(OptionValidator v : VALIDATORS) {
knownOptions.put(v.getOptionName(), v);
options.putIfAbsent(v.getOptionName(), v.getDefault());
}
@@ -182,7 +182,7 @@ public class SystemOptionManager implements OptionManager{
@Override
public void registerOptionType(OptionValidator validator) {
- if(null != knownOptions.putIfAbsent(validator.getOptionName(), validator) ){
+ if (null != knownOptions.putIfAbsent(validator.getOptionName(), validator) ) {
throw new IllegalArgumentException("Only one option is allowed to be registered with name: " + validator.getOptionName());
}
}
@@ -190,20 +190,21 @@ public class SystemOptionManager implements OptionManager{
@Override
public void validate(OptionValue v) throws SetOptionException {
OptionValidator validator = knownOptions.get(v.name);
- if(validator == null) throw new SetOptionException("Unknown option " + v.name);
+ if (validator == null) {
+ throw new SetOptionException("Unknown option " + v.name);
+ }
validator.validate(v);
}
@Override
public OptionValue validate(String name, SqlLiteral value) throws SetOptionException {
OptionValidator validator = knownOptions.get(name);
- if(validator == null) throw new SetOptionException("Unknown option: " + name);
+ if (validator == null) {
+ throw new SetOptionException("Unknown option: " + name);
+ }
return validator.validate(value);
}
-
-
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
index 03982151e..a0afd29c3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
@@ -40,9 +40,10 @@ public class TypeValidators {
@Override
public void validate(OptionValue v) throws ExpressionParsingException {
super.validate(v);
- if (v.num_val > max || v.num_val < 0)
+ if (v.num_val > max || v.num_val < 0) {
throw new ExpressionParsingException(String.format("Option %s must be between %d and %d.", getOptionName(), 0,
max));
+ }
}
}
@@ -55,8 +56,9 @@ public class TypeValidators {
@Override
public void validate(OptionValue v) throws ExpressionParsingException {
super.validate(v);
- if (!isPowerOfTwo(v.num_val))
+ if (!isPowerOfTwo(v.num_val)) {
throw new ExpressionParsingException(String.format("Option %s must be a power of two.", getOptionName()));
+ }
}
private boolean isPowerOfTwo(long num) {
@@ -77,36 +79,37 @@ public class TypeValidators {
@Override
public void validate(OptionValue v) throws ExpressionParsingException {
super.validate(v);
- if (v.float_val > max || v.float_val < min)
+ if (v.float_val > max || v.float_val < min) {
throw new ExpressionParsingException(String.format("Option %s must be between %d and %d.", getOptionName(), min,
max));
+ }
}
}
public static class BooleanValidator extends TypeValidator{
- public BooleanValidator(String name, boolean def){
+ public BooleanValidator(String name, boolean def) {
super(name, Kind.BOOLEAN, OptionValue.createBoolean(OptionType.SYSTEM, name, def));
}
}
+
public static class StringValidator extends TypeValidator{
- public StringValidator(String name, String def){
+ public StringValidator(String name, String def) {
super(name, Kind.STRING, OptionValue.createString(OptionType.SYSTEM, name, def));
}
}
+
public static class LongValidator extends TypeValidator{
- public LongValidator(String name, long def){
+ public LongValidator(String name, long def) {
super(name, Kind.LONG, OptionValue.createLong(OptionType.SYSTEM, name, def));
}
}
- public static class DoubleValidator extends TypeValidator{
- public DoubleValidator(String name, double def){
+ public static class DoubleValidator extends TypeValidator{
+ public DoubleValidator(String name, double def) {
super(name, Kind.DOUBLE, OptionValue.createDouble(OptionType.SYSTEM, name, def));
}
-
-
}
public static abstract class TypeValidator extends OptionValidator {
@@ -133,9 +136,10 @@ public class TypeValidators {
@Override
public void validate(OptionValue v) throws ExpressionParsingException {
- if (v.kind != kind)
+ if (v.kind != kind) {
throw new ExpressionParsingException(String.format("Option %s must be of type %s but you tried to set to %s.",
getOptionName(), kind.name(), v.kind.name()));
+ }
}
public void extraValidate(OptionValue v) throws ExpressionParsingException {
@@ -174,4 +178,5 @@ public class TypeValidators {
throw new ExpressionParsingException(String.format(
"Drill doesn't support set option expressions with literals of type %s.", literal.getTypeName()));
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
index 5d7ea2cc5..98e460a43 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
@@ -27,14 +27,10 @@ import org.apache.hadoop.fs.Seekable;
public class ResourceInputStream extends ByteArrayInputStream implements Seekable, PositionedReadable {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ResourceInputStream.class);
-
public ResourceInputStream(byte[] bytes) {
super(bytes);
}
-
-
-
@Override
public void readFully(long position, byte[] buffer) throws IOException {
int l = read(position, buffer, 0, buffer.length);
@@ -64,12 +60,14 @@ public class ResourceInputStream extends ByteArrayInputStream implements Seekabl
}
System.arraycopy(buf, start, b, off, len);
return len;
-}
+ }
@Override
public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
int l = read(position, buffer, offset, length);
- if (l < length) throw new EOFException();
+ if (l < length) {
+ throw new EOFException();
+ }
}
@Override
@@ -77,7 +75,6 @@ public class ResourceInputStream extends ByteArrayInputStream implements Seekabl
return pos;
}
-
@Override
public int read(byte[] b) throws IOException {
int l = read(pos, b, 0, b.length);
@@ -91,14 +88,11 @@ public class ResourceInputStream extends ByteArrayInputStream implements Seekabl
return true;
}
-
-
-
@Override
public void seek(long arg0) throws IOException {
- if(buf.length > arg0){
+ if (buf.length > arg0) {
pos = (int) arg0;
- }else{
+ } else {
throw new EOFException();
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
index a876ea51f..e0c14a33b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
@@ -206,19 +206,25 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
throw new ExecutionSetupException("Two processes tried to change a plugin at the same time.");
}
- if(persist) pluginSystemTable.put(name, config);
+ if (persist) {
+ pluginSystemTable.put(name, config);
+ }
return newPlugin;
}
public StoragePlugin getPlugin(String name) throws ExecutionSetupException {
StoragePlugin plugin = plugins.get(name);
- if(name.equals(SYS_PLUGIN) || name.equals(INFORMATION_SCHEMA_PLUGIN)) return plugin;
+ if (name.equals(SYS_PLUGIN) || name.equals(INFORMATION_SCHEMA_PLUGIN)) {
+ return plugin;
+ }
// since we lazily manage the list of plugins per server, we need to update this once we know that it is time.
StoragePluginConfig config = this.pluginSystemTable.get(name);
if (config == null) {
- if(plugin != null) plugins.remove(name);
+ if (plugin != null) {
+ plugins.remove(name);
+ }
return null;
} else {
if (plugin == null || !plugin.getConfig().equals(config)) {
@@ -239,7 +245,9 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
public FormatPlugin getFormatPlugin(StoragePluginConfig storageConfig, FormatPluginConfig formatConfig) throws ExecutionSetupException {
StoragePlugin p = getPlugin(storageConfig);
- if(!(p instanceof FileSystemPlugin)) throw new ExecutionSetupException(String.format("You tried to request a format plugin for a storage plugin that wasn't of type FileSystemPlugin. The actual type of plugin was %s.", p.getClass().getName()));
+ if (!(p instanceof FileSystemPlugin)) {
+ throw new ExecutionSetupException(String.format("You tried to request a format plugin for a storage plugin that wasn't of type FileSystemPlugin. The actual type of plugin was %s.", p.getClass().getName()));
+ }
FileSystemPlugin storage = (FileSystemPlugin) p;
return storage.getFormatPlugin(formatConfig);
}
@@ -256,8 +264,9 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
return plugin;
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
Throwable t = e instanceof InvocationTargetException ? ((InvocationTargetException) e).getTargetException() : e;
- if (t instanceof ExecutionSetupException)
+ if (t instanceof ExecutionSetupException) {
throw ((ExecutionSetupException) t);
+ }
throw new ExecutionSetupException(String.format(
"Failure setting up new storage plugin configuration for config %s", pluginConfig), t);
}
@@ -304,7 +313,9 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
}
// remove those which are no longer in the registry
for (String pluginName : currentPluginNames) {
- if(pluginName.equals(SYS_PLUGIN) || pluginName.equals(INFORMATION_SCHEMA_PLUGIN)) continue;
+ if (pluginName.equals(SYS_PLUGIN) || pluginName.equals(INFORMATION_SCHEMA_PLUGIN)) {
+ continue;
+ }
plugins.remove(pluginName);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
index dda2dfc36..2ba291004 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
@@ -54,7 +54,7 @@ public class BasicFormatMatcher extends FormatMatcher{
this.codecFactory = null;
}
- public BasicFormatMatcher(FormatPlugin plugin, DrillFileSystem fs, List<String> extensions, boolean compressible){
+ public BasicFormatMatcher(FormatPlugin plugin, DrillFileSystem fs, List<String> extensions, boolean compressible) {
List<Pattern> patterns = Lists.newArrayList();
for (String extension : extensions) {
patterns.add(Pattern.compile(".*\\." + extension));
@@ -74,7 +74,7 @@ public class BasicFormatMatcher extends FormatMatcher{
@Override
public FormatSelection isReadable(FileSelection selection) throws IOException {
- if(isReadable(selection.getFirstPath(fs))){
+ if (isReadable(selection.getFirstPath(fs))) {
if (plugin.getName() != null) {
NamedFormatPluginConfig namedConfig = new NamedFormatPluginConfig();
namedConfig.name = plugin.getName();
@@ -98,13 +98,15 @@ public class BasicFormatMatcher extends FormatMatcher{
} else {
fileName = status.getPath().toString();
}
- for(Pattern p : patterns){
- if(p.matcher(fileName).matches()){
+ for (Pattern p : patterns) {
+ if (p.matcher(fileName).matches()) {
return true;
}
}
- if(matcher.matches(status)) return true;
+ if (matcher.matches(status)) {
+ return true;
+ }
return false;
}
@@ -116,32 +118,37 @@ public class BasicFormatMatcher extends FormatMatcher{
}
- private class MagicStringMatcher{
+ private class MagicStringMatcher {
private List<RangeMagics> ranges;
- public MagicStringMatcher(List<MagicString> magicStrings){
+ public MagicStringMatcher(List<MagicString> magicStrings) {
ranges = Lists.newArrayList();
- for(MagicString ms : magicStrings){
+ for(MagicString ms : magicStrings) {
ranges.add(new RangeMagics(ms));
}
}
public boolean matches(FileStatus status) throws IOException{
- if(ranges.isEmpty()) return false;
+ if (ranges.isEmpty()) {
+ return false;
+ }
final Range<Long> fileRange = Range.closedOpen( 0L, status.getLen());
- try(FSDataInputStream is = fs.open(status.getPath()).getInputStream()){
- for(RangeMagics rMagic : ranges){
+ try (FSDataInputStream is = fs.open(status.getPath()).getInputStream()) {
+ for(RangeMagics rMagic : ranges) {
Range<Long> r = rMagic.range;
- if(!fileRange.encloses(r)) continue;
+ if (!fileRange.encloses(r)) {
+ continue;
+ }
int len = (int) (r.upperEndpoint() - r.lowerEndpoint());
byte[] bytes = new byte[len];
is.readFully(r.lowerEndpoint(), bytes);
- for(byte[] magic : rMagic.magics){
- if(Arrays.equals(magic, bytes)) return true;
+ for (byte[] magic : rMagic.magics) {
+ if (Arrays.equals(magic, bytes)) {
+ return true;
+ }
}
-
}
}
return false;
@@ -151,10 +158,11 @@ public class BasicFormatMatcher extends FormatMatcher{
Range<Long> range;
byte[][] magics;
- public RangeMagics(MagicString ms){
+ public RangeMagics(MagicString ms) {
this.range = Range.closedOpen( ms.getOffset(), (long) ms.getBytes().length);
this.magics = new byte[][]{ms.getBytes()};
}
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
index 76f6be4d9..36e7efe19 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
@@ -53,7 +53,7 @@ public class FileSelection {
this.selectionRoot = selectionRoot;
}
- public FileSelection(List<String> files, boolean dummy){
+ public FileSelection(List<String> files, boolean dummy) {
this.files = files;
}
@@ -73,7 +73,9 @@ public class FileSelection {
public boolean containsDirectories(DrillFileSystem fs) throws IOException {
init(fs);
for (FileStatus p : statuses) {
- if (p.isDir()) return true;
+ if (p.isDir()) {
+ return true;
+ }
}
return false;
}
@@ -99,11 +101,15 @@ public class FileSelection {
return statuses.get(0);
}
- public List<String> getAsFiles(){
- if(!files.isEmpty()) return files;
- if(statuses == null) return Collections.emptyList();
+ public List<String> getAsFiles() {
+ if (!files.isEmpty()) {
+ return files;
+ }
+ if (statuses == null) {
+ return Collections.emptyList();
+ }
List<String> files = Lists.newArrayList();
- for(FileStatus s : statuses){
+ for (FileStatus s : statuses) {
files.add(s.getPath().toString());
}
return files;
@@ -131,7 +137,9 @@ public class FileSelection {
} else {
Path p = new Path(parent,removeLeadingSlash(path));
FileStatus[] status = fs.getUnderlying().globStatus(p);
- if(status == null || status.length == 0) return null;
+ if (status == null || status.length == 0) {
+ return null;
+ }
String[] s = p.toUri().getPath().split("/");
String newPath = StringUtils.join(ArrayUtils.subarray(s, 0, s.length - 1), "/");
Preconditions.checkState(!newPath.contains("*") && !newPath.contains("?"), String.format("Unsupported selection path: %s", p));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index ec9a04e38..b0855c80a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -60,7 +60,7 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
private final DrillFileSystem fs;
public FileSystemPlugin(FileSystemConfig config, DrillbitContext context, String name) throws ExecutionSetupException{
- try{
+ try {
this.config = config;
this.context = context;
@@ -72,18 +72,18 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
this.formatsByName = FormatCreator.getFormatPlugins(context, fs, config);
List<FormatMatcher> matchers = Lists.newArrayList();
formatPluginsByConfig = Maps.newHashMap();
- for(FormatPlugin p : formatsByName.values()){
+ for (FormatPlugin p : formatsByName.values()) {
matchers.add(p.getMatcher());
formatPluginsByConfig.put(p.getConfig(), p);
}
List<WorkspaceSchemaFactory> factories;
- if(config.workspaces == null || config.workspaces.isEmpty()){
+ if (config.workspaces == null || config.workspaces.isEmpty()) {
factories = Collections.singletonList(
new WorkspaceSchemaFactory(context.getConfig(), context.getPersistentStoreProvider(), this, "default", name, fs, WorkspaceConfig.DEFAULT, matchers));
- }else{
+ } else {
factories = Lists.newArrayList();
- for(Map.Entry<String, WorkspaceConfig> space : config.workspaces.entrySet()){
+ for (Map.Entry<String, WorkspaceConfig> space : config.workspaces.entrySet()) {
factories.add(new WorkspaceSchemaFactory(context.getConfig(), context.getPersistentStoreProvider(), this, space.getKey(), name, fs, space.getValue(), matchers));
}
@@ -93,7 +93,7 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
}
}
this.schemaFactory = new FileSystemSchemaFactory(name, factories);
- }catch(IOException e){
+ } catch (IOException e) {
throw new ExecutionSetupException("Failure setting up file system plugin.", e);
}
}
@@ -112,12 +112,14 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
public AbstractGroupScan getPhysicalScan(JSONOptions selection, List<SchemaPath> columns) throws IOException {
FormatSelection formatSelection = selection.getWith(context.getConfig(), FormatSelection.class);
FormatPlugin plugin;
- if(formatSelection.getFormat() instanceof NamedFormatPluginConfig){
+ if (formatSelection.getFormat() instanceof NamedFormatPluginConfig) {
plugin = formatsByName.get( ((NamedFormatPluginConfig) formatSelection.getFormat()).name);
- }else{
+ } else {
plugin = formatPluginsByConfig.get(formatSelection.getFormat());
}
- if(plugin == null) throw new IOException(String.format("Failure getting requested format plugin named '%s'. It was not one of the format plugins registered.", formatSelection.getFormat()));
+ if (plugin == null) {
+ throw new IOException(String.format("Failure getting requested format plugin named '%s'. It was not one of the format plugins registered.", formatSelection.getFormat()));
+ }
return plugin.getGroupScan(formatSelection.getSelection(), columns);
}
@@ -126,15 +128,16 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
schemaFactory.registerSchemas(session, parent);
}
- public FormatPlugin getFormatPlugin(String name){
+ public FormatPlugin getFormatPlugin(String name) {
return formatsByName.get(name);
}
- public FormatPlugin getFormatPlugin(FormatPluginConfig config){
- if(config instanceof NamedFormatPluginConfig){
+ public FormatPlugin getFormatPlugin(FormatPluginConfig config) {
+ if (config instanceof NamedFormatPluginConfig) {
return formatsByName.get(((NamedFormatPluginConfig) config).name);
- }else{
+ } else {
return formatPluginsByConfig.get(config);
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
index 0d0d46a54..e5c0487f8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
@@ -36,66 +36,64 @@ import com.google.common.collect.Maps;
public class FormatCreator {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FormatCreator.class);
-
static final ConstructorChecker FORMAT_BASED = new ConstructorChecker(String.class, DrillbitContext.class, DrillFileSystem.class, StoragePluginConfig.class, FormatPluginConfig.class);
static final ConstructorChecker DEFAULT_BASED = new ConstructorChecker(String.class, DrillbitContext.class, DrillFileSystem.class, StoragePluginConfig.class);
- static Map<String, FormatPlugin> getFormatPlugins(DrillbitContext context, DrillFileSystem fileSystem, FileSystemConfig storageConfig){
+ static Map<String, FormatPlugin> getFormatPlugins(DrillbitContext context, DrillFileSystem fileSystem, FileSystemConfig storageConfig) {
final DrillConfig config = context.getConfig();
Map<String, FormatPlugin> plugins = Maps.newHashMap();
Collection<Class<? extends FormatPlugin>> pluginClasses = PathScanner.scanForImplementations(FormatPlugin.class, config.getStringList(ExecConstants.STORAGE_ENGINE_SCAN_PACKAGES));
- if(storageConfig.formats == null || storageConfig.formats.isEmpty()){
-
- for(Class<? extends FormatPlugin> pluginClass: pluginClasses){
- for(Constructor<?> c : pluginClass.getConstructors()){
- try{
+ if (storageConfig.formats == null || storageConfig.formats.isEmpty()) {
- if(!DEFAULT_BASED.check(c)) continue;
+ for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
+ for (Constructor<?> c : pluginClass.getConstructors()) {
+ try {
+ if (!DEFAULT_BASED.check(c)) {
+ continue;
+ }
FormatPlugin plugin = (FormatPlugin) c.newInstance(null, context, fileSystem, storageConfig);
plugins.put(plugin.getName(), plugin);
- }catch(Exception e){
+ } catch (Exception e) {
logger.warn(String.format("Failure while trying instantiate FormatPlugin %s.", pluginClass.getName()), e);
}
}
}
- }else{
-
+ } else {
Map<Class<?>, Constructor<?>> constructors = Maps.newHashMap();
- for(Class<? extends FormatPlugin> pluginClass: pluginClasses){
- for(Constructor<?> c : pluginClass.getConstructors()){
- try{
- if(!FORMAT_BASED.check(c)) continue;
+ for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
+ for (Constructor<?> c : pluginClass.getConstructors()) {
+ try {
+ if (!FORMAT_BASED.check(c)) {
+ continue;
+ }
Class<? extends FormatPluginConfig> configClass = (Class<? extends FormatPluginConfig>) c.getParameterTypes()[4];
constructors.put(configClass, c);
- }catch(Exception e){
+ } catch (Exception e) {
logger.warn(String.format("Failure while trying instantiate FormatPlugin %s.", pluginClass.getName()), e);
}
}
}
- for(Map.Entry<String, FormatPluginConfig> e : storageConfig.formats.entrySet()){
+ for (Map.Entry<String, FormatPluginConfig> e : storageConfig.formats.entrySet()) {
Constructor<?> c = constructors.get(e.getValue().getClass());
- if(c == null){
+ if (c == null) {
logger.warn("Unable to find constructor for storage config named '{}' of type '{}'.", e.getKey(), e.getValue().getClass().getName());
continue;
}
- try{
- plugins.put(e.getKey(), (FormatPlugin) c.newInstance(e.getKey(), context, fileSystem, storageConfig, e.getValue()));
+ try {
+ plugins.put(e.getKey(), (FormatPlugin) c.newInstance(e.getKey(), context, fileSystem, storageConfig, e.getValue()));
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e1) {
logger.warn("Failure initializing storage config named '{}' of type '{}'.", e.getKey(), e.getValue().getClass().getName(), e1);
}
}
-
}
return plugins;
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceConfig.java
index 4e7fb8fd9..2103a96e4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceConfig.java
@@ -57,8 +57,9 @@ public class WorkspaceConfig {
@Override
public boolean equals(Object obj) {
- if (obj == this)
+ if (obj == this) {
return true;
+ }
if (obj == null || !(obj instanceof WorkspaceConfig)) {
return false;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index 4349fe069..03a696613 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -68,8 +68,6 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
private final PStore<String> knownViews;
private final ObjectMapper mapper;
-
-
public WorkspaceSchemaFactory(DrillConfig drillConfig, PStoreProvider provider, FileSystemPlugin plugin, String schemaName, String storageEngineName,
DrillFileSystem fileSystem, WorkspaceConfig config,
List<FormatMatcher> formatMatchers) throws ExecutionSetupException, IOException {
@@ -84,10 +82,10 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
this.schemaName = schemaName;
// setup cache
- if(storageEngineName == null){
+ if (storageEngineName == null) {
this.knownViews = null;
// this.knownPaths = null;
- }else{
+ } else {
this.knownViews = provider.getPStore(PStoreConfig //
.newJacksonBuilder(drillConfig.getMapper(), String.class) //
.name(Joiner.on('.').join("storage.views", storageEngineName, schemaName)) //
@@ -109,7 +107,7 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
}
- private Path getViewPath(String name){
+ private Path getViewPath(String name) {
return new Path(config.getLocation() + '/' + name + ".view.drill");
}
@@ -122,14 +120,17 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
try {
FileSelection fileSelection = FileSelection.create(fs, config.getLocation(), key);
- if(fileSelection == null) return null;
+ if (fileSelection == null) {
+ return null;
+ }
if (fileSelection.containsDirectories(fs)) {
for (FormatMatcher m : dirMatchers) {
try {
Object selection = m.isReadable(fileSelection);
- if (selection != null)
+ if (selection != null) {
return new DynamicDrillTable(plugin, storageEngineName, selection);
+ }
} catch (IOException e) {
logger.debug("File read failed.", e);
}
@@ -139,8 +140,9 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
for (FormatMatcher m : fileMatchers) {
Object selection = m.isReadable(fileSelection);
- if (selection != null)
+ if (selection != null) {
return new DynamicDrillTable(plugin, storageEngineName, selection);
+ }
}
return null;
@@ -160,10 +162,12 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
public boolean createView(View view) throws Exception {
Path viewPath = getViewPath(view.getName());
boolean replaced = fs.getUnderlying().exists(viewPath);
- try(DrillOutputStream stream = fs.create(viewPath)){
+ try (DrillOutputStream stream = fs.create(viewPath)) {
mapper.writeValue(stream.getOuputStream(), view);
}
- if(knownViews != null) knownViews.put(view.getName(), viewPath.toString());
+ if (knownViews != null) {
+ knownViews.put(view.getName(), viewPath.toString());
+ }
return replaced;
}
@@ -174,7 +178,9 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
public void dropView(String viewName) throws IOException {
fs.getUnderlying().delete(getViewPath(viewName), false);
- if(knownViews != null) knownViews.delete(viewName);
+ if (knownViews != null) {
+ knownViews.delete(viewName);
+ }
}
private ExpandingConcurrentMap<String, DrillTable> tables = new ExpandingConcurrentMap<String, DrillTable>(WorkspaceSchemaFactory.this);
@@ -230,7 +236,9 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
@Override
public Table getTable(String name) {
// first check existing tables.
- if(tables.alreadyContainsKey(name)) return tables.get(name);
+ if(tables.alreadyContainsKey(name)) {
+ return tables.get(name);
+ }
// then check known views.
// String path = knownViews.get(name);
@@ -239,8 +247,8 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
List<DotDrillFile> files;
try {
files = DotDrillUtil.getDotDrills(fs, new Path(config.getLocation()), name, DotDrillType.VIEW);
- for(DotDrillFile f : files){
- switch(f.getType()){
+ for(DotDrillFile f : files) {
+ switch(f.getType()) {
case VIEW:
return new DrillViewTable(schemaPath, getView(f));
}
@@ -271,10 +279,11 @@ public class WorkspaceSchemaFactory implements ExpandingConcurrentMap.MapValueFa
public CreateTableEntry createNewTable(String tableName) {
String storage = session.getOptions().getOption(ExecConstants.OUTPUT_FORMAT_OPTION).string_val;
FormatPlugin formatPlugin = plugin.getFormatPlugin(storage);
- if (formatPlugin == null)
+ if (formatPlugin == null) {
throw new UnsupportedOperationException(
String.format("Unsupported format '%s' in workspace '%s'", config.getStorageFormat(),
Joiner.on(".").join(getSchemaPath())));
+ }
return new FileSystemCreateTableEntry(
(FileSystemConfig) plugin.getConfig(),
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
index 1341fa40f..8efcd2cce 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
@@ -123,7 +123,7 @@ public class EasyGroupScan extends AbstractGroupScan{
@Override
public ScanStats getScanStats() {
long data =0;
- for(CompleteFileWork work : chunks){
+ for (CompleteFileWork work : chunks) {
data += work.getTotalBytes();
}
@@ -137,13 +137,13 @@ public class EasyGroupScan extends AbstractGroupScan{
}
@JsonProperty("columns")
- public List<SchemaPath> getColumns(){
+ public List<SchemaPath> getColumns() {
return columns;
}
@JsonIgnore
- public FileSelection getFileSelection(){
+ public FileSelection getFileSelection() {
return selection;
}
@@ -183,21 +183,21 @@ public class EasyGroupScan extends AbstractGroupScan{
return new EasySubScan(convert(filesForMinor), formatPlugin, columns, selectionRoot);
}
- private List<FileWorkImpl> convert(List<CompleteFileWork> list){
+ private List<FileWorkImpl> convert(List<CompleteFileWork> list) {
List<FileWorkImpl> newList = Lists.newArrayList();
- for(CompleteFileWork f : list){
+ for (CompleteFileWork f : list) {
newList.add(f.getAsFileWork());
}
return newList;
}
@JsonProperty("storage")
- public StoragePluginConfig getStorageConfig(){
+ public StoragePluginConfig getStorageConfig() {
return formatPlugin.getStorageConfig();
}
@JsonProperty("format")
- public FormatPluginConfig getFormatConfig(){
+ public FormatPluginConfig getFormatConfig() {
return formatPlugin.getConfig();
}
@@ -213,7 +213,9 @@ public class EasyGroupScan extends AbstractGroupScan{
@Override
public GroupScan clone(List<SchemaPath> columns) {
- if(!formatPlugin.supportsPushDown()) throw new IllegalStateException(String.format("%s doesn't support pushdown.", this.getClass().getSimpleName()));
+ if (!formatPlugin.supportsPushDown()) {
+ throw new IllegalStateException(String.format("%s doesn't support pushdown.", this.getClass().getSimpleName()));
+ }
EasyGroupScan newScan = new EasyGroupScan(this);
newScan.columns = columns;
return newScan;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
index 8cd7cf279..e1165a2da 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
@@ -90,12 +90,13 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ } else if (obj == null) {
return false;
- if (getClass() == obj.getClass())
+ } else if (getClass() == obj.getClass()) {
return true;
+ }
return false;
}
@@ -116,5 +117,4 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
return true;
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
index ff70ccd7e..b64a032d3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
@@ -117,15 +117,18 @@ public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextForm
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ } else if (obj == null) {
return false;
- if (!(obj instanceof TextFormatConfig))
+ } else if (!(obj instanceof TextFormatConfig)) {
return false;
+ }
+
TextFormatConfig that = (TextFormatConfig) obj;
- if (this.delimiter.equals(that.delimiter))
+ if (this.delimiter.equals(that.delimiter)) {
return true;
+ }
return false;
}
@@ -145,4 +148,5 @@ public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextForm
public boolean supportsPushDown() {
return true;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java
index 21923d846..5736df84b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java
@@ -58,7 +58,7 @@ public class MockGroupScanPOP extends AbstractGroupScan {
this.url = url;
}
- public ScanStats getScanStats(){
+ public ScanStats getScanStats() {
return ScanStats.TRIVIAL_TABLE;
}
@@ -83,7 +83,7 @@ public class MockGroupScanPOP extends AbstractGroupScan {
this.records = records;
this.types = types;
int size = 0;
- for(MockColumn dt : types){
+ for (MockColumn dt : types) {
size += TypeHelper.getSize(dt.getMajorType());
}
this.recordSize = size;
@@ -144,13 +144,19 @@ public class MockGroupScanPOP extends AbstractGroupScan {
}
@JsonIgnore
- public MajorType getMajorType(){
+ public MajorType getMajorType() {
MajorType.Builder b = MajorType.newBuilder();
b.setMode(mode);
b.setMinorType(minorType);
- if(precision != null) b.setPrecision(precision);
- if(width != null) b.setWidth(width);
- if(scale != null) b.setScale(scale);
+ if (precision != null) {
+ b.setPrecision(precision);
+ }
+ if (width != null) {
+ b.setWidth(width);
+ }
+ if (scale != null) {
+ b.setScale(scale);
+ }
return b.build();
}
@@ -174,10 +180,12 @@ public class MockGroupScanPOP extends AbstractGroupScan {
mappings = new LinkedList[endpoints.size()];
int i =0;
- for(MockScanEntry e : this.getReadEntries()){
- if(i == endpoints.size()) i -= endpoints.size();
+ for (MockScanEntry e : this.getReadEntries()) {
+ if (i == endpoints.size()) {
+ i -= endpoints.size();
+ }
LinkedList<MockScanEntry> entries = mappings[i];
- if(entries == null){
+ if (entries == null) {
entries = new LinkedList<MockScanEntry>();
mappings[i] = entries;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
index 66851a998..43e6416d3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
@@ -103,12 +103,14 @@ public class MockRecordReader extends AbstractRecordReader {
@Override
public int next() {
- if(recordsRead >= this.config.getRecords()) return 0;
+ if (recordsRead >= this.config.getRecords()) {
+ return 0;
+ }
int recordSetSize = Math.min(batchRecordCount, this.config.getRecords() - recordsRead);
recordsRead += recordSetSize;
- for(ValueVector v : valueVectors){
+ for (ValueVector v : valueVectors) {
// logger.debug(String.format("MockRecordReader: Generating %d records of random data for VV of type %s.", recordSetSize, v.getClass().getName()));
ValueVector.Mutator m = v.getMutator();
@@ -132,4 +134,5 @@ public class MockRecordReader extends AbstractRecordReader {
@Override
public void cleanup() {
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java
index c5661826d..2f7ea18f3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java
@@ -44,12 +44,18 @@ public class MockStorageEngineConfig extends StoragePluginConfigBase{
@Override
public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
MockStorageEngineConfig that = (MockStorageEngineConfig) o;
- if (url != null ? !url.equals(that.url) : that.url != null) return false;
+ if (url != null ? !url.equals(that.url) : that.url != null) {
+ return false;
+ }
return true;
}
@@ -58,4 +64,5 @@ public class MockStorageEngineConfig extends StoragePluginConfigBase{
public int hashCode() {
return url != null ? url.hashCode() : 0;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
index a768fc9c8..86e52240c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
@@ -126,7 +126,9 @@ public class ParquetGroupScan extends AbstractGroupScan {
@JsonProperty("selectionRoot") String selectionRoot //
) throws IOException, ExecutionSetupException {
this.columns = columns;
- if(formatConfig == null) formatConfig = new ParquetFormatConfig();
+ if (formatConfig == null) {
+ formatConfig = new ParquetFormatConfig();
+ }
Preconditions.checkNotNull(storageConfig);
Preconditions.checkNotNull(formatConfig);
this.formatPlugin = (ParquetFormatPlugin) engineRegistry.getFormatPlugin(storageConfig, formatConfig);
@@ -154,7 +156,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
this.fs = formatPlugin.getFileSystem().getUnderlying();
this.entries = Lists.newArrayList();
- for(FileStatus file : files){
+ for (FileStatus file : files) {
entries.add(new ReadEntryWithPath(file.getPath().toString()));
}
@@ -166,7 +168,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
/*
* This is used to clone another copy of the group scan.
*/
- private ParquetGroupScan(ParquetGroupScan that){
+ private ParquetGroupScan(ParquetGroupScan that) {
this.columns = that.columns;
this.endpointAffinities = that.endpointAffinities;
this.entries = that.entries;
@@ -182,7 +184,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
private void readFooterFromEntries() throws IOException {
List<FileStatus> files = Lists.newArrayList();
- for(ReadEntryWithPath e : entries){
+ for (ReadEntryWithPath e : entries) {
files.add(fs.getFileStatus(new Path(e.getPath())));
}
readFooter(files);
@@ -299,7 +301,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
if (this.endpointAffinities == null) {
BlockMapBuilder bmb = new BlockMapBuilder(fs, formatPlugin.getContext().getBits());
- try{
+ try {
for (RowGroupInfo rgi : rowGroupInfos) {
EndpointByteMap ebm = bmb.getEndpointByteMap(rgi);
rgi.setEndpointByteMap(ebm);
@@ -318,7 +320,6 @@ public class ParquetGroupScan extends AbstractGroupScan {
public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) throws PhysicalOperatorSetupException {
this.mappings = AssignmentCreator.getMappings(incomingEndpoints, rowGroupInfos);
-
}
@Override
@@ -335,9 +336,7 @@ public class ParquetGroupScan extends AbstractGroupScan {
return new ParquetRowGroupScan(formatPlugin, convertToReadEntries(rowGroupsForMinor), columns, selectionRoot);
}
-
-
- private List<RowGroupReadEntry> convertToReadEntries(List<RowGroupInfo> rowGroups){
+ private List<RowGroupReadEntry> convertToReadEntries(List<RowGroupInfo> rowGroups) {
List<RowGroupReadEntry> entries = Lists.newArrayList();
for (RowGroupInfo rgi : rowGroups) {
RowGroupReadEntry rgre = new RowGroupReadEntry(rgi.getPath(), rgi.getStart(), rgi.getLength(),
@@ -347,7 +346,6 @@ public class ParquetGroupScan extends AbstractGroupScan {
return entries;
}
-
@Override
public int getMaxParallelizationWidth() {
return rowGroupInfos.size();
@@ -357,7 +355,6 @@ public class ParquetGroupScan extends AbstractGroupScan {
return columns;
}
-
@Override
public ScanStats getScanStats() {
int columnCount = columns == null ? 20 : columns.size();
@@ -403,4 +400,5 @@ public class ParquetGroupScan extends AbstractGroupScan {
public long getColumnValueCount(SchemaPath column) {
return columnValueCounts.containsKey(column) ? columnValueCounts.get(column) : 0;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
index b629ddad7..2424fac7e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
@@ -139,18 +139,21 @@ public abstract class ColumnReader<V extends ValueVector> {
public boolean determineSize(long recordsReadInCurrentPass, Integer lengthVarFieldsInCurrentRecord) throws IOException {
boolean doneReading = readPage();
- if (doneReading)
+ if (doneReading) {
return true;
+ }
doneReading = processPageData((int) recordsReadInCurrentPass);
- if (doneReading)
+ if (doneReading) {
return true;
+ }
lengthVarFieldsInCurrentRecord += dataTypeLengthInBits;
doneReading = checkVectorCapacityReached();
- if (doneReading)
+ if (doneReading) {
return true;
+ }
return false;
}
@@ -189,8 +192,9 @@ public abstract class ColumnReader<V extends ValueVector> {
if (pageReader.currentPage == null
|| totalValuesReadAndReadyToReadInPage() == pageReader.currentPage.getValueCount()) {
readRecords(pageReader.valuesReadyToRead);
- if (pageReader.currentPage != null)
+ if (pageReader.currentPage != null) {
totalValuesRead += pageReader.currentPage.getValueCount();
+ }
if (!pageReader.next()) {
hitRowGroupEnd();
return true;
@@ -215,9 +219,10 @@ public abstract class ColumnReader<V extends ValueVector> {
logger.debug("Reached the capacity of the data vector in a variable length value vector.");
return true;
}
- else if (valuesReadInCurrentPass > valueVec.getValueCapacity()){
+ else if (valuesReadInCurrentPass > valueVec.getValueCapacity()) {
return true;
}
return false;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java
index 2fc3d6ebe..0c4437ab6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java
@@ -91,8 +91,9 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
pageReader.valuesReadyToRead += repeatedValuesInCurrentList;
repeatedGroupsReadInCurrentPass++;
currDictVal = null;
- if ( ! notFishedReadingList)
+ if ( ! notFishedReadingList) {
repeatedValuesInCurrentList = -1;
+ }
}
@Override
@@ -112,8 +113,9 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
public void postPageRead() {
super.postPageRead();
// this is no longer correct as we figured out that lists can reach across pages
- if ( ! notFishedReadingList)
+ if ( ! notFishedReadingList) {
repeatedValuesInCurrentList = -1;
+ }
definitionLevelsRead = 0;
}
@@ -130,12 +132,14 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
@Override
protected boolean checkVectorCapacityReached() {
boolean doneReading = super.checkVectorCapacityReached();
- if (doneReading)
+ if (doneReading) {
return true;
- if (valuesReadInCurrentPass + pageReader.valuesReadyToRead + repeatedValuesInCurrentList >= valueVec.getValueCapacity())
+ }
+ if (valuesReadInCurrentPass + pageReader.valuesReadyToRead + repeatedValuesInCurrentList >= valueVec.getValueCapacity()) {
return true;
- else
+ } else {
return false;
+ }
}
@Override
@@ -163,7 +167,7 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
definitionLevelsRead++;
}
int repLevel;
- if ( columnDescriptor.getMaxDefinitionLevel() == currDefLevel){
+ if ( columnDescriptor.getMaxDefinitionLevel() == currDefLevel) {
if (repeatedValuesInCurrentList == -1 || notFishedReadingList) {
repeatedValuesInCurrentList = 1;
do {
@@ -178,7 +182,7 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
// check that we have not hit the end of the row group (in which case we will not find the repetition level indicating
// the end of this record as there is no next page to check, we have read all the values in this repetition so it is okay
// to add it to the read )
- if (totalValuesRead + pageReader.valuesReadyToRead + repeatedValuesInCurrentList != columnChunkMetaData.getValueCount()){
+ if (totalValuesRead + pageReader.valuesReadyToRead + repeatedValuesInCurrentList != columnChunkMetaData.getValueCount()) {
notFishedReadingList = true;
// if we hit this case, we cut off the current batch at the previous value, these extra values as well
// as those that spill into the next page will be added to the next batch
@@ -188,8 +192,7 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
}
} while (repLevel != 0);
}
- }
- else {
+ } else {
repeatedValuesInCurrentList = 0;
}
int currentValueListLength = repeatedValuesInCurrentList;
@@ -209,7 +212,9 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
@Override
protected void readRecords(int valuesToRead) {
- if (valuesToRead == 0) return;
+ if (valuesToRead == 0) {
+ return;
+ }
// TODO - validate that this works in all cases, it fixes a bug when reading from multiple pages into
// a single vector
dataReader.valuesReadInCurrentPass = 0;
@@ -228,5 +233,5 @@ public class FixedWidthRepeatedReader extends VarLengthColumn {
super.clear();
dataReader.clear();
}
-}
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java
index 47d64bc90..2e24674eb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java
@@ -69,7 +69,7 @@ public abstract class NullableVarLengthValuesColumn<V extends ValueVector> exten
if ( currDefLevel == -1 ) {
currDefLevel = pageReader.definitionLevels.readInteger();
}
- if ( columnDescriptor.getMaxDefinitionLevel() > currDefLevel){
+ if ( columnDescriptor.getMaxDefinitionLevel() > currDefLevel) {
nullsRead++;
// set length of zero, each index in the vector defaults to null so no need to set the nullability
variableWidthVector.getMutator().setValueLengthSafe(
@@ -93,14 +93,15 @@ public abstract class NullableVarLengthValuesColumn<V extends ValueVector> exten
// I think this also needs to happen if it is null for the random access
boolean success = setSafe(valuesReadInCurrentPass + pageReader.valuesReadyToRead, pageReader.pageDataByteArray,
(int) pageReader.readyToReadPosInBytes + 4, dataTypeLengthInBits);
- if ( ! success )
+ if ( ! success ) {
return true;
+ }
return false;
}
@Override
public void updateReadyToReadPosition() {
- if (! currentValNull){
+ if (! currentValNull) {
pageReader.readyToReadPosInBytes += dataTypeLengthInBits + 4;
}
pageReader.valuesReadyToRead++;
@@ -109,7 +110,7 @@ public abstract class NullableVarLengthValuesColumn<V extends ValueVector> exten
@Override
public void updatePosition() {
- if (! currentValNull){
+ if (! currentValNull) {
pageReader.readPosInBytes += dataTypeLengthInBits + 4;
bytesReadInCurrentPass += dataTypeLengthInBits;
}
@@ -128,11 +129,12 @@ public abstract class NullableVarLengthValuesColumn<V extends ValueVector> exten
dataTypeLengthInBits = variableWidthVector.getAccessor().getValueLength(valuesReadInCurrentPass);
currentValNull = variableWidthVector.getAccessor().getObject(valuesReadInCurrentPass) == null;
// again, I am re-purposing the unused field here, it is a length n BYTES, not bits
- if (! currentValNull){
+ if (! currentValNull) {
boolean success = setSafe(valuesReadInCurrentPass, pageReader.pageDataByteArray,
(int) pageReader.readPosInBytes + 4, dataTypeLengthInBits);
assert success;
}
updatePosition();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java
index bdfc860fd..7d0fc23a2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java
@@ -180,7 +180,7 @@ public class ParquetRecordReader extends AbstractRecordReader {
}
}
- private boolean fieldSelected(MaterializedField field){
+ private boolean fieldSelected(MaterializedField field) {
// TODO - not sure if this is how we want to represent this
// for now it makes the existing tests pass, simply selecting
// all available data if no columns are provided
@@ -189,8 +189,8 @@ public class ParquetRecordReader extends AbstractRecordReader {
}
int i = 0;
- for (SchemaPath expr : getColumns()){
- if ( field.matches(expr)){
+ for (SchemaPath expr : getColumns()) {
+ if ( field.matches(expr)) {
columnsFound[i] = true;
return true;
}
@@ -237,7 +237,7 @@ public class ParquetRecordReader extends AbstractRecordReader {
SchemaElement se = schemaElements.get(column.getPath()[0]);
MajorType mt = ParquetToDrillTypeConverter.toMajorType(column.getType(), se.getType_length(), getDataMode(column), se);
field = MaterializedField.create(toFieldName(column.getPath()),mt);
- if ( ! fieldSelected(field)){
+ if ( ! fieldSelected(field)) {
continue;
}
columnsToScan++;
@@ -246,7 +246,7 @@ public class ParquetRecordReader extends AbstractRecordReader {
if (column.getMaxRepetitionLevel() > 0) {
allFieldsFixedLength = false;
}
- if (column.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY){
+ if (column.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) {
bitWidthAllFixedFields += se.getType_length() * 8;
} else {
bitWidthAllFixedFields += getTypeLengthInBits(column.getType());
@@ -278,7 +278,9 @@ public class ParquetRecordReader extends AbstractRecordReader {
MajorType type = ParquetToDrillTypeConverter.toMajorType(column.getType(), schemaElement.getType_length(), getDataMode(column), schemaElement);
field = MaterializedField.create(toFieldName(column.getPath()), type);
// the field was not requested to be read
- if ( ! fieldSelected(field)) continue;
+ if ( ! fieldSelected(field)) {
+ continue;
+ }
fieldFixedLength = column.getType() != PrimitiveType.PrimitiveTypeName.BINARY;
v = output.addField(field, (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()));
@@ -353,14 +355,14 @@ public class ParquetRecordReader extends AbstractRecordReader {
for (ColumnReader column : columnStatuses) {
column.valuesReadInCurrentPass = 0;
}
- for (VarLengthColumn r : varLengthReader.columns){
+ for (VarLengthColumn r : varLengthReader.columns) {
r.valuesReadInCurrentPass = 0;
}
}
public void readAllFixedFields(long recordsToRead) throws IOException {
- for (ColumnReader crs : columnStatuses){
+ for (ColumnReader crs : columnStatuses) {
crs.processPages(recordsToRead);
}
}
@@ -371,11 +373,11 @@ public class ParquetRecordReader extends AbstractRecordReader {
long recordsToRead = 0;
try {
ColumnReader firstColumnStatus;
- if (columnStatuses.size() > 0){
+ if (columnStatuses.size() > 0) {
firstColumnStatus = columnStatuses.iterator().next();
}
else{
- if (varLengthReader.columns.size() > 0){
+ if (varLengthReader.columns.size() > 0) {
firstColumnStatus = varLengthReader.columns.iterator().next();
}
else{
@@ -437,9 +439,10 @@ public class ParquetRecordReader extends AbstractRecordReader {
}
columnStatuses.clear();
- for (VarLengthColumn r : varLengthReader.columns){
+ for (VarLengthColumn r : varLengthReader.columns) {
r.clear();
}
varLengthReader.columns.clear();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java
index 409f17d68..68a7e2a31 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java
@@ -25,7 +25,7 @@ public class VarLenBinaryReader {
ParquetRecordReader parentReader;
final List<VarLengthColumn> columns;
- public VarLenBinaryReader(ParquetRecordReader parentReader, List<VarLengthColumn> columns){
+ public VarLenBinaryReader(ParquetRecordReader parentReader, List<VarLengthColumn> columns) {
this.parentReader = parentReader;
this.columns = columns;
}
@@ -52,14 +52,15 @@ public class VarLenBinaryReader {
do {
lengthVarFieldsInCurrentRecord = 0;
for (VarLengthColumn columnReader : columns) {
- if ( ! exitLengthDeterminingLoop )
+ if ( !exitLengthDeterminingLoop ) {
exitLengthDeterminingLoop = columnReader.determineSize(recordsReadInCurrentPass, lengthVarFieldsInCurrentRecord);
- else
+ } else {
break;
+ }
}
// check that the next record will fit in the batch
if (exitLengthDeterminingLoop || (recordsReadInCurrentPass + 1) * parentReader.getBitWidthAllFixedFields() + totalVariableLengthData
- + lengthVarFieldsInCurrentRecord > parentReader.getBatchSize()){
+ + lengthVarFieldsInCurrentRecord > parentReader.getBatchSize()) {
break;
}
for (VarLengthColumn columnReader : columns ) {
@@ -78,4 +79,5 @@ public class VarLenBinaryReader {
}
return recordsReadInCurrentPass;
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumnReaders.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumnReaders.java
index 99e6f539a..83f9bde06 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumnReaders.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumnReaders.java
@@ -162,7 +162,6 @@ public class VarLengthColumnReaders {
}
}
-
public static class VarCharColumn extends VarLengthValuesColumn<VarCharVector> {
// store a hard reference to the vector (which is also stored in the superclass) to prevent repetitive casting
@@ -178,7 +177,9 @@ public class VarLengthColumnReaders {
@Override
public boolean setSafe(int index, DrillBuf bytebuf, int start, int length) {
boolean success;
- if(index >= varCharVector.getValueCapacity()) return false;
+ if (index >= varCharVector.getValueCapacity()) {
+ return false;
+ }
if (usingDictionary) {
DrillBuf b = DrillBuf.wrapByteBuffer(currDictValToWrite.toByteBuffer());
@@ -225,7 +226,9 @@ public class VarLengthColumnReaders {
@Override
public boolean setSafe(int index, DrillBuf value, int start, int length) {
boolean success;
- if(index >= vector.getValueCapacity()) return false;
+ if (index >= vector.getValueCapacity()) {
+ return false;
+ }
if (usingDictionary) {
DrillBuf b = DrillBuf.wrapByteBuffer(currDictValToWrite.toByteBuffer());
@@ -258,7 +261,9 @@ public class VarLengthColumnReaders {
@Override
public boolean setSafe(int index, DrillBuf value, int start, int length) {
boolean success;
- if(index >= varBinaryVector.getValueCapacity()) return false;
+ if (index >= varBinaryVector.getValueCapacity()) {
+ return false;
+ }
if (usingDictionary) {
DrillBuf b = DrillBuf.wrapByteBuffer(currDictValToWrite.toByteBuffer());
@@ -303,7 +308,9 @@ public class VarLengthColumnReaders {
@Override
public boolean setSafe(int index, DrillBuf value, int start, int length) {
boolean success;
- if(index >= nullableVarBinaryVector.getValueCapacity()) return false;
+ if (index >= nullableVarBinaryVector.getValueCapacity()) {
+ return false;
+ }
if (usingDictionary) {
DrillBuf b = DrillBuf.wrapByteBuffer(currDictValToWrite.toByteBuffer());
@@ -331,4 +338,5 @@ public class VarLengthColumnReaders {
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoDataType.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoDataType.java
index c84f43cac..c1e64e674 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoDataType.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoDataType.java
@@ -34,45 +34,45 @@ public class PojoDataType {
public List<SqlTypeName> types = Lists.newArrayList();
public List<String> names = Lists.newArrayList();
- public PojoDataType(Class<?> pojoClass){
+ public PojoDataType(Class<?> pojoClass) {
logger.debug(pojoClass.getName());
Field[] fields = pojoClass.getDeclaredFields();
- for(int i = 0; i < fields.length; i++){
+ for (int i = 0; i < fields.length; i++) {
Field f = fields[i];
- if(Modifier.isStatic(f.getModifiers())) continue;
+ if (Modifier.isStatic(f.getModifiers())) {
+ continue;
+ }
Class<?> type = f.getType();
names.add(f.getName());
- if(type == int.class || type == Integer.class){
+ if (type == int.class || type == Integer.class) {
types.add(SqlTypeName.INTEGER);
- }else if(type == boolean.class || type == Boolean.class){
+ } else if(type == boolean.class || type == Boolean.class) {
types.add(SqlTypeName.BOOLEAN);
- }else if(type == long.class || type == Long.class){
+ } else if(type == long.class || type == Long.class) {
types.add(SqlTypeName.BIGINT);
- }else if(type == double.class || type == Double.class){
+ } else if(type == double.class || type == Double.class) {
types.add(SqlTypeName.DOUBLE);
- }else if(type == String.class){
+ } else if(type == String.class) {
types.add(SqlTypeName.VARCHAR);
- }else if(type.isEnum()){
+ } else if(type.isEnum()) {
types.add(SqlTypeName.VARCHAR);
- }else if (type == Timestamp.class) {
+ } else if (type == Timestamp.class) {
types.add(SqlTypeName.TIMESTAMP);
- }else{
+ } else {
throw new RuntimeException(String.format("PojoRecord reader doesn't yet support conversions from type [%s].", type));
}
}
}
-
- public RelDataType getRowType(RelDataTypeFactory f){
+ public RelDataType getRowType(RelDataTypeFactory f) {
List<RelDataType> fields = Lists.newArrayList();
- for(SqlTypeName n : types){
+ for (SqlTypeName n : types) {
fields.add(f.createSqlType(n));
}
return f.createStructType(fields, names);
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
index 38160df8f..241fa956e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
@@ -59,7 +59,7 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
private T currentPojo;
private OperatorContext operatorContext;
- public PojoRecordReader(Class<T> pojoClass, Iterator<T> iterator){
+ public PojoRecordReader(Class<T> pojoClass, Iterator<T> iterator) {
this.pojoClass = pojoClass;
this.iterator = iterator;
}
@@ -74,40 +74,42 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
@Override
public void setup(OutputMutator output) throws ExecutionSetupException {
- try{
+ try {
Field[] fields = pojoClass.getDeclaredFields();
List<PojoWriter> writers = Lists.newArrayList();
- for(int i = 0; i < fields.length; i++){
+ for (int i = 0; i < fields.length; i++) {
Field f = fields[i];
- if(Modifier.isStatic(f.getModifiers())) continue;
+ if (Modifier.isStatic(f.getModifiers())) {
+ continue;
+ }
Class<?> type = f.getType();
PojoWriter w = null;
- if(type == int.class){
+ if(type == int.class) {
w = new IntWriter(f);
- }else if(type == Integer.class){
+ } else if(type == Integer.class) {
w = new NIntWriter(f);
- }else if(type == Long.class){
+ } else if(type == Long.class) {
w = new NBigIntWriter(f);
- }else if(type == Boolean.class){
+ } else if(type == Boolean.class) {
w = new NBooleanWriter(f);
- }else if(type == double.class){
+ } else if(type == double.class) {
w = new DoubleWriter(f);
- }else if(type == Double.class){
+ } else if(type == Double.class) {
w = new NDoubleWriter(f);
- }else if(type.isEnum()){
+ } else if(type.isEnum()) {
w = new EnumWriter(f, output.getManagedBuffer());
- }else if(type == boolean.class){
+ } else if(type == boolean.class) {
w = new BitWriter(f);
- }else if(type == long.class){
+ } else if(type == long.class) {
w = new LongWriter(f);
- }else if(type == String.class){
+ } else if(type == String.class) {
w = new StringWriter(f, output.getManagedBuffer());
- }else if (type == Timestamp.class) {
+ } else if (type == Timestamp.class) {
w = new NTimeStampWriter(f);
- }else{
+ } else {
throw new ExecutionSetupException(String.format("PojoRecord reader doesn't yet support conversions from type [%s].", type));
}
writers.add(w);
@@ -116,7 +118,7 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
this.writers = writers.toArray(new PojoWriter[writers.size()]);
- }catch(SchemaChangeException e){
+ } catch(SchemaChangeException e) {
throw new ExecutionSetupException("Failure while setting up schema for PojoRecordReader.", e);
}
@@ -130,14 +132,14 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
}
}
- private void allocate(){
- for(PojoWriter writer : writers){
+ private void allocate() {
+ for (PojoWriter writer : writers) {
writer.allocate();
}
}
- private void setValueCount(int i){
- for(PojoWriter writer : writers){
+ private void setValueCount(int i) {
+ for (PojoWriter writer : writers) {
writer.setValueCount(i);
}
}
@@ -146,32 +148,36 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
public int next() {
boolean allocated = false;
- try{
+ try {
int i =0;
outside:
- while(doCurrent || iterator.hasNext()){
- if(doCurrent){
+ while (doCurrent || iterator.hasNext()) {
+ if (doCurrent) {
doCurrent = false;
- }else{
+ } else {
currentPojo = iterator.next();
}
- if(!allocated){
+ if (!allocated) {
allocate();
allocated = true;
}
- for(PojoWriter writer : writers){
- if(!writer.writeField(currentPojo, i)){
+ for (PojoWriter writer : writers) {
+ if (!writer.writeField(currentPojo, i)) {
doCurrent = true;
- if(i == 0) throw new IllegalStateException("Got into a position where we can't write data but the batch is empty.");
+ if (i == 0) {
+ throw new IllegalStateException("Got into a position where we can't write data but the batch is empty.");
+ }
break outside;
};
}
i++;
}
- if(i != 0 ) setValueCount(i);
+ if (i != 0 ) {
+ setValueCount(i);
+ }
return i;
} catch (IllegalArgumentException | IllegalAccessException e) {
throw new RuntimeException("Failure while trying to use PojoRecordReader.", e);
@@ -182,5 +188,4 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
public void cleanup() {
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/Writers.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/Writers.java
index b982c9e1d..fee011a9c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/Writers.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/Writers.java
@@ -41,11 +41,13 @@ import com.google.common.base.Charsets;
public class Writers {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Writers.class);
- public static class IntWriter extends AbstractWriter<IntVector>{
+ public static class IntWriter extends AbstractWriter<IntVector> {
public IntWriter(Field field) {
super(field, Types.required(MinorType.INT));
- if(field.getType() != int.class) throw new IllegalStateException();
+ if (field.getType() != int.class) {
+ throw new IllegalStateException();
+ }
}
@Override
@@ -60,7 +62,9 @@ public class Writers {
public BitWriter(Field field) {
super(field, Types.required(MinorType.BIT));
- if(field.getType() != boolean.class) throw new IllegalStateException();
+ if (field.getType() != boolean.class) {
+ throw new IllegalStateException();
+ }
}
@Override
@@ -75,13 +79,14 @@ public class Writers {
public LongWriter(Field field) {
super(field, Types.required(MinorType.BIGINT));
- if(field.getType() != long.class) throw new IllegalStateException();
+ if (field.getType() != long.class) {
+ throw new IllegalStateException();
+ }
}
@Override
public boolean writeField(Object pojo, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
long l = field.getLong(pojo);
-
return vector.getMutator().setSafe(outboundIndex, l);
}
@@ -91,7 +96,9 @@ public class Writers {
public DoubleWriter(Field field) {
super(field, Types.required(MinorType.FLOAT8));
- if(field.getType() != double.class) throw new IllegalStateException();
+ if (field.getType() != double.class) {
+ throw new IllegalStateException();
+ }
}
@Override
@@ -113,18 +120,18 @@ public class Writers {
ensureLength(100);
}
- void ensureLength(int len){
+ void ensureLength(int len) {
data = data.reallocIfNeeded(len);
}
@Override
- public void cleanup(){
+ public void cleanup() {
}
public boolean writeString(String s, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
- if(s == null){
+ if (s == null) {
return true;
- }else{
+ } else {
h.isSet = 1;
byte[] bytes = s.getBytes(Charsets.UTF_8);
ensureLength(bytes.length);
@@ -134,9 +141,7 @@ public class Writers {
h.start = 0;
h.end = bytes.length;
return vector.getMutator().setSafe(outboundIndex, h);
-
}
-
}
}
@@ -144,13 +149,17 @@ public class Writers {
public static class EnumWriter extends AbstractStringWriter{
public EnumWriter(Field field, DrillBuf managedBuf) {
super(field, managedBuf);
- if(!field.getType().isEnum()) throw new IllegalStateException();
+ if (!field.getType().isEnum()) {
+ throw new IllegalStateException();
+ }
}
@Override
public boolean writeField(Object pojo, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
Enum<?> e= ((Enum<?>) field.get(pojo));
- if(e == null) return true;
+ if (e == null) {
+ return true;
+ }
return writeString(e.name(), outboundIndex);
}
}
@@ -158,7 +167,9 @@ public class Writers {
public static class StringWriter extends AbstractStringWriter {
public StringWriter(Field field, DrillBuf managedBuf) {
super(field, managedBuf);
- if(field.getType() != String.class) throw new IllegalStateException();
+ if (field.getType() != String.class) {
+ throw new IllegalStateException();
+ }
}
@Override
@@ -172,13 +183,15 @@ public class Writers {
public NIntWriter(Field field) {
super(field, Types.optional(MinorType.INT));
- if(field.getType() != Integer.class) throw new IllegalStateException();
+ if (field.getType() != Integer.class) {
+ throw new IllegalStateException();
+ }
}
@Override
public boolean writeField(Object pojo, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
Integer i = (Integer) field.get(pojo);
- if(i != null){
+ if (i != null) {
return vector.getMutator().setSafe(outboundIndex, i);
}
return true;
@@ -190,13 +203,15 @@ public class Writers {
public NBigIntWriter(Field field) {
super(field, Types.optional(MinorType.BIGINT));
- if(field.getType() != Long.class) throw new IllegalStateException();
+ if (field.getType() != Long.class) {
+ throw new IllegalStateException();
+ }
}
@Override
public boolean writeField(Object pojo, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
Long o = (Long) field.get(pojo);
- if(o != null){
+ if (o != null) {
return vector.getMutator().setSafe(outboundIndex, o);
}
return true;
@@ -208,13 +223,15 @@ public class Writers {
public NBooleanWriter(Field field) {
super(field, Types.optional(MinorType.BIT));
- if(field.getType() != Boolean.class) throw new IllegalStateException();
+ if (field.getType() != Boolean.class) {
+ throw new IllegalStateException();
+ }
}
@Override
public boolean writeField(Object pojo, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
Boolean o = (Boolean) field.get(pojo);
- if(o != null){
+ if (o != null) {
return vector.getMutator().setSafe(outboundIndex, o ? 1 : 0);
}
return true;
@@ -225,13 +242,15 @@ public class Writers {
public NDoubleWriter(Field field) {
super(field, Types.optional(MinorType.FLOAT8));
- if(field.getType() != Double.class) throw new IllegalStateException();
+ if (field.getType() != Double.class) {
+ throw new IllegalStateException();
+ }
}
@Override
public boolean writeField(Object pojo, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
Double o = (Double) field.get(pojo);
- if(o != null){
+ if (o != null) {
return vector.getMutator().setSafe(outboundIndex, o);
}
return true;
@@ -243,13 +262,15 @@ public class Writers {
public NTimeStampWriter(Field field) {
super(field, Types.optional(MinorType.TIMESTAMP));
- if(field.getType() != Timestamp.class) throw new IllegalStateException();
+ if (field.getType() != Timestamp.class) {
+ throw new IllegalStateException();
+ }
}
@Override
public boolean writeField(Object pojo, int outboundIndex) throws IllegalArgumentException, IllegalAccessException {
Timestamp o = (Timestamp) field.get(pojo);
- if(o != null){
+ if (o != null) {
return vector.getMutator().setSafe(outboundIndex, o.getTime());
}
return true;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java
index 705688ee5..7e9c4c993 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java
@@ -101,8 +101,9 @@ public class AssignmentCreator<T extends CompleteWork> {
final boolean requireAffinity = requiredPercentage > 0;
int maxAssignments = (int) (workunits.size() / endpoints.size());
- if (maxAssignments < 1)
+ if (maxAssignments < 1) {
maxAssignments = 1;
+ }
for (Iterator<T> iter = workunits.iterator(); iter.hasNext();) {
T unit = iter.next();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
index 149f9e1f8..1aac14c36 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
@@ -92,7 +92,9 @@ public class BlockMapBuilder {
private FileStatus status;
public FileStatusWork(FileStatus status) {
- if(status.isDir()) throw new IllegalStateException("FileStatus work only works with files, not directories.");
+ if (status.isDir()) {
+ throw new IllegalStateException("FileStatus work only works with files, not directories.");
+ }
this.status = status;
}
@@ -151,7 +153,7 @@ public class BlockMapBuilder {
private ImmutableRangeMap<Long,BlockLocation> getBlockMap(FileStatus status) throws IOException{
ImmutableRangeMap<Long,BlockLocation> blockMap = blockMapMap.get(status.getPath());
- if(blockMap == null){
+ if (blockMap == null) {
blockMap = buildBlockMap(status);
}
return blockMap;
@@ -194,9 +196,9 @@ public class BlockMapBuilder {
// For each host in the current block location, add the intersecting bytes to the corresponding endpoint
for (String host : hosts) {
DrillbitEndpoint endpoint = getDrillBitEndpoint(host);
- if(endpoint != null){
+ if (endpoint != null) {
endpointByteMap.add(endpoint, bytes);
- }else{
+ } else {
logger.debug("Failure finding Drillbit running on host {}. Skipping affinity to that host.", host);
}
}
@@ -226,4 +228,5 @@ public class BlockMapBuilder {
watch.stop();
logger.debug("Took {} ms to build endpoint map", watch.elapsed(TimeUnit.MILLISECONDS));
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStore.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStore.java
index afaaad2c7..35e4aeaee 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStore.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStore.java
@@ -45,7 +45,7 @@ public class LocalPStore<V> implements PStore<V>{
public LocalPStore(File base, PStoreConfig<V> config) {
super();
this.basePath = new File(base, config.getName());
- if(!basePath.exists()){
+ if (!basePath.exists()) {
basePath.mkdirs();
}
this.config = config;
@@ -54,10 +54,12 @@ public class LocalPStore<V> implements PStore<V>{
@Override
public Iterator<Entry<String, V>> iterator() {
String[] f = basePath.list();
- if(f == null) return Collections.emptyIterator();
+ if (f == null) {
+ return Collections.emptyIterator();
+ }
List<String> files = Lists.newArrayList();
- for(String s : f){
- if(s.endsWith(SUFFIX)){
+ for (String s : f) {
+ if (s.endsWith(SUFFIX)) {
files.add(s.substring(0, s.length() - SUFFIX.length()));
}
}
@@ -81,42 +83,42 @@ public class LocalPStore<V> implements PStore<V>{
@Override
public V get(String key) {
- try(InputStream is = new FileInputStream(p(key))){
+ try (InputStream is = new FileInputStream(p(key))) {
return config.getSerializer().deserialize(IOUtils.toByteArray(is));
- }catch(IOException e){
+ } catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void put(String key, V value) {
- try(OutputStream os = new FileOutputStream(p(key))){
+ try (OutputStream os = new FileOutputStream(p(key))) {
IOUtils.write(config.getSerializer().serialize(value), os);
- }catch(IOException e){
+ } catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean putIfAbsent(String key, V value) {
- try{
+ try {
File f = p(key);
- if(f.exists()){
+ if (f.exists()) {
return false;
- }else{
+ } else {
put(key, value);
return true;
}
- }catch(IOException e){
+ } catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void delete(String key) {
- try{
+ try {
p(key).delete();
- }catch(IOException e){
+ } catch (IOException e) {
throw new RuntimeException(e);
}
}
@@ -148,12 +150,10 @@ public class LocalPStore<V> implements PStore<V>{
keys.remove();
}
-
- private class DeferredEntry implements Entry<String, V>{
+ private class DeferredEntry implements Entry<String, V> {
private String name;
-
public DeferredEntry(String name) {
super();
this.name = name;
@@ -176,4 +176,5 @@ public class LocalPStore<V> implements PStore<V>{
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStoreProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStoreProvider.java
index 5d97b1b82..95fcf148e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStoreProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/local/LocalPStoreProvider.java
@@ -43,7 +43,7 @@ public class LocalPStoreProvider implements PStoreProvider{
public LocalPStoreProvider(DrillConfig config) {
path = new File(config.getString(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH));
enableWrite = config.getBoolean(ExecConstants.SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE);
- if(!enableWrite){
+ if (!enableWrite) {
pstores = Maps.newConcurrentMap();
}
}
@@ -58,12 +58,14 @@ public class LocalPStoreProvider implements PStoreProvider{
@Override
public <V> PStore<V> getPStore(PStoreConfig<V> storeConfig) throws IOException {
- if(enableWrite){
+ if (enableWrite) {
return new LocalPStore<V>(path, storeConfig);
- }else{
+ } else {
PStore<V> p = new NoWriteLocalPStore<V>();
PStore<?> p2 = pstores.putIfAbsent(storeConfig, p);
- if(p2 != null) return (PStore<V>) p2;
+ if(p2 != null) {
+ return (PStore<V>) p2;
+ }
return p;
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordWriter.java
index b713a0c70..31b1fbe9e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordWriter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordWriter.java
@@ -51,7 +51,7 @@ public class DrillTextRecordWriter extends StringOutputRecordWriter {
private boolean fRecordStarted = false; // true once the startRecord() is called until endRecord() is called
private StringBuilder currentRecord; // contains the current record separated by field delimiter
- public DrillTextRecordWriter(BufferAllocator allocator){
+ public DrillTextRecordWriter(BufferAllocator allocator) {
super(allocator);
}
@@ -97,16 +97,18 @@ public class DrillTextRecordWriter extends StringOutputRecordWriter {
@Override
public void startRecord() throws IOException {
- if (fRecordStarted)
+ if (fRecordStarted) {
throw new IOException("Previous record is not written completely");
+ }
fRecordStarted = true;
}
@Override
public void endRecord() throws IOException {
- if (!fRecordStarted)
+ if (!fRecordStarted) {
throw new IOException("No record is in writing");
+ }
// remove the extra delimiter at the end
currentRecord.deleteCharAt(currentRecord.length()-fieldDelimiter.length());
@@ -165,4 +167,5 @@ public class DrillTextRecordWriter extends StringOutputRecordWriter {
throw ex;
}
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java
index 94e8300ca..bc3edaae7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java
@@ -66,11 +66,11 @@ public abstract class BaseDataValueVector extends BaseValueVector{
@Override
- public DrillBuf[] getBuffers(boolean clear){
+ public DrillBuf[] getBuffers(boolean clear) {
DrillBuf[] out;
- if(valueCount == 0){
+ if (valueCount == 0) {
out = new DrillBuf[0];
- }else{
+ } else {
out = new DrillBuf[]{data};
if (clear) {
data.readerIndex(0);
@@ -84,7 +84,9 @@ public abstract class BaseDataValueVector extends BaseValueVector{
}
public int getBufferSize() {
- if(valueCount == 0) return 0;
+ if (valueCount == 0) {
+ return 0;
+ }
return data.writerIndex();
}
@@ -92,11 +94,11 @@ public abstract class BaseDataValueVector extends BaseValueVector{
public abstract SerializedField getMetadata();
@Override
- public DrillBuf getData(){
+ public DrillBuf getData() {
return data;
}
- public long getDataAddr(){
+ public long getDataAddr() {
return data.memoryAddress();
}
@@ -105,6 +107,4 @@ public abstract class BaseDataValueVector extends BaseValueVector{
return Iterators.emptyIterator();
}
-
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java
index a28f91949..6542fe79c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java
@@ -67,7 +67,9 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
public void allocateNew() {
- if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ if (!allocateNewSafe()) {
+ throw new OutOfMemoryRuntimeException();
+ }
}
public boolean allocateNewSafe() {
@@ -84,7 +86,9 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
valueCapacity = allocationValueCount;
int valueSize = getSizeFromCount(allocationValueCount);
data = allocator.buffer(valueSize);
- if(data == null) return false;
+ if (data == null) {
+ return false;
+ }
zeroVector();
return true;
}
@@ -125,8 +129,8 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
this.mutator.set(outIndex, from.accessor.get(inIndex));
}
- public boolean copyFromSafe(int inIndex, int outIndex, BitVector from){
- if(outIndex >= this.getValueCapacity()) {
+ public boolean copyFromSafe(int inIndex, int outIndex, BitVector from) {
+ if (outIndex >= this.getValueCapacity()) {
decrementAllocationMonitor();
return false;
}
@@ -154,10 +158,10 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
return new Accessor();
}
- public TransferPair getTransferPair(){
+ public TransferPair getTransferPair() {
return new TransferImpl(getField());
}
- public TransferPair getTransferPair(FieldReference ref){
+ public TransferPair getTransferPair(FieldReference ref) {
return new TransferImpl(getField().clone(ref));
}
@@ -195,15 +199,15 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
for (int i = 0; i < byteSize - 1; i++) {
target.data.setByte(i, (((this.data.getByte(firstByte + i) & 0xFF) >>> offset) + (this.data.getByte(firstByte + i + 1) << (8 - offset))));
}
- if (length % 8 != 0)
+ if (length % 8 != 0) {
target.data.setByte(byteSize - 1, ((this.data.getByte(firstByte + byteSize - 1) & 0xFF) >>> offset));
- else
+ } else {
target.data.setByte(byteSize - 1,
(((this.data.getByte(firstByte + byteSize - 1) & 0xFF) >>> offset) + (this.data.getByte(firstByte + byteSize) << (8 - offset))));
+ }
}
}
-
private class TransferImpl implements TransferPair {
BitVector to;
@@ -260,7 +264,7 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
return Long.bitCount(b & (1L << bitIndex));
}
- public boolean isNull(int index){
+ public boolean isNull(int index) {
return false;
}
@@ -381,4 +385,5 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java
index a6df3a36c..096db6edf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java
@@ -33,10 +33,10 @@ public abstract class AbstractContainerVector implements ValueVector{
public abstract <T extends ValueVector> T get(String name, Class<T> clazz);
public abstract int size();
- protected <T extends ValueVector> T typeify(ValueVector v, Class<T> clazz){
- if(clazz.isAssignableFrom(v.getClass())){
+ protected <T extends ValueVector> T typeify(ValueVector v, Class<T> clazz) {
+ if (clazz.isAssignableFrom(v.getClass())) {
return (T) v;
- }else{
+ } else {
throw new IllegalStateException(String.format("Vector requested [%s] was different than type stored [%s]. Drill doesn't yet support hetergenous types.", clazz.getSimpleName(), v.getClass().getSimpleName()));
}
}
@@ -45,10 +45,11 @@ public abstract class AbstractContainerVector implements ValueVector{
public abstract VectorWithOrdinal getVectorWithOrdinal(String name);
-
public TypedFieldId getFieldIdIfMatches(TypedFieldId.Builder builder, boolean addToBreadCrumb, PathSegment seg) {
if (seg == null) {
- if(addToBreadCrumb) builder.intermediateType(this.getField().getType());
+ if (addToBreadCrumb) {
+ builder.intermediateType(this.getField().getType());
+ }
return builder.finalType(this.getField().getType()).build();
}
@@ -72,7 +73,9 @@ public abstract class AbstractContainerVector implements ValueVector{
}
VectorWithOrdinal vord = getVectorWithOrdinal(seg.isArray() ? null : seg.getNameSegment().getPath());
- if (vord == null) return null;
+ if (vord == null) {
+ return null;
+ }
ValueVector v = vord.vector;
if (addToBreadCrumb) {
@@ -86,7 +89,9 @@ public abstract class AbstractContainerVector implements ValueVector{
return c.getFieldIdIfMatches(builder, addToBreadCrumb, seg.getChild());
} else {
if (seg.isNamed()) {
- if(addToBreadCrumb) builder.intermediateType(v.getField().getType());
+ if(addToBreadCrumb) {
+ builder.intermediateType(v.getField().getType());
+ }
builder.finalType(v.getField().getType());
} else {
builder.finalType(v.getField().getType().toBuilder().setMode(DataMode.OPTIONAL).build());
@@ -116,8 +121,9 @@ public abstract class AbstractContainerVector implements ValueVector{
this.getField().getType().getMode() == DataMode.REPEATED)) { // Use Repeated scalar type instead of Required List.
VectorWithOrdinal vord = getVectorWithOrdinal(null);
ValueVector v = vord.vector;
- if (! (v instanceof AbstractContainerVector))
+ if (! (v instanceof AbstractContainerVector)) {
return v.getField().getType();
+ }
} else if (this.getField().getType().getMinorType() == MinorType.MAP &&
this.getField().getType().getMode() == DataMode.REPEATED) { // Use Required Map
return this.getField().getType().toBuilder().setMode(DataMode.REQUIRED).build();
@@ -126,7 +132,7 @@ public abstract class AbstractContainerVector implements ValueVector{
return this.getField().getType();
}
- protected boolean supportsDirectRead(){
+ protected boolean supportsDirectRead() {
return false;
}
@@ -134,10 +140,10 @@ public abstract class AbstractContainerVector implements ValueVector{
final ValueVector vector;
final int ordinal;
- public VectorWithOrdinal(ValueVector v, int ordinal){
+ public VectorWithOrdinal(ValueVector v, int ordinal) {
this.vector = v;
this.ordinal = ordinal;
}
}
-}
+} \ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java
index 85067a1f5..037f1c72f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java
@@ -62,17 +62,17 @@ public class MapVector extends AbstractContainerVector {
private MaterializedField field;
private int valueCount;
- public MapVector(String path, BufferAllocator allocator){
+ public MapVector(String path, BufferAllocator allocator) {
this.field = MaterializedField.create(SchemaPath.getSimplePath(path), TYPE);
this.allocator = allocator;
}
- public MapVector(MaterializedField field, BufferAllocator allocator){
+ public MapVector(MaterializedField field, BufferAllocator allocator) {
this.field = field;
this.allocator = allocator;
}
@Override
- public int size(){
+ public int size() {
return vectors.size();
}
@@ -95,15 +95,15 @@ public class MapVector extends AbstractContainerVector {
transient private MapTransferPair ephPair;
transient private MapSingleCopier ephPair2;
- public boolean copyFromSafe(int fromIndex, int thisIndex, MapVector from){
- if(ephPair == null || ephPair.from != from){
+ public boolean copyFromSafe(int fromIndex, int thisIndex, MapVector from) {
+ if(ephPair == null || ephPair.from != from) {
ephPair = (MapTransferPair) from.makeTransferPair(this);
}
return ephPair.copyValueSafe(fromIndex, thisIndex);
}
- public boolean copyFromSafe(int fromSubIndex, int thisIndex, RepeatedMapVector from){
- if(ephPair2 == null || ephPair2.from != from){
+ public boolean copyFromSafe(int fromSubIndex, int thisIndex, RepeatedMapVector from) {
+ if(ephPair2 == null || ephPair2.from != from) {
ephPair2 = from.makeSingularCopier(this);
}
return ephPair2.copySafe(fromSubIndex, thisIndex);
@@ -112,8 +112,7 @@ public class MapVector extends AbstractContainerVector {
@Override
public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
ValueVector v = vectors.get(name);
-
- if(v == null){
+ if (v == null) {
v = TypeHelper.getNewVector(field.getPath(), name, allocator, type);
Preconditions.checkNotNull(v, String.format("Failure to create vector of type %s.", type));
put(name, v);
@@ -122,9 +121,9 @@ public class MapVector extends AbstractContainerVector {
}
- protected void put(String name, ValueVector vv){
+ protected void put(String name, ValueVector vv) {
int ordinal = vectors.size();
- if(vectors.put(name, vv) != null){
+ if (vectors.put(name, vv) != null) {
throw new IllegalStateException();
}
vectorIds.put(name, new VectorWithOrdinal(vv, ordinal));
@@ -138,19 +137,23 @@ public class MapVector extends AbstractContainerVector {
return true;
}
- public Iterator<String> fieldNameIterator(){
+ public Iterator<String> fieldNameIterator() {
return vectors.keySet().iterator();
}
@Override
public void allocateNew() throws OutOfMemoryRuntimeException {
- if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ if (!allocateNewSafe()) {
+ throw new OutOfMemoryRuntimeException();
+ }
}
@Override
public boolean allocateNewSafe() {
- for(ValueVector v : vectors.values()){
- if(!v.allocateNewSafe()) return false;
+ for (ValueVector v : vectors.values()) {
+ if (!v.allocateNewSafe()) {
+ return false;
+ }
}
return true;
}
@@ -158,15 +161,19 @@ public class MapVector extends AbstractContainerVector {
@Override
public <T extends ValueVector> T get(String name, Class<T> clazz) {
ValueVector v = vectors.get(name);
- if(v == null) throw new IllegalStateException(String.format("Attempting to access invalid map field of name %s.", name));
+ if (v == null) {
+ throw new IllegalStateException(String.format("Attempting to access invalid map field of name %s.", name));
+ }
return typeify(v, clazz);
}
@Override
public int getBufferSize() {
- if(valueCount == 0 || vectors.isEmpty()) return 0;
+ if (valueCount == 0 || vectors.isEmpty()) {
+ return 0;
+ }
long buffer = 0;
- for(ValueVector v : this){
+ for (ValueVector v : this) {
buffer += v.getBufferSize();
}
@@ -175,7 +182,7 @@ public class MapVector extends AbstractContainerVector {
@Override
public void close() {
- for(ValueVector v : this){
+ for (ValueVector v : this) {
v.close();
}
}
@@ -210,11 +217,11 @@ public class MapVector extends AbstractContainerVector {
private TransferPair[] pairs;
private MapVector to;
- public MapTransferPair(SchemaPath path){
+ public MapTransferPair(SchemaPath path) {
MapVector v = new MapVector(MaterializedField.create(path, TYPE), allocator);
pairs = new TransferPair[vectors.size()];
int i =0;
- for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
TransferPair otherSide = e.getValue().getTransferPair();
v.put(e.getKey(), otherSide.getTo());
pairs[i++] = otherSide;
@@ -222,14 +229,16 @@ public class MapVector extends AbstractContainerVector {
this.to = v;
}
- public MapTransferPair(MapVector to){
+ public MapTransferPair(MapVector to) {
this.to = to;
pairs = new TransferPair[vectors.size()];
int i =0;
- for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
int preSize = to.vectors.size();
ValueVector v = to.addOrGet(e.getKey(), e.getValue().getField().getType(), e.getValue().getClass());
- if(to.vectors.size() != preSize) v.allocateNew();
+ if (to.vectors.size() != preSize) {
+ v.allocateNew();
+ }
pairs[i++] = e.getValue().makeTransferPair(v);
}
}
@@ -237,7 +246,7 @@ public class MapVector extends AbstractContainerVector {
@Override
public void transfer() {
- for(TransferPair p : pairs){
+ for (TransferPair p : pairs) {
p.transfer();
}
to.valueCount = valueCount;
@@ -251,8 +260,10 @@ public class MapVector extends AbstractContainerVector {
@Override
public boolean copyValueSafe(int from, int to) {
- for(TransferPair p : pairs){
- if(!p.copyValueSafe(from, to)) return false;
+ for (TransferPair p : pairs) {
+ if (!p.copyValueSafe(from, to)) {
+ return false;
+ }
}
return true;
}
@@ -266,7 +277,9 @@ public class MapVector extends AbstractContainerVector {
@Override
public int getValueCapacity() {
- if(this.vectors.isEmpty()) return 0;
+ if (this.vectors.isEmpty()) {
+ return 0;
+ }
return vectors.values().iterator().next().getValueCapacity();
}
@@ -278,8 +291,8 @@ public class MapVector extends AbstractContainerVector {
@Override
public DrillBuf[] getBuffers(boolean clear) {
List<DrillBuf> bufs = Lists.newArrayList();
- for(ValueVector v : vectors.values()){
- for(DrillBuf b : v.getBuffers(clear)){
+ for (ValueVector v : vectors.values()) {
+ for (DrillBuf b : v.getBuffers(clear)) {
bufs.add(b);
}
}
@@ -296,12 +309,11 @@ public class MapVector extends AbstractContainerVector {
MaterializedField fieldDef = MaterializedField.create(fmd);
ValueVector v = vectors.get(fieldDef.getLastName());
- if(v == null) {
+ if (v == null) {
// if we arrive here, we didn't have a matching vector.
-
v = TypeHelper.getNewVector(fieldDef, allocator);
}
- if (fmd.getValueCount() == 0){
+ if (fmd.getValueCount() == 0) {
v.clear();
} else {
v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
@@ -319,7 +331,7 @@ public class MapVector extends AbstractContainerVector {
.setValueCount(valueCount);
- for(ValueVector v : vectors.values()){
+ for(ValueVector v : vectors.values()) {
b.addChild(v.getMetadata());
}
return b.build();
@@ -335,18 +347,18 @@ public class MapVector extends AbstractContainerVector {
@Override
public Object getObject(int index) {
Map<String, Object> vv = new JsonStringHashMap();
- for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
ValueVector v = e.getValue();
String k = e.getKey();
Object value = v.getAccessor().getObject(index);
- if(value != null){
+ if (value != null) {
vv.put(k, value);
}
}
return vv;
}
- public void get(int index, ComplexHolder holder){
+ public void get(int index, ComplexHolder holder) {
reader.setPosition(index);
holder.reader = reader;
}
@@ -370,10 +382,9 @@ public class MapVector extends AbstractContainerVector {
//return new SingleMapReaderImpl(MapVector.this);
return reader;
}
-
}
- public ValueVector getVectorById(int id){
+ public ValueVector getVectorById(int id) {
return vectorsById.get(id);
}
@@ -381,7 +392,7 @@ public class MapVector extends AbstractContainerVector {
@Override
public void setValueCount(int valueCount) {
- for(ValueVector v : vectors.values()){
+ for (ValueVector v : vectors.values()) {
v.getMutator().setValueCount(valueCount);
}
MapVector.this.valueCount = valueCount;
@@ -400,13 +411,14 @@ public class MapVector extends AbstractContainerVector {
@Override
public void clear() {
valueCount = 0;
- for(ValueVector v : vectors.values()){
+ for (ValueVector v : vectors.values()) {
v.clear();;
}
}
@Override
- public VectorWithOrdinal getVectorWithOrdinal(String name){
+ public VectorWithOrdinal getVectorWithOrdinal(String name) {
return vectorIds.get(name);
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java
index dd3131659..9870dd583 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java
@@ -68,14 +68,14 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
public static MajorType TYPE = Types.repeated(MinorType.LIST);
- public RepeatedListVector(MaterializedField field, BufferAllocator allocator){
+ public RepeatedListVector(MaterializedField field, BufferAllocator allocator) {
this.allocator = allocator;
this.offsets = new UInt4Vector(null, allocator);
this.field = field;
}
@Override
- public int size(){
+ public int size() {
return vector != null ? 1 : 0;
}
@@ -93,36 +93,40 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
return primitiveVectors;
}
- public RepeatedListVector(SchemaPath path, BufferAllocator allocator){
+ public RepeatedListVector(SchemaPath path, BufferAllocator allocator) {
this(MaterializedField.create(path, TYPE), allocator);
}
transient private RepeatedListTransferPair ephPair;
- public boolean copyFromSafe(int fromIndex, int thisIndex, RepeatedListVector from){
- if(ephPair == null || ephPair.from != from){
+ public boolean copyFromSafe(int fromIndex, int thisIndex, RepeatedListVector from) {
+ if(ephPair == null || ephPair.from != from) {
ephPair = (RepeatedListTransferPair) from.makeTransferPair(this);
}
return ephPair.copyValueSafe(fromIndex, thisIndex);
}
- public Mutator getMutator(){
+ public Mutator getMutator() {
return mutator;
}
@Override
public void allocateNew() throws OutOfMemoryRuntimeException {
- if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ if (!allocateNewSafe()) {
+ throw new OutOfMemoryRuntimeException();
+ }
}
@Override
public boolean allocateNewSafe() {
- if(!offsets.allocateNewSafe()) return false;
+ if (!offsets.allocateNewSafe()) {
+ return false;
+ }
offsets.zeroVector();
- if(vector != null){
+ if (vector != null) {
return vector.allocateNewSafe();
- }else{
+ } else {
return true;
}
@@ -134,13 +138,15 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
offsets.getMutator().set(index+1, offsets.getAccessor().get(index));
}
- public int add(int index){
+ public int add(int index) {
int endOffset = index+1;
int currentChildOffset = offsets.getAccessor().get(endOffset);
int newChildOffset = currentChildOffset + 1;
boolean success = offsets.getMutator().setSafe(endOffset, newChildOffset);
lastSet = index;
- if(!success) return -1;
+ if (!success) {
+ return -1;
+ }
// this is done at beginning so return the currentChildOffset, not the new offset.
return currentChildOffset;
@@ -152,7 +158,7 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
populateEmpties(groupCount);
offsets.getMutator().setValueCount(groupCount+1);
- if(vector != null){
+ if (vector != null) {
int valueCount = offsets.getAccessor().get(groupCount);
vector.getMutator().setValueCount(valueCount);
}
@@ -190,7 +196,7 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
public Object getObject(int index) {
List<Object> l = new JsonStringArrayList();
int end = offsets.getAccessor().get(index+1);
- for(int i = offsets.getAccessor().get(index); i < end; i++){
+ for (int i = offsets.getAccessor().get(index); i < end; i++) {
l.add(vector.getAccessor().getObject(i));
}
return l;
@@ -201,31 +207,30 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
return offsets.getAccessor().getValueCount() - 1;
}
- public void get(int index, RepeatedListHolder holder){
+ public void get(int index, RepeatedListHolder holder) {
assert index <= getValueCapacity();
holder.start = offsets.getAccessor().get(index);
holder.end = offsets.getAccessor().get(index+1);
}
- public void get(int index, ComplexHolder holder){
+ public void get(int index, ComplexHolder holder) {
FieldReader reader = getReader();
reader.setPosition(index);
holder.reader = reader;
}
- public void get(int index, int arrayIndex, ComplexHolder holder){
+ public void get(int index, int arrayIndex, ComplexHolder holder) {
RepeatedListHolder h = new RepeatedListHolder();
get(index, h);
int offset = h.start + arrayIndex;
- if(offset >= h.end){
+ if (offset >= h.end) {
holder.reader = NullReader.INSTANCE;
- }else{
+ } else {
FieldReader r = vector.getAccessor().getReader();
r.setPosition(offset);
holder.reader = r;
}
-
}
@Override
@@ -256,14 +261,18 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
@Override
public void close() {
offsets.close();
- if(vector != null) vector.close();
+ if (vector != null) {
+ vector.close();
+ }
}
@Override
public void clear() {
lastSet = 0;
offsets.clear();
- if(vector != null) vector.clear();
+ if (vector != null) {
+ vector.clear();
+ }
}
@Override
@@ -276,22 +285,21 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
return new RepeatedListTransferPair(field.getPath());
}
-
public class RepeatedListTransferPair implements TransferPair{
private final RepeatedListVector from = RepeatedListVector.this;
private final RepeatedListVector to;
private final TransferPair vectorTransfer;
- private RepeatedListTransferPair(RepeatedListVector to){
+ private RepeatedListTransferPair(RepeatedListVector to) {
this.to = to;
- if(to.vector == null){
+ if (to.vector == null) {
to.vector = to.addOrGet(null, vector.getField().getType(), vector.getClass());
to.vector.allocateNew();
}
this.vectorTransfer = vector.makeTransferPair(to.vector);
}
- private RepeatedListTransferPair(SchemaPath path){
+ private RepeatedListTransferPair(SchemaPath path) {
this.to = new RepeatedListVector(path, allocator);
vectorTransfer = vector.getTransferPair();
this.to.vector = vectorTransfer.getTo();
@@ -315,18 +323,20 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
throw new UnsupportedOperationException();
}
-
@Override
public boolean copyValueSafe(int from, int to) {
RepeatedListHolder holder = new RepeatedListHolder();
accessor.get(from, holder);
int newIndex = this.to.offsets.getAccessor().get(to);
//todo: make this a bulk copy.
- for(int i = holder.start; i < holder.end; i++, newIndex++){
- if(!vectorTransfer.copyValueSafe(i, newIndex)) return false;
+ for (int i = holder.start; i < holder.end; i++, newIndex++) {
+ if (!vectorTransfer.copyValueSafe(i, newIndex)) {
+ return false;
+ }
+ }
+ if (!this.to.offsets.getMutator().setSafe(to + 1, newIndex)) {
+ return false;
}
- if(!this.to.offsets.getMutator().setSafe(to + 1, newIndex)) return false;
-
this.to.lastSet++;
return true;
}
@@ -335,7 +345,9 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
@Override
public TransferPair makeTransferPair(ValueVector to) {
- if(!(to instanceof RepeatedListVector ) ) throw new IllegalArgumentException("You can't make a transfer pair from an incompatible .");
+ if (!(to instanceof RepeatedListVector ) ) {
+ throw new IllegalArgumentException("You can't make a transfer pair from an incompatible .");
+ }
return new RepeatedListTransferPair( (RepeatedListVector) to);
}
@@ -346,7 +358,9 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
@Override
public int getValueCapacity() {
- if(vector == null) return offsets.getValueCapacity() - 1;
+ if (vector == null) {
+ return offsets.getValueCapacity() - 1;
+ }
return Math.min(offsets.getValueCapacity() - 1, vector.getValueCapacity());
}
@@ -360,7 +374,7 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
return ArrayUtils.addAll(offsets.getBuffers(clear), vector.getBuffers(clear));
}
- private void setVector(ValueVector v){
+ private void setVector(ValueVector v) {
field.addChild(v.getField());
this.vector = v;
}
@@ -372,11 +386,11 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
int bufOffset = offsets.load(metadata.getValueCount()+1, buf);
MaterializedField fieldDef = MaterializedField.create(childField);
- if(vector == null) {
+ if (vector == null) {
setVector(TypeHelper.getNewVector(fieldDef, allocator));
}
- if (childField.getValueCount() == 0){
+ if (childField.getValueCount() == 0) {
vector.clear();
} else {
vector.load(childField, buf.slice(bufOffset, childField.getBufferLength()));
@@ -393,9 +407,9 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
.build();
}
- private void populateEmpties(int groupCount){
+ private void populateEmpties(int groupCount) {
int previousEnd = offsets.getAccessor().get(lastSet + 1);
- for(int i = lastSet + 2; i <= groupCount; i++){
+ for (int i = lastSet + 2; i <= groupCount; i++) {
offsets.getMutator().setSafe(i, previousEnd);
}
lastSet = groupCount - 1;
@@ -410,7 +424,7 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
Preconditions.checkArgument(name == null);
- if(vector == null){
+ if (vector == null) {
vector = TypeHelper.getNewVector(MaterializedField.create(field.getPath().getUnindexedArrayChild(), type), allocator);
}
return typeify(vector, clazz);
@@ -418,7 +432,9 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
@Override
public <T extends ValueVector> T get(String name, Class<T> clazz) {
- if(name != null) return null;
+ if (name != null) {
+ return null;
+ }
return typeify(vector, clazz);
}
@@ -437,9 +453,10 @@ public class RepeatedListVector extends AbstractContainerVector implements Repea
@Override
public VectorWithOrdinal getVectorWithOrdinal(String name) {
- if(name != null) return null;
+ if (name != null) {
+ return null;
+ }
return new VectorWithOrdinal(vector, 0);
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java
index df5fdaa5f..7a7b11dda 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java
@@ -68,7 +68,7 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
private final MaterializedField field;
private int lastSet = -1;
- public RepeatedMapVector(MaterializedField field, BufferAllocator allocator){
+ public RepeatedMapVector(MaterializedField field, BufferAllocator allocator) {
this.field = field;
this.allocator = allocator;
this.offsets = new UInt4Vector(null, allocator);
@@ -80,19 +80,19 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
clear();
offsets.allocateNew(parentValueCount+1);
offsets.zeroVector();
- for(ValueVector v : vectors.values()){
+ for (ValueVector v : vectors.values()) {
AllocationHelper.allocatePrecomputedChildCount(v, parentValueCount, 50, childValueCount);
}
mutator.reset();
accessor.reset();
}
- public Iterator<String> fieldNameIterator(){
+ public Iterator<String> fieldNameIterator() {
return vectors.keySet().iterator();
}
@Override
- public int size(){
+ public int size() {
return vectors.size();
}
@@ -117,36 +117,38 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
ValueVector v = vectors.get(name);
- if(v == null){
+ if (v == null) {
v = TypeHelper.getNewVector(field.getPath(), name, allocator, type);
Preconditions.checkNotNull(v, String.format("Failure to create vector of type %s.", type));
put(name, v);
}
return typeify(v, clazz);
-
}
@Override
public <T extends ValueVector> T get(String name, Class<T> clazz) {
ValueVector v = vectors.get(name);
- if(v == null) throw new IllegalStateException(String.format("Attempting to access invalid map field of name %s.", name));
+ if (v == null) {
+ throw new IllegalStateException(String.format("Attempting to access invalid map field of name %s.", name));
+ }
return typeify(v, clazz);
}
@Override
public int getBufferSize() {
- if(accessor.getValueCount() == 0 || vectors.isEmpty()) return 0;
+ if (accessor.getValueCount() == 0 || vectors.isEmpty()) {
+ return 0;
+ }
long buffer = offsets.getBufferSize();
- for(ValueVector v : this){
+ for (ValueVector v : this) {
buffer += v.getBufferSize();
}
-
return (int) buffer;
}
@Override
public void close() {
- for(ValueVector v : this){
+ for (ValueVector v : this) {
v.close();
}
}
@@ -171,29 +173,32 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
return new MapTransferPair( (RepeatedMapVector) to);
}
- MapSingleCopier makeSingularCopier(MapVector to){
+ MapSingleCopier makeSingularCopier(MapVector to) {
return new MapSingleCopier(to);
}
-
- class MapSingleCopier{
+ class MapSingleCopier {
private final TransferPair[] pairs;
final RepeatedMapVector from = RepeatedMapVector.this;
- public MapSingleCopier(MapVector to){
+ public MapSingleCopier(MapVector to) {
pairs = new TransferPair[vectors.size()];
int i =0;
- for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
int preSize = to.vectors.size();
ValueVector v = to.addOrGet(e.getKey(), e.getValue().getField().getType(), e.getValue().getClass());
- if(to.vectors.size() != preSize) v.allocateNew();
+ if (to.vectors.size() != preSize) {
+ v.allocateNew();
+ }
pairs[i++] = e.getValue().makeTransferPair(v);
}
}
- public boolean copySafe(int fromSubIndex, int toIndex){
- for(TransferPair p : pairs){
- if(!p.copyValueSafe(fromSubIndex, toIndex)) return false;
+ public boolean copySafe(int fromSubIndex, int toIndex) {
+ for (TransferPair p : pairs) {
+ if (!p.copyValueSafe(fromSubIndex, toIndex)) {
+ return false;
+ }
}
return true;
}
@@ -206,15 +211,21 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
@Override
public void allocateNew() throws OutOfMemoryRuntimeException {
- if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ if (!allocateNewSafe()) {
+ throw new OutOfMemoryRuntimeException();
+ }
}
@Override
public boolean allocateNewSafe() {
- if(!offsets.allocateNewSafe()) return false;
+ if (!offsets.allocateNewSafe()) {
+ return false;
+ }
offsets.zeroVector();
- for(ValueVector v : vectors.values()){
- if(!v.allocateNewSafe()) return false;
+ for (ValueVector v : vectors.values()) {
+ if (!v.allocateNewSafe()) {
+ return false;
+ }
}
return true;
}
@@ -225,11 +236,11 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
private final RepeatedMapVector to;
private final RepeatedMapVector from = RepeatedMapVector.this;
- public MapTransferPair(SchemaPath path){
+ public MapTransferPair(SchemaPath path) {
RepeatedMapVector v = new RepeatedMapVector(MaterializedField.create(path, TYPE), allocator);
pairs = new TransferPair[vectors.size()];
int i =0;
- for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
TransferPair otherSide = e.getValue().getTransferPair();
v.put(e.getKey(), otherSide.getTo());
pairs[i++] = otherSide;
@@ -237,14 +248,16 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
this.to = v;
}
- public MapTransferPair(RepeatedMapVector to){
+ public MapTransferPair(RepeatedMapVector to) {
this.to = to;
pairs = new TransferPair[vectors.size()];
int i =0;
- for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
int preSize = to.vectors.size();
ValueVector v = to.addOrGet(e.getKey(), e.getValue().getField().getType(), e.getValue().getClass());
- if(preSize != to.vectors.size()) v.allocateNew();
+ if (preSize != to.vectors.size()) {
+ v.allocateNew();
+ }
pairs[i++] = e.getValue().makeTransferPair(v);
}
}
@@ -253,7 +266,7 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
@Override
public void transfer() {
offsets.transferTo(to.offsets);
- for(TransferPair p : pairs){
+ for (TransferPair p : pairs) {
p.transfer();
}
clear();
@@ -270,12 +283,16 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
accessor.get(from, holder);
int newIndex = this.to.offsets.getAccessor().get(to);
//todo: make these bulk copies
- for(int i = holder.start; i < holder.end; i++, newIndex++){
- for(TransferPair p : pairs){
- if(!p.copyValueSafe(i, newIndex)) return false;
+ for (int i = holder.start; i < holder.end; i++, newIndex++) {
+ for (TransferPair p : pairs) {
+ if (!p.copyValueSafe(i, newIndex)) {
+ return false;
+ }
}
}
- if(!this.to.offsets.getMutator().setSafe(to+1, newIndex)) return false;
+ if (!this.to.offsets.getMutator().setSafe(to+1, newIndex)) {
+ return false;
+ }
this.to.lastSet++;
return true;
}
@@ -290,8 +307,8 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
transient private MapTransferPair ephPair;
- public boolean copyFromSafe(int fromIndex, int thisIndex, RepeatedMapVector from){
- if(ephPair == null || ephPair.from != from){
+ public boolean copyFromSafe(int fromIndex, int thisIndex, RepeatedMapVector from) {
+ if (ephPair == null || ephPair.from != from) {
ephPair = (MapTransferPair) from.makeTransferPair(this);
}
return ephPair.copyValueSafe(fromIndex, thisIndex);
@@ -311,8 +328,8 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
public DrillBuf[] getBuffers(boolean clear) {
List<DrillBuf> bufs = Lists.newArrayList(offsets.getBuffers(clear));
- for(ValueVector v : vectors.values()){
- for(DrillBuf b : v.getBuffers(clear)){
+ for (ValueVector v : vectors.values()) {
+ for (DrillBuf b : v.getBuffers(clear)) {
bufs.add(b);
}
}
@@ -328,14 +345,12 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
for (SerializedField fmd : fields) {
MaterializedField fieldDef = MaterializedField.create(fmd);
-
ValueVector v = vectors.get(fieldDef.getLastName());
- if(v == null) {
+ if (v == null) {
// if we arrive here, we didn't have a matching vector.
-
v = TypeHelper.getNewVector(fieldDef, allocator);
}
- if (fmd.getValueCount() == 0){
+ if (fmd.getValueCount() == 0) {
v.clear();
} else {
v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
@@ -351,16 +366,15 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
.getAsBuilder() //
.setBufferLength(getBufferSize()) //
.setValueCount(accessor.getValueCount());
-
- for(ValueVector v : vectors.values()){
+ for (ValueVector v : vectors.values()) {
b.addChild(v.getMetadata());
}
return b.build();
}
- protected void put(String name, ValueVector vv){
+ protected void put(String name, ValueVector vv) {
int ordinal = vectors.size();
- if(vectors.put(name, vv) != null){
+ if (vectors.put(name, vv) != null) {
throw new IllegalStateException();
}
vectorIds.put(name, new VectorWithOrdinal(vv, ordinal));
@@ -368,7 +382,6 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
field.addChild(vv.getField());
}
-
@Override
public Mutator getMutator() {
return mutator;
@@ -380,13 +393,13 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
public Object getObject(int index) {
List<Object> l = new JsonStringArrayList();
int end = offsets.getAccessor().get(index+1);
- for(int i = offsets.getAccessor().get(index); i < end; i++){
+ for (int i = offsets.getAccessor().get(index); i < end; i++) {
Map<String, Object> vv = Maps.newLinkedHashMap();
- for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
ValueVector v = e.getValue();
String k = e.getKey();
Object value = v.getAccessor().getObject(i);
- if(value != null){
+ if (value != null) {
vv.put(k,value);
}
}
@@ -400,26 +413,26 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
return offsets.getAccessor().getValueCount() - 1;
}
- public void get(int index, RepeatedMapHolder holder){
+ public void get(int index, RepeatedMapHolder holder) {
assert index <= getValueCapacity();
holder.start = offsets.getAccessor().get(index);
holder.end = offsets.getAccessor().get(index+1);
}
- public void get(int index, ComplexHolder holder){
+ public void get(int index, ComplexHolder holder) {
FieldReader reader = getReader();
reader.setPosition(index);
holder.reader = reader;
}
- public void get(int index, int arrayIndex, ComplexHolder holder){
+ public void get(int index, int arrayIndex, ComplexHolder holder) {
RepeatedMapHolder h = new RepeatedMapHolder();
get(index, h);
int offset = h.start + arrayIndex;
- if(offset >= h.end){
+ if (offset >= h.end) {
holder.reader = NullReader.INSTANCE;
- }else{
+ } else {
reader.setSinglePosition(index, arrayIndex);
holder.reader = reader;
}
@@ -445,9 +458,9 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
}
}
- private void populateEmpties(int groupCount){
+ private void populateEmpties(int groupCount) {
int previousEnd = offsets.getAccessor().get(lastSet + 1);
- for(int i = lastSet + 2; i <= groupCount; i++){
+ for (int i = lastSet + 2; i <= groupCount; i++) {
offsets.getMutator().setSafe(i, previousEnd);
}
lastSet = groupCount - 1;
@@ -461,10 +474,12 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
offsets.getMutator().set(index+1, offsets.getAccessor().get(index));
}
- public int add(int index){
+ public int add(int index) {
int nextOffset = offsets.getAccessor().get(index+1);
boolean success = offsets.getMutator().setSafe(index+1, nextOffset+1);
- if(!success) return -1;
+ if (!success) {
+ return -1;
+ }
return nextOffset;
}
@@ -473,7 +488,7 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
populateEmpties(groupCount);
offsets.getMutator().setValueCount(groupCount+1);
int valueCount = offsets.getAccessor().get(groupCount);
- for(ValueVector v : vectors.values()){
+ for (ValueVector v : vectors.values()) {
v.getMutator().setValueCount(valueCount);
}
}
@@ -508,7 +523,7 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
public void clear() {
lastSet = 0;
offsets.clear();
- for(ValueVector v : vectors.values()){
+ for(ValueVector v : vectors.values()) {
v.clear();;
}
}
@@ -518,7 +533,6 @@ public class RepeatedMapVector extends AbstractContainerVector implements Repeat
throw new UnsupportedOperationException();
}
-
@Override
public VectorWithOrdinal getVectorWithOrdinal(String name) {
return vectorIds.get(name);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java
index 99f601056..f5ed3a019 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java
@@ -22,10 +22,13 @@ import java.util.Arrays;
public class StateTool {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StateTool.class);
- public static <T extends Enum<?>> void check(T currentState, T... expectedStates){
- for(T s : expectedStates){
- if(s == currentState) return;
+ public static <T extends Enum<?>> void check(T currentState, T... expectedStates) {
+ for (T s : expectedStates) {
+ if (s == currentState) {
+ return;
+ }
}
throw new IllegalArgumentException(String.format("Expected to be in one of these states %s but was actuall in state %s", Arrays.toString(expectedStates), currentState));
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
index eab39b5c8..4e12b8b00 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
@@ -86,19 +86,21 @@ public class JsonReader {
}
private boolean containsStar() {
- for (SchemaPath expr : this.columns){
- if (expr.getRootSegment().getPath().equals("*"))
+ for (SchemaPath expr : this.columns) {
+ if (expr.getRootSegment().getPath().equals("*")) {
return true;
+ }
}
return false;
}
- private boolean fieldSelected(SchemaPath field){
- if (starRequested)
+ private boolean fieldSelected(SchemaPath field) {
+ if (starRequested) {
return true;
+ }
int i = 0;
- for (SchemaPath expr : this.columns){
- if ( expr.contains(field)){
+ for (SchemaPath expr : this.columns) {
+ if ( expr.contains(field)) {
columnsFound[i] = true;
return true;
}
@@ -122,7 +124,9 @@ public class JsonReader {
parser = factory.createJsonParser(reader);
reader.mark(MAX_RECORD_SIZE);
JsonToken t = parser.nextToken();
- while(!parser.hasCurrentToken()) t = parser.nextToken();
+ while (!parser.hasCurrentToken()) {
+ t = parser.nextToken();
+ }
switch (t) {
@@ -146,7 +150,7 @@ public class JsonReader {
}
private void consumeEntireNextValue(JsonParser parser) throws IOException {
- switch(parser.nextToken()){
+ switch (parser.nextToken()) {
case START_ARRAY:
case START_OBJECT:
int arrayAndObjectCounter = 1;
@@ -176,9 +180,11 @@ public class JsonReader {
private void writeData(MapWriter map) throws JsonParseException, IOException {
//
map.start();
- outside: while(true){
+ outside: while(true) {
JsonToken t = parser.nextToken();
- if(t == JsonToken.NOT_AVAILABLE || t == JsonToken.END_OBJECT) return;
+ if (t == JsonToken.NOT_AVAILABLE || t == JsonToken.END_OBJECT) {
+ return;
+ }
assert t == JsonToken.FIELD_NAME : String.format("Expected FIELD_NAME but got %s.", t.name());
final String fieldName = parser.getText();
@@ -193,7 +199,7 @@ public class JsonReader {
continue outside;
}
- switch(parser.nextToken()){
+ switch(parser.nextToken()) {
case START_ARRAY:
writeData(map.list(fieldName));
break;
@@ -258,13 +264,12 @@ public class JsonReader {
throw new IllegalStateException("Unexpected token " + parser.getCurrentToken());
}
-
}
map.end();
}
- private void ensure(int length){
+ private void ensure(int length) {
workBuf = workBuf.reallocIfNeeded(length);
}
@@ -295,9 +300,9 @@ public class JsonReader {
private void writeData(ListWriter list) throws JsonParseException, IOException {
list.start();
- outside: while(true){
+ outside: while (true) {
- switch(parser.nextToken()){
+ switch (parser.nextToken()) {
case START_ARRAY:
writeData(list.list());
break;
@@ -364,7 +369,6 @@ public class JsonReader {
}
list.end();
-
}
public DrillBuf getWorkBuf() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReaderWithState.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReaderWithState.java
index 0636db6c1..ef995f838 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReaderWithState.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReaderWithState.java
@@ -57,9 +57,9 @@ public class JsonReaderWithState {
public WriteState write(ComplexWriter writer) throws JsonParseException, IOException {
if (reader == null) {
reader = splitter.getNextReader();
- if (reader == null)
+ if (reader == null) {
return WriteState.NO_MORE;
-
+ }
}
jsonReader.write(reader, writer);
@@ -72,4 +72,5 @@ public class JsonReaderWithState {
return WriteState.WRITE_SUCCEED;
}
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java
index 83e644e19..d21100b39 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java
@@ -1,4 +1,3 @@
-
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
@@ -37,27 +36,31 @@ public class RepeatedListReaderImpl extends AbstractFieldReader{
private final RepeatedListVector container;
private FieldReader reader;
- public RepeatedListReaderImpl(String name, RepeatedListVector container){
+ public RepeatedListReaderImpl(String name, RepeatedListVector container) {
super();
this.name = name;
this.container = container;
}
@Override
- public MajorType getType(){
+ public MajorType getType() {
return TYPE;
}
@Override
- public void copyAsValue(ListWriter writer){
- if(currentOffset == NO_VALUES) return;
+ public void copyAsValue(ListWriter writer) {
+ if (currentOffset == NO_VALUES) {
+ return;
+ }
RepeatedListWriter impl = (RepeatedListWriter) writer;
impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), container));
}
@Override
- public void copyAsField(String name, MapWriter writer){
- if(currentOffset == NO_VALUES) return;
+ public void copyAsField(String name, MapWriter writer) {
+ if (currentOffset == NO_VALUES) {
+ return;
+ }
RepeatedListWriter impl = (RepeatedListWriter) writer.list(name);
impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), container));
}
@@ -66,31 +69,35 @@ public class RepeatedListReaderImpl extends AbstractFieldReader{
private int maxOffset;
@Override
- public int size(){
+ public int size() {
return maxOffset - currentOffset;
}
@Override
- public void setPosition(int index){
+ public void setPosition(int index) {
super.setPosition(index);
RepeatedListHolder h = new RepeatedListHolder();
container.getAccessor().get(index, h);
- if(h.start == h.end){
+ if (h.start == h.end) {
currentOffset = NO_VALUES;
- }else{
+ } else {
currentOffset = h.start-1;
maxOffset = h.end;
- if(reader != null) reader.setPosition(currentOffset);
+ if(reader != null) {
+ reader.setPosition(currentOffset);
+ }
}
}
@Override
- public boolean next(){
- if(currentOffset +1 < maxOffset){
+ public boolean next() {
+ if (currentOffset +1 < maxOffset) {
currentOffset++;
- if(reader != null) reader.setPosition(currentOffset);
+ if (reader != null) {
+ reader.setPosition(currentOffset);
+ }
return true;
- }else{
+ } else {
currentOffset = NO_VALUES;
return false;
}
@@ -102,22 +109,20 @@ public class RepeatedListReaderImpl extends AbstractFieldReader{
}
@Override
- public FieldReader reader(){
- if(reader == null){
+ public FieldReader reader() {
+ if (reader == null) {
reader = container.get(name, ValueVector.class).getAccessor().getReader();
- if (currentOffset == NO_VALUES)
+ if (currentOffset == NO_VALUES) {
reader = NullReader.INSTANCE;
- else
+ } else {
reader.setPosition(currentOffset);
+ }
}
return reader;
}
- public boolean isSet(){
+ public boolean isSet() {
return true;
}
-
}
-
-
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java
index faf3508c1..e57e37c3f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java
@@ -1,5 +1,3 @@
-
-
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
@@ -20,7 +18,6 @@
******************************************************************************/
package org.apache.drill.exec.vector.complex.impl;
-
import java.util.Map;
import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -43,20 +40,20 @@ public class RepeatedMapReaderImpl extends AbstractFieldReader{
this.vector = vector;
}
- private void setChildrenPosition(int index){
- for(FieldReader r : fields.values()){
+ private void setChildrenPosition(int index) {
+ for (FieldReader r : fields.values()) {
r.setPosition(index);
}
}
@Override
- public FieldReader reader(String name){
+ public FieldReader reader(String name) {
FieldReader reader = fields.get(name);
- if(reader == null){
+ if (reader == null) {
ValueVector child = vector.get(name, ValueVector.class);
- if(child == null){
+ if (child == null) {
reader = NullReader.INSTANCE;
- }else{
+ } else {
reader = child.getAccessor().getReader();
}
fields.put(name, reader);
@@ -67,8 +64,9 @@ public class RepeatedMapReaderImpl extends AbstractFieldReader{
@Override
public FieldReader reader() {
- if (currentOffset == NO_VALUES)
+ if (currentOffset == NO_VALUES) {
return NullReader.INSTANCE;
+ }
setChildrenPosition(currentOffset);
return new SingleLikeRepeatedMapReaderImpl(vector, this);
@@ -78,7 +76,7 @@ public class RepeatedMapReaderImpl extends AbstractFieldReader{
private int maxOffset;
@Override
- public int size(){
+ public int size() {
if (isNull()) {
return 0;
}
@@ -86,26 +84,26 @@ public class RepeatedMapReaderImpl extends AbstractFieldReader{
}
@Override
- public void setPosition(int index){
+ public void setPosition(int index) {
super.setPosition(index);
RepeatedMapHolder h = new RepeatedMapHolder();
vector.getAccessor().get(index, h);
- if(h.start == h.end){
+ if (h.start == h.end) {
currentOffset = NO_VALUES;
- }else{
+ } else {
currentOffset = h.start-1;
maxOffset = h.end;
setChildrenPosition(currentOffset);
}
}
- public void setSinglePosition(int index, int childIndex){
+ public void setSinglePosition(int index, int childIndex) {
super.setPosition(index);
RepeatedMapHolder h = new RepeatedMapHolder();
vector.getAccessor().get(index, h);
- if(h.start == h.end){
+ if (h.start == h.end) {
currentOffset = NO_VALUES;
- }else{
+ } else {
int singleOffset = h.start + childIndex;
assert singleOffset < h.end;
currentOffset = singleOffset;
@@ -115,11 +113,11 @@ public class RepeatedMapReaderImpl extends AbstractFieldReader{
}
@Override
- public boolean next(){
- if(currentOffset +1 < maxOffset){
+ public boolean next() {
+ if (currentOffset +1 < maxOffset) {
setChildrenPosition(++currentOffset);
return true;
- }else{
+ } else {
currentOffset = NO_VALUES;
return false;
}
@@ -135,12 +133,12 @@ public class RepeatedMapReaderImpl extends AbstractFieldReader{
}
@Override
- public MajorType getType(){
+ public MajorType getType() {
return vector.getField().getType();
}
@Override
- public java.util.Iterator<String> iterator(){
+ public java.util.Iterator<String> iterator() {
return vector.fieldNameIterator();
}
@@ -150,26 +148,29 @@ public class RepeatedMapReaderImpl extends AbstractFieldReader{
}
@Override
- public void copyAsValue(MapWriter writer){
- if(currentOffset == NO_VALUES) return;
+ public void copyAsValue(MapWriter writer) {
+ if (currentOffset == NO_VALUES) {
+ return;
+ }
RepeatedMapWriter impl = (RepeatedMapWriter) writer;
impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), vector));
}
- public void copyAsValueSingle(MapWriter writer){
- if(currentOffset == NO_VALUES) return;
+ public void copyAsValueSingle(MapWriter writer) {
+ if (currentOffset == NO_VALUES) {
+ return;
+ }
SingleMapWriter impl = (SingleMapWriter) writer;
impl.inform(impl.container.copyFromSafe(currentOffset, impl.idx(), vector));
}
@Override
- public void copyAsField(String name, MapWriter writer){
- if(currentOffset == NO_VALUES) return;
+ public void copyAsField(String name, MapWriter writer) {
+ if (currentOffset == NO_VALUES) {
+ return;
+ }
RepeatedMapWriter impl = (RepeatedMapWriter) writer.map(name);
impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), vector));
}
-
}
-
-
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java
index 872c5e32d..c2284ec5b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java
@@ -29,13 +29,6 @@ import org.apache.drill.exec.vector.complex.reader.FieldReader;
import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
-
-
-
-
-
-
-
@SuppressWarnings("unused")
public class SingleListReaderImpl extends AbstractFieldReader{
@@ -44,22 +37,24 @@ public class SingleListReaderImpl extends AbstractFieldReader{
private final AbstractContainerVector container;
private FieldReader reader;
- public SingleListReaderImpl(String name, AbstractContainerVector container){
+ public SingleListReaderImpl(String name, AbstractContainerVector container) {
super();
this.name = name;
this.container = container;
}
@Override
- public MajorType getType(){
+ public MajorType getType() {
return TYPE;
}
@Override
- public void setPosition(int index){
+ public void setPosition(int index) {
super.setPosition(index);
- if(reader != null) reader.setPosition(index);
+ if (reader != null) {
+ reader.setPosition(index);
+ }
}
@Override
@@ -68,8 +63,8 @@ public class SingleListReaderImpl extends AbstractFieldReader{
}
@Override
- public FieldReader reader(){
- if(reader == null){
+ public FieldReader reader() {
+ if (reader == null) {
reader = container.get(name, ValueVector.class).getAccessor().getReader();
setPosition(idx());
}
@@ -82,16 +77,13 @@ public class SingleListReaderImpl extends AbstractFieldReader{
}
@Override
- public void copyAsValue(ListWriter writer){
+ public void copyAsValue(ListWriter writer) {
throw new UnsupportedOperationException("Generic list copying not yet supported. Please resolve to typed list.");
}
@Override
- public void copyAsField(String name, MapWriter writer){
+ public void copyAsField(String name, MapWriter writer) {
throw new UnsupportedOperationException("Generic list copying not yet supported. Please resolve to typed list.");
}
-
-
}
-
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/ErrorHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/ErrorHelper.java
index 5679a4f82..51b4e32fe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/ErrorHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/ErrorHelper.java
@@ -35,7 +35,7 @@ public class ErrorHelper {
// }
public static DrillPBError logAndConvertError(DrillbitEndpoint endpoint, String message, Throwable t, Logger logger,
- boolean verbose){
+ boolean verbose) {
String id = UUID.randomUUID().toString();
DrillPBError.Builder builder = DrillPBError.newBuilder();
builder.setEndpoint(endpoint);
@@ -65,7 +65,9 @@ public class ErrorHelper {
while (true) {
rootCause = t;
if (t.getCause() == null || t.getCause() == t
- || (t instanceof SqlParseException && t.getCause() instanceof ParseException)) break;
+ || (t instanceof SqlParseException && t.getCause() instanceof ParseException)) {
+ break;
+ }
t = t.getCause();
}
@@ -78,4 +80,5 @@ public class ErrorHelper {
return builder.build();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlHandlerImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlHandlerImpl.java
index a7f36664b..0ac606c4e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlHandlerImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlHandlerImpl.java
@@ -58,10 +58,11 @@ public class ControlHandlerImpl implements ControlMessageHandler {
this.bee = bee;
}
-
@Override
public Response handle(ControlConnection connection, int rpcType, ByteBuf pBody, ByteBuf dBody) throws RpcException {
- if(RpcConstants.EXTRA_DEBUGGING) logger.debug("Received bit com message of type {}", rpcType);
+ if (RpcConstants.EXTRA_DEBUGGING) {
+ logger.debug("Received bit com message of type {}", rpcType);
+ }
switch (rpcType) {
@@ -112,8 +113,6 @@ public class ControlHandlerImpl implements ControlMessageHandler {
}
-
-
/* (non-Javadoc)
* @see org.apache.drill.exec.work.batch.BitComHandler#startNewRemoteFragment(org.apache.drill.exec.proto.ExecProtos.PlanFragment)
*/
@@ -124,19 +123,18 @@ public class ControlHandlerImpl implements ControlMessageHandler {
ControlTunnel tunnel = bee.getContext().getController().getTunnel(fragment.getForeman());
NonRootStatusReporter listener = new NonRootStatusReporter(context, tunnel);
- try{
+ try {
FragmentRoot rootOperator = bee.getContext().getPlanReader().readFragmentOperator(fragment.getFragmentJson());
FragmentExecutor fr = new FragmentExecutor(context, bee, rootOperator, listener);
bee.addFragmentRunner(fr);
} catch (Exception e) {
listener.fail(fragment.getHandle(), "Failure due to uncaught exception", e);
} catch (OutOfMemoryError t) {
- if(t.getMessage().startsWith("Direct buffer")){
+ if (t.getMessage().startsWith("Direct buffer")) {
listener.fail(fragment.getHandle(), "Failure due to error", t);
- }else{
+ } else {
throw t;
}
-
}
}
@@ -145,16 +143,17 @@ public class ControlHandlerImpl implements ControlMessageHandler {
* @see org.apache.drill.exec.work.batch.BitComHandler#cancelFragment(org.apache.drill.exec.proto.ExecProtos.FragmentHandle)
*/
@Override
- public Ack cancelFragment(FragmentHandle handle){
+ public Ack cancelFragment(FragmentHandle handle) {
FragmentManager manager = bee.getContext().getWorkBus().getFragmentManager(handle);
-
- if(manager != null){
+ if (manager != null) {
// try remote fragment cancel.
manager.cancel();
- }else{
+ } else {
// then try local cancel.
FragmentExecutor runner = bee.getFragmentRunner(handle);
- if(runner != null) runner.cancel();
+ if (runner != null) {
+ runner.cancel();
+ }
}
return Acks.OK;
@@ -164,7 +163,7 @@ public class ControlHandlerImpl implements ControlMessageHandler {
FragmentManager manager = bee.getContext().getWorkBus().getFragmentManager(finishedReceiver.getSender());
FragmentExecutor executor;
- if(manager != null) {
+ if (manager != null) {
executor = manager.getRunnable();
} else {
// then try local cancel.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/IncomingBuffers.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/IncomingBuffers.java
index 1d6a70907..5fa9ce0e3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/IncomingBuffers.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/IncomingBuffers.java
@@ -54,7 +54,7 @@ public class IncomingBuffers implements AutoCloseable {
// Determine the total number of incoming streams that will need to be completed before we are finished.
int totalStreams = 0;
- for(DataCollector bc : fragCounts.values()){
+ for (DataCollector bc : fragCounts.values()) {
totalStreams += bc.getTotalIncomingFragments();
}
assert totalStreams >= remainingRequired.get() : String.format("Total Streams %d should be more than the minimum number of streams to commence (%d). It isn't.", totalStreams, remainingRequired.get());
@@ -74,14 +74,16 @@ public class IncomingBuffers implements AutoCloseable {
}
return false;
}
- if(batch.getHeader().getIsLastBatch()){
+ if (batch.getHeader().getIsLastBatch()) {
streamsRemaining.decrementAndGet();
}
int sendMajorFragmentId = batch.getHeader().getSendingMajorFragmentId();
DataCollector fSet = fragCounts.get(sendMajorFragmentId);
- if (fSet == null) throw new FragmentSetupException(String.format("We received a major fragment id that we were not expecting. The id was %d. %s", sendMajorFragmentId, Arrays.toString(fragCounts.values().toArray())));
+ if (fSet == null) {
+ throw new FragmentSetupException(String.format("We received a major fragment id that we were not expecting. The id was %d. %s", sendMajorFragmentId, Arrays.toString(fragCounts.values().toArray())));
+ }
try {
- synchronized(this){
+ synchronized (this) {
boolean decremented = fSet.batchArrived(batch.getHeader().getSendingMinorFragmentId(), batch);
// we should only return true if remaining required has been decremented and is currently equal to zero.
@@ -94,11 +96,13 @@ public class IncomingBuffers implements AutoCloseable {
public int getRemainingRequired() {
int rem = remainingRequired.get();
- if (rem < 0) return 0;
+ if (rem < 0) {
+ return 0;
+ }
return rem;
}
- public RawBatchBuffer[] getBuffers(int senderMajorFragmentId){
+ public RawBatchBuffer[] getBuffers(int senderMajorFragmentId) {
return fragCounts.get(senderMajorFragmentId).getBuffers();
}
@@ -124,7 +128,7 @@ public class IncomingBuffers implements AutoCloseable {
@Override
public Void visitOp(PhysicalOperator op, Map<Integer, DataCollector> value) throws RuntimeException {
- for(PhysicalOperator o : op){
+ for (PhysicalOperator o : op) {
o.accept(this, value);
}
return null;
@@ -132,7 +136,7 @@ public class IncomingBuffers implements AutoCloseable {
}
- public boolean isDone(){
+ public boolean isDone() {
return streamsRemaining.get() < 1;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java
index a4ed4d69c..bb43b1ee6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java
@@ -66,10 +66,10 @@ public class UnlimitedRawBatchBuffer implements RawBatchBuffer{
return;
}
buffer.add(batch);
- if(buffer.size() == softlimit){
+ if (buffer.size() == softlimit) {
overlimit.set(true);
readController.enqueueResponse(batch.getSender());
- }else{
+ } else {
batch.sendOk();
}
}
@@ -93,14 +93,16 @@ public class UnlimitedRawBatchBuffer implements RawBatchBuffer{
}
RawFragmentBatch batch;
while ((batch = buffer.poll()) != null) {
- if (batch.getBody() != null) batch.getBody().release();
+ if (batch.getBody() != null) {
+ batch.getBody().release();
+ }
}
}
}
@Override
public void kill(FragmentContext context) {
- while(!buffer.isEmpty()){
+ while (!buffer.isEmpty()) {
RawFragmentBatch batch = buffer.poll();
batch.getBody().release();
}
@@ -115,7 +117,7 @@ public class UnlimitedRawBatchBuffer implements RawBatchBuffer{
}
@Override
- public RawFragmentBatch getNext(){
+ public RawFragmentBatch getNext() {
if (outOfMemory.get() && buffer.size() < 10) {
logger.debug("Setting autoread true");
@@ -128,7 +130,7 @@ public class UnlimitedRawBatchBuffer implements RawBatchBuffer{
b = buffer.poll();
// if we didn't get a buffer, block on waiting for buffer.
- if(b == null && (!finished || !buffer.isEmpty())){
+ if (b == null && (!finished || !buffer.isEmpty())) {
try {
b = buffer.take();
} catch (InterruptedException e) {
@@ -143,8 +145,8 @@ public class UnlimitedRawBatchBuffer implements RawBatchBuffer{
// if we are in the overlimit condition and aren't finished, check if we've passed the start limit. If so, turn off the overlimit condition and set auto read to true (start reading from socket again).
- if(!finished && overlimit.get()){
- if(buffer.size() == startlimit){
+ if (!finished && overlimit.get()) {
+ if (buffer.size() == startlimit) {
overlimit.set(false);
readController.flushResponses();
}
@@ -167,5 +169,4 @@ public class UnlimitedRawBatchBuffer implements RawBatchBuffer{
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
index 05fc2b1bc..1e5d8b8f4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
@@ -108,14 +108,14 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
this.queryRequest = queryRequest;
this.context = new QueryContext(connection.getSession(), queryId, dContext);
this.queuingEnabled = context.getOptions().getOption(ExecConstants.ENABLE_QUEUE_KEY).bool_val;
- if(queuingEnabled){
+ if (queuingEnabled) {
int smallQueue = context.getOptions().getOption(ExecConstants.SMALL_QUEUE_KEY).num_val.intValue();
int largeQueue = context.getOptions().getOption(ExecConstants.LARGE_QUEUE_KEY).num_val.intValue();
this.largeSemaphore = dContext.getClusterCoordinator().getSemaphore("query.large", largeQueue);
this.smallSemaphore = dContext.getClusterCoordinator().getSemaphore("query.small", smallQueue);
this.queueThreshold = context.getOptions().getOption(ExecConstants.QUEUE_THRESHOLD_KEY).num_val;
this.queueTimeout = context.getOptions().getOption(ExecConstants.QUEUE_TIMEOUT_KEY).num_val;
- }else{
+ } else {
this.largeSemaphore = null;
this.smallSemaphore = null;
this.queueThreshold = 0;
@@ -138,8 +138,8 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
return context;
}
- private boolean isFinished(){
- switch(state.getState()){
+ private boolean isFinished() {
+ switch(state.getState()) {
case PENDING:
case RUNNING:
return false;
@@ -150,12 +150,13 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
}
private void fail(String message, Throwable t) {
- if(isFinished()){
+ if(isFinished()) {
logger.error("Received a failure message query finished of: {}", message, t);
}
if (!state.updateState(QueryState.RUNNING, QueryState.FAILED)) {
- if (!state.updateState(QueryState.PENDING, QueryState.FAILED))
- logger.warn("Tried to update query state to FAILED, but was not RUNNING");
+ if (!state.updateState(QueryState.PENDING, QueryState.FAILED)) {
+ logger.warn("Tried to update query state to FAILED, but was not RUNNING");
+ }
}
boolean verbose = getContext().getOptions().getOption(ExecConstants.ENABLE_VERBOSE_ERRORS_KEY).bool_val;
@@ -171,7 +172,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
}
public void cancel() {
- if(isFinished()){
+ if (isFinished()) {
return;
}
@@ -182,7 +183,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
cleanupAndSendResult(result);
}
- void cleanupAndSendResult(QueryResult result){
+ void cleanupAndSendResult(QueryResult result) {
bee.retireForeman(this);
initiatingClient.sendResult(new ResponseSendListener(), new QueryWritableBatch(result), true);
state.updateState(QueryState.RUNNING, QueryState.COMPLETED);
@@ -206,7 +207,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
Thread.currentThread().setName(QueryIdHelper.getQueryId(queryId) + ":foreman");
fragmentManager.getStatus().setStartTime(System.currentTimeMillis());
// convert a run query request into action
- try{
+ try {
switch (queryRequest.getType()) {
case LOGICAL:
parseAndRunLogicalPlan(queryRequest.getPlan());
@@ -220,23 +221,23 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
default:
throw new UnsupportedOperationException();
}
- }catch(AssertionError | Exception ex){
+ } catch (AssertionError | Exception ex) {
fail("Failure while setting up Foreman.", ex);
- }catch(OutOfMemoryError e){
+ } catch (OutOfMemoryError e) {
System.out.println("Out of memory, exiting.");
System.out.flush();
System.exit(-1);
- }finally{
+ } finally {
releaseLease();
Thread.currentThread().setName(originalThread);
}
}
- private void releaseLease(){
- if(lease != null){
- try{
+ private void releaseLease() {
+ if (lease != null) {
+ try {
lease.close();
- }catch(Exception e){
+ } catch (Exception e) {
logger.warn("Failure while releasing lease.", e);
};
}
@@ -247,18 +248,22 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
try {
LogicalPlan logicalPlan = context.getPlanReader().readLogicalPlan(json);
- if(logicalPlan.getProperties().resultMode == ResultMode.LOGICAL){
+ if (logicalPlan.getProperties().resultMode == ResultMode.LOGICAL) {
fail("Failure running plan. You requested a result mode of LOGICAL and submitted a logical plan. In this case you're output mode must be PHYSICAL or EXEC.", new Exception());
}
- if(logger.isDebugEnabled()) logger.debug("Logical {}", logicalPlan.unparse(context.getConfig()));
+ if (logger.isDebugEnabled()) {
+ logger.debug("Logical {}", logicalPlan.unparse(context.getConfig()));
+ }
PhysicalPlan physicalPlan = convert(logicalPlan);
- if(logicalPlan.getProperties().resultMode == ResultMode.PHYSICAL){
+ if (logicalPlan.getProperties().resultMode == ResultMode.PHYSICAL) {
returnPhysical(physicalPlan);
return;
}
- if(logger.isDebugEnabled()) logger.debug("Physical {}", context.getConfig().getMapper().writeValueAsString(physicalPlan));
+ if (logger.isDebugEnabled()) {
+ logger.debug("Physical {}", context.getConfig().getMapper().writeValueAsString(physicalPlan));
+ }
runPhysicalPlan(physicalPlan);
} catch (IOException e) {
fail("Failure while parsing logical plan.", e);
@@ -267,7 +272,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
}
}
- private void returnPhysical(PhysicalPlan plan){
+ private void returnPhysical(PhysicalPlan plan) {
String jsonPlan = plan.unparse(context.getConfig().getMapper().writer());
runPhysicalPlan(DirectPlan.createDirectPlan(context, new PhysicalFromLogicalExplain(jsonPlan)));
}
@@ -286,7 +291,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
final SendingAccountor acct;
- public SingleListener(){
+ public SingleListener() {
acct = new SendingAccountor();
acct.increment();
acct.increment();
@@ -304,6 +309,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
}
}
+
private void parseAndRunPhysicalPlan(String json) {
try {
PhysicalPlan plan = context.getPlanReader().readPhysicalPlan(json);
@@ -315,10 +321,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
private void runPhysicalPlan(PhysicalPlan plan) {
-
-
-
- if(plan.getProperties().resultMode != ResultMode.EXEC){
+ if(plan.getProperties().resultMode != ResultMode.EXEC) {
fail(String.format("Failure running plan. You requested a result mode of %s and a physical plan can only be output as EXEC", plan.getProperties().resultMode), new Exception());
}
PhysicalOperator rootOperator = plan.getSortedOperators(false).iterator().next();
@@ -334,7 +337,9 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
int sortCount = 0;
for (PhysicalOperator op : plan.getSortedOperators()) {
- if (op instanceof ExternalSort) sortCount++;
+ if (op instanceof ExternalSort) {
+ sortCount++;
+ }
}
if (sortCount > 0) {
@@ -356,13 +361,13 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
try {
double size = 0;
- for(PhysicalOperator ops : plan.getSortedOperators()){
+ for (PhysicalOperator ops : plan.getSortedOperators()) {
size += ops.getCost();
}
- if(queuingEnabled){
- if(size > this.queueThreshold){
+ if (queuingEnabled) {
+ if (size > this.queueThreshold) {
this.lease = largeSemaphore.acquire(this.queueTimeout, TimeUnit.MILLISECONDS);
- }else{
+ } else {
this.lease = smallSemaphore.acquire(this.queueTimeout, TimeUnit.MILLISECONDS);
}
}
@@ -420,13 +425,15 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
PhysicalPlan plan = sqlWorker.getPlan(sql, textPlan);
fragmentManager.getStatus().setPlanText(textPlan.value);
runPhysicalPlan(plan);
- }catch(Exception e){
+ } catch(Exception e) {
fail("Failure while parsing sql.", e);
}
}
private PhysicalPlan convert(LogicalPlan plan) throws OptimizerException {
- if(logger.isDebugEnabled()) logger.debug("Converting logical plan {}.", plan.toJsonStringSafe(context.getConfig()));
+ if (logger.isDebugEnabled()) {
+ logger.debug("Converting logical plan {}.", plan.toJsonStringSafe(context.getConfig()));
+ }
return new BasicOptimizer(DrillConfig.create(), context, initiatingClient).optimize(new BasicOptimizer.BasicOptimizationContext(context), plan);
}
@@ -443,7 +450,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
public void close() throws IOException {
}
- public QueryState getQueryState(){
+ public QueryState getQueryState() {
return this.state.getState();
}
@@ -457,7 +464,7 @@ public class Foreman implements Runnable, Closeable, Comparable<Object>{
ForemanManagerListener.this.fail(message, t);
}
- void cleanupAndSendResult(QueryResult result){
+ void cleanupAndSendResult(QueryResult result) {
Foreman.this.cleanupAndSendResult(result);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/QueryStatus.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/QueryStatus.java
index f89cec929..45a151e9c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/QueryStatus.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/QueryStatus.java
@@ -59,7 +59,7 @@ public class QueryStatus {
private final PStore<QueryProfile> profileCache;
- public QueryStatus(RunQuery query, QueryId id, PStoreProvider provider, Foreman foreman){
+ public QueryStatus(RunQuery query, QueryId id, PStoreProvider provider, Foreman foreman) {
this.id = id;
this.query = query;
this.queryId = QueryIdHelper.getQueryId(id);
@@ -75,7 +75,7 @@ public class QueryStatus {
return fragmentDataSet;
}
- public void setPlanText(String planText){
+ public void setPlanText(String planText) {
this.planText = planText;
updateCache();
@@ -98,11 +98,11 @@ public class QueryStatus {
assert finishedFragments <= totalFragments;
}
- void add(FragmentData data){
+ void add(FragmentData data) {
int majorFragmentId = data.getHandle().getMajorFragmentId();
int minorFragmentId = data.getHandle().getMinorFragmentId();
IntObjectOpenHashMap<FragmentData> minorMap = fragmentDataMap.get(majorFragmentId);
- if(minorMap == null){
+ if (minorMap == null) {
minorMap = new IntObjectOpenHashMap<FragmentData>();
fragmentDataMap.put(majorFragmentId, minorMap);
}
@@ -111,7 +111,7 @@ public class QueryStatus {
fragmentDataSet.add(data);
}
- void update(FragmentStatus status, boolean updateCache){
+ void update(FragmentStatus status, boolean updateCache) {
int majorFragmentId = status.getHandle().getMajorFragmentId();
int minorFragmentId = status.getHandle().getMinorFragmentId();
fragmentDataMap.get(majorFragmentId).get(minorFragmentId).setStatus(status);
@@ -120,14 +120,14 @@ public class QueryStatus {
}
}
- public void updateCache(){
+ public void updateCache() {
QueryState queryState = foreman.getQueryState();
boolean fullStatus = queryState == QueryState.COMPLETED || queryState == QueryState.FAILED;
profileCache.put(queryId, getAsProfile(fullStatus));
}
@Override
- public String toString(){
+ public String toString() {
return fragmentDataMap.toString();
}
@@ -135,12 +135,12 @@ public class QueryStatus {
int major;
int minor;
- public FragmentId(FragmentStatus status){
+ public FragmentId(FragmentStatus status) {
this.major = status.getHandle().getMajorFragmentId();
this.minor = status.getHandle().getMinorFragmentId();
}
- public FragmentId(FragmentData data){
+ public FragmentId(FragmentData data) {
this.major = data.getHandle().getMajorFragmentId();
this.minor = data.getHandle().getMinorFragmentId();
}
@@ -162,42 +162,49 @@ public class QueryStatus {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
FragmentId other = (FragmentId) obj;
- if (major != other.major)
+ if (major != other.major) {
return false;
- if (minor != other.minor)
+ }
+ if (minor != other.minor) {
return false;
+ }
return true;
}
@Override
- public String toString(){
+ public String toString() {
return major + ":" + minor;
}
}
- public QueryProfile getAsProfile(boolean fullStatus){
+ public QueryProfile getAsProfile(boolean fullStatus) {
QueryProfile.Builder b = QueryProfile.newBuilder();
b.setQuery(query.getPlan());
b.setType(query.getType());
- if(planText != null) b.setPlan(planText);
+ if (planText != null) {
+ b.setPlan(planText);
+ }
b.setId(id);
if (fullStatus) {
- for(int i = 0; i < fragmentDataMap.allocated.length; i++){
- if(fragmentDataMap.allocated[i]){
+ for (int i = 0; i < fragmentDataMap.allocated.length; i++) {
+ if (fragmentDataMap.allocated[i]) {
int majorFragmentId = fragmentDataMap.keys[i];
IntObjectOpenHashMap<FragmentData> minorMap = (IntObjectOpenHashMap<FragmentData>) ((Object[]) fragmentDataMap.values)[i];
MajorFragmentProfile.Builder fb = MajorFragmentProfile.newBuilder();
fb.setMajorFragmentId(majorFragmentId);
- for(int v = 0; v < minorMap.allocated.length; v++){
- if(minorMap.allocated[v]){
+ for (int v = 0; v < minorMap.allocated.length; v++) {
+ if (minorMap.allocated[v]) {
FragmentData data = (FragmentData) ((Object[]) minorMap.values)[v];
fb.addMinorFragmentProfile(data.getStatus().getProfile());
}
@@ -215,4 +222,5 @@ public class QueryStatus {
b.setFinishedFragments(finishedFragments);
return b.build();
}
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java
index 6b4ee9b01..ecc8df2cd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java
@@ -49,7 +49,7 @@ public class FragmentExecutor implements Runnable, CancelableQuery, StatusProvid
private Thread executionThread;
private AtomicBoolean closed = new AtomicBoolean(false);
- public FragmentExecutor(FragmentContext context, WorkerBee bee, FragmentRoot rootOperator, StatusReporter listener){
+ public FragmentExecutor(FragmentContext context, WorkerBee bee, FragmentRoot rootOperator, StatusReporter listener) {
this.context = context;
this.bee = bee;
this.rootOperator = rootOperator;
@@ -75,7 +75,7 @@ public class FragmentExecutor implements Runnable, CancelableQuery, StatusProvid
root.receivingFragmentFinished(handle);
}
- public UserClientConnection getClient(){
+ public UserClientConnection getClient() {
return context.getConnection();
}
@@ -102,7 +102,7 @@ public class FragmentExecutor implements Runnable, CancelableQuery, StatusProvid
// run the query until root.next returns false.
while (state.get() == FragmentState.RUNNING_VALUE) {
if (!root.next()) {
- if (context.isFailed()){
+ if (context.isFailed()) {
internalFail(context.getFailureCause());
closeOutResources(false);
} else {
@@ -125,32 +125,38 @@ public class FragmentExecutor implements Runnable, CancelableQuery, StatusProvid
}
}
- private void closeOutResources(boolean throwFailure){
- if(closed.get()) return;
+ private void closeOutResources(boolean throwFailure) {
+ if (closed.get()) {
+ return;
+ }
- try{
+ try {
root.stop();
- }catch(RuntimeException e){
- if(throwFailure) throw e;
+ } catch (RuntimeException e) {
+ if (throwFailure) {
+ throw e;
+ }
logger.warn("Failure while closing out resources.", e);
}
- try{
+ try {
context.close();
- }catch(RuntimeException e){
- if(throwFailure) throw e;
+ } catch (RuntimeException e) {
+ if (throwFailure) {
+ throw e;
+ }
logger.warn("Failure while closing out resources.", e);
}
closed.set(true);
}
- private void internalFail(Throwable excep){
+ private void internalFail(Throwable excep) {
state.set(FragmentState.FAILED_VALUE);
listener.fail(context.getHandle(), "Failure while running fragment.", excep);
}
- private void updateState(FragmentState update){
+ private void updateState(FragmentState update) {
state.set(update.getNumber());
listener.stateChanged(context.getHandle(), update);
}
@@ -172,7 +178,7 @@ public class FragmentExecutor implements Runnable, CancelableQuery, StatusProvid
return o.hashCode() - this.hashCode();
}
- public FragmentContext getContext(){
+ public FragmentContext getContext() {
return context;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/NonRootFragmentManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/NonRootFragmentManager.java
index 02dec0af6..979870108 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/NonRootFragmentManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/NonRootFragmentManager.java
@@ -48,7 +48,7 @@ public class NonRootFragmentManager implements FragmentManager {
private List<RemoteConnection> connections = new CopyOnWriteArrayList<>();
public NonRootFragmentManager(PlanFragment fragment, WorkerBee bee) throws FragmentSetupException{
- try{
+ try {
this.fragment = fragment;
DrillbitContext context = bee.getContext();
this.bee = bee;
@@ -58,7 +58,7 @@ public class NonRootFragmentManager implements FragmentManager {
this.context.setBuffers(buffers);
this.runnerListener = new NonRootStatusReporter(this.context, context.getController().getTunnel(fragment.getForeman()));
- }catch(ExecutionSetupException | IOException e){
+ } catch (ExecutionSetupException | IOException e) {
throw new FragmentSetupException("Failure while decoding fragment.", e);
}
}
@@ -75,10 +75,14 @@ public class NonRootFragmentManager implements FragmentManager {
* @see org.apache.drill.exec.work.fragment.FragmentHandler#getRunnable()
*/
@Override
- public FragmentExecutor getRunnable(){
- synchronized(this){
- if(runner != null) throw new IllegalStateException("Get Runnable can only be run once.");
- if(cancel) return null;
+ public FragmentExecutor getRunnable() {
+ synchronized(this) {
+ if (runner != null) {
+ throw new IllegalStateException("Get Runnable can only be run once.");
+ }
+ if (cancel) {
+ return null;
+ }
runner = new FragmentExecutor(context, bee, root, runnerListener);
return this.runner;
}
@@ -89,10 +93,10 @@ public class NonRootFragmentManager implements FragmentManager {
* @see org.apache.drill.exec.work.fragment.FragmentHandler#cancel()
*/
@Override
- public void cancel(){
- synchronized(this){
+ public void cancel() {
+ synchronized(this) {
cancel = true;
- if(runner != null){
+ if (runner != null) {
runner.cancel();
}
}
@@ -125,4 +129,4 @@ public class NonRootFragmentManager implements FragmentManager {
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java
index 195a2cd8a..db76057cd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java
@@ -44,7 +44,7 @@ public class UserWorker{
this.bee = bee;
}
- public QueryId submitWork(UserClientConnection connection, RunQuery query){
+ public QueryId submitWork(UserClientConnection connection, RunQuery query) {
UUID uuid = UUID.randomUUID();
QueryId id = QueryId.newBuilder().setPart1(uuid.getMostSignificantBits()).setPart2(uuid.getLeastSignificantBits()).build();
Foreman foreman = new Foreman(bee, bee.getContext(), connection, id, query);
@@ -52,31 +52,36 @@ public class UserWorker{
return id;
}
- public QueryResult getResult(UserClientConnection connection, RequestResults req){
+ public QueryResult getResult(UserClientConnection connection, RequestResults req) {
Foreman foreman = bee.getForemanForQueryId(req.getQueryId());
- if(foreman == null) return QueryResult.newBuilder().setQueryState(QueryState.UNKNOWN_QUERY).build();
+ if (foreman == null) {
+ return QueryResult.newBuilder().setQueryState(QueryState.UNKNOWN_QUERY).build();
+ }
return foreman.getResult(connection, req);
}
- public Ack cancelQuery(QueryId query){
+ public Ack cancelQuery(QueryId query) {
Foreman foreman = bee.getForemanForQueryId(query);
- if(foreman != null){
+ if(foreman != null) {
foreman.cancel();
}
return Acks.OK;
}
- public Ack cancelFragment(FragmentHandle handle){
+ public Ack cancelFragment(FragmentHandle handle) {
FragmentExecutor runner = bee.getFragmentRunner(handle);
- if(runner != null) runner.cancel();
+ if (runner != null) {
+ runner.cancel();
+ }
return Acks.OK;
}
- public SchemaFactory getSchemaFactory(){
+ public SchemaFactory getSchemaFactory() {
return bee.getContext().getSchemaFactory();
}
- public OptionManager getSystemOptions(){
+ public OptionManager getSystemOptions() {
return bee.getContext().getOptionManager();
}
+
}
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index f99c2fa7d..44ef03285 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -13,8 +13,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-// This file tells Drill to consider this module when class path scanning.
-// This file can also include any supplementary configuration information.
+// This file tells Drill to consider this module when class path scanning.
+// This file can also include any supplementary configuration information.
// This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information.
drill.logical.function.packages += "org.apache.drill.exec.expr.fn.impl"
@@ -45,7 +45,7 @@ drill.exec: {
threads: 10
}
},
- use.ip : false
+ use.ip : false
},
operator: {
packages += "org.apache.drill.exec.physical.config"
@@ -75,14 +75,14 @@ drill.exec: {
}
},
zk: {
- connect: "localhost:2181",
- root: "drill",
- refresh: 500,
- timeout: 5000,
- retry: {
- count: 7200,
- delay: 500
- }
+ connect: "localhost:2181",
+ root: "drill",
+ refresh: 500,
+ timeout: 5000,
+ retry: {
+ count: 7200,
+ delay: 500
+ }
},
http: {
enabled: true,
diff --git a/exec/java-exec/src/main/resources/rest/profile/profile.ftl b/exec/java-exec/src/main/resources/rest/profile/profile.ftl
index 3cd214c4f..8ca21faa0 100644
--- a/exec/java-exec/src/main/resources/rest/profile/profile.ftl
+++ b/exec/java-exec/src/main/resources/rest/profile/profile.ftl
@@ -75,10 +75,10 @@
</form>
</div>
</div>
-
+
<div class="page-header"></div>
<h3>Fragment Profiles</h3>
-
+
<div class="panel-group" id="fragment-accordion">
<div class="panel panel-default">
<div class="panel-heading">
@@ -112,10 +112,10 @@
</div>
</#list>
</div>
-
+
<div class="page-header"></div>
<h3>Operator Profiles</h3>
-
+
<div class="panel-group" id="operator-accordion">
<div class="panel panel-default">
<div class="panel-heading">
@@ -149,10 +149,10 @@
</div>
</#list>
</div>
-
+
<div class="page-header"></div>
<h3>Full JSON Profile</h3>
-
+
<div class="span4 collapse-group" id="full-json-profile">
<a class="btn btn-default" data-toggle="collapse" data-target="#full-json-profile-json">JSON profile</a>
<br> <br>
diff --git a/exec/java-exec/src/main/resources/rest/www/graph.js b/exec/java-exec/src/main/resources/rest/www/graph.js
index b65c4165f..55ca7f44f 100644
--- a/exec/java-exec/src/main/resources/rest/www/graph.js
+++ b/exec/java-exec/src/main/resources/rest/www/graph.js
@@ -71,7 +71,7 @@ $(window).load(function () {
return r1.end - r1.start > r2.end - r2.start ? 1 : -1;
}
else return r1.category > r2.category ? 1 : -1;
-
+
});
return timetable;
}
@@ -106,7 +106,7 @@ $(window).load(function () {
fragment: parseInt(ps[i][0].split("-")[0])
});
}
-
+
// edges
var st = [ps[0]];
for (var i = 1; i < ps.length; i++) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
index f504de480..99c4da503 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
@@ -95,9 +95,9 @@ public class BaseTestQuery extends ExecTest{
public static void openClient() throws Exception{
config = DrillConfig.create(TEST_CONFIGURATIONS);
allocator = new TopLevelAllocator(config);
- if(config.hasPath(ENABLE_FULL_CACHE) && config.getBoolean(ENABLE_FULL_CACHE)){
+ if (config.hasPath(ENABLE_FULL_CACHE) && config.getBoolean(ENABLE_FULL_CACHE)) {
serviceSet = RemoteServiceSet.getServiceSetWithFullCache(config, allocator);
- }else{
+ } else {
serviceSet = RemoteServiceSet.getLocalServiceSet();
}
bit = new Drillbit(config, serviceSet);
@@ -105,23 +105,31 @@ public class BaseTestQuery extends ExecTest{
client = new DrillClient(config, serviceSet.getCoordinator());
client.connect();
List<QueryResultBatch> results = client.runQuery(QueryType.SQL, String.format("alter session set `%s` = 2", ExecConstants.MAX_WIDTH_PER_NODE_KEY));
- for(QueryResultBatch b : results){
+ for (QueryResultBatch b : results) {
b.release();
}
}
- protected BufferAllocator getAllocator(){
+ protected BufferAllocator getAllocator() {
return allocator;
}
@AfterClass
public static void closeClient() throws IOException{
- if(client != null) client.close();
- if(bit != null) bit.close();
- if(serviceSet != null) serviceSet.close();
- if(allocator != null) allocator.close();
+ if (client != null) {
+ client.close();
+ }
+ if (bit != null) {
+ bit.close();
+ }
+ if(serviceSet != null) {
+ serviceSet.close();
+ }
+ if (allocator != null) {
+ allocator.close();
+ }
}
protected void runSQL(String sql) throws Exception {
@@ -154,7 +162,7 @@ public class BaseTestQuery extends ExecTest{
return resultListener.await();
}
- protected void testWithListener(QueryType type, String query, UserResultsListener resultListener){
+ protected void testWithListener(QueryType type, String query, UserResultsListener resultListener) {
query = query.replace("[WORKING_PATH]", TestTools.getWorkingPath());
client.runQuery(type, query, resultListener);
}
@@ -176,8 +184,10 @@ public class BaseTestQuery extends ExecTest{
protected void test(String query) throws Exception{
String[] queries = query.split(";");
- for(String q : queries){
- if(q.trim().isEmpty()) continue;
+ for (String q : queries) {
+ if (q.trim().isEmpty()) {
+ continue;
+ }
testRunAndPrint(QueryType.SQL, q);
}
}
@@ -197,19 +207,22 @@ public class BaseTestQuery extends ExecTest{
protected void testPhysicalFromFile(String file) throws Exception{
testPhysical(getFile(file));
}
+
protected List<QueryResultBatch> testPhysicalFromFileWithResults(String file) throws Exception {
return testRunAndReturn(QueryType.PHYSICAL, getFile(file));
}
+
protected void testLogicalFromFile(String file) throws Exception{
testLogical(getFile(file));
}
+
protected void testSqlFromFile(String file) throws Exception{
test(getFile(file));
}
protected String getFile(String resource) throws IOException{
URL url = Resources.getResource(resource);
- if(url == null){
+ if (url == null) {
throw new IOException(String.format("Unable to find path %s.", resource));
}
return Resources.toString(url, Charsets.UTF_8);
@@ -245,7 +258,9 @@ public class BaseTestQuery extends ExecTest{
public int waitForCompletion() throws Exception {
latch.await();
- if(exception != null) throw exception;
+ if (exception != null) {
+ throw exception;
+ }
return count.get();
}
}
@@ -261,7 +276,7 @@ public class BaseTestQuery extends ExecTest{
protected int printResult(List<QueryResultBatch> results) throws SchemaChangeException {
int rowCount = 0;
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
- for(QueryResultBatch result : results){
+ for(QueryResultBatch result : results) {
rowCount += result.getHeader().getRowCount();
loader.load(result.getHeader().getDef(), result.getData());
if (loader.getRecordCount() <= 0) {
@@ -279,7 +294,7 @@ public class BaseTestQuery extends ExecTest{
StringBuilder formattedResults = new StringBuilder();
boolean includeHeader = true;
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
- for(QueryResultBatch result : results){
+ for(QueryResultBatch result : results) {
loader.load(result.getHeader().getDef(), result.getData());
if (loader.getRecordCount() <= 0) {
break;
@@ -294,4 +309,5 @@ public class BaseTestQuery extends ExecTest{
return formattedResults.toString();
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
index 6331116b9..0c7564054 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
@@ -101,7 +101,6 @@ public class PlanTestBase extends BaseTestQuery {
for (String substr : expectedSubstrs) {
assertTrue(planStr.contains(substr));
}
-
}
/**
@@ -120,7 +119,6 @@ public class PlanTestBase extends BaseTestQuery {
}
}
-
/**
* This method will take a SQL string statement, get the PHYSICAL plan in
* Optiq RelNode format. Then check the physical plan against the list
@@ -149,7 +147,6 @@ public class PlanTestBase extends BaseTestQuery {
}
}
-
/*
* This will get the plan (either logical or physical) in Optiq RelNode
* format, based on SqlExplainLevel and Depth.
@@ -205,8 +202,9 @@ public class PlanTestBase extends BaseTestQuery {
StringBuilder builder = new StringBuilder();
for (QueryResultBatch b : results) {
- if (!b.hasData())
+ if (!b.hasData()) {
continue;
+ }
loader.load(b.getHeader().getDef(), b.getData());
@@ -247,8 +245,9 @@ public class PlanTestBase extends BaseTestQuery {
Stack<Integer> s = new Stack<Integer>();
for (String line : planLines) {
- if (line.trim().isEmpty())
+ if (line.trim().isEmpty()) {
continue;
+ }
if (line.contains(joinKeyWord)) {
builder.append(Strings.repeat(" ", 2 * s.size()));
builder.append(joinKeyWord + "\n");
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
index aca7d06c6..7fc7d6bac 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
@@ -63,11 +63,11 @@ public class PlanningBase extends ExecTest{
private final DrillConfig config = DrillConfig.create();
- protected void testSqlPlanFromFile(String file) throws Exception{
+ protected void testSqlPlanFromFile(String file) throws Exception {
testSqlPlan(getFile(file));
}
- protected void testSqlPlan(String sqlCommands) throws Exception{
+ protected void testSqlPlan(String sqlCommands) throws Exception {
String[] sqlStrings = sqlCommands.split(";");
final DistributedCache cache = new LocalCache();
@@ -133,8 +133,10 @@ public class PlanningBase extends ExecTest{
}
};
- for(String sql : sqlStrings){
- if(sql.trim().isEmpty()) continue;
+ for (String sql : sqlStrings) {
+ if (sql.trim().isEmpty()) {
+ continue;
+ }
DrillSqlWorker worker = new DrillSqlWorker(context);
PhysicalPlan p = worker.getPlan(sql);
}
@@ -143,7 +145,7 @@ public class PlanningBase extends ExecTest{
protected String getFile(String resource) throws IOException{
URL url = Resources.getResource(resource);
- if(url == null){
+ if (url == null) {
throw new IOException(String.format("Unable to find path %s.", resource));
}
return Resources.toString(url, Charsets.UTF_8);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java b/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
index e45b24832..828ffe946 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
@@ -136,8 +136,9 @@ public class TestProjectPushDown extends PlanTestBase {
String query = getFile(fileName);
String[] queries = query.split(";");
for (String q : queries) {
- if (q.trim().isEmpty())
+ if (q.trim().isEmpty()) {
continue;
+ }
testPhysicalPlan(q, expectedSubstrs);
}
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java
index 29502be31..05105fc09 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java
@@ -55,7 +55,7 @@ public class TestOpSerialization {
screen.setOperatorId(0);
boolean reversed = false;
- while(true){
+ while (true) {
List<PhysicalOperator> pops = Lists.newArrayList();
pops.add(s);
@@ -63,7 +63,9 @@ public class TestOpSerialization {
pops.add(f);
pops.add(screen);
- if(reversed) pops = Lists.reverse(pops);
+ if (reversed) {
+ pops = Lists.reverse(pops);
+ }
PhysicalPlan plan1 = new PhysicalPlan(PlanProperties.builder().build(), pops);
String json = plan1.unparse(c.getMapper().writer());
System.out.println(json);
@@ -78,12 +80,12 @@ public class TestOpSerialization {
assertEquals(1, o1.getOperatorId());
PhysicalOperator o2 = o1.iterator().next();
assertEquals(2, o2.getOperatorId());
- if(reversed) break;
+ if(reversed) {
+ break;
+ }
reversed = !reversed;
}
-
-
-
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java b/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java
index 82da48311..09248dacd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java
@@ -84,7 +84,9 @@ public class TestWriteToDisk extends ExecTest{
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
FileSystem fs = FileSystem.get(conf);
Path path = new Path("/tmp/drillSerializable");
- if (fs.exists(path)) fs.delete(path, false);
+ if (fs.exists(path)) {
+ fs.delete(path, false);
+ }
FSDataOutputStream out = fs.create(path);
wrap.writeToStream(out);
@@ -109,4 +111,5 @@ public class TestWriteToDisk extends ExecTest{
}
}
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java
index 5e57dc776..6a3d2f1ac 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java
@@ -39,51 +39,52 @@ import com.google.common.io.Files;
public class TestAggregateFunction extends PopUnitTestBase {
- public void runTest(Object[] values, String planPath, String dataPath) throws Throwable {
-
- try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- Drillbit bit = new Drillbit(CONFIG, serviceSet);
- DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
-
- // run query.
- bit.run();
- client.connect();
- List<QueryResultBatch> results = client.runQuery(QueryType.PHYSICAL,
- Files.toString(FileUtils.getResourceAsFile(planPath), Charsets.UTF_8).replace("#{TEST_FILE}", dataPath));
-
- RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
-
- QueryResultBatch batch = results.get(0);
- assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
-
- int i = 0;
- for (VectorWrapper<?> v : batchLoader) {
- ValueVector.Accessor accessor = v.getValueVector().getAccessor();
- assertEquals(values[i++], (accessor.getObject(0)));
- }
-
- batchLoader.clear();
- for(QueryResultBatch b : results){
- b.release();
- }
- }
- }
+ public void runTest(Object[] values, String planPath, String dataPath) throws Throwable {
+
+ try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+ Drillbit bit = new Drillbit(CONFIG, serviceSet);
+ DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+
+ // run query.
+ bit.run();
+ client.connect();
+ List<QueryResultBatch> results = client.runQuery(QueryType.PHYSICAL,
+ Files.toString(FileUtils.getResourceAsFile(planPath), Charsets.UTF_8).replace("#{TEST_FILE}", dataPath));
+
+ RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
- @Test
- public void testSortDate() throws Throwable {
- String planPath = "/functions/test_stddev_variance.json";
- String dataPath = "/simple_stddev_variance_input.json";
- Double expectedValues[] = {2.0d, 2.138089935299395d, 2.138089935299395d, 4.0d, 4.571428571428571d, 4.571428571428571d};
+ QueryResultBatch batch = results.get(0);
+ assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
- runTest(expectedValues, planPath, dataPath);
+ int i = 0;
+ for (VectorWrapper<?> v : batchLoader) {
+ ValueVector.Accessor accessor = v.getValueVector().getAccessor();
+ assertEquals(values[i++], (accessor.getObject(0)));
+ }
+
+ batchLoader.clear();
+ for(QueryResultBatch b : results) {
+ b.release();
+ }
}
+ }
+
+ @Test
+ public void testSortDate() throws Throwable {
+ String planPath = "/functions/test_stddev_variance.json";
+ String dataPath = "/simple_stddev_variance_input.json";
+ Double expectedValues[] = {2.0d, 2.138089935299395d, 2.138089935299395d, 4.0d, 4.571428571428571d, 4.571428571428571d};
+
+ runTest(expectedValues, planPath, dataPath);
+ }
+
+ @Test
+ public void testCovarianceCorrelation() throws Throwable {
+ String planPath = "/functions/test_covariance.json";
+ String dataPath = "/covariance_input.json";
+ Double expectedValues[] = {4.571428571428571d, 4.857142857142857d, -6.000000000000002d, 4.0d , 4.25d, -5.250000000000002d, 1.0d, 0.9274260335029677d, -1.0000000000000004d};
- @Test
- public void testCovarianceCorrelation() throws Throwable {
- String planPath = "/functions/test_covariance.json";
- String dataPath = "/covariance_input.json";
- Double expectedValues[] = {4.571428571428571d, 4.857142857142857d, -6.000000000000002d, 4.0d , 4.25d, -5.250000000000002d, 1.0d, 0.9274260335029677d, -1.0000000000000004d};
+ runTest(expectedValues, planPath, dataPath);
+ }
- runTest(expectedValues, planPath, dataPath);
}
-} \ No newline at end of file
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java
index 28e667e3d..54c1700f7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java
@@ -51,86 +51,93 @@ import com.google.common.io.Resources;
public class TestNewMathFunctions {
-
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestNewMathFunctions.class);
-
- DrillConfig c = DrillConfig.create();
- PhysicalPlanReader reader;
- FunctionImplementationRegistry registry;
- FragmentContext context;
-
- public Object[] getRunResult(SimpleRootExec exec) {
- int size = 0;
- for (ValueVector v : exec) {
- size++;
- }
-
- Object[] res = new Object [size];
- int i = 0;
- for (ValueVector v : exec) {
- if (v instanceof VarCharVector) {
- res[i++] = new String( ((VarCharVector) v).getAccessor().get(0));
- } else
- res[i++] = v.getAccessor().getObject(0);
- }
- return res;
- }
-
- public void runTest(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection, Object[] expectedResults, String planPath) throws Throwable {
-
- new NonStrictExpectations(){{
- bitContext.getMetrics(); result = new MetricRegistry();
- bitContext.getAllocator(); result = new TopLevelAllocator();
- bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
- bitContext.getConfig(); result = c;
- bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
- }};
-
- String planString = Resources.toString(Resources.getResource(planPath), Charsets.UTF_8);
- if(reader == null) reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
- if(registry == null) registry = new FunctionImplementationRegistry(c);
- if(context == null) context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry); //new FragmentContext(bitContext, ExecProtos.FragmentHandle.getDefaultInstance(), connection, registry);
- PhysicalPlan plan = reader.readPhysicalPlan(planString);
- SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
-
- while(exec.next()){
- Object [] res = getRunResult(exec);
- assertEquals("return count does not match", res.length, expectedResults.length);
-
- for (int i = 0; i<res.length; i++) {
- assertEquals(String.format("column %s does not match", i), res[i], expectedResults[i]);
- }
- }
-
- if(context.getFailureCause() != null){
- throw context.getFailureCause();
- }
-
- assertTrue(!context.isFailed());
- }
-
- @Test
- public void testTrigoMathFunc(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable{
- Object [] expected = new Object[] {Math.sin(45), Math.cos(45), Math.tan(45),Math.asin(45), Math.acos(45), Math.atan(45),Math.sinh(45), Math.cosh(45), Math.tanh(45)};
- runTest(bitContext, connection, expected, "functions/testTrigoMathFunctions.json");
- }
-
- @Test
- public void testExtendedMathFunc(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable{
- BigDecimal d = new BigDecimal("100111111111111111111111111111111111.00000000000000000000000000000000000000000000000000001");
-
- Object [] expected = new Object[] {Math.cbrt(1000), Math.log(10), (Math.log(64.0)/Math.log(2.0)), Math.exp(10), Math.toDegrees(0.5), Math.toRadians(45.0), Math.PI, Math.cbrt(d.doubleValue()), Math.log(d.doubleValue()), (Math.log(d.doubleValue())/Math.log(2)), Math.exp(d.doubleValue()), Math.toDegrees(d.doubleValue()), Math.toRadians(d.doubleValue())};
-
- runTest(bitContext, connection, expected, "functions/testExtendedMathFunctions.json");
- }
-
- @Test
- public void testTruncDivMod(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable{
- Object [] expected = new Object[] {101.0, 0, 101, 1010.0, 101, 481.0, 0.001099999999931267};
- runTest(bitContext, connection, expected, "functions/testDivModTruncFunctions.json");
- }
-} \ No newline at end of file
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestNewMathFunctions.class);
+
+ DrillConfig c = DrillConfig.create();
+ PhysicalPlanReader reader;
+ FunctionImplementationRegistry registry;
+ FragmentContext context;
+
+ public Object[] getRunResult(SimpleRootExec exec) {
+ int size = 0;
+ for (ValueVector v : exec) {
+ size++;
+ }
+
+ Object[] res = new Object [size];
+ int i = 0;
+ for (ValueVector v : exec) {
+ if (v instanceof VarCharVector) {
+ res[i++] = new String( ((VarCharVector) v).getAccessor().get(0));
+ } else {
+ res[i++] = v.getAccessor().getObject(0);
+ }
+ }
+ return res;
+ }
+
+ public void runTest(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection, Object[] expectedResults, String planPath) throws Throwable {
+
+ new NonStrictExpectations() {{
+ bitContext.getMetrics(); result = new MetricRegistry();
+ bitContext.getAllocator(); result = new TopLevelAllocator();
+ bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
+ bitContext.getConfig(); result = c;
+ bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
+ }};
+
+ String planString = Resources.toString(Resources.getResource(planPath), Charsets.UTF_8);
+ if (reader == null) {
+ reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+ }
+ if (registry == null) {
+ registry = new FunctionImplementationRegistry(c);
+ }
+ if (context == null) {
+ context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry); //new FragmentContext(bitContext, ExecProtos.FragmentHandle.getDefaultInstance(), connection, registry);
+ }
+ PhysicalPlan plan = reader.readPhysicalPlan(planString);
+ SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
+
+ while (exec.next()) {
+ Object [] res = getRunResult(exec);
+ assertEquals("return count does not match", res.length, expectedResults.length);
+
+ for (int i = 0; i<res.length; i++) {
+ assertEquals(String.format("column %s does not match", i), res[i], expectedResults[i]);
+ }
+ }
+
+ if (context.getFailureCause() != null) {
+ throw context.getFailureCause();
+ }
+
+ assertTrue(!context.isFailed());
+ }
+
+ @Test
+ public void testTrigoMathFunc(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable{
+ Object [] expected = new Object[] {Math.sin(45), Math.cos(45), Math.tan(45),Math.asin(45), Math.acos(45), Math.atan(45),Math.sinh(45), Math.cosh(45), Math.tanh(45)};
+ runTest(bitContext, connection, expected, "functions/testTrigoMathFunctions.json");
+ }
+
+ @Test
+ public void testExtendedMathFunc(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable{
+ BigDecimal d = new BigDecimal("100111111111111111111111111111111111.00000000000000000000000000000000000000000000000000001");
+
+ Object [] expected = new Object[] {Math.cbrt(1000), Math.log(10), (Math.log(64.0)/Math.log(2.0)), Math.exp(10), Math.toDegrees(0.5), Math.toRadians(45.0), Math.PI, Math.cbrt(d.doubleValue()), Math.log(d.doubleValue()), (Math.log(d.doubleValue())/Math.log(2)), Math.exp(d.doubleValue()), Math.toDegrees(d.doubleValue()), Math.toRadians(d.doubleValue())};
+
+ runTest(bitContext, connection, expected, "functions/testExtendedMathFunctions.json");
+ }
+
+ @Test
+ public void testTruncDivMod(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable{
+ Object [] expected = new Object[] {101.0, 0, 101, 1010.0, 101, 481.0, 0.001099999999931267};
+ runTest(bitContext, connection, expected, "functions/testDivModTruncFunctions.json");
+ }
+
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java
index a15b50386..f878bcb69 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java
@@ -39,37 +39,37 @@ public class SimpleRootExec implements RootExec, Iterable<ValueVector>{
private RecordBatch incoming;
private ScreenRoot screenRoot;
- public SimpleRootExec(RootExec e){
- if(e instanceof ScreenRoot){
+ public SimpleRootExec(RootExec e) {
+ if (e instanceof ScreenRoot) {
incoming = ((ScreenRoot)e).getIncoming();
screenRoot = (ScreenRoot) e;
- }else{
+ } else {
throw new UnsupportedOperationException();
}
}
- public FragmentContext getContext(){
+ public FragmentContext getContext() {
return incoming.getContext();
}
- public SelectionVector2 getSelectionVector2(){
+ public SelectionVector2 getSelectionVector2() {
return incoming.getSelectionVector2();
}
- public SelectionVector4 getSelectionVector4(){
+ public SelectionVector4 getSelectionVector4() {
return incoming.getSelectionVector4();
}
@SuppressWarnings("unchecked")
- public <T extends ValueVector> T getValueVectorById(SchemaPath path, Class<?> vvClass){
+ public <T extends ValueVector> T getValueVectorById(SchemaPath path, Class<?> vvClass) {
TypedFieldId tfid = incoming.getValueVectorId(path);
return (T) incoming.getValueAccessorById(vvClass, tfid.getFieldIds()).getValueVector();
}
@Override
public boolean next() {
- switch(incoming.next()){
+ switch (incoming.next()) {
case NONE:
case STOP:
incoming.cleanup();
@@ -92,19 +92,19 @@ public class SimpleRootExec implements RootExec, Iterable<ValueVector>{
@Override
public Iterator<ValueVector> iterator() {
List<ValueVector> vv = Lists.newArrayList();
- for(VectorWrapper<?> vw : incoming){
+ for (VectorWrapper<?> vw : incoming) {
vv.add(vw.getValueVector());
}
return vv.iterator();
}
- public int getRecordCount(){
+ public int getRecordCount() {
return incoming.getRecordCount();
}
/// Temporary: for exposing the incoming batch to TestHashTable
public RecordBatch getIncoming() {
- return incoming;
+ return incoming;
}
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestBroadcastExchange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestBroadcastExchange.java
index 826ebf532..521212519 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestBroadcastExchange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestBroadcastExchange.java
@@ -38,7 +38,7 @@ public class TestBroadcastExchange extends PopUnitTestBase {
public void TestSingleBroadcastExchangeWithTwoScans() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
@@ -52,8 +52,10 @@ public class TestBroadcastExchange extends PopUnitTestBase {
.replace("#{RIGHT_FILE}", FileUtils.getResourceAsFile("/join/merge_single_batch.right.json").toURI().toString());
List<QueryResultBatch> results = client.runQuery(QueryType.PHYSICAL, physicalPlan);
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0) count += b.getHeader().getRowCount();
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
+ count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(25, count);
@@ -64,7 +66,7 @@ public class TestBroadcastExchange extends PopUnitTestBase {
public void TestMultipleSendLocationBroadcastExchange() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
@@ -76,11 +78,14 @@ public class TestBroadcastExchange extends PopUnitTestBase {
FileUtils.getResourceAsFile("/sender/broadcast_exchange_long_run.json"), Charsets.UTF_8);
List<QueryResultBatch> results = client.runQuery(QueryType.PHYSICAL, physicalPlan);
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0) count += b.getHeader().getRowCount();
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
+ count += b.getHeader().getRowCount();
+ }
b.release();
}
System.out.println(count);
}
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestComparisonFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestComparisonFunctions.java
index 421c3f59f..609bc14c3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestComparisonFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestComparisonFunctions.java
@@ -53,7 +53,7 @@ public class TestComparisonFunctions extends ExecTest {
public void runTest(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection, String expression, int expectedResults) throws Throwable {
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
@@ -62,13 +62,19 @@ public class TestComparisonFunctions extends ExecTest {
}};
String planString = Resources.toString(Resources.getResource(COMPARISON_TEST_PHYSICAL_PLAN), Charsets.UTF_8).replaceAll("EXPRESSION", expression);
- if(reader == null) reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
- if(registry == null) registry = new FunctionImplementationRegistry(c);
- if(context == null) context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
+ if (reader == null) {
+ reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+ }
+ if (registry == null) {
+ registry = new FunctionImplementationRegistry(c);
+ }
+ if(context == null) {
+ context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
+ }
PhysicalPlan plan = reader.readPhysicalPlan(planString);
SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
- while(exec.next()){
+ while(exec.next()) {
assertEquals(String.format("Expression: %s;", expression), expectedResults, exec.getSelectionVector2().getCount());
// for (ValueVector vv: exec) {
// vv.close();
@@ -79,8 +85,7 @@ public class TestComparisonFunctions extends ExecTest {
context.close();
-
- if(context.getFailureCause() != null){
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
index 5111a499a..c64c7a330 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
@@ -306,7 +306,7 @@ public class TestConvertFunctions extends BaseTestQuery {
List<QueryResultBatch> results = testLogicalWithResults(logicalPlan);
int count = 0;
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
- for(QueryResultBatch result : results){
+ for (QueryResultBatch result : results){
count += result.getHeader().getRowCount();
loader.load(result.getHeader().getDef(), result.getData());
if (loader.getRecordCount() > 0) {
@@ -365,7 +365,9 @@ public class TestConvertFunctions extends BaseTestQuery {
protected <T> void verifyPhysicalPlan(String expression, T expectedResults) throws Throwable {
expression = expression.replace("\\", "\\\\\\\\"); // "\\\\\\\\" => Java => "\\\\" => JsonParser => "\\" => AntlrParser "\"
- if (textFileContent == null) textFileContent = Resources.toString(Resources.getResource(CONVERSION_TEST_PHYSICAL_PLAN), Charsets.UTF_8);
+ if (textFileContent == null) {
+ textFileContent = Resources.toString(Resources.getResource(CONVERSION_TEST_PHYSICAL_PLAN), Charsets.UTF_8);
+ }
String planString = textFileContent.replace("__CONVERT_EXPRESSION__", expression);
verifyResults(expression, expectedResults, getRunResult(QueryType.PHYSICAL, planString));
@@ -432,4 +434,5 @@ public class TestConvertFunctions extends BaseTestQuery {
expected.getClass().getName(), (actual == null ? "null" : actual.getClass().getName())));
}
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestHashToRandomExchange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestHashToRandomExchange.java
index e3fca8576..10ee46a01 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestHashToRandomExchange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestHashToRandomExchange.java
@@ -32,7 +32,6 @@ import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
-
public class TestHashToRandomExchange extends PopUnitTestBase {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestHashToRandomExchange.class);
@@ -40,7 +39,7 @@ public class TestHashToRandomExchange extends PopUnitTestBase {
public void twoBitTwoExchangeTwoEntryRun() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
@@ -51,9 +50,10 @@ public class TestHashToRandomExchange extends PopUnitTestBase {
Files.toString(FileUtils.getResourceAsFile("/sender/hash_exchange.json"),
Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(200, count);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java
index 13c9364cf..141c9cd64 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java
@@ -67,7 +67,7 @@ public class TestImplicitCastFunctions extends ExecTest {
public void runTest(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection, Object[] expectedResults, String planPath) throws Throwable {
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
@@ -76,14 +76,20 @@ public class TestImplicitCastFunctions extends ExecTest {
}};
String planString = Resources.toString(Resources.getResource(planPath), Charsets.UTF_8);
- if(reader == null) reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
- if(registry == null) registry = new FunctionImplementationRegistry(c);
- if(context == null) context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
+ if (reader == null) {
+ reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+ }
+ if (registry == null) {
+ registry = new FunctionImplementationRegistry(c);
+ }
+ if (context == null) {
+ context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
+ }
PhysicalPlan plan = reader.readPhysicalPlan(planString);
SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
- while(exec.next()){
+ while (exec.next()) {
Object [] res = getRunResult(exec);
assertEquals("return count does not match", res.length, expectedResults.length);
@@ -92,7 +98,7 @@ public class TestImplicitCastFunctions extends ExecTest {
}
}
- if(context.getFailureCause() != null){
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
@@ -144,24 +150,24 @@ public class TestImplicitCastFunctions extends ExecTest {
runTest(bitContext, connection, expected, "functions/cast/testICastMockCol.json");
}
- @Test
- public void testImplicitCastWithNullExpression(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable{
- Object [] expected = new Object[10];
+ @Test
+ public void testImplicitCastWithNullExpression(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable{
+ Object [] expected = new Object[10];
- expected [0] = Boolean.TRUE;
- expected [1] = Boolean.FALSE;
- expected [2] = Boolean.FALSE;
- expected [3] = Boolean.TRUE;
+ expected [0] = Boolean.TRUE;
+ expected [1] = Boolean.FALSE;
+ expected [2] = Boolean.FALSE;
+ expected [3] = Boolean.TRUE;
- expected [4] = null;
- expected [5] = null;
- expected [6] = null;
- expected [7] = null;
- expected [8] = null;
- expected [9] = null;
+ expected [4] = null;
+ expected [5] = null;
+ expected [6] = null;
+ expected [7] = null;
+ expected [8] = null;
+ expected [9] = null;
- runTest(bitContext, connection, expected, "functions/cast/testICastNullExp.json");
- }
+ runTest(bitContext, connection, expected, "functions/cast/testICastNullExp.json");
+ }
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSimpleFragmentRun.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSimpleFragmentRun.java
index 5dd64e10f..68e211264 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSimpleFragmentRun.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSimpleFragmentRun.java
@@ -44,17 +44,16 @@ public class TestSimpleFragmentRun extends PopUnitTestBase {
private static final Charset UTF_8 = Charset.forName("UTF-8");
-
@Test
public void runNoExchangeFragment() throws Exception {
- try(RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+ try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
- DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());){
+ DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
// run query.
bit.run();
client.connect();
- String path = "/physical_test2.json";
+ String path = "/physical_test2.json";
// String path = "/filter/test1.json";
List<QueryResultBatch> results = client.runQuery(QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile(path), Charsets.UTF_8));
@@ -62,7 +61,6 @@ public class TestSimpleFragmentRun extends PopUnitTestBase {
RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
int recordCount = 0;
for (QueryResultBatch batch : results) {
-
boolean schemaChanged = batchLoader.load(batch.getHeader().getDef(), batch.getData());
boolean firstColumn = true;
@@ -95,7 +93,9 @@ public class TestSimpleFragmentRun extends PopUnitTestBase {
}
System.out.print(value.getValueVector().getAccessor().getObject(i));
}
- if(!first) System.out.println();
+ if (!first) {
+ System.out.println();
+ }
}
batchLoader.clear();
batch.release();
@@ -170,7 +170,9 @@ public class TestSimpleFragmentRun extends PopUnitTestBase {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
System.out.print(accessor.getObject(r));
}
- if (!first) System.out.println();
+ if (!first) {
+ System.out.println();
+ }
}
batchLoader.clear();
batch.release();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestStringFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestStringFunctions.java
index a48510fa1..aa3548d74 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestStringFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestStringFunctions.java
@@ -62,15 +62,16 @@ public class TestStringFunctions extends ExecTest {
for (ValueVector v : exec) {
if (v instanceof VarCharVector) {
res[i++] = new String( ((VarCharVector) v).getAccessor().get(0), Charsets.UTF_8);
- } else
+ } else {
res[i++] = v.getAccessor().getObject(0);
+ }
}
return res;
}
public void runTest(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection, Object[] expectedResults, String planPath) throws Throwable {
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
@@ -79,14 +80,19 @@ public class TestStringFunctions extends ExecTest {
}};
String planString = Resources.toString(Resources.getResource(planPath), Charsets.UTF_8);
- if(reader == null) reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
- if(registry == null) registry = new FunctionImplementationRegistry(c);
- if(context == null) context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry); //new FragmentContext(bitContext, ExecProtos.FragmentHandle.getDefaultInstance(), connection, registry);
+ if (reader == null) {
+ reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+ }
+ if (registry == null) {
+ registry = new FunctionImplementationRegistry(c);
+ }
+ if (context == null) {
+ context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry); //new FragmentContext(bitContext, ExecProtos.FragmentHandle.getDefaultInstance(), connection, registry);
+ }
PhysicalPlan plan = reader.readPhysicalPlan(planString);
SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
-
- while(exec.next()){
+ while(exec.next()) {
Object [] res = getRunResult(exec);
assertEquals("return count does not match", expectedResults.length, res.length);
@@ -95,10 +101,9 @@ public class TestStringFunctions extends ExecTest {
}
}
- if(context.getFailureCause() != null){
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
-
assertTrue(!context.isFailed());
}
@@ -247,4 +252,5 @@ public class TestStringFunctions extends ExecTest {
Object [] expected = new Object[] {97, 65, -32, "A", "btrim", "Peace Peace Peace ", "हकुना मताता हकुना मताता ", "katcit", "\u00C3\u00A2pple", "नदम"};
runTest(bitContext, connection, expected, "functions/string/testStringFuncs.json");
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestUnionExchange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestUnionExchange.java
index 4ad215d9a..271af72b1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestUnionExchange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestUnionExchange.java
@@ -32,7 +32,6 @@ import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
-
public class TestUnionExchange extends PopUnitTestBase {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestUnionExchange.class);
@@ -40,7 +39,7 @@ public class TestUnionExchange extends PopUnitTestBase {
public void twoBitTwoExchangeTwoEntryRun() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
@@ -51,9 +50,10 @@ public class TestUnionExchange extends PopUnitTestBase {
Files.toString(FileUtils.getResourceAsFile("/sender/union_exchange.json"),
Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(150, count);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java
index 430c5056b..ccc052dad 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java
@@ -42,12 +42,11 @@ public class TestSimpleTopN extends PopUnitTestBase {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestSimpleTopN.class);
DrillConfig c = DrillConfig.create();
-
@Test
public void sortOneKeyAscending() throws Throwable{
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
@@ -58,9 +57,10 @@ public class TestSimpleTopN extends PopUnitTestBase {
Files.toString(FileUtils.getResourceAsFile("/topN/one_key_sort.json"),
Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
}
assertEquals(100, count);
@@ -70,7 +70,9 @@ public class TestSimpleTopN extends PopUnitTestBase {
int batchCount = 0;
for (QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() == 0) break;
+ if (b.getHeader().getRowCount() == 0) {
+ break;
+ }
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
loader.load(b.getHeader().getDef(),b.getData());
@@ -80,7 +82,7 @@ public class TestSimpleTopN extends PopUnitTestBase {
BigIntVector.Accessor a1 = c1.getAccessor();
// IntVector.Accessor a2 = c2.getAccessor();
- for(int i =0; i < c1.getAccessor().getValueCount(); i++){
+ for (int i =0; i < c1.getAccessor().getValueCount(); i++) {
recordCount++;
assertTrue(previousBigInt <= a1.get(i));
previousBigInt = a1.get(i);
@@ -88,13 +90,10 @@ public class TestSimpleTopN extends PopUnitTestBase {
loader.clear();
b.release();
}
-
System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
}
-
}
-
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
index 1f0d89e35..f46617186 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
@@ -62,181 +62,181 @@ import com.google.common.base.Charsets;
import com.google.common.io.Files;
-public class TestHashJoin extends PopUnitTestBase{
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMergeJoin.class);
-
- @Rule public final TestRule TIMEOUT = TestTools.getTimeoutRule(100000);
-
- DrillConfig c = DrillConfig.create();
-
- private void testHJMockScanCommon(final DrillbitContext bitContext, UserServer.UserClientConnection connection, String physicalPlan, int expectedRows) throws Throwable {
- final LocalPStoreProvider provider = new LocalPStoreProvider(c);
- provider.start();
- final SystemOptionManager opt = new SystemOptionManager(c, provider);
- opt.init();
- new NonStrictExpectations(){{
- bitContext.getMetrics(); result = new MetricRegistry();
- bitContext.getAllocator(); result = new TopLevelAllocator();
- bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
- bitContext.getConfig(); result = c;
- bitContext.getOptionManager(); result = opt;
- bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
- }};
-
- PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
- PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile(physicalPlan), Charsets.UTF_8));
- FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
- FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
- SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
-
- int totalRecordCount = 0;
- while (exec.next()) {
- totalRecordCount += exec.getRecordCount();
- }
- exec.stop();
- assertEquals(expectedRows, totalRecordCount);
- System.out.println("Total Record Count: " + totalRecordCount);
- if (context.getFailureCause() != null)
- throw context.getFailureCause();
- assertTrue(!context.isFailed());
+public class TestHashJoin extends PopUnitTestBase {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMergeJoin.class);
+
+ @Rule public final TestRule TIMEOUT = TestTools.getTimeoutRule(100000);
+
+ DrillConfig c = DrillConfig.create();
+
+ private void testHJMockScanCommon(final DrillbitContext bitContext, UserServer.UserClientConnection connection, String physicalPlan, int expectedRows) throws Throwable {
+ final LocalPStoreProvider provider = new LocalPStoreProvider(c);
+ provider.start();
+ final SystemOptionManager opt = new SystemOptionManager(c, provider);
+ opt.init();
+ new NonStrictExpectations() {{
+ bitContext.getMetrics(); result = new MetricRegistry();
+ bitContext.getAllocator(); result = new TopLevelAllocator();
+ bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
+ bitContext.getConfig(); result = c;
+ bitContext.getOptionManager(); result = opt;
+ bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
+ }};
+
+ PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+ PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile(physicalPlan), Charsets.UTF_8));
+ FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
+ FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
+ SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
+
+ int totalRecordCount = 0;
+ while (exec.next()) {
+ totalRecordCount += exec.getRecordCount();
}
-
- @Test
- public void multiBatchEqualityJoin(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable {
-
- testHJMockScanCommon(bitContext, connection, "/join/hash_join_multi_batch.json", 200000);
+ exec.stop();
+ assertEquals(expectedRows, totalRecordCount);
+ System.out.println("Total Record Count: " + totalRecordCount);
+ if (context.getFailureCause() != null) {
+ throw context.getFailureCause();
}
+ assertTrue(!context.isFailed());
+ }
- @Test
- public void multiBatchRightOuterJoin(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable {
+ @Test
+ public void multiBatchEqualityJoin(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable {
- testHJMockScanCommon(bitContext, connection, "/join/hj_right_outer_multi_batch.json", 100000);
- }
+ testHJMockScanCommon(bitContext, connection, "/join/hash_join_multi_batch.json", 200000);
+ }
- @Test
- public void multiBatchLeftOuterJoin(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable {
+ @Test
+ public void multiBatchRightOuterJoin(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable {
+ testHJMockScanCommon(bitContext, connection, "/join/hj_right_outer_multi_batch.json", 100000);
+ }
- testHJMockScanCommon(bitContext, connection, "/join/hj_left_outer_multi_batch.json", 100000);
- }
+ @Test
+ public void multiBatchLeftOuterJoin(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable {
- @Test
- public void simpleEqualityJoin() throws Throwable {
+ testHJMockScanCommon(bitContext, connection, "/join/hj_left_outer_multi_batch.json", 100000);
+ }
- // Function checks hash join with single equality condition
- try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- Drillbit bit = new Drillbit(CONFIG, serviceSet);
- DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+ @Test
+ public void simpleEqualityJoin() throws Throwable {
+ // Function checks hash join with single equality condition
+ try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+ Drillbit bit = new Drillbit(CONFIG, serviceSet);
+ DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
- // run query.
- bit.run();
- client.connect();
- List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
- Files.toString(FileUtils.getResourceAsFile("/join/hash_join.json"), Charsets.UTF_8)
- .replace("#{TEST_FILE_1}", FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
- .replace("#{TEST_FILE_2}", FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
+ // run query.
+ bit.run();
+ client.connect();
+ List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
+ Files.toString(FileUtils.getResourceAsFile("/join/hash_join.json"), Charsets.UTF_8)
+ .replace("#{TEST_FILE_1}", FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
+ .replace("#{TEST_FILE_2}", FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
- RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+ RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
- QueryResultBatch batch = results.get(0);
- assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+ QueryResultBatch batch = results.get(0);
+ assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
- Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+ Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
- // Just test the join key
- long colA[] = {1, 1, 2, 2, 1, 1};
+ // Just test the join key
+ long colA[] = {1, 1, 2, 2, 1, 1};
- // Check the output of decimal9
- ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
+ // Check the output of decimal9
+ ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
- for (int i = 0; i < intAccessor1.getValueCount(); i++) {
- assertEquals(intAccessor1.getObject(i), colA[i]);
- }
- assertEquals(6, intAccessor1.getValueCount());
+ for (int i = 0; i < intAccessor1.getValueCount(); i++) {
+ assertEquals(intAccessor1.getObject(i), colA[i]);
+ }
+ assertEquals(6, intAccessor1.getValueCount());
- batchLoader.clear();
- for (QueryResultBatch result : results) {
- result.release();
- }
- }
+ batchLoader.clear();
+ for (QueryResultBatch result : results) {
+ result.release();
+ }
}
+ }
+
+ @Test
+ public void hjWithExchange(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable {
+
+ // Function tests with hash join with exchanges
+ try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+ Drillbit bit = new Drillbit(CONFIG, serviceSet);
+ DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
- @Test
- public void hjWithExchange(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable {
-
- // Function tests with hash join with exchanges
- try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- Drillbit bit = new Drillbit(CONFIG, serviceSet);
- DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
-
- // run query.
- bit.run();
- client.connect();
- List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
- Files.toString(FileUtils.getResourceAsFile("/join/hj_exchanges.json"), Charsets.UTF_8));
-
- int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
- count += b.getHeader().getRowCount();
- b.release();
- }
-
- System.out.println("Total records: " + count);
- assertEquals(25, count);
+ // run query.
+ bit.run();
+ client.connect();
+ List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
+ Files.toString(FileUtils.getResourceAsFile("/join/hj_exchanges.json"), Charsets.UTF_8));
+
+ int count = 0;
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
+ count += b.getHeader().getRowCount();
}
+ b.release();
+ }
+
+ System.out.println("Total records: " + count);
+ assertEquals(25, count);
}
+ }
- @Test
- public void multipleConditionJoin(@Injectable final DrillbitContext bitContext,
- @Injectable UserServer.UserClientConnection connection) throws Throwable {
+ @Test
+ public void multipleConditionJoin(@Injectable final DrillbitContext bitContext,
+ @Injectable UserServer.UserClientConnection connection) throws Throwable {
- // Function tests hash join with multiple join conditions
- try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- Drillbit bit = new Drillbit(CONFIG, serviceSet);
- DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+ // Function tests hash join with multiple join conditions
+ try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+ Drillbit bit = new Drillbit(CONFIG, serviceSet);
+ DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
- // run query.
- bit.run();
- client.connect();
- List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
- Files.toString(FileUtils.getResourceAsFile("/join/hj_multi_condition_join.json"), Charsets.UTF_8)
- .replace("#{TEST_FILE_1}", FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
- .replace("#{TEST_FILE_2}", FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
+ // run query.
+ bit.run();
+ client.connect();
+ List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
+ Files.toString(FileUtils.getResourceAsFile("/join/hj_multi_condition_join.json"), Charsets.UTF_8)
+ .replace("#{TEST_FILE_1}", FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
+ .replace("#{TEST_FILE_2}", FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
- RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+ RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
- QueryResultBatch batch = results.get(0);
- assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+ QueryResultBatch batch = results.get(0);
+ assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
- Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
+ Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
- // Just test the join key
- long colA[] = {1, 2, 1};
- long colC[] = {100, 200, 500};
+ // Just test the join key
+ long colA[] = {1, 2, 1};
+ long colC[] = {100, 200, 500};
- // Check the output of decimal9
- ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
- ValueVector.Accessor intAccessor2 = itr.next().getValueVector().getAccessor();
+ // Check the output of decimal9
+ ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
+ ValueVector.Accessor intAccessor2 = itr.next().getValueVector().getAccessor();
- for (int i = 0; i < intAccessor1.getValueCount(); i++) {
- assertEquals(intAccessor1.getObject(i), colA[i]);
- assertEquals(intAccessor2.getObject(i), colC[i]);
- }
- assertEquals(3, intAccessor1.getValueCount());
+ for (int i = 0; i < intAccessor1.getValueCount(); i++) {
+ assertEquals(intAccessor1.getObject(i), colA[i]);
+ assertEquals(intAccessor2.getObject(i), colC[i]);
+ }
+ assertEquals(3, intAccessor1.getValueCount());
- batchLoader.clear();
- for (QueryResultBatch result : results) {
- result.release();
- }
- }
+ batchLoader.clear();
+ for (QueryResultBatch result : results) {
+ result.release();
+ }
}
+ }
@Test
@@ -255,9 +255,10 @@ public class TestHashJoin extends PopUnitTestBase{
Files.toString(FileUtils.getResourceAsFile("/join/hj_exchanges1.json"), Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
@@ -271,7 +272,7 @@ public class TestHashJoin extends PopUnitTestBase{
public void testHashJoinExprInCondition() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
bit1.run();
@@ -279,13 +280,14 @@ public class TestHashJoin extends PopUnitTestBase{
List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(FileUtils.getResourceAsFile("/join/hashJoinExpr.json"), Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(10, count);
}
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
index 86584bb2e..a6a186639 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
@@ -69,7 +69,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void simpleEqualityJoin(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
@@ -86,8 +86,9 @@ public class TestMergeJoin extends PopUnitTestBase {
int totalRecordCount = 0;
while (exec.next()) {
totalRecordCount += exec.getRecordCount();
- for (ValueVector v : exec)
+ for (ValueVector v : exec) {
System.out.print("[" + v.getField().toExpr() + "] ");
+ }
System.out.println("\n");
for (int valueIdx = 0; valueIdx < exec.getRecordCount(); valueIdx++) {
List<Object> row = new ArrayList();
@@ -101,8 +102,9 @@ public class TestMergeJoin extends PopUnitTestBase {
}
int len = cell.toString().length();
System.out.print(cell);
- for (int i = 0; i < (14 - len); ++i)
+ for (int i = 0; i < (14 - len); ++i) {
System.out.print(" ");
+ }
}
System.out.println();
}
@@ -110,8 +112,9 @@ public class TestMergeJoin extends PopUnitTestBase {
}
assertEquals(100, totalRecordCount);
System.out.println("Total Record Count: " + totalRecordCount);
- if (context.getFailureCause() != null)
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
+ }
assertTrue(!context.isFailed());
}
@@ -121,7 +124,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void orderedEqualityLeftJoin(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getConfig(); result = c;
@@ -147,8 +150,9 @@ public class TestMergeJoin extends PopUnitTestBase {
for (int valueIdx = 0; valueIdx < exec.getRecordCount(); valueIdx++) {
List<Object> row = Lists.newArrayList();
- for (ValueVector v : exec)
+ for (ValueVector v : exec) {
row.add(v.getField().toExpr() + ":" + v.getAccessor().getObject(valueIdx));
+ }
for (Object cell : row) {
if (cell == null) {
System.out.print("<null> ");
@@ -156,8 +160,9 @@ public class TestMergeJoin extends PopUnitTestBase {
}
int len = cell.toString().length();
System.out.print(cell + " ");
- for (int i = 0; i < (10 - len); ++i)
+ for (int i = 0; i < (10 - len); ++i) {
System.out.print(" ");
+ }
}
System.out.println();
}
@@ -165,8 +170,9 @@ public class TestMergeJoin extends PopUnitTestBase {
System.out.println("Total Record Count: " + totalRecordCount);
assertEquals(25, totalRecordCount);
- if (context.getFailureCause() != null)
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
+ }
assertTrue(!context.isFailed());
}
@@ -176,7 +182,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void orderedEqualityInnerJoin(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getConfig(); result = c;
@@ -202,8 +208,9 @@ public class TestMergeJoin extends PopUnitTestBase {
for (int valueIdx = 0; valueIdx < exec.getRecordCount(); valueIdx++) {
List<Object> row = Lists.newArrayList();
- for (ValueVector v : exec)
+ for (ValueVector v : exec) {
row.add(v.getField().toExpr() + ":" + v.getAccessor().getObject(valueIdx));
+ }
for (Object cell : row) {
if (cell == null) {
System.out.print("<null> ");
@@ -211,8 +218,9 @@ public class TestMergeJoin extends PopUnitTestBase {
}
int len = cell.toString().length();
System.out.print(cell + " ");
- for (int i = 0; i < (10 - len); ++i)
+ for (int i = 0; i < (10 - len); ++i) {
System.out.print(" ");
+ }
}
System.out.println();
}
@@ -220,8 +228,9 @@ public class TestMergeJoin extends PopUnitTestBase {
System.out.println("Total Record Count: " + totalRecordCount);
assertEquals(23, totalRecordCount);
- if (context.getFailureCause() != null)
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
+ }
assertTrue(!context.isFailed());
}
@@ -231,7 +240,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void orderedEqualityMultiBatchJoin(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getConfig(); result = c;
@@ -256,8 +265,9 @@ public class TestMergeJoin extends PopUnitTestBase {
for (int valueIdx = 0; valueIdx < exec.getRecordCount(); valueIdx++) {
List<Object> row = Lists.newArrayList();
- for (ValueVector v : exec)
+ for (ValueVector v : exec) {
row.add(v.getField().toExpr() + ":" + v.getAccessor().getObject(valueIdx));
+ }
for (Object cell : row) {
if (cell == null) {
System.out.print("<null> ");
@@ -265,8 +275,9 @@ public class TestMergeJoin extends PopUnitTestBase {
}
int len = cell.toString().length();
System.out.print(cell + " ");
- for (int i = 0; i < (10 - len); ++i)
+ for (int i = 0; i < (10 - len); ++i) {
System.out.print(" ");
+ }
}
System.out.println();
}
@@ -274,15 +285,16 @@ public class TestMergeJoin extends PopUnitTestBase {
System.out.println("Total Record Count: " + totalRecordCount);
assertEquals(25, totalRecordCount);
- if (context.getFailureCause() != null)
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
+ }
assertTrue(!context.isFailed());
}
@Test
public void testJoinBatchSize(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable{
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();;
bitContext.getConfig(); result = c;
@@ -295,11 +307,11 @@ public class TestMergeJoin extends PopUnitTestBase {
FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
- while(exec.next()){
+ while (exec.next()) {
assertEquals(100, exec.getRecordCount());
}
- if(context.getFailureCause() != null){
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
assertTrue(!context.isFailed());
@@ -310,7 +322,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void testMergeJoinInnerEmptyBatch() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
bit1.run();
@@ -320,9 +332,10 @@ public class TestMergeJoin extends PopUnitTestBase {
Charsets.UTF_8)
.replace("${JOIN_TYPE}", "INNER"));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(0, count);
@@ -333,7 +346,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void testMergeJoinLeftEmptyBatch() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
bit1.run();
@@ -343,9 +356,10 @@ public class TestMergeJoin extends PopUnitTestBase {
Charsets.UTF_8)
.replace("${JOIN_TYPE}", "LEFT"));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(50, count);
@@ -356,7 +370,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void testMergeJoinRightEmptyBatch() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
bit1.run();
@@ -366,9 +380,10 @@ public class TestMergeJoin extends PopUnitTestBase {
Charsets.UTF_8)
.replace("${JOIN_TYPE}", "RIGHT"));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(0, count);
@@ -379,7 +394,7 @@ public class TestMergeJoin extends PopUnitTestBase {
public void testMergeJoinExprInCondition() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
bit1.run();
@@ -387,9 +402,10 @@ public class TestMergeJoin extends PopUnitTestBase {
List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(FileUtils.getResourceAsFile("/join/mergeJoinExpr.json"), Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(10, count);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
index a3a7aced8..996b6751b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
@@ -50,7 +50,7 @@ public class TestMergeJoinMulCondition extends PopUnitTestBase {
public void testMergeJoinMultiKeys() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
bit1.run();
@@ -59,8 +59,8 @@ public class TestMergeJoinMulCondition extends PopUnitTestBase {
Files.toString(FileUtils.getResourceAsFile("/join/mj_multi_condition.json"),
Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0){
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
}
b.release();
@@ -76,17 +76,17 @@ public class TestMergeJoinMulCondition extends PopUnitTestBase {
public void testMergeJoinInnerNullKey() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
-
bit1.run();
client.connect();
List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(FileUtils.getResourceAsFile("/join/merge_join_nullkey.json"), Charsets.UTF_8).replace("${JOIN_TYPE}", "INNER"));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(0, count);
@@ -100,7 +100,7 @@ public class TestMergeJoinMulCondition extends PopUnitTestBase {
public void testMergeJoinLeftOuterNullKey() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
bit1.run();
@@ -108,12 +108,14 @@ public class TestMergeJoinMulCondition extends PopUnitTestBase {
List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(FileUtils.getResourceAsFile("/join/merge_join_nullkey.json"), Charsets.UTF_8).replace("${JOIN_TYPE}", "LEFT"));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(110, count);
}
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
index e8553ca5c..d61c1230a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
@@ -37,7 +37,6 @@ import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
-
public class TestMergingReceiver extends PopUnitTestBase {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMergingReceiver.class);
@@ -45,10 +44,9 @@ public class TestMergingReceiver extends PopUnitTestBase {
public void twoBitTwoExchange() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
-
bit1.run();
bit2.run();
client.connect();
@@ -58,13 +56,14 @@ public class TestMergingReceiver extends PopUnitTestBase {
int count = 0;
RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
// print the results
- for(QueryResultBatch b : results) {
+ for (QueryResultBatch b : results) {
count += b.getHeader().getRowCount();
for (int valueIdx = 0; valueIdx < b.getHeader().getRowCount(); valueIdx++) {
List<Object> row = Lists.newArrayList();
batchLoader.load(b.getHeader().getDef(), b.getData());
- for (VectorWrapper<?> vw : batchLoader)
+ for (VectorWrapper<?> vw : batchLoader) {
row.add(vw.getValueVector().getField().toExpr() + ":" + vw.getValueVector().getAccessor().getObject(valueIdx));
+ }
for (Object cell : row) {
if (cell == null) {
System.out.print("<null> ");
@@ -72,8 +71,9 @@ public class TestMergingReceiver extends PopUnitTestBase {
}
int len = cell.toString().length();
System.out.print(cell + " ");
- for (int i = 0; i < (30 - len); ++i)
+ for (int i = 0; i < (30 - len); ++i) {
System.out.print(" ");
+ }
}
System.out.println();
}
@@ -88,7 +88,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
public void testMultipleProvidersMixedSizes() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
@@ -102,7 +102,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
// print the results
Long lastBlueValue = null;
- for(QueryResultBatch b : results) {
+ for (QueryResultBatch b : results) {
count += b.getHeader().getRowCount();
for (int valueIdx = 0; valueIdx < b.getHeader().getRowCount(); valueIdx++) {
List<Object> row = Lists.newArrayList();
@@ -111,17 +111,21 @@ public class TestMergingReceiver extends PopUnitTestBase {
row.add(vw.getValueVector().getField().toExpr() + ":" + vw.getValueVector().getAccessor().getObject(valueIdx));
if (vw.getValueVector().getField().getAsSchemaPath().getRootSegment().getPath().equals("blue")) {
// assert order is ascending
- if (((Long)vw.getValueVector().getAccessor().getObject(valueIdx)).longValue() == 0) continue; // ignore initial 0's from sort
- if (lastBlueValue != null)
+ if (((Long)vw.getValueVector().getAccessor().getObject(valueIdx)).longValue() == 0) {
+ continue; // ignore initial 0's from sort
+ }
+ if (lastBlueValue != null) {
assertTrue(((Long)vw.getValueVector().getAccessor().getObject(valueIdx)).longValue() >= ((Long)lastBlueValue).longValue());
+ }
lastBlueValue = (Long)vw.getValueVector().getAccessor().getObject(valueIdx);
}
}
for (Object cell : row) {
int len = cell.toString().length();
System.out.print(cell + " ");
- for (int i = 0; i < (30 - len); ++i)
+ for (int i = 0; i < (30 - len); ++i) {
System.out.print(" ");
+ }
}
System.out.println();
}
@@ -136,7 +140,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
public void handleEmptyBatch() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+ try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
@@ -149,13 +153,14 @@ public class TestMergingReceiver extends PopUnitTestBase {
int count = 0;
RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
// print the results
- for(QueryResultBatch b : results) {
+ for (QueryResultBatch b : results) {
count += b.getHeader().getRowCount();
for (int valueIdx = 0; valueIdx < b.getHeader().getRowCount(); valueIdx++) {
List<Object> row = Lists.newArrayList();
batchLoader.load(b.getHeader().getDef(), b.getData());
- for (VectorWrapper vw : batchLoader)
+ for (VectorWrapper vw : batchLoader) {
row.add(vw.getValueVector().getField().toExpr() + ":" + vw.getValueVector().getAccessor().getObject(valueIdx));
+ }
for (Object cell : row) {
if (cell == null) {
System.out.print("<null> ");
@@ -163,8 +168,9 @@ public class TestMergingReceiver extends PopUnitTestBase {
}
int len = cell.toString().length();
System.out.print(cell + " ");
- for (int i = 0; i < (30 - len); ++i)
+ for (int i = 0; i < (30 - len); ++i) {
System.out.print(" ");
+ }
}
System.out.println();
}
@@ -174,4 +180,5 @@ public class TestMergingReceiver extends PopUnitTestBase {
assertEquals(100, count);
}
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java
index 28d2897e6..43c430a88 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java
@@ -53,12 +53,10 @@ public class TestSimpleProjection extends ExecTest {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestSimpleProjection.class);
DrillConfig c = DrillConfig.create();
-
@Test
public void project(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable{
-
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
bitContext.getMetrics(); result = new MetricRegistry();
bitContext.getAllocator(); result = new TopLevelAllocator();
bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
@@ -66,14 +64,13 @@ public class TestSimpleProjection extends ExecTest {
bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
}};
-
PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/project/test1.json"), Charsets.UTF_8));
FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
- while(exec.next()){
+ while (exec.next()) {
VectorUtil.showVectorAccessibleContent(exec.getIncoming(), "\t");
NullableBigIntVector c1 = exec.getValueVectorById(new SchemaPath("col1", ExpressionPosition.UNKNOWN), NullableBigIntVector.class);
NullableBigIntVector c2 = exec.getValueVectorById(new SchemaPath("col2", ExpressionPosition.UNKNOWN), NullableBigIntVector.class);
@@ -82,13 +79,15 @@ public class TestSimpleProjection extends ExecTest {
a1 = c1.getAccessor();
a2 = c2.getAccessor();
- for(int i =0; i < c1.getAccessor().getValueCount(); i++){
- if (!a1.isNull(i)) assertEquals(a1.get(i)+1, a2.get(i));
+ for (int i =0; i < c1.getAccessor().getValueCount(); i++) {
+ if (!a1.isNull(i)) {
+ assertEquals(a1.get(i)+1, a2.get(i));
+ }
x += a1.isNull(i) ? 0 : a1.get(i);
}
}
- if(context.getFailureCause() != null){
+ if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
assertTrue(!context.isFailed());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
index af4afa48c..ac5673db8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
@@ -505,7 +505,9 @@ public class TestParquetWriter extends BaseTestQuery {
for (VectorWrapper w : loader) {
String field = w.getField().toExpr();
for (int j = 0; j < loader.getRecordCount(); j++) {
- if (totalRecords - loader.getRecordCount() + j > 5000000) continue;
+ if (totalRecords - loader.getRecordCount() + j > 5000000) {
+ continue;
+ }
Object obj = w.getValueVector().getAccessor().getObject(j);
if (obj != null) {
if (obj instanceof Text) {
@@ -568,7 +570,9 @@ public class TestParquetWriter extends BaseTestQuery {
@Override
public boolean hasNext() {
- if (totalValuesRead == recordLimit) return false;
+ if (totalValuesRead == recordLimit) {
+ return false;
+ }
if (indexInVectorList < hyperVector.getValueVectors().length) {
return true;
} else if ( indexInCurrentVector < currVec.getAccessor().getValueCount()) {
@@ -637,15 +641,17 @@ public class TestParquetWriter extends BaseTestQuery {
public void compareValues(Object expected, Object actual, int counter, String column) throws Exception {
- if ( expected == null ) {
- if (actual == null ) {
- if (VERBOSE_DEBUG) logger.debug("(1) at position " + counter + " column '" + column + "' matched value: " + expected );
+ if (expected == null) {
+ if (actual == null) {
+ if (VERBOSE_DEBUG) {
+ logger.debug("(1) at position " + counter + " column '" + column + "' matched value: " + expected );
+ }
return;
} else {
throw new Exception("at position " + counter + " column '" + column + "' mismatched values, expected: " + expected + " but received " + actual);
}
}
- if ( actual == null) {
+ if (actual == null) {
throw new Exception("unexpected null at position " + counter + " column '" + column + "' should have been: " + expected);
}
if (actual instanceof byte[]) {
@@ -653,14 +659,18 @@ public class TestParquetWriter extends BaseTestQuery {
throw new Exception("at position " + counter + " column '" + column + "' mismatched values, expected: "
+ new String((byte[])expected, "UTF-8") + " but received " + new String((byte[])actual, "UTF-8"));
} else {
- if (VERBOSE_DEBUG) logger.debug("at position " + counter + " column '" + column + "' matched value " + new String((byte[])expected, "UTF-8"));
+ if (VERBOSE_DEBUG) {
+ logger.debug("at position " + counter + " column '" + column + "' matched value " + new String((byte[])expected, "UTF-8"));
+ }
return;
}
}
- if ( ! expected.equals(actual)) {
+ if (!expected.equals(actual)) {
throw new Exception("at position " + counter + " column '" + column + "' mismatched values, expected: " + expected + " but received " + actual);
} else {
- if (VERBOSE_DEBUG) logger.debug("at position " + counter + " column '" + column + "' matched value: " + expected );
+ if (VERBOSE_DEBUG) {
+ logger.debug("at position " + counter + " column '" + column + "' matched value: " + expected );
+ }
}
}
@@ -676,7 +686,7 @@ public class TestParquetWriter extends BaseTestQuery {
for (String column : record.keySet()) {
compareValues(record.get(column), actualRecords.get(i).get(column), counter, column );
}
- if ( ! actualRecords.get(i).equals(record)) {
+ if ( !actualRecords.get(i).equals(record)) {
System.out.println("mismatch at position " + counter );
missing.append(missmatch);
missing.append(",");
@@ -691,4 +701,5 @@ public class TestParquetWriter extends BaseTestQuery {
logger.debug(missing.toString());
System.out.println(missing);
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
index c8790a8d1..530883b34 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
@@ -85,7 +85,7 @@ public class TestWriter extends BaseTestQuery {
FileStatus[] fileStatuses = fs.globStatus(new Path(path.toString(), "*.csv"));
assertTrue(2 == fileStatuses.length);
- for(QueryResultBatch b : results){
+ for (QueryResultBatch b : results) {
b.release();
}
batchLoader.clear();
@@ -137,7 +137,7 @@ public class TestWriter extends BaseTestQuery {
private void ctasHelper(String tableDir, String testQuery, int expectedOutputCount) throws Exception {
Path tableLocation = new Path(tableDir);
- if (fs.exists(tableLocation)){
+ if (fs.exists(tableLocation)) {
fs.delete(tableLocation, true);
}
@@ -146,11 +146,12 @@ public class TestWriter extends BaseTestQuery {
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
int recordsWritten = 0;
- for(QueryResultBatch batch : results) {
+ for (QueryResultBatch batch : results) {
batchLoader.load(batch.getHeader().getDef(), batch.getData());
- if (batchLoader.getRecordCount() <= 0)
+ if (batchLoader.getRecordCount() <= 0) {
continue;
+ }
BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
index 16b1f4055..a96c5076c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
@@ -55,8 +55,9 @@ public class TestSimpleExternalSort extends BaseTestQuery {
List<QueryResultBatch> results = testPhysicalFromFileWithResults("xsort/one_key_sort_descending_sv2.json");
int count = 0;
for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
}
assertEquals(500000, count);
@@ -66,7 +67,9 @@ public class TestSimpleExternalSort extends BaseTestQuery {
int batchCount = 0;
for (QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() == 0) break;
+ if (b.getHeader().getRowCount() == 0) {
+ break;
+ }
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(allocator);
loader.load(b.getHeader().getDef(),b.getData());
@@ -76,7 +79,7 @@ public class TestSimpleExternalSort extends BaseTestQuery {
BigIntVector.Accessor a1 = c1.getAccessor();
- for(int i =0; i < c1.getAccessor().getValueCount(); i++){
+ for (int i =0; i < c1.getAccessor().getValueCount(); i++) {
recordCount++;
assertTrue(String.format("%d > %d", previousBigInt, a1.get(i)), previousBigInt >= a1.get(i));
previousBigInt = a1.get(i);
@@ -92,9 +95,10 @@ public class TestSimpleExternalSort extends BaseTestQuery {
public void sortOneKeyDescendingMergeSort() throws Throwable{
List<QueryResultBatch> results = testPhysicalFromFileWithResults("xsort/one_key_sort_descending.json");
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
}
assertEquals(1000000, count);
@@ -104,7 +108,9 @@ public class TestSimpleExternalSort extends BaseTestQuery {
int batchCount = 0;
for (QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() == 0) break;
+ if (b.getHeader().getRowCount() == 0) {
+ break;
+ }
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(allocator);
loader.load(b.getHeader().getDef(),b.getData());
@@ -113,7 +119,7 @@ public class TestSimpleExternalSort extends BaseTestQuery {
BigIntVector.Accessor a1 = c1.getAccessor();
- for(int i =0; i < c1.getAccessor().getValueCount(); i++){
+ for (int i =0; i < c1.getAccessor().getValueCount(); i++) {
recordCount++;
assertTrue(String.format("%d > %d", previousBigInt, a1.get(i)), previousBigInt >= a1.get(i));
previousBigInt = a1.get(i);
@@ -131,7 +137,7 @@ public class TestSimpleExternalSort extends BaseTestQuery {
DrillConfig config = DrillConfig.create("drill-external-sort.conf");
- try(Drillbit bit1 = new Drillbit(config, serviceSet);
+ try (Drillbit bit1 = new Drillbit(config, serviceSet);
Drillbit bit2 = new Drillbit(config, serviceSet);
DrillClient client = new DrillClient(config, serviceSet.getCoordinator());) {
@@ -142,9 +148,10 @@ public class TestSimpleExternalSort extends BaseTestQuery {
Files.toString(FileUtils.getResourceAsFile("/xsort/one_key_sort_descending.json"),
Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
}
assertEquals(1000000, count);
@@ -154,7 +161,9 @@ public class TestSimpleExternalSort extends BaseTestQuery {
int batchCount = 0;
for (QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() == 0) break;
+ if (b.getHeader().getRowCount() == 0) {
+ break;
+ }
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
loader.load(b.getHeader().getDef(),b.getData());
@@ -163,7 +172,7 @@ public class TestSimpleExternalSort extends BaseTestQuery {
BigIntVector.Accessor a1 = c1.getAccessor();
- for(int i =0; i < c1.getAccessor().getValueCount(); i++){
+ for (int i =0; i < c1.getAccessor().getValueCount(); i++) {
recordCount++;
assertTrue(String.format("%d < %d", previousBigInt, a1.get(i)), previousBigInt >= a1.get(i));
previousBigInt = a1.get(i);
@@ -171,7 +180,6 @@ public class TestSimpleExternalSort extends BaseTestQuery {
loader.clear();
b.release();
}
-
System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
}
@@ -183,7 +191,7 @@ public class TestSimpleExternalSort extends BaseTestQuery {
DrillConfig config = DrillConfig.create("drill-oom-xsort.conf");
- try(Drillbit bit1 = new Drillbit(config, serviceSet);
+ try (Drillbit bit1 = new Drillbit(config, serviceSet);
DrillClient client = new DrillClient(config, serviceSet.getCoordinator());) {
bit1.run();
@@ -192,9 +200,10 @@ public class TestSimpleExternalSort extends BaseTestQuery {
Files.toString(FileUtils.getResourceAsFile("/xsort/oom_sort_test.json"),
Charsets.UTF_8));
int count = 0;
- for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ for (QueryResultBatch b : results) {
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
}
assertEquals(10000000, count);
@@ -204,7 +213,9 @@ public class TestSimpleExternalSort extends BaseTestQuery {
int batchCount = 0;
for (QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() == 0) break;
+ if (b.getHeader().getRowCount() == 0) {
+ break;
+ }
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
loader.load(b.getHeader().getDef(),b.getData());
@@ -213,7 +224,7 @@ public class TestSimpleExternalSort extends BaseTestQuery {
BigIntVector.Accessor a1 = c1.getAccessor();
- for(int i =0; i < c1.getAccessor().getValueCount(); i++){
+ for (int i =0; i < c1.getAccessor().getValueCount(); i++) {
recordCount++;
assertTrue(String.format("%d < %d", previousBigInt, a1.get(i)), previousBigInt >= a1.get(i));
previousBigInt = a1.get(i);
@@ -222,9 +233,9 @@ public class TestSimpleExternalSort extends BaseTestQuery {
loader.clear();
b.release();
}
-
System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
}
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestFragmentChecker.java b/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestFragmentChecker.java
index f6972c3ba..58ddd0663 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestFragmentChecker.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestFragmentChecker.java
@@ -36,14 +36,12 @@ import com.google.common.collect.Lists;
public class TestFragmentChecker extends PopUnitTestBase{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestFragmentChecker.class);
-
@Test
public void checkSimpleExchangePlan() throws Exception{
print("/physical_double_exchange.json", 2, 3);
}
-
private void print(String fragmentFile, int bitCount, int exepectedFragmentCount) throws Exception{
System.out.println(String.format("=================Building plan fragments for [%s]. Allowing %d total Drillbits.==================", fragmentFile, bitCount));
@@ -53,9 +51,11 @@ public class TestFragmentChecker extends PopUnitTestBase{
SimpleParallelizer par = new SimpleParallelizer(1000*1000, 5, 10, 1.2);
List<DrillbitEndpoint> endpoints = Lists.newArrayList();
DrillbitEndpoint localBit = null;
- for(int i =0; i < bitCount; i++){
+ for(int i =0; i < bitCount; i++) {
DrillbitEndpoint b1 = DrillbitEndpoint.newBuilder().setAddress("localhost").setControlPort(1234+i).build();
- if(i ==0) localBit = b1;
+ if (i == 0) {
+ localBit = b1;
+ }
endpoints.add(b1);
}
@@ -64,12 +64,10 @@ public class TestFragmentChecker extends PopUnitTestBase{
System.out.print(qwu.getRootFragment().getFragmentJson());
-
- for(PlanFragment f : qwu.getFragments()){
+ for(PlanFragment f : qwu.getFragments()) {
System.out.println(String.format("=========Fragment [%d:%d]=====", f.getHandle().getMajorFragmentId(), f.getHandle().getMinorFragmentId()));
System.out.print(f.getFragmentJson());
}
-
//assertEquals(exepectedFragmentCount, qwu.getFragments().size());
logger.debug("Planning Set {}", planningSet);
@@ -80,4 +78,5 @@ public class TestFragmentChecker extends PopUnitTestBase{
print("/physical_single_exchange.json", 1, 2);
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
index c294aee5e..c01f5d8da 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
@@ -126,8 +126,9 @@ public class ExpressionTreeMaterializerTest extends ExecTest {
//ifCondition = newIfExpr.conditions.get(0);
assertEquals(bigIntType, ifCondition.expression.getMajorType());
assertEquals(true, ((ValueExpressions.BooleanExpression) ((IfExpression)(newIfExpr.elseExpression)).ifCondition.condition).value);
- if (ec.hasErrors())
+ if (ec.hasErrors()) {
System.out.println(ec.toErrorString());
+ }
assertFalse(ec.hasErrors());
}
@@ -204,4 +205,5 @@ public class ExpressionTreeMaterializerTest extends ExecTest {
assertEquals(1, ec.getErrorCount());
System.out.println(ec.toErrorString());
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java
index 831bf74dc..f57e76503 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java
@@ -105,15 +105,18 @@ public class TestLoad extends ExecTest {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
if (v.getField().getType().getMinorType() == TypeProtos.MinorType.VARCHAR) {
Object obj = accessor.getObject(r);
- if (obj != null)
+ if (obj != null) {
System.out.print(accessor.getObject(r));
- else
+ } else {
System.out.print("NULL");
+ }
} else {
System.out.print(accessor.getObject(r));
}
}
- if (!first) System.out.println();
+ if (!first) {
+ System.out.println();
+ }
}
assertEquals(100, recordCount);
batchLoader.clear();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestBitRpc.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestBitRpc.java
index a5de83269..d408773c1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestBitRpc.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestBitRpc.java
@@ -73,21 +73,18 @@ public class TestBitRpc extends ExecTest {
final BootStrapContext c = new BootStrapContext(DrillConfig.create());
BootStrapContext c2 = new BootStrapContext(DrillConfig.create());
-
- new NonStrictExpectations(){{
+ new NonStrictExpectations() {{
workBus.getOrCreateFragmentManager((FragmentHandle) any); result = fman;
workBus.getFragmentManager( (FragmentHandle) any); result = fman;
fman.getFragmentContext(); result = fcon;
fcon.getAllocator(); result = c.getAllocator();
- }};
-
+ }};
int port = 1234;
DataResponseHandler drp = new BitComTestHandler();
DataServer server = new DataServer(c, workBus, drp);
-
port = server.bind(port, false);
DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build();
DataConnectionManager manager = new DataConnectionManager(FragmentHandle.getDefaultInstance(), ep, c2);
@@ -139,8 +136,9 @@ public class TestBitRpc extends ExecTest {
while (true) {
long nowMax = max.get();
if (nowMax < micros) {
- if (max.compareAndSet(nowMax, micros))
+ if (max.compareAndSet(nowMax, micros)) {
break;
+ }
} else {
break;
}
@@ -175,4 +173,5 @@ public class TestBitRpc extends ExecTest {
}
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ByteArrayUtil.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ByteArrayUtil.java
index bc678e2e5..cd29f7c9d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ByteArrayUtil.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ByteArrayUtil.java
@@ -21,12 +21,19 @@ package org.apache.drill.exec.store;
public class ByteArrayUtil {
public static byte[] toByta(Object data) throws Exception {
- if (data instanceof Integer) return toByta((int) data);
- else if (data instanceof Double) return toByta((double) data);
- else if (data instanceof Float) return toByta((float) data);
- else if (data instanceof Boolean) return toByta((boolean) data);
- else if (data instanceof Long) return toByta((long) data);
- else throw new Exception("Cannot convert that type to a byte array.");
+ if (data instanceof Integer) {
+ return toByta((int) data);
+ } else if (data instanceof Double) {
+ return toByta((double) data);
+ } else if (data instanceof Float) {
+ return toByta((float) data);
+ } else if (data instanceof Boolean) {
+ return toByta((boolean) data);
+ } else if (data instanceof Long) {
+ return toByta((long) data);
+ } else {
+ throw new Exception("Cannot convert that type to a byte array.");
+ }
}
// found at http://www.daniweb.com/software-development/java/code/216874/primitive-types-as-byte-arrays
@@ -51,11 +58,14 @@ public class ByteArrayUtil {
}
public static byte[] toByta(short[] data) {
- if (data == null) return null;
+ if (data == null) {
+ return null;
+ }
// ----------
byte[] byts = new byte[data.length * 2];
- for (int i = 0; i < data.length; i++)
+ for (int i = 0; i < data.length; i++) {
System.arraycopy(toByta(data[i]), 0, byts, i * 2, 2);
+ }
return byts;
}
@@ -68,11 +78,14 @@ public class ByteArrayUtil {
}
public static byte[] toByta(char[] data) {
- if (data == null) return null;
+ if (data == null) {
+ return null;
+ }
// ----------
byte[] byts = new byte[data.length * 2];
- for (int i = 0; i < data.length; i++)
+ for (int i = 0; i < data.length; i++) {
System.arraycopy(toByta(data[i]), 0, byts, i * 2, 2);
+ }
return byts;
}
@@ -87,11 +100,14 @@ public class ByteArrayUtil {
}
public static byte[] toByta(int[] data) {
- if (data == null) return null;
+ if (data == null) {
+ return null;
+ }
// ----------
byte[] byts = new byte[data.length * 4];
- for (int i = 0; i < data.length; i++)
+ for (int i = 0; i < data.length; i++) {
System.arraycopy(toByta(data[i]), 0, byts, i * 4, 4);
+ }
return byts;
}
@@ -110,11 +126,14 @@ public class ByteArrayUtil {
}
public static byte[] toByta(long[] data) {
- if (data == null) return null;
+ if (data == null) {
+ return null;
+ }
// ----------
byte[] byts = new byte[data.length * 8];
- for (int i = 0; i < data.length; i++)
+ for (int i = 0; i < data.length; i++) {
System.arraycopy(toByta(data[i]), 0, byts, i * 8, 8);
+ }
return byts;
}
@@ -124,11 +143,14 @@ public class ByteArrayUtil {
}
public static byte[] toByta(float[] data) {
- if (data == null) return null;
+ if (data == null) {
+ return null;
+ }
// ----------
byte[] byts = new byte[data.length * 4];
- for (int i = 0; i < data.length; i++)
+ for (int i = 0; i < data.length; i++) {
System.arraycopy(toByta(data[i]), 0, byts, i * 4, 4);
+ }
return byts;
}
@@ -138,11 +160,14 @@ public class ByteArrayUtil {
}
public static byte[] toByta(double[] data) {
- if (data == null) return null;
+ if (data == null) {
+ return null;
+ }
// ----------
byte[] byts = new byte[data.length * 8];
- for (int i = 0; i < data.length; i++)
+ for (int i = 0; i < data.length; i++) {
System.arraycopy(toByta(data[i]), 0, byts, i * 8, 8);
+ }
return byts;
}
@@ -156,7 +181,9 @@ public class ByteArrayUtil {
// about how many boolean values are involved, so the exact
// array is returned when later decoded.
// ----------
- if (data == null) return null;
+ if (data == null) {
+ return null;
+ }
// ----------
int len = data.length;
byte[] lena = toByta(len); // int conversion; length array = lena
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java
index 6d6baf4e1..01f244a42 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/CachedSingleFileSystem.java
@@ -38,22 +38,24 @@ import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
-public class CachedSingleFileSystem extends FileSystem{
+public class CachedSingleFileSystem extends FileSystem {
private ByteBuf file;
private String path;
- public CachedSingleFileSystem(String path) throws IOException{
+ public CachedSingleFileSystem(String path) throws IOException {
this.path = path;
File f = new File(path);
long length = f.length();
- if(length > Integer.MAX_VALUE) throw new UnsupportedOperationException("Cached file system only supports files of less than 2GB.");
+ if (length > Integer.MAX_VALUE) {
+ throw new UnsupportedOperationException("Cached file system only supports files of less than 2GB.");
+ }
System.out.println(length);
- try(InputStream is = new BufferedInputStream(new FileInputStream(path))){
+ try (InputStream is = new BufferedInputStream(new FileInputStream(path))) {
byte[] buffer = new byte[64*1024];
this.file = UnpooledByteBufAllocator.DEFAULT.directBuffer((int) length);
int read;
- while( (read = is.read(buffer)) > 0){
+ while ( (read = is.read(buffer)) > 0) {
file.writeBytes(buffer, 0, read);
}
}
@@ -113,7 +115,9 @@ public class CachedSingleFileSystem extends FileSystem{
@Override
public FSDataInputStream open(Path path, int arg1) throws IOException {
- if(!path.toString().equals(this.path)) throw new IOException(String.format("You requested file %s but this cached single file system only has the file %s.", path.toString(), this.path));
+ if (!path.toString().equals(this.path)) {
+ throw new IOException(String.format("You requested file %s but this cached single file system only has the file %s.", path.toString(), this.path));
+ }
return new FSDataInputStream(new CachedFSDataInputStream(file.slice()));
}
@@ -165,8 +169,11 @@ public class CachedSingleFileSystem extends FileSystem{
@Override
public void readFully(long pos, byte[] buffer, int offset, int length) throws IOException {
- if(length + pos > buf.capacity()) throw new IOException("Read was too big.");
+ if (length + pos > buf.capacity()) {
+ throw new IOException("Read was too big.");
+ }
read(pos, buffer, offset, length);
}
}
-} \ No newline at end of file
+
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java
index b40edd119..15bae6e20 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java
@@ -41,14 +41,15 @@ public class TestOutputMutator implements OutputMutator, Iterable<VectorWrapper<
private final Map<MaterializedField, ValueVector> fieldVectorMap = Maps.newHashMap();
private final BufferAllocator allocator;
- public TestOutputMutator(BufferAllocator allocator){
+ public TestOutputMutator(BufferAllocator allocator) {
this.allocator = allocator;
}
public void removeField(MaterializedField field) throws SchemaChangeException {
ValueVector vector = fieldVectorMap.remove(field);
- if (vector == null)
+ if (vector == null) {
throw new SchemaChangeException("Failure attempting to remove an unknown field.");
+ }
container.remove(vector);
vector.close();
}
@@ -66,7 +67,7 @@ public class TestOutputMutator implements OutputMutator, Iterable<VectorWrapper<
return container.iterator();
}
- public void clear(){
+ public void clear() {
}
@@ -83,7 +84,9 @@ public class TestOutputMutator implements OutputMutator, Iterable<VectorWrapper<
@Override
public <T extends ValueVector> T addField(MaterializedField field, Class<T> clazz) throws SchemaChangeException {
ValueVector v = TypeHelper.getNewVector(field, allocator);
- if(!clazz.isAssignableFrom(v.getClass())) throw new SchemaChangeException(String.format("The class that was provided %s does not correspond to the expected vector type of %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
+ if (!clazz.isAssignableFrom(v.getClass())) {
+ throw new SchemaChangeException(String.format("The class that was provided %s does not correspond to the expected vector type of %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
+ }
addField(v);
return (T) v;
}
@@ -92,4 +95,5 @@ public class TestOutputMutator implements OutputMutator, Iterable<VectorWrapper<
public DrillBuf getManagedBuffer() {
return allocator.buffer(255);
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
index b2c859662..da8abdd50 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
@@ -97,7 +97,9 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
File f = new File(fileName);
ParquetTestProperties props = new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, new HashMap<String, FieldInfo>());
populateFieldInfoMap(props);
- if(!f.exists()) TestFileGenerator.generateParquetFile(fileName, props);
+ if (!f.exists()) {
+ TestFileGenerator.generateParquetFile(fileName, props);
+ }
}
@@ -118,10 +120,11 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
readEntries = "";
// number of times to read the file
int i = 3;
- for (int j = 0; j < i; j++){
+ for (int j = 0; j < i; j++) {
readEntries += "\""+fileName+"\"";
- if (j < i - 1)
+ if (j < i - 1) {
readEntries += ",";
+ }
}
String planText = Files.toString(FileUtils.getResourceAsFile("/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8).replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
testParquetFullEngineLocalText(planText, fileName, i, numberRowGroups, recordsPerRowGroup, true);
@@ -475,10 +478,11 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
String readEntries = "";
// number of times to read the file
int i = 3;
- for (int j = 0; j < i; j++){
+ for (int j = 0; j < i; j++) {
readEntries += "\"/tmp/test.parquet\"";
- if (j < i - 1)
+ if (j < i - 1) {
readEntries += ",";
+ }
}
testParquetFullEngineEventBased(true, "/parquet/parquet_scan_screen_read_entry_replace.json", readEntries,
"/tmp/test.parquet", i, props);
@@ -626,7 +630,7 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
FileSystem fs = new CachedSingleFileSystem(fileName);
BufferAllocator allocator = new TopLevelAllocator();
- for(int i = 0; i < 25; i++){
+ for(int i = 0; i < 25; i++) {
ParquetRecordReader rr = new ParquetRecordReader(context, 256000, fileName, 0, fs,
new CodecFactoryExposer(dfsConfig), f.getParquetMetadata(), columns);
TestOutputMutator mutator = new TestOutputMutator(allocator);
@@ -663,7 +667,9 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
public void testParquetFullEngineEventBased(boolean testValues, boolean generateNew, String plan, String readEntries, String filename,
int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props,
QueryType queryType) throws Exception{
- if (generateNew) TestFileGenerator.generateParquetFile(filename, props);
+ if (generateNew) {
+ TestFileGenerator.generateParquetFile(filename, props);
+ }
ParquetResultListener resultListener = new ParquetResultListener(getAllocator(), props, numberOfTimesRead, testValues);
long C = System.nanoTime();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java
index 0dfb1d8ee..013ea9550 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java
@@ -61,7 +61,7 @@ public class TestFileGenerator {
// TODO - figure out what this should be set at, it should be based on the max nesting level
public static final int MAX_EXPECTED_BIT_WIDTH_FOR_DEFINITION_LEVELS = 16;
- static void populateDrill_418_fields(ParquetTestProperties props){
+ static void populateDrill_418_fields(ParquetTestProperties props) {
props.fields.put("cust_key", new FieldInfo("int32", "integer", 32, intVals, TypeProtos.MinorType.INT, props));
props.fields.put("nation_key", new FieldInfo("int32", "integer", 32, intVals, TypeProtos.MinorType.INT, props));
@@ -73,7 +73,7 @@ public class TestFileGenerator {
props.fields.put("comment_col", new FieldInfo("int32", "integer", 32, intVals, TypeProtos.MinorType.INT, props));
}
- static void populateFieldInfoMap(ParquetTestProperties props){
+ static void populateFieldInfoMap(ParquetTestProperties props) {
props.fields.put("integer", new FieldInfo("int32", "integer", 32, intVals, TypeProtos.MinorType.INT, props));
props.fields.put("bigInt", new FieldInfo("int64", "bigInt", 64, longVals, TypeProtos.MinorType.BIGINT, props));
props.fields.put("f", new FieldInfo("float", "f", 32, floatVals, TypeProtos.MinorType.FLOAT4, props));
@@ -83,7 +83,7 @@ public class TestFileGenerator {
props.fields.put("bin2", new FieldInfo("binary", "bin2", -1, bin2Vals, TypeProtos.MinorType.VARBINARY, props));
}
- static void populatePigTPCHCustomerFields(ParquetTestProperties props){
+ static void populatePigTPCHCustomerFields(ParquetTestProperties props) {
// all of the data in the fieldInfo constructors doesn't matter because the file is generated outside the test
props.fields.put("C_CUSTKEY", new FieldInfo("int32", "integer", 32, intVals, TypeProtos.MinorType.INT, props));
props.fields.put("C_NATIONKEY", new FieldInfo("int64", "bigInt", 64, longVals, TypeProtos.MinorType.BIGINT, props));
@@ -95,7 +95,7 @@ public class TestFileGenerator {
props.fields.put("C_COMMENT", new FieldInfo("binary", "bin2", -1, bin2Vals, TypeProtos.MinorType.VARBINARY, props));
}
- static void populatePigTPCHSupplierFields(ParquetTestProperties props){
+ static void populatePigTPCHSupplierFields(ParquetTestProperties props) {
// all of the data in the fieldInfo constructors doesn't matter because the file is generated outside the test
props.fields.put("S_SUPPKEY", new FieldInfo("int32", "integer", 32, intVals, TypeProtos.MinorType.INT, props));
props.fields.put("S_NATIONKEY", new FieldInfo("int64", "bigInt", 64, longVals, TypeProtos.MinorType.BIGINT, props));
@@ -146,7 +146,9 @@ public class TestFileGenerator {
FileSystem fs = FileSystem.get(configuration);
Path path = new Path(filename);
- if (fs.exists(path)) fs.delete(path, false);
+ if (fs.exists(path)) {
+ fs.delete(path, false);
+ }
String messageSchema = "message m {";
@@ -165,14 +167,14 @@ public class TestFileGenerator {
w.start();
HashMap<String, Integer> columnValuesWritten = new HashMap();
int valsWritten;
- for (int k = 0; k < props.numberRowGroups; k++){
+ for (int k = 0; k < props.numberRowGroups; k++) {
w.startBlock(props.recordsPerRowGroup);
currentBooleanByte = 0;
booleanBitCounter.reset();
for (FieldInfo fieldInfo : props.fields.values()) {
- if ( ! columnValuesWritten.containsKey(fieldInfo.name)){
+ if ( ! columnValuesWritten.containsKey(fieldInfo.name)) {
columnValuesWritten.put((String) fieldInfo.name, 0);
valsWritten = 0;
} else {
@@ -202,8 +204,12 @@ public class TestFileGenerator {
int totalValLength = ((byte[]) fieldInfo.values[0]).length + ((byte[]) fieldInfo.values[1]).length + ((byte[]) fieldInfo.values[2]).length + 3 * bytesNeededToEncodeLength;
// used for the case where there is a number of values in this row group that is not divisible by 3
int leftOverBytes = 0;
- if ( valsPerPage % 3 > 0 ) leftOverBytes += ((byte[])fieldInfo.values[1]).length + bytesNeededToEncodeLength;
- if ( valsPerPage % 3 > 1 ) leftOverBytes += ((byte[])fieldInfo.values[2]).length + bytesNeededToEncodeLength;
+ if ( valsPerPage % 3 > 0 ) {
+ leftOverBytes += ((byte[])fieldInfo.values[1]).length + bytesNeededToEncodeLength;
+ }
+ if ( valsPerPage % 3 > 1 ) {
+ leftOverBytes += ((byte[])fieldInfo.values[2]).length + bytesNeededToEncodeLength;
+ }
bytes = new byte[valsPerPage / 3 * totalValLength + leftOverBytes];
}
int bytesPerPage = (int) (valsPerPage * ((int) fieldInfo.bitLength / 8.0));
@@ -222,9 +228,11 @@ public class TestFileGenerator {
currentBooleanByte++;
}
valsWritten++;
- if (currentBooleanByte > bytesPerPage) break;
+ if (currentBooleanByte > bytesPerPage) {
+ break;
+ }
} else {
- if (fieldInfo.values[valsWritten % 3] instanceof byte[]){
+ if (fieldInfo.values[valsWritten % 3] instanceof byte[]) {
System.arraycopy(ByteArrayUtil.toByta(((byte[])fieldInfo.values[valsWritten % 3]).length),
0, bytes, bytesWritten, bytesNeededToEncodeLength);
System.arraycopy(fieldInfo.values[valsWritten % 3],
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java
index 84913125b..6cb412c3c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java
@@ -53,7 +53,7 @@ public class TestParquetPhysicalPlan extends ExecTest {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
DrillConfig config = DrillConfig.create();
- try(Drillbit bit1 = new Drillbit(config, serviceSet); DrillClient client = new DrillClient(config, serviceSet.getCoordinator());){
+ try (Drillbit bit1 = new Drillbit(config, serviceSet); DrillClient client = new DrillClient(config, serviceSet.getCoordinator());) {
bit1.run();
client.connect();
List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Resources.toString(Resources.getResource(fileName),Charsets.UTF_8));
@@ -88,6 +88,7 @@ public class TestParquetPhysicalPlan extends ExecTest {
private class ParquetResultsListener implements UserResultsListener {
AtomicInteger count = new AtomicInteger();
private CountDownLatch latch = new CountDownLatch(1);
+
@Override
public void submissionFailed(RpcException ex) {
logger.error("submission failed", ex);
@@ -99,7 +100,9 @@ public class TestParquetPhysicalPlan extends ExecTest {
int rows = result.getHeader().getRowCount();
System.out.println(String.format("Result batch arrived. Number of records: %d", rows));
count.addAndGet(rows);
- if (result.getHeader().getIsLastChunk()) latch.countDown();
+ if (result.getHeader().getIsLastChunk()) {
+ latch.countDown();
+ }
result.release();
}
@@ -112,12 +115,13 @@ public class TestParquetPhysicalPlan extends ExecTest {
public void queryIdArrived(QueryId queryId) {
}
}
+
@Test
@Ignore
public void testParseParquetPhysicalPlanRemote() throws Exception {
DrillConfig config = DrillConfig.create();
- try(DrillClient client = new DrillClient(config);){
+ try(DrillClient client = new DrillClient(config);) {
client.connect();
ParquetResultsListener listener = new ParquetResultsListener();
Stopwatch watch = new Stopwatch();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/util/MiniZooKeeperCluster.java b/exec/java-exec/src/test/java/org/apache/drill/exec/util/MiniZooKeeperCluster.java
index 7e9dbbdb3..a502c323e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/util/MiniZooKeeperCluster.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/util/MiniZooKeeperCluster.java
@@ -127,8 +127,9 @@ public class MiniZooKeeperCluster {
*/
public int startup(File baseDir, int numZooKeeperServers) throws IOException,
InterruptedException {
- if (numZooKeeperServers <= 0)
+ if (numZooKeeperServers <= 0) {
return -1;
+ }
setupTestEnv();
shutdown();
@@ -368,4 +369,5 @@ public class MiniZooKeeperCluster {
public int getClientPort() {
return clientPort;
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
index c92495f03..9e13ae4cf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
@@ -62,12 +62,12 @@ public class TestJsonReader extends BaseTestQuery {
private static final boolean VERBOSE_DEBUG = true;
@BeforeClass
- public static void setupAllocator(){
+ public static void setupAllocator() {
allocator = new TopLevelAllocator();
}
@AfterClass
- public static void destroyAllocator(){
+ public static void destroyAllocator() {
allocator.close();
}
@@ -254,7 +254,9 @@ public class TestJsonReader extends BaseTestQuery {
"}\n }";
String compound = simple;
- for(int i =0; i < repeatSize; i++) compound += simple;
+ for (int i =0; i < repeatSize; i++) {
+ compound += simple;
+ }
// simple = "{ \"integer\" : 2001, \n" +
// " \"float\" : 1.2\n" +
@@ -272,9 +274,9 @@ public class TestJsonReader extends BaseTestQuery {
int i =0;
List<Integer> batchSizes = Lists.newArrayList();
- outside: while(true){
+ outside: while(true) {
writer.setPosition(i);
- switch(jsonReader.write(writer)){
+ switch (jsonReader.write(writer)) {
case WRITE_SUCCEED:
i++;
break;
@@ -291,7 +293,7 @@ public class TestJsonReader extends BaseTestQuery {
writer.allocate();
writer.reset();
- switch(jsonReader.write(writer)){
+ switch(jsonReader.write(writer)) {
case NO_MORE:
System.out.println("no more records - new alloc loop.");
break outside;
@@ -306,7 +308,7 @@ public class TestJsonReader extends BaseTestQuery {
int total = 0;
int lastRecordCount = 0;
- for(Integer records : batchSizes){
+ for (Integer records : batchSizes) {
total += records;
lastRecordCount = records;
}
@@ -340,4 +342,5 @@ public class TestJsonReader extends BaseTestQuery {
writer.clear();
buffer.release();
}
+
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java
index b01b3e0b2..b3c653f87 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java
@@ -51,8 +51,9 @@ public class TestSpoolingBuffer extends ExecTest {
Charsets.UTF_8));
int count = 0;
for(QueryResultBatch b : results) {
- if (b.getHeader().getRowCount() != 0)
+ if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
+ }
b.release();
}
assertEquals(500024, count);
diff --git a/exec/java-exec/src/test/resources/agg/hashagg/q6.json b/exec/java-exec/src/test/resources/agg/hashagg/q6.json
index 10f4cba76..c15539188 100644
--- a/exec/java-exec/src/test/resources/agg/hashagg/q6.json
+++ b/exec/java-exec/src/test/resources/agg/hashagg/q6.json
@@ -42,7 +42,7 @@
exprs : [ {
ref : "SUM",
expr : "sum($f1) "
- }, {
+ }, {
ref : "MIN",
expr : "min($f1) "
}, {
diff --git a/exec/java-exec/src/test/resources/agg/hashagg/q7_1.json b/exec/java-exec/src/test/resources/agg/hashagg/q7_1.json
index 256bec644..ef0561367 100644
--- a/exec/java-exec/src/test/resources/agg/hashagg/q7_1.json
+++ b/exec/java-exec/src/test/resources/agg/hashagg/q7_1.json
@@ -42,7 +42,7 @@
ref : "$f0",
expr : "$f0"
}, {
- ref : "$f2",
+ ref : "$f2",
expr : "$f2"
} ],
exprs : [ {
diff --git a/exec/java-exec/src/test/resources/agg/hashagg/q7_2.json b/exec/java-exec/src/test/resources/agg/hashagg/q7_2.json
index d444fc8de..62cf5c358 100644
--- a/exec/java-exec/src/test/resources/agg/hashagg/q7_2.json
+++ b/exec/java-exec/src/test/resources/agg/hashagg/q7_2.json
@@ -42,7 +42,7 @@
ref : "$f0",
expr : "$f0"
}, {
- ref : "$f1",
+ ref : "$f1",
expr : "$f1"
} ],
exprs : [ {
diff --git a/exec/java-exec/src/test/resources/agg/hashagg/q7_3.json b/exec/java-exec/src/test/resources/agg/hashagg/q7_3.json
index 6c5fff0f7..8edc11069 100644
--- a/exec/java-exec/src/test/resources/agg/hashagg/q7_3.json
+++ b/exec/java-exec/src/test/resources/agg/hashagg/q7_3.json
@@ -42,7 +42,7 @@
ref : "$f0",
expr : "$f0"
}, {
- ref : "$f1",
+ ref : "$f1",
expr : "$f1"
}, {
ref : "$f2",
diff --git a/exec/java-exec/src/test/resources/agg/hashagg/q8_1.json b/exec/java-exec/src/test/resources/agg/hashagg/q8_1.json
index adc5e75e8..3461c8ca3 100644
--- a/exec/java-exec/src/test/resources/agg/hashagg/q8_1.json
+++ b/exec/java-exec/src/test/resources/agg/hashagg/q8_1.json
@@ -65,8 +65,8 @@
child: 5,
first: 0,
last: 100
- }, {
- pop : "screen",
+ }, {
+ pop : "screen",
@id : 7,
child : 6
} ]
diff --git a/exec/java-exec/src/test/resources/agg/test1.json b/exec/java-exec/src/test/resources/agg/test1.json
index eb4647d5f..12dab5f13 100644
--- a/exec/java-exec/src/test/resources/agg/test1.json
+++ b/exec/java-exec/src/test/resources/agg/test1.json
@@ -6,16 +6,16 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
entries:[
- {records: 20000, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 20000, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -30,7 +30,7 @@
@id:3,
child: 2,
pop:"streaming-aggregate",
- keys: [
+ keys: [
{ ref: "blue", expr: "blue" }
],
exprs: [
@@ -43,4 +43,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/agg/twokey.json b/exec/java-exec/src/test/resources/agg/twokey.json
index a237279bb..5e33c5811 100644
--- a/exec/java-exec/src/test/resources/agg/twokey.json
+++ b/exec/java-exec/src/test/resources/agg/twokey.json
@@ -6,16 +6,16 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
entries:[
- {records: 204, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 204, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -40,10 +40,10 @@
@id:4,
child: 3,
pop:"streaming-aggregate",
- keys: [
+ keys: [
{ ref: "key1", expr: "key1" },
{ ref: "key2", expr: "alt" }
-
+
],
exprs: [
{ ref: "cnt", expr:"count(1)" },
@@ -56,4 +56,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/donuts.json b/exec/java-exec/src/test/resources/donuts.json
index 276bcbed2..a5ed4da1d 100644
--- a/exec/java-exec/src/test/resources/donuts.json
+++ b/exec/java-exec/src/test/resources/donuts.json
@@ -1,132 +1,131 @@
{
- "id": "0001",
- "type": "donut",
- "name": "Cake",
- "ppu": 0.55,
- "sales": 35,
-
-
- "batters":
- {
- "batter":
- [
- { "id": "1001", "type": "Regular" },
- { "id": "1002", "type": "Chocolate" },
- { "id": "1003", "type": "Blueberry" },
- { "id": "1004", "type": "Devil's Food" }
- ]
- },
- "topping":
- [
- { "id": "5001", "type": "None" },
- { "id": "5002", "type": "Glazed" },
- { "id": "5005", "type": "Sugar" },
- { "id": "5007", "type": "Powdered Sugar" },
- { "id": "5006", "type": "Chocolate with Sprinkles" },
- { "id": "5003", "type": "Chocolate" },
- { "id": "5004", "type": "Maple" }
- ]
- }
- {
- "id": "0002",
- "type": "donut",
- "name": "Raised",
- "ppu": 0.69,
- "sales": 145,
- "batters":
- {
- "batter":
- [
- { "id": "1001", "type": "Regular" }
- ]
- },
- "topping":
- [
- { "id": "5001", "type": "None" },
- { "id": "5002", "type": "Glazed" },
- { "id": "5005", "type": "Sugar" },
- { "id": "5003", "type": "Chocolate" },
- { "id": "5004", "type": "Maple" }
- ]
- }
- {
- "id": "0003",
- "type": "donut",
- "name": "Old Fashioned",
- "ppu": 0.55,
- "sales": 300,
+ "id": "0001",
+ "type": "donut",
+ "name": "Cake",
+ "ppu": 0.55,
+ "sales": 35,
+ "batters":
+ {
+ "batter":
+ [
+ { "id": "1001", "type": "Regular" },
+ { "id": "1002", "type": "Chocolate" },
+ { "id": "1003", "type": "Blueberry" },
+ { "id": "1004", "type": "Devil's Food" }
+ ]
+ },
+ "topping":
+ [
+ { "id": "5001", "type": "None" },
+ { "id": "5002", "type": "Glazed" },
+ { "id": "5005", "type": "Sugar" },
+ { "id": "5007", "type": "Powdered Sugar" },
+ { "id": "5006", "type": "Chocolate with Sprinkles" },
+ { "id": "5003", "type": "Chocolate" },
+ { "id": "5004", "type": "Maple" }
+ ]
+}
+{
+ "id": "0002",
+ "type": "donut",
+ "name": "Raised",
+ "ppu": 0.69,
+ "sales": 145,
+ "batters":
+ {
+ "batter":
+ [
+ { "id": "1001", "type": "Regular" }
+ ]
+ },
+ "topping":
+ [
+ { "id": "5001", "type": "None" },
+ { "id": "5002", "type": "Glazed" },
+ { "id": "5005", "type": "Sugar" },
+ { "id": "5003", "type": "Chocolate" },
+ { "id": "5004", "type": "Maple" }
+ ]
+}
+{
+ "id": "0003",
+ "type": "donut",
+ "name": "Old Fashioned",
+ "ppu": 0.55,
+ "sales": 300,
- "batters":
- {
- "batter":
- [
- { "id": "1001", "type": "Regular" },
- { "id": "1002", "type": "Chocolate" }
- ]
- },
- "topping":
- [
- { "id": "5001", "type": "None" },
- { "id": "5002", "type": "Glazed" },
- { "id": "5003", "type": "Chocolate" },
- { "id": "5004", "type": "Maple" }
- ]
- }
- {
- "id": "0004",
- "type": "donut",
- "name": "Filled",
- "ppu": 0.69,
- "sales": 14,
+ "batters":
+ {
+ "batter":
+ [
+ { "id": "1001", "type": "Regular" },
+ { "id": "1002", "type": "Chocolate" }
+ ]
+ },
+ "topping":
+ [
+ { "id": "5001", "type": "None" },
+ { "id": "5002", "type": "Glazed" },
+ { "id": "5003", "type": "Chocolate" },
+ { "id": "5004", "type": "Maple" }
+ ]
+}
+{
+ "id": "0004",
+ "type": "donut",
+ "name": "Filled",
+ "ppu": 0.69,
+ "sales": 14,
- "batters":
- {
- "batter":
- [
- { "id": "1001", "type": "Regular" },
- { "id": "1002", "type": "Chocolate" },
- { "id": "1003", "type": "Blueberry" },
- { "id": "1004", "type": "Devil's Food" }
- ]
- },
- "topping":
- [
- { "id": "5001", "type": "None" },
- { "id": "5002", "type": "Glazed" },
- { "id": "5005", "type": "Sugar" },
- { "id": "5007", "type": "Powdered Sugar" },
- { "id": "5006", "type": "Chocolate with Sprinkles" },
- { "id": "5003", "type": "Chocolate" },
- { "id": "5004", "type": "Maple" }
- ],
- "filling":
- [
- { "id": "6001", "type": "None" },
- { "id": "6002", "type": "Raspberry" },
- { "id": "6003", "type": "Lemon" },
- { "id": "6004", "type": "Chocolate" },
- { "id": "6005", "type": "Kreme" }
- ]
- }
- {
- "id": "0005",
- "type": "donut",
- "name": "Apple Fritter",
- "ppu": 1.00,
- "sales": 700,
+ "batters":
+ {
+ "batter":
+ [
+ { "id": "1001", "type": "Regular" },
+ { "id": "1002", "type": "Chocolate" },
+ { "id": "1003", "type": "Blueberry" },
+ { "id": "1004", "type": "Devil's Food" }
+ ]
+ },
+ "topping":
+ [
+ { "id": "5001", "type": "None" },
+ { "id": "5002", "type": "Glazed" },
+ { "id": "5005", "type": "Sugar" },
+ { "id": "5007", "type": "Powdered Sugar" },
+ { "id": "5006", "type": "Chocolate with Sprinkles" },
+ { "id": "5003", "type": "Chocolate" },
+ { "id": "5004", "type": "Maple" }
+ ],
+ "filling":
+ [
+ { "id": "6001", "type": "None" },
+ { "id": "6002", "type": "Raspberry" },
+ { "id": "6003", "type": "Lemon" },
+ { "id": "6004", "type": "Chocolate" },
+ { "id": "6005", "type": "Kreme" }
+ ]
+}
+{
+ "id": "0005",
+ "type": "donut",
+ "name": "Apple Fritter",
+ "ppu": 1.00,
+ "sales": 700,
- "batters":
- {
- "batter":
- [
- { "id": "1001", "type": "Regular" }
- ]
- },
- "topping":
- [
- { "id": "5002", "type": "Glazed" }
- ]
- } \ No newline at end of file
+ "batters":
+ {
+ "batter":
+ [
+ { "id": "1001", "type": "Regular" }
+ ]
+ },
+ "topping":
+ [
+ { "id": "5002", "type": "Glazed" }
+ ]
+}
+ \ No newline at end of file
diff --git a/exec/java-exec/src/test/resources/drill-module.conf b/exec/java-exec/src/test/resources/drill-module.conf
index 65e4d4cb8..7f0587adc 100644
--- a/exec/java-exec/src/test/resources/drill-module.conf
+++ b/exec/java-exec/src/test/resources/drill-module.conf
@@ -26,7 +26,7 @@ drill.exec: {
threads: 1
}
},
- use.ip : false
+ use.ip : false
},
operator: {
packages += "org.apache.drill.exec.physical.config"
@@ -42,14 +42,14 @@ drill.exec: {
context: "drillbit"
},
zk: {
- connect: "localhost:2181",
- root: "drill/happy",
- refresh: 500,
- timeout: 5000,
- retry: {
- count: 7200,
- delay: 500
- }
+ connect: "localhost:2181",
+ root: "drill/happy",
+ refresh: 500,
+ timeout: 5000,
+ retry: {
+ count: 7200,
+ delay: 500
+ }
},
functions: ["org.apache.drill.expr.fn.impl"],
network: {
diff --git a/exec/java-exec/src/test/resources/drill-spool-test-module.conf b/exec/java-exec/src/test/resources/drill-spool-test-module.conf
index c20cc8574..89d248a9b 100644
--- a/exec/java-exec/src/test/resources/drill-spool-test-module.conf
+++ b/exec/java-exec/src/test/resources/drill-spool-test-module.conf
@@ -1,5 +1,5 @@
-// This file tells Drill to consider this module when class path scanning.
-// This file can also include any supplementary configuration information.
+// This file tells Drill to consider this module when class path scanning.
+// This file can also include any supplementary configuration information.
// This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information.
drill.logical.function.packages += "org.apache.drill.exec.expr.fn.impl"
@@ -26,7 +26,7 @@ drill.exec: {
threads: 1
}
},
- use.ip : false
+ use.ip : false
},
operator: {
packages += "org.apache.drill.exec.physical.config"
@@ -36,9 +36,9 @@ drill.exec: {
},
functions: ["org.apache.drill.expr.fn.impl"],
storage: {
- packages += "org.apache.drill.exec.store"
+ packages += "org.apache.drill.exec.store"
},
- metrics : {
+ metrics : {
context: "drillbit",
jmx: {
enabled : true
@@ -49,14 +49,14 @@ drill.exec: {
}
},
zk: {
- connect: "localhost:2181",
- root: "/drill",
- refresh: 500,
- timeout: 5000,
- retry: {
- count: 7200,
- delay: 500
- }
+ connect: "localhost:2181",
+ root: "/drill",
+ refresh: 500,
+ timeout: 5000,
+ retry: {
+ count: 7200,
+ delay: 500
+ }
},
functions: ["org.apache.drill.expr.fn.impl"],
network: {
@@ -80,4 +80,4 @@ drill.exec: {
delete: false,
size: 0
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/filter/test1.json b/exec/java-exec/src/test/resources/filter/test1.json
index 7d05928c2..43c934fca 100644
--- a/exec/java-exec/src/test/resources/filter/test1.json
+++ b/exec/java-exec/src/test/resources/filter/test1.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -29,7 +29,7 @@
@id:4,
child:2,
pop: "selection-vector-remover"
-
+
},
{
@id: 3,
@@ -37,4 +37,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/filter/test_sv4.json b/exec/java-exec/src/test/resources/filter/test_sv4.json
index 685e31535..69437acd5 100644
--- a/exec/java-exec/src/test/resources/filter/test_sv4.json
+++ b/exec/java-exec/src/test/resources/filter/test_sv4.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -39,4 +39,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastBigInt.json b/exec/java-exec/src/test/resources/functions/cast/testCastBigInt.json
index c0a156546..e07090099 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastBigInt.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastBigInt.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
- {name: "intcol", type: "INT", mode: "REQUIRED"} ,
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
- ]}
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
+ {name: "intcol", type: "INT", mode: "REQUIRED"} ,
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -25,13 +25,13 @@
child: 1,
pop:"project",
exprs: [
- { ref: "float4col", expr:"float4col" },
- { ref: "float4_cast", expr:"cast(float4col as bigint)" },
- { ref: "float8col", expr:"float8col" },
- { ref: "float8_cast", expr:"cast(float8col as bigint)" },
- { ref: "intcol", expr:"intcol" },
- { ref: "int_cast", expr:"cast(intcol as bigint)" },
- { ref: "varchar_cast", expr:"cast('1256' as bigint)" }
+ { ref: "float4col", expr:"float4col" },
+ { ref: "float4_cast", expr:"cast(float4col as bigint)" },
+ { ref: "float8col", expr:"float8col" },
+ { ref: "float8_cast", expr:"cast(float8col as bigint)" },
+ { ref: "intcol", expr:"intcol" },
+ { ref: "int_cast", expr:"cast(intcol as bigint)" },
+ { ref: "varchar_cast", expr:"cast('1256' as bigint)" }
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastFloat4.json b/exec/java-exec/src/test/resources/functions/cast/testCastFloat4.json
index f5f6e2c80..8fe0131f4 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastFloat4.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastFloat4.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
- {name: "intcol", type: "INT", mode: "REQUIRED"} ,
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
- ]}
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
+ {name: "intcol", type: "INT", mode: "REQUIRED"} ,
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -25,14 +25,14 @@
child: 1,
pop:"project",
exprs: [
- { ref: "float8col", expr:"float8col" },
- { ref: "float8_cast", expr:"cast(float8col as float4)" },
- { ref: "intcol", expr:"intcol" },
- { ref: "int_cast", expr:"cast(intcol as float4)" },
- { ref: "bigintcol", expr:"bigintcol" },
- { ref: "bigint_cast", expr:"cast(bigintcol as float4)" },
- { ref: "varchar_cast1", expr:"cast('1256' as float4)" },
- { ref: "varchar_cast2", expr:"cast('12.56' as float4)" }
+ { ref: "float8col", expr:"float8col" },
+ { ref: "float8_cast", expr:"cast(float8col as float4)" },
+ { ref: "intcol", expr:"intcol" },
+ { ref: "int_cast", expr:"cast(intcol as float4)" },
+ { ref: "bigintcol", expr:"bigintcol" },
+ { ref: "bigint_cast", expr:"cast(bigintcol as float4)" },
+ { ref: "varchar_cast1", expr:"cast('1256' as float4)" },
+ { ref: "varchar_cast2", expr:"cast('12.56' as float4)" }
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastFloat8.json b/exec/java-exec/src/test/resources/functions/cast/testCastFloat8.json
index c700dd4c2..f16b47b77 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastFloat8.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastFloat8.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
- {name: "intcol", type: "INT", mode: "REQUIRED"} ,
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
- ]}
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
+ {name: "intcol", type: "INT", mode: "REQUIRED"} ,
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -25,14 +25,14 @@
child: 1,
pop:"project",
exprs: [
- { ref: "float4col", expr:"float4col" },
- { ref: "float4_cast", expr:"cast(float4col as float8)" },
- { ref: "intcol", expr:"intcol" },
- { ref: "int_cast", expr:"cast(intcol as float8)" },
- { ref: "bigintcol", expr:"bigintcol" },
- { ref: "bigint_cast", expr:"cast(bigintcol as float8)" },
- { ref: "varchar_cast1", expr:"cast('1256' as float8)" },
- { ref: "varchar_cast2", expr:"cast('12.56' as float8)" }
+ { ref: "float4col", expr:"float4col" },
+ { ref: "float4_cast", expr:"cast(float4col as float8)" },
+ { ref: "intcol", expr:"intcol" },
+ { ref: "int_cast", expr:"cast(intcol as float8)" },
+ { ref: "bigintcol", expr:"bigintcol" },
+ { ref: "bigint_cast", expr:"cast(bigintcol as float8)" },
+ { ref: "varchar_cast1", expr:"cast('1256' as float8)" },
+ { ref: "varchar_cast2", expr:"cast('12.56' as float8)" }
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastInt.json b/exec/java-exec/src/test/resources/functions/cast/testCastInt.json
index 271e2768e..40b02cd5b 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastInt.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastInt.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
- {name: "intcol", type: "INT", mode: "REQUIRED"} ,
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
- ]}
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
+ {name: "intcol", type: "INT", mode: "REQUIRED"} ,
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -25,13 +25,13 @@
child: 1,
pop:"project",
exprs: [
- { ref: "float4col", expr:"float4col" },
- { ref: "float4_cast", expr:"cast(float4col as int)" },
- { ref: "float8col", expr:"float8col" },
- { ref: "float8_cast", expr:"cast(float8col as int)" },
- { ref: "bigintcol", expr:"bigintcol" },
- { ref: "bigint_cast", expr:"cast(bigintcol as int)" },
- { ref: "varchar_cast", expr:"cast('1256' as int)" }
+ { ref: "float4col", expr:"float4col" },
+ { ref: "float4_cast", expr:"cast(float4col as int)" },
+ { ref: "float8col", expr:"float8col" },
+ { ref: "float8_cast", expr:"cast(float8col as int)" },
+ { ref: "bigintcol", expr:"bigintcol" },
+ { ref: "bigint_cast", expr:"cast(bigintcol as int)" },
+ { ref: "varchar_cast", expr:"cast('1256' as int)" }
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastNested.json b/exec/java-exec/src/test/resources/functions/cast/testCastNested.json
index 4a2d80a0b..f9698b412 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastNested.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastNested.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
- {name: "intcol", type: "INT", mode: "REQUIRED"} ,
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
- ]}
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
+ {name: "intcol", type: "INT", mode: "REQUIRED"} ,
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -25,12 +25,12 @@
child: 1,
pop:"project",
exprs: [
- { ref: "float4col", expr:"float4col" },
- { ref: "float4_cast", expr:"cast(cast(float4col as bigint) as varchar(10))" },
- { ref: "intcol", expr:"intcol" },
- { ref: "int_float4_add", expr:"cast(intcol as bigint) + cast(float4col as bigint)" },
- { ref: "bigintcol", expr:"bigintcol" },
- { ref: "bigint2varchar2int", expr:"cast(cast(bigintcol as varchar(8)) as int)" },
+ { ref: "float4col", expr:"float4col" },
+ { ref: "float4_cast", expr:"cast(cast(float4col as bigint) as varchar(10))" },
+ { ref: "intcol", expr:"intcol" },
+ { ref: "int_float4_add", expr:"cast(intcol as bigint) + cast(float4col as bigint)" },
+ { ref: "bigintcol", expr:"bigintcol" },
+ { ref: "bigint2varchar2int", expr:"cast(cast(bigintcol as varchar(8)) as int)" },
{ ref: "add_cast", expr:"cast('100' as int) + cast(cast(200 as varchar(8)) as int)"}
]
},
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastNumException.json b/exec/java-exec/src/test/resources/functions/cast/testCastNumException.json
index 27103af67..2302da001 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastNumException.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastNumException.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
- {name: "intcol", type: "INT", mode: "REQUIRED"} ,
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
- ]}
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"} ,
+ {name: "intcol", type: "INT", mode: "REQUIRED"} ,
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -25,7 +25,7 @@
child: 1,
pop:"project",
exprs: [
- { ref: "varchar_cast", expr:"cast('abc' as int)" }
+ { ref: "varchar_cast", expr:"cast('abc' as int)" }
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastVarBinary.json b/exec/java-exec/src/test/resources/functions/cast/testCastVarBinary.json
index b82a6207a..62f9dca61 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastVarBinary.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastVarBinary.json
@@ -12,14 +12,14 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"},
- {name: "intcol", type: "INT", mode: "REQUIRED"},
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"},
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"}
- ]}
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"},
+ {name: "intcol", type: "INT", mode: "REQUIRED"},
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"},
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -27,23 +27,23 @@
child: 1,
pop:"project",
exprs: [
- { ref: "bigintcol", expr:"bigintcol" },
- { ref: "bigintcast1", expr:"cast(bigintcol as varbinary(10))" },
- { ref: "bigintcast2", expr:"cast(bigintcol as varbinary(30))" },
- { ref: "bigintcast3", expr:"cast(bigintcol as varbinary(2))" },
- { ref: "intcol", expr:"intcol" },
- { ref: "intcast1", expr:"cast(intcol as varbinary(10))" },
- { ref: "intcast2", expr:"cast(intcol as varbinary(2))" },
- { ref: "float4col", expr:"float4col" },
- { ref: "float4cast1", expr:"cast(float4col as varbinary(2))" },
- { ref: "float4cast2", expr:"cast(float4col as varbinary(15))" },
- { ref: "float8col", expr:"float8col" },
- { ref: "float8cast1", expr:"cast(float8col as varbinary(2))" },
- { ref: "float8cast2", expr:"cast(float8col as varbinary(15))" },
- { ref: "varcharcol", expr:"varcharcol" },
- { ref: "varchar_cast1", expr:"cast(varcharcol as varbinary(30))" },
- { ref: "varchar_cast2", expr:"cast(varcharcol as varbinary(2))" },
- { ref: "int_lit_cast", expr:"cast(123 as varbinary(5))" }
+ { ref: "bigintcol", expr:"bigintcol" },
+ { ref: "bigintcast1", expr:"cast(bigintcol as varbinary(10))" },
+ { ref: "bigintcast2", expr:"cast(bigintcol as varbinary(30))" },
+ { ref: "bigintcast3", expr:"cast(bigintcol as varbinary(2))" },
+ { ref: "intcol", expr:"intcol" },
+ { ref: "intcast1", expr:"cast(intcol as varbinary(10))" },
+ { ref: "intcast2", expr:"cast(intcol as varbinary(2))" },
+ { ref: "float4col", expr:"float4col" },
+ { ref: "float4cast1", expr:"cast(float4col as varbinary(2))" },
+ { ref: "float4cast2", expr:"cast(float4col as varbinary(15))" },
+ { ref: "float8col", expr:"float8col" },
+ { ref: "float8cast1", expr:"cast(float8col as varbinary(2))" },
+ { ref: "float8cast2", expr:"cast(float8col as varbinary(15))" },
+ { ref: "varcharcol", expr:"varcharcol" },
+ { ref: "varchar_cast1", expr:"cast(varcharcol as varbinary(30))" },
+ { ref: "varchar_cast2", expr:"cast(varcharcol as varbinary(2))" },
+ { ref: "int_lit_cast", expr:"cast(123 as varbinary(5))" }
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastVarChar.json b/exec/java-exec/src/test/resources/functions/cast/testCastVarChar.json
index 16615d5b6..910d91d02 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastVarChar.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastVarChar.json
@@ -12,14 +12,14 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 5, types: [
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"},
- {name: "intcol", type: "INT", mode: "REQUIRED"},
- {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8col", type: "FLOAT8", mode: "REQUIRED"},
+ {records: 5, types: [
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "bigintcol", type: "BIGINT", mode: "REQUIRED"},
+ {name: "intcol", type: "INT", mode: "REQUIRED"},
+ {name: "float4col", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8col", type: "FLOAT8", mode: "REQUIRED"},
{name: "varbinarycol", type: "VARBINARY", mode: "REQUIRED"}
- ]}
+ ]}
]
},
{
@@ -27,18 +27,18 @@
child: 1,
pop:"project",
exprs: [
- { ref: "bigint1", expr:"cast(bigintcol as varchar(10))" },
- { ref: "bigint2", expr:"cast(bigintcol as varchar(30))" },
- { ref: "bigint3", expr:"cast(bigintcol as varchar(2))" },
- { ref: "int1", expr:"cast(intcol as varchar(10))" },
- { ref: "int2", expr:"cast(intcol as varchar(2))" },
- { ref: "float4_1", expr:"cast(float4col as varchar(2))" },
- { ref: "float4_2", expr:"cast(float4col as varchar(15))" },
- { ref: "float8_1", expr:"cast(float8col as varchar(2))" },
- { ref: "float8_2", expr:"cast(float8col as varchar(15))" },
- { ref: "varbinarycol", expr:"varbinarycol"},
- { ref: "varbinary_cast1", expr:"cast(varbinarycol as varchar(15))" },
- { ref: "varbinary_cast2", expr:"cast(varbinarycol as varchar(2))" },
+ { ref: "bigint1", expr:"cast(bigintcol as varchar(10))" },
+ { ref: "bigint2", expr:"cast(bigintcol as varchar(30))" },
+ { ref: "bigint3", expr:"cast(bigintcol as varchar(2))" },
+ { ref: "int1", expr:"cast(intcol as varchar(10))" },
+ { ref: "int2", expr:"cast(intcol as varchar(2))" },
+ { ref: "float4_1", expr:"cast(float4col as varchar(2))" },
+ { ref: "float4_2", expr:"cast(float4col as varchar(15))" },
+ { ref: "float8_1", expr:"cast(float8col as varchar(2))" },
+ { ref: "float8_2", expr:"cast(float8col as varchar(15))" },
+ { ref: "varbinarycol", expr:"varbinarycol"},
+ { ref: "varbinary_cast1", expr:"cast(varbinarycol as varchar(15))" },
+ { ref: "varbinary_cast2", expr:"cast(varbinarycol as varchar(2))" },
{ ref: "int_lit_cast", expr:"cast(123 as varchar(10))"}
]
},
diff --git a/exec/java-exec/src/test/resources/functions/cast/testCastVarCharNull.json b/exec/java-exec/src/test/resources/functions/cast/testCastVarCharNull.json
index 3ca7ed687..dceecd180 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testCastVarCharNull.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testCastVarCharNull.json
@@ -8,7 +8,7 @@
},
graph:[
{
- @id:1,
+ @id:1,
pop:"fs-scan",
format: {type: "json"},
storage:{type: "file", connection: "classpath:///"},
@@ -18,9 +18,9 @@
child: 1,
pop:"project",
exprs: [
- { ref: "int2varchar", expr:"cast(integer as varchar(20))" },
- { ref: "float2varchar", expr:"cast(float as varchar(20))" }
- ]
+ { ref: "int2varchar", expr:"cast(integer as varchar(20))" },
+ { ref: "float2varchar", expr:"cast(float as varchar(20))" }
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/cast/testICastConstant.json b/exec/java-exec/src/test/resources/functions/cast/testICastConstant.json
index d85dd6c71..69e40584c 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testICastConstant.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testICastConstant.json
@@ -6,44 +6,44 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "intColumn", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 1, types: [
+ {name: "intColumn", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@id:2,
child: 1,
- pop:"project",
+ pop:"project",
exprs: [
- { ref: "BigIntAddFloat8", expr:"10+20.1" },
- { ref: "Float8AddBigInt", expr:"20.1+10" },
- { ref: "Float8AddChar", expr:"20.1 + '10'" },
- { ref: "IntAddFloat8", expr:"cast('10' as int) + 20.1"},
- { ref: "IntAddBigInt", expr:"cast('10' as int) + cast('20' as bigint)"},
- { ref: "BigIntAddInt", expr:"cast('10' as bigint) + cast('20' as int)"},
- { ref: "IntAddFloat8", expr:"cast('10' as int) + cast('20.1' as float8)"},
- { ref: "Float8AddInt", expr:"cast('20.1' as float8) + cast('10' as int) "},
- { ref: "IntAddFloat4", expr:"cast('10' as int) + cast('20.1' as float4)"},
- { ref: "BigIntAddFloat4", expr:"cast('10' as bigint) + cast('20.1' as float4)"},
- { ref: "BigIntAddFloat8", expr:"cast('10' as bigint) + cast('20.1' as float8)"},
- { ref: "Float4AddFloat8", expr:"cast('10' as float4) + cast('20.1' as float8)"},
- { ref: "CharAddFloat4", expr:"'10' + cast('20.1' as float4)"},
- { ref: "CharAddFloat8", expr:"'10' + cast('20.1' as float8)"},
- { ref: "Float4AddFloat8", expr:"cast('10' as float4) + '20.1' "},
- { ref: "Float8AddChar", expr:"cast('10' as float8) + '20.1' "},
- { ref: "CompBigIntFloat8", expr:"10 < 20.1" },
- { ref: "CompCharFloat8", expr:"'10' < 20.1" },
- { ref: "CompFloat8Char", expr:"20.1 > '10' " },
- { ref: "nested1", expr:" 20.1 + 10 > '10' " },
- { ref: "nested2", expr:" 20.1 + 10 > '10' + 15.1" }
- ]
+ { ref: "BigIntAddFloat8", expr:"10+20.1" },
+ { ref: "Float8AddBigInt", expr:"20.1+10" },
+ { ref: "Float8AddChar", expr:"20.1 + '10'" },
+ { ref: "IntAddFloat8", expr:"cast('10' as int) + 20.1"},
+ { ref: "IntAddBigInt", expr:"cast('10' as int) + cast('20' as bigint)"},
+ { ref: "BigIntAddInt", expr:"cast('10' as bigint) + cast('20' as int)"},
+ { ref: "IntAddFloat8", expr:"cast('10' as int) + cast('20.1' as float8)"},
+ { ref: "Float8AddInt", expr:"cast('20.1' as float8) + cast('10' as int) "},
+ { ref: "IntAddFloat4", expr:"cast('10' as int) + cast('20.1' as float4)"},
+ { ref: "BigIntAddFloat4", expr:"cast('10' as bigint) + cast('20.1' as float4)"},
+ { ref: "BigIntAddFloat8", expr:"cast('10' as bigint) + cast('20.1' as float8)"},
+ { ref: "Float4AddFloat8", expr:"cast('10' as float4) + cast('20.1' as float8)"},
+ { ref: "CharAddFloat4", expr:"'10' + cast('20.1' as float4)"},
+ { ref: "CharAddFloat8", expr:"'10' + cast('20.1' as float8)"},
+ { ref: "Float4AddFloat8", expr:"cast('10' as float4) + '20.1' "},
+ { ref: "Float8AddChar", expr:"cast('10' as float8) + '20.1' "},
+ { ref: "CompBigIntFloat8", expr:"10 < 20.1" },
+ { ref: "CompCharFloat8", expr:"'10' < 20.1" },
+ { ref: "CompFloat8Char", expr:"20.1 > '10' " },
+ { ref: "nested1", expr:" 20.1 + 10 > '10' " },
+ { ref: "nested2", expr:" 20.1 + 10 > '10' + 15.1" }
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/cast/testICastMockCol.json b/exec/java-exec/src/test/resources/functions/cast/testICastMockCol.json
index b9fd2428e..27b06fd7d 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testICastMockCol.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testICastMockCol.json
@@ -6,35 +6,35 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id: 1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "intColumn", type: "INT", mode: "REQUIRED"},
- {name: "bigIntColumn", type: "BIGINT", mode: "REQUIRED"},
- {name: "float4Column", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8Column", type: "FLOAT8", mode: "REQUIRED"},
- {name: "intNullableColumn", type: "INT", mode: "OPTIONAL"},
- {name: "bigIntNullableColumn", type: "BIGINT", mode: "OPTIONAL"},
- {name: "float4NullableColumn", type: "FLOAT4", mode: "OPTIONAL"},
- {name: "float8NullableColumn", type: "FLOAT8", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "intColumn", type: "INT", mode: "REQUIRED"},
+ {name: "bigIntColumn", type: "BIGINT", mode: "REQUIRED"},
+ {name: "float4Column", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8Column", type: "FLOAT8", mode: "REQUIRED"},
+ {name: "intNullableColumn", type: "INT", mode: "OPTIONAL"},
+ {name: "bigIntNullableColumn", type: "BIGINT", mode: "OPTIONAL"},
+ {name: "float4NullableColumn", type: "FLOAT4", mode: "OPTIONAL"},
+ {name: "float8NullableColumn", type: "FLOAT8", mode: "OPTIONAL"}
+ ]}
]
},
{
@id:2,
child: 1,
- pop:"project",
+ pop:"project",
exprs: [
- { ref: "NullIntAddInt", expr:"intNullableColumn + intColumn" },
- { ref: "IntAddNullInt", expr:"intColumn + intNullableColumn " },
- { ref: "IntAddNullFloat4", expr:"intColumn + float4NullableColumn " },
- { ref: "Float4AddNullInt", expr:"float4Column + intNullableColumn " },
- { ref: "Float8AddNullBigInt", expr:"float8Column + bigIntNullableColumn " }
- ]
+ { ref: "NullIntAddInt", expr:"intNullableColumn + intColumn" },
+ { ref: "IntAddNullInt", expr:"intColumn + intNullableColumn " },
+ { ref: "IntAddNullFloat4", expr:"intColumn + float4NullableColumn " },
+ { ref: "Float4AddNullInt", expr:"float4Column + intNullableColumn " },
+ { ref: "Float8AddNullBigInt", expr:"float8Column + bigIntNullableColumn " }
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/cast/testICastNullExp.json b/exec/java-exec/src/test/resources/functions/cast/testICastNullExp.json
index 6b8fd446f..8c214ad78 100644
--- a/exec/java-exec/src/test/resources/functions/cast/testICastNullExp.json
+++ b/exec/java-exec/src/test/resources/functions/cast/testICastNullExp.json
@@ -6,33 +6,33 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "float8Column", type: "FLOAT8", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "float8Column", type: "FLOAT8", mode: "OPTIONAL"}
+ ]}
]
},
{
@id:2,
child: 1,
- pop:"project",
- exprs: [
- { ref: "isNullOfNullExp", expr:"isnull(unkonwCol)" },
- { ref: "isNullOfKnowCol", expr:"isnull(float8Column)" },
- { ref: "isNotNullOfNullExp", expr:"isnotnull(unkonwCol)" },
- { ref: "isNotNullOfKnowCol", expr:"isnotnull(float8Column)" },
- { ref: "BigIntAddNullExp", expr:"1 + unknowCol" },
- { ref: "NullExpAddBigInt", expr:"unknowCol + 1" },
- { ref: "Float8AddNullExp", expr:"1.2 + unknowCol" },
- { ref: "NullExpAddFloat8", expr:"unknowCol + 1.2" },
- { ref: "Float4AddNullExp", expr:"cast(1.2 as float4) + unknowCol" },
- { ref: "NullExpAddFloat4", expr:"unknowCol +cast(1.2 as float4) " }
- ]
+ pop:"project",
+ exprs: [
+ { ref: "isNullOfNullExp", expr:"isnull(unkonwCol)" },
+ { ref: "isNullOfKnowCol", expr:"isnull(float8Column)" },
+ { ref: "isNotNullOfNullExp", expr:"isnotnull(unkonwCol)" },
+ { ref: "isNotNullOfKnowCol", expr:"isnotnull(float8Column)" },
+ { ref: "BigIntAddNullExp", expr:"1 + unknowCol" },
+ { ref: "NullExpAddBigInt", expr:"unknowCol + 1" },
+ { ref: "Float8AddNullExp", expr:"1.2 + unknowCol" },
+ { ref: "NullExpAddFloat8", expr:"unknowCol + 1.2" },
+ { ref: "Float4AddNullExp", expr:"cast(1.2 as float4) + unknowCol" },
+ { ref: "NullExpAddFloat4", expr:"unknowCol +cast(1.2 as float4) " }
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/cast/two_way_implicit_cast.json b/exec/java-exec/src/test/resources/functions/cast/two_way_implicit_cast.json
index 31cf5419d..008a59d85 100644
--- a/exec/java-exec/src/test/resources/functions/cast/two_way_implicit_cast.json
+++ b/exec/java-exec/src/test/resources/functions/cast/two_way_implicit_cast.json
@@ -12,10 +12,10 @@
pop:"mock-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "col1", type: "FLOAT4", mode: "REQUIRED"},
- {name: "col2", type: "FLOAT8", mode: "REQUIRED"}
- ]}
+ {records: 1, types: [
+ {name: "col1", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "col2", type: "FLOAT8", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -23,8 +23,8 @@
child: 1,
pop:"project",
exprs: [
- {ref: "str_to_int_cast", expr:"8 + '2'" },
- {ref: "int_to_str_cast", expr:"substr(10123, 1, 3)" }
+ {ref: "str_to_int_cast", expr:"8 + '2'" },
+ {ref: "int_to_str_cast", expr:"substr(10123, 1, 3)" }
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/comparisonTest.json b/exec/java-exec/src/test/resources/functions/comparisonTest.json
index eac6e685d..986dbccbb 100644
--- a/exec/java-exec/src/test/resources/functions/comparisonTest.json
+++ b/exec/java-exec/src/test/resources/functions/comparisonTest.json
@@ -6,22 +6,22 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "intColumn", type: "INT", mode: "REQUIRED"},
- {name: "bigIntColumn", type: "BIGINT", mode: "REQUIRED"},
- {name: "float4Column", type: "FLOAT4", mode: "REQUIRED"},
- {name: "float8Column", type: "FLOAT8", mode: "REQUIRED"},
- {name: "intNullableColumn", type: "INT", mode: "OPTIONAL"},
- {name: "bigIntNullableColumn", type: "BIGINT", mode: "OPTIONAL"},
- {name: "float4NullableColumn", type: "FLOAT4", mode: "OPTIONAL"},
- {name: "float8NullableColumn", type: "FLOAT8", mode: "OPTIONAL"}
- ]}
+ {records: 100, types: [
+ {name: "intColumn", type: "INT", mode: "REQUIRED"},
+ {name: "bigIntColumn", type: "BIGINT", mode: "REQUIRED"},
+ {name: "float4Column", type: "FLOAT4", mode: "REQUIRED"},
+ {name: "float8Column", type: "FLOAT8", mode: "REQUIRED"},
+ {name: "intNullableColumn", type: "INT", mode: "OPTIONAL"},
+ {name: "bigIntNullableColumn", type: "BIGINT", mode: "OPTIONAL"},
+ {name: "float4NullableColumn", type: "FLOAT4", mode: "OPTIONAL"},
+ {name: "float8NullableColumn", type: "FLOAT8", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -36,4 +36,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/functions/string/testCharLength.json b/exec/java-exec/src/test/resources/functions/string/testCharLength.json
index ae29f6737..4d45607e7 100644
--- a/exec/java-exec/src/test/resources/functions/string/testCharLength.json
+++ b/exec/java-exec/src/test/resources/functions/string/testCharLength.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,19 +23,19 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "char_length('aababcdf')"},
- { ref: "col2", expr: "char_length('')"},
- { ref: "col3", expr: "char_length(varcharcol)"},
- { ref: "col4", expr: "char_length(nullvarcharcol)"},
- { ref: "col5", expr: "character_length('aababcdf')"},
- { ref: "col6", expr: "character_length('')"},
- { ref: "col7", expr: "character_length(varcharcol)"},
- { ref: "col8", expr: "character_length(nullvarcharcol)"},
- { ref: "col9", expr: "length('aababcdf')"},
- { ref: "col10", expr: "length('')"},
- { ref: "col11", expr: "length(varcharcol)"},
- { ref: "col12", expr: "length(nullvarcharcol)"}
- ]
+ { ref: "col1", expr: "char_length('aababcdf')"},
+ { ref: "col2", expr: "char_length('')"},
+ { ref: "col3", expr: "char_length(varcharcol)"},
+ { ref: "col4", expr: "char_length(nullvarcharcol)"},
+ { ref: "col5", expr: "character_length('aababcdf')"},
+ { ref: "col6", expr: "character_length('')"},
+ { ref: "col7", expr: "character_length(varcharcol)"},
+ { ref: "col8", expr: "character_length(nullvarcharcol)"},
+ { ref: "col9", expr: "length('aababcdf')"},
+ { ref: "col10", expr: "length('')"},
+ { ref: "col11", expr: "length(varcharcol)"},
+ { ref: "col12", expr: "length(nullvarcharcol)"}
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testConcat.json b/exec/java-exec/src/test/resources/functions/string/testConcat.json
index 5b217106d..e8e9a3143 100644
--- a/exec/java-exec/src/test/resources/functions/string/testConcat.json
+++ b/exec/java-exec/src/test/resources/functions/string/testConcat.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,10 +23,10 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "concat('abc', 'ABC')"},
- { ref: "col2", expr: "concat('abc', '')"},
- { ref: "col3", expr: "concat('', 'ABC')"},
- { ref: "col4", expr: "concat('', '')"}
+ { ref: "col1", expr: "concat('abc', 'ABC')"},
+ { ref: "col2", expr: "concat('abc', '')"},
+ { ref: "col3", expr: "concat('', 'ABC')"},
+ { ref: "col4", expr: "concat('', '')"}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testLeft.json b/exec/java-exec/src/test/resources/functions/string/testLeft.json
index e8126a552..b30ddb968 100644
--- a/exec/java-exec/src/test/resources/functions/string/testLeft.json
+++ b/exec/java-exec/src/test/resources/functions/string/testLeft.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,14 +23,14 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "left('abcdef', 2)"},
- { ref: "col2", expr: "left('abcdef', 6)"},
+ { ref: "col1", expr: "left('abcdef', 2)"},
+ { ref: "col2", expr: "left('abcdef', 6)"},
{ ref: "col3", expr: "left('abcdef', 7)"},
{ ref: "col4", expr: "left('abcdef', -2)"},
{ ref: "col5", expr: "left('abcdef', -5)"},
{ ref: "col6", expr: "left('abcdef', -6)"},
{ ref: "col7", expr: "left('abcdef', -7)"}
- ]
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testLike.json b/exec/java-exec/src/test/resources/functions/string/testLike.json
index fc99ef218..59880f632 100644
--- a/exec/java-exec/src/test/resources/functions/string/testLike.json
+++ b/exec/java-exec/src/test/resources/functions/string/testLike.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testLower.json b/exec/java-exec/src/test/resources/functions/string/testLower.json
index d6351d829..d7dd186cb 100644
--- a/exec/java-exec/src/test/resources/functions/string/testLower.json
+++ b/exec/java-exec/src/test/resources/functions/string/testLower.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,8 +23,8 @@
child: 1,
pop:"project",
exprs: [
- { ref: "lower", expr: "lower('ABcEFgh')"},
- { ref: "lower", expr: "lower('aBc')"},
+ { ref: "lower", expr: "lower('ABcEFgh')"},
+ { ref: "lower", expr: "lower('aBc')"},
{ ref: "lower", expr: "lower('')"}
]
},
diff --git a/exec/java-exec/src/test/resources/functions/string/testLpad.json b/exec/java-exec/src/test/resources/functions/string/testLpad.json
index 017df71d5..fc21ee8f5 100644
--- a/exec/java-exec/src/test/resources/functions/string/testLpad.json
+++ b/exec/java-exec/src/test/resources/functions/string/testLpad.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,18 +23,17 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "lpad('abcdef', 0, 'abc')"},
- { ref: "col2", expr: "lpad('abcdef', -3, 'abc')"},
- { ref: "col3", expr: "lpad('abcdef', 6, 'abc')"},
- { ref: "col4", expr: "lpad('abcdef', 2, 'abc')"},
- { ref: "col5", expr: "lpad('abcdef', 2, '')"},
- { ref: "col7", expr: "lpad('abcdef', 10, '')"},
- { ref: "col8", expr: "lpad('abcdef', 10, 'A')"},
- { ref: "col9", expr: "lpad('abcdef', 10, 'AB')"},
- { ref: "col10", expr: "lpad('abcdef', 10, 'ABC')"},
- { ref: "col11", expr: "lpad('abcdef', 10, 'ABCDEFGHIJKLMN')"}
-
- ]
+ { ref: "col1", expr: "lpad('abcdef', 0, 'abc')"},
+ { ref: "col2", expr: "lpad('abcdef', -3, 'abc')"},
+ { ref: "col3", expr: "lpad('abcdef', 6, 'abc')"},
+ { ref: "col4", expr: "lpad('abcdef', 2, 'abc')"},
+ { ref: "col5", expr: "lpad('abcdef', 2, '')"},
+ { ref: "col7", expr: "lpad('abcdef', 10, '')"},
+ { ref: "col8", expr: "lpad('abcdef', 10, 'A')"},
+ { ref: "col9", expr: "lpad('abcdef', 10, 'AB')"},
+ { ref: "col10", expr: "lpad('abcdef', 10, 'ABC')"},
+ { ref: "col11", expr: "lpad('abcdef', 10, 'ABCDEFGHIJKLMN')"}
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testLtrim.json b/exec/java-exec/src/test/resources/functions/string/testLtrim.json
index f80123652..7b3e038ef 100644
--- a/exec/java-exec/src/test/resources/functions/string/testLtrim.json
+++ b/exec/java-exec/src/test/resources/functions/string/testLtrim.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,13 +23,13 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "ltrim('abcdef', 'abc')"},
- { ref: "col2", expr: "ltrim('abcdef', '')"},
- { ref: "col3", expr: "ltrim('abcdabc', 'abc')"},
- { ref: "col4", expr: "ltrim('abc', 'abc')"},
- { ref: "col5", expr: "ltrim('', 'abc')"},
- { ref: "col6", expr: "ltrim('', '')"}
- ]
+ { ref: "col1", expr: "ltrim('abcdef', 'abc')"},
+ { ref: "col2", expr: "ltrim('abcdef', '')"},
+ { ref: "col3", expr: "ltrim('abcdabc', 'abc')"},
+ { ref: "col4", expr: "ltrim('abc', 'abc')"},
+ { ref: "col5", expr: "ltrim('', 'abc')"},
+ { ref: "col6", expr: "ltrim('', '')"}
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testPosition.json b/exec/java-exec/src/test/resources/functions/string/testPosition.json
index d9cccf99c..73dda81df 100644
--- a/exec/java-exec/src/test/resources/functions/string/testPosition.json
+++ b/exec/java-exec/src/test/resources/functions/string/testPosition.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testRegexpReplace.json b/exec/java-exec/src/test/resources/functions/string/testRegexpReplace.json
index 268ec0006..014c09b48 100644
--- a/exec/java-exec/src/test/resources/functions/string/testRegexpReplace.json
+++ b/exec/java-exec/src/test/resources/functions/string/testRegexpReplace.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,11 +23,10 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "regexp_replace('Thomas', '.[mN]a.', 'M')" },
- { ref: "col1", expr: "regexp_replace('Thomas', '.[mN]a.', '')" },
- { ref: "col1", expr: "regexp_replace('Thomas', 'ef', 'AB')" }
-
- ]
+ { ref: "col1", expr: "regexp_replace('Thomas', '.[mN]a.', 'M')" },
+ { ref: "col1", expr: "regexp_replace('Thomas', '.[mN]a.', '')" },
+ { ref: "col1", expr: "regexp_replace('Thomas', 'ef', 'AB')" }
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testReplace.json b/exec/java-exec/src/test/resources/functions/string/testReplace.json
index 8cf8c64d8..fa1e8d861 100644
--- a/exec/java-exec/src/test/resources/functions/string/testReplace.json
+++ b/exec/java-exec/src/test/resources/functions/string/testReplace.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,13 +23,13 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "replace('aababcdf', 'ab', 'AB')"},
- { ref: "col2", expr: "replace('aababcdf', 'a', 'AB')"},
- { ref: "col3", expr: "replace('aababcdf', '', 'AB')"},
- { ref: "col4", expr: "replace('aababcdf', 'ab', '')"},
- { ref: "col5", expr: "replace('abc', 'abc', 'ABCD')"},
- { ref: "col6", expr: "replace('abc', 'abcdefg', 'ABCD')"}
- ]
+ { ref: "col1", expr: "replace('aababcdf', 'ab', 'AB')"},
+ { ref: "col2", expr: "replace('aababcdf', 'a', 'AB')"},
+ { ref: "col3", expr: "replace('aababcdf', '', 'AB')"},
+ { ref: "col4", expr: "replace('aababcdf', 'ab', '')"},
+ { ref: "col5", expr: "replace('abc', 'abc', 'ABCD')"},
+ { ref: "col6", expr: "replace('abc', 'abcdefg', 'ABCD')"}
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testRight.json b/exec/java-exec/src/test/resources/functions/string/testRight.json
index 74abcd465..e9d88b0e0 100644
--- a/exec/java-exec/src/test/resources/functions/string/testRight.json
+++ b/exec/java-exec/src/test/resources/functions/string/testRight.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,14 +23,14 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "right('abcdef', 2)"},
- { ref: "col2", expr: "right('abcdef', 6)"},
+ { ref: "col1", expr: "right('abcdef', 2)"},
+ { ref: "col2", expr: "right('abcdef', 6)"},
{ ref: "col3", expr: "right('abcdef', 7)"},
{ ref: "col4", expr: "right('abcdef', -2)"},
{ ref: "col5", expr: "right('abcdef', -5)"},
{ ref: "col6", expr: "right('abcdef', -6)"},
{ ref: "col7", expr: "right('abcdef', -7)"}
- ]
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testRpad.json b/exec/java-exec/src/test/resources/functions/string/testRpad.json
index e41da4b49..389ea15aa 100644
--- a/exec/java-exec/src/test/resources/functions/string/testRpad.json
+++ b/exec/java-exec/src/test/resources/functions/string/testRpad.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,17 +23,16 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "rpad('abcdef', 0, 'abc')"},
- { ref: "col2", expr: "rpad('abcdef', -3, 'abc')"},
- { ref: "col3", expr: "rpad('abcdef', 6, 'abc')"},
- { ref: "col4", expr: "rpad('abcdef', 2, 'abc')"},
- { ref: "col5", expr: "rpad('abcdef', 2, '')"},
- { ref: "col7", expr: "rpad('abcdef', 10, '')"},
- { ref: "col8", expr: "rpad('abcdef', 10, 'A')"},
- { ref: "col9", expr: "rpad('abcdef', 10, 'AB')"},
- { ref: "col10", expr: "rpad('abcdef', 10, 'ABC')"},
- { ref: "col11", expr: "rpad('abcdef', 10, 'ABCDEFGHIJKLMN')"}
-
+ { ref: "col1", expr: "rpad('abcdef', 0, 'abc')"},
+ { ref: "col2", expr: "rpad('abcdef', -3, 'abc')"},
+ { ref: "col3", expr: "rpad('abcdef', 6, 'abc')"},
+ { ref: "col4", expr: "rpad('abcdef', 2, 'abc')"},
+ { ref: "col5", expr: "rpad('abcdef', 2, '')"},
+ { ref: "col7", expr: "rpad('abcdef', 10, '')"},
+ { ref: "col8", expr: "rpad('abcdef', 10, 'A')"},
+ { ref: "col9", expr: "rpad('abcdef', 10, 'AB')"},
+ { ref: "col10", expr: "rpad('abcdef', 10, 'ABC')"},
+ { ref: "col11", expr: "rpad('abcdef', 10, 'ABCDEFGHIJKLMN')"}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testRtrim.json b/exec/java-exec/src/test/resources/functions/string/testRtrim.json
index d3608d2fd..6d77cc908 100644
--- a/exec/java-exec/src/test/resources/functions/string/testRtrim.json
+++ b/exec/java-exec/src/test/resources/functions/string/testRtrim.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,13 +23,13 @@
child: 1,
pop:"project",
exprs: [
- { ref: "col1", expr: "rtrim('abcdef', 'def')"},
- { ref: "col2", expr: "rtrim('abcdef', '')"},
- { ref: "col3", expr: "rtrim('ABdabc', 'abc')"},
- { ref: "col4", expr: "rtrim('abc', 'abc')"},
- { ref: "col5", expr: "rtrim('', 'abc')"},
- { ref: "col6", expr: "rtrim('', '')"}
- ]
+ { ref: "col1", expr: "rtrim('abcdef', 'def')"},
+ { ref: "col2", expr: "rtrim('abcdef', '')"},
+ { ref: "col3", expr: "rtrim('ABdabc', 'abc')"},
+ { ref: "col4", expr: "rtrim('abc', 'abc')"},
+ { ref: "col5", expr: "rtrim('', 'abc')"},
+ { ref: "col6", expr: "rtrim('', '')"}
+ ]
},
{
@id: 3,
diff --git a/exec/java-exec/src/test/resources/functions/string/testSimilar.json b/exec/java-exec/src/test/resources/functions/string/testSimilar.json
index c0972aeef..dceb4579b 100644
--- a/exec/java-exec/src/test/resources/functions/string/testSimilar.json
+++ b/exec/java-exec/src/test/resources/functions/string/testSimilar.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testStringFuncs.json b/exec/java-exec/src/test/resources/functions/string/testStringFuncs.json
index 4beea3d8d..9fd0110a7 100644
--- a/exec/java-exec/src/test/resources/functions/string/testStringFuncs.json
+++ b/exec/java-exec/src/test/resources/functions/string/testStringFuncs.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testSubstr.json b/exec/java-exec/src/test/resources/functions/string/testSubstr.json
index 02c7a2d2e..0f889c5ca 100644
--- a/exec/java-exec/src/test/resources/functions/string/testSubstr.json
+++ b/exec/java-exec/src/test/resources/functions/string/testSubstr.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testTrim.json b/exec/java-exec/src/test/resources/functions/string/testTrim.json
index 6c81f782a..4b71ea668 100644
--- a/exec/java-exec/src/test/resources/functions/string/testTrim.json
+++ b/exec/java-exec/src/test/resources/functions/string/testTrim.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
diff --git a/exec/java-exec/src/test/resources/functions/string/testUpper.json b/exec/java-exec/src/test/resources/functions/string/testUpper.json
index c155e14e7..841a8f6fa 100644
--- a/exec/java-exec/src/test/resources/functions/string/testUpper.json
+++ b/exec/java-exec/src/test/resources/functions/string/testUpper.json
@@ -12,10 +12,10 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 1, types: [
- {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
- {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 1, types: [
+ {name: "varcharcol", type: "VARCHAR", mode: "REQUIRED"},
+ {name: "nullvarcharcol", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -23,8 +23,8 @@
child: 1,
pop:"project",
exprs: [
- { ref: "lower", expr: "upper('ABcEFgh')"},
- { ref: "lower", expr: "upper('aBc')"},
+ { ref: "lower", expr: "upper('ABcEFgh')"},
+ { ref: "lower", expr: "upper('aBc')"},
{ ref: "lower", expr: "upper('')"}
]
},
diff --git a/exec/java-exec/src/test/resources/functions/testByteSubstring.json b/exec/java-exec/src/test/resources/functions/testByteSubstring.json
index 299bb5110..ff4e967a7 100644
--- a/exec/java-exec/src/test/resources/functions/testByteSubstring.json
+++ b/exec/java-exec/src/test/resources/functions/testByteSubstring.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "yellow", type: "VARBINARY", mode: "OPTIONAL"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "yellow", type: "VARBINARY", mode: "OPTIONAL"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -34,4 +34,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/functions/testSubstring.json b/exec/java-exec/src/test/resources/functions/testSubstring.json
index d0fe2b9f6..536fda1e7 100644
--- a/exec/java-exec/src/test/resources/functions/testSubstring.json
+++ b/exec/java-exec/src/test/resources/functions/testSubstring.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "yellow", type: "VARCHAR", mode: "OPTIONAL"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "yellow", type: "VARCHAR", mode: "OPTIONAL"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -34,4 +34,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/functions/testSubstringNegative.json b/exec/java-exec/src/test/resources/functions/testSubstringNegative.json
index 12ff0f285..6d4659dec 100644
--- a/exec/java-exec/src/test/resources/functions/testSubstringNegative.json
+++ b/exec/java-exec/src/test/resources/functions/testSubstringNegative.json
@@ -12,12 +12,12 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "yellow", type: "VARCHAR", mode: "OPTIONAL"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "yellow", type: "VARCHAR", mode: "OPTIONAL"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -34,4 +34,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/join/hash_join.json b/exec/java-exec/src/test/resources/join/hash_join.json
index 41a983b0d..dbeb4efc1 100644
--- a/exec/java-exec/src/test/resources/join/hash_join.json
+++ b/exec/java-exec/src/test/resources/join/hash_join.json
@@ -9,56 +9,56 @@
"resultMode" : "EXEC"
},
graph:[
- {
- @id:1,
- pop:"fs-scan",
- format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
- files:["#{TEST_FILE_1}"]
- },
- {
- @id:2,
- pop:"fs-scan",
- format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
- files:["#{TEST_FILE_2}"]
- },
- {
- "pop" : "project",
- "@id" : 3,
- "exprs" : [ {
- "ref" : "A",
- "expr" : "A"
- },
- { "ref" : "CCOL", "expr" : "C" }
- ],
+ {
+ @id:1,
+ pop:"fs-scan",
+ format: {type: "json"},
+ storage:{type: "file", connection: "file:///"},
+ files:["#{TEST_FILE_1}"]
+ },
+ {
+ @id:2,
+ pop:"fs-scan",
+ format: {type: "json"},
+ storage:{type: "file", connection: "file:///"},
+ files:["#{TEST_FILE_2}"]
+ },
+ {
+ "pop" : "project",
+ "@id" : 3,
+ "exprs" : [ {
+ "ref" : "A",
+ "expr" : "A"
+ },
+ { "ref" : "CCOL", "expr" : "C" }
+ ],
- "child" : 1
- },
- {
- "pop" : "project",
- "@id" : 4,
- "exprs" : [ {
- "ref" : "B",
- "expr" : "B"
- },
- { "ref" : "DCOL", "expr" : "D" }
- ],
+ "child" : 1
+ },
+ {
+ "pop" : "project",
+ "@id" : 4,
+ "exprs" : [ {
+ "ref" : "B",
+ "expr" : "B"
+ },
+ { "ref" : "DCOL", "expr" : "D" }
+ ],
- "child" : 2
- },
- {
+ "child" : 2
+ },
+ {
@id: 5,
right: 3,
left: 4,
pop: "hash-join",
conditions: [ {relationship: "==", left: "B", right: "A"} ],
- joinType : "INNER"
- },
- {
+ joinType : "INNER"
+ },
+ {
@id: 6,
child: 5,
pop: "screen"
- }
- ]
- }
+ }
+ ]
+}
diff --git a/exec/java-exec/src/test/resources/join/hj_multi_condition_join.json b/exec/java-exec/src/test/resources/join/hj_multi_condition_join.json
index 0f1c32b91..4033ca9c3 100644
--- a/exec/java-exec/src/test/resources/join/hj_multi_condition_join.json
+++ b/exec/java-exec/src/test/resources/join/hj_multi_condition_join.json
@@ -56,7 +56,7 @@
{relationship: "==", left: "B", right: "A"},
{relationship: "==", left: "DCOL", right: "CCOL"}
],
- joinType : "INNER"
+ joinType : "INNER"
},
{
@id: 6,
diff --git a/exec/java-exec/src/test/resources/join/join_batchsize.json b/exec/java-exec/src/test/resources/join/join_batchsize.json
index 969ff0d2a..4817e7c72 100644
--- a/exec/java-exec/src/test/resources/join/join_batchsize.json
+++ b/exec/java-exec/src/test/resources/join/join_batchsize.json
@@ -18,7 +18,7 @@
{name: "green", type: "INT", mode: "REQUIRED"}
]}
]
- },
+ },
{
pop : "sort",
@id : 2,
@@ -28,12 +28,12 @@
expr : "blue"
} ],
reverse : false
- },
+ },
{
pop : "selection-vector-remover",
@id : 3,
child : 2
- },
+ },
{
@id:4,
pop:"mock-sub-scan",
@@ -43,7 +43,7 @@
{name: "blue1", type: "INT", mode: "REQUIRED"},
{name: "red1", type: "INT", mode: "REQUIRED"},
{name: "green1", type: "INT", mode: "REQUIRED"}
- ]}
+ ]}
]
},
{
@@ -55,12 +55,12 @@
expr : "blue1"
} ],
reverse : false
- },
+ },
{
pop : "selection-vector-remover",
@id : 6,
child : 5
- },
+ },
{
@id: 7,
right: 6,
@@ -79,7 +79,7 @@
pop : "selection-vector-remover",
@id : 9,
child : 8
- },
+ },
{
@id: 10,
child: 9,
diff --git a/exec/java-exec/src/test/resources/join/merge_inner_single_batch.json b/exec/java-exec/src/test/resources/join/merge_inner_single_batch.json
index 52b414146..37ba9a49b 100644
--- a/exec/java-exec/src/test/resources/join/merge_inner_single_batch.json
+++ b/exec/java-exec/src/test/resources/join/merge_inner_single_batch.json
@@ -11,7 +11,7 @@
@id:1,
pop:"fs-sub-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:[
{ path: "#{LEFT_FILE}" }
]
@@ -20,7 +20,7 @@
@id:2,
pop:"fs-sub-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:[
{ path: "#{RIGHT_FILE}" }
]
@@ -39,4 +39,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/join/merge_multi_batch.json b/exec/java-exec/src/test/resources/join/merge_multi_batch.json
index ef3ef414f..f209a9840 100644
--- a/exec/java-exec/src/test/resources/join/merge_multi_batch.json
+++ b/exec/java-exec/src/test/resources/join/merge_multi_batch.json
@@ -11,7 +11,7 @@
@id:1,
pop:"fs-sub-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:[
{ path: "#{LEFT_FILE}" }
]
@@ -20,7 +20,7 @@
@id:2,
pop:"fs-sub-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:[
{ path: "#{RIGHT_FILE}" }
]
@@ -39,4 +39,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/join/merge_single_batch.json b/exec/java-exec/src/test/resources/join/merge_single_batch.json
index ef3ef414f..f209a9840 100644
--- a/exec/java-exec/src/test/resources/join/merge_single_batch.json
+++ b/exec/java-exec/src/test/resources/join/merge_single_batch.json
@@ -11,7 +11,7 @@
@id:1,
pop:"fs-sub-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:[
{ path: "#{LEFT_FILE}" }
]
@@ -20,7 +20,7 @@
@id:2,
pop:"fs-sub-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:[
{ path: "#{RIGHT_FILE}" }
]
@@ -39,4 +39,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/jsoninput/input1.json b/exec/java-exec/src/test/resources/jsoninput/input1.json
index e9bde7efb..9f4cdb5f9 100644
--- a/exec/java-exec/src/test/resources/jsoninput/input1.json
+++ b/exec/java-exec/src/test/resources/jsoninput/input1.json
@@ -1,6 +1,6 @@
-{ "integer" : 2001,
+{ "integer" : 2001,
"float" : 1.2
}
{ "integer" : -2002,
- "float" : -1.2
+ "float" : -1.2
}
diff --git a/exec/java-exec/src/test/resources/jsoninput/input2.json b/exec/java-exec/src/test/resources/jsoninput/input2.json
index 4e044c20c..d37f686d8 100644
--- a/exec/java-exec/src/test/resources/jsoninput/input2.json
+++ b/exec/java-exec/src/test/resources/jsoninput/input2.json
@@ -1,4 +1,4 @@
-{ "integer" : 2010,
+{ "integer" : 2010,
"float" : 17.4,
"x": {
"y": "kevin",
@@ -12,9 +12,9 @@
"rl": [ [2,1], [4,6] ]
}
{ "integer" : -2002,
- "float" : -1.2
+ "float" : -1.2
}
-{ "integer" : 2001,
+{ "integer" : 2001,
"float" : 1.2,
"x": {
"y": "bill",
@@ -26,7 +26,7 @@
"l": [4,2],
"rl": [ [2,1], [4,6] ]
}
-{ "integer" : 6005,
+{ "integer" : 6005,
"float" : 1.2,
"x": {
"y": "mike",
diff --git a/exec/java-exec/src/test/resources/jsoninput/vvtypes.json b/exec/java-exec/src/test/resources/jsoninput/vvtypes.json
index 60cd1ea1e..6335b4e96 100644
--- a/exec/java-exec/src/test/resources/jsoninput/vvtypes.json
+++ b/exec/java-exec/src/test/resources/jsoninput/vvtypes.json
@@ -74,7 +74,7 @@
major: "Fixed",
width: 16,
javaType: "ByteBuf",
- boxedType: "ByteBuf",
+ boxedType: "ByteBuf",
minor: [
{ class: "Interval", daysOffset: 4, milliSecondsOffset: 8, friendlyType: "Period" }
]
@@ -93,7 +93,7 @@
width: 16,
javaType: "ByteBuf",
boxedType: "ByteBuf",
-
+
minor: [
{ class: "Decimal38Dense", maxPrecisionDigits: 38, nDecimalDigits: 4, friendlyType: "BigDecimal" }
]
diff --git a/exec/java-exec/src/test/resources/limit/limit_exchanges.json b/exec/java-exec/src/test/resources/limit/limit_exchanges.json
index 5ad56beee..c2303c89d 100644
--- a/exec/java-exec/src/test/resources/limit/limit_exchanges.json
+++ b/exec/java-exec/src/test/resources/limit/limit_exchanges.json
@@ -63,11 +63,11 @@
"pop" : "union-exchange",
"@id" : 4,
"child" : 3
- },
+ },
{
"pop" : "project",
"@id" : 5,
- "exprs" : [ {
+ "exprs" : [ {
"ref" : "`N_NATIONKEY`",
"expr" : "`N_NATIONKEY`"
} ],
diff --git a/exec/java-exec/src/test/resources/limit/test1.json b/exec/java-exec/src/test/resources/limit/test1.json
index 79d674825..feca79423 100644
--- a/exec/java-exec/src/test/resources/limit/test1.json
+++ b/exec/java-exec/src/test/resources/limit/test1.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -38,4 +38,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/limit/test2.json b/exec/java-exec/src/test/resources/limit/test2.json
index 5ab1ab5b8..1278559ff 100644
--- a/exec/java-exec/src/test/resources/limit/test2.json
+++ b/exec/java-exec/src/test/resources/limit/test2.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100000000, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100000000, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -38,4 +38,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/limit/test3.json b/exec/java-exec/src/test/resources/limit/test3.json
index 26b78bba8..26f0e7b65 100644
--- a/exec/java-exec/src/test/resources/limit/test3.json
+++ b/exec/java-exec/src/test/resources/limit/test3.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -37,4 +37,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/limit/test4.json b/exec/java-exec/src/test/resources/limit/test4.json
index b7793b1bf..03427f1d0 100644
--- a/exec/java-exec/src/test/resources/limit/test4.json
+++ b/exec/java-exec/src/test/resources/limit/test4.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100000000, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100000000, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -46,4 +46,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/mock-scan.json b/exec/java-exec/src/test/resources/mock-scan.json
index 36604801f..494a319fe 100644
--- a/exec/java-exec/src/test/resources/mock-scan.json
+++ b/exec/java-exec/src/test/resources/mock-scan.json
@@ -6,16 +6,16 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-scan",
url: "http://apache.org",
entries:[
- {records: 100000000, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]},
+ {records: 100000000, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]},
{records: 100000000, types: [
{name: "blue", type: "INT", mode: "REQUIRED"},
{name: "green", type: "INT", mode: "REQUIRED"}
@@ -28,4 +28,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/physical_double_exchange.json b/exec/java-exec/src/test/resources/physical_double_exchange.json
index 85823cf23..d2d958616 100644
--- a/exec/java-exec/src/test/resources/physical_double_exchange.json
+++ b/exec/java-exec/src/test/resources/physical_double_exchange.json
@@ -12,16 +12,16 @@
pop:"mock-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]},
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]},
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -52,4 +52,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/physical_join.json b/exec/java-exec/src/test/resources/physical_join.json
index 7914ac360..0ad268b78 100644
--- a/exec/java-exec/src/test/resources/physical_join.json
+++ b/exec/java-exec/src/test/resources/physical_join.json
@@ -57,7 +57,7 @@
entries : [ {
path : "/tmp/nation.parquet"
} ],
- storageengine: {type: "parquet", dfsName: "file:///" },
+ storageengine: {type: "parquet", dfsName: "file:///" },
ref : "_MAP",
fragmentPointer : 0
}, {
@@ -114,4 +114,4 @@
@id : 13,
child : 12
} ]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/physical_json_scan_test1.json b/exec/java-exec/src/test/resources/physical_json_scan_test1.json
index 5293d3bcb..5013d6db3 100644
--- a/exec/java-exec/src/test/resources/physical_json_scan_test1.json
+++ b/exec/java-exec/src/test/resources/physical_json_scan_test1.json
@@ -6,12 +6,12 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"fs-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files: [ "#{TEST_FILE}" ]
},
{
@@ -25,4 +25,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/physical_repeated_1.json b/exec/java-exec/src/test/resources/physical_repeated_1.json
index 71eff1d6b..e518cd746 100644
--- a/exec/java-exec/src/test/resources/physical_repeated_1.json
+++ b/exec/java-exec/src/test/resources/physical_repeated_1.json
@@ -6,7 +6,7 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
@@ -26,7 +26,7 @@
exprs: [
{ ref: "cnt", expr:"repeated_count(blue)" },
{ ref: "has_min", expr:"repeated_contains(red, 9223372036854775807)" }
- ]
+ ]
},
{
@id: 3,
@@ -34,4 +34,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/physical_test1.json b/exec/java-exec/src/test/resources/physical_test1.json
index 0ddd48faf..b46f9c4e4 100644
--- a/exec/java-exec/src/test/resources/physical_test1.json
+++ b/exec/java-exec/src/test/resources/physical_test1.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -37,4 +37,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/physical_test2.json b/exec/java-exec/src/test/resources/physical_test2.json
index b00192153..d98aa2fe5 100644
--- a/exec/java-exec/src/test/resources/physical_test2.json
+++ b/exec/java-exec/src/test/resources/physical_test2.json
@@ -12,16 +12,16 @@
pop:"mock-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]},
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]},
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -29,6 +29,5 @@
child: 1,
pop: "screen"
}
- ]
-
-} \ No newline at end of file
+ ]
+}
diff --git a/exec/java-exec/src/test/resources/project/test1.json b/exec/java-exec/src/test/resources/project/test1.json
index 3a84fd029..39d8c1804 100644
--- a/exec/java-exec/src/test/resources/project/test1.json
+++ b/exec/java-exec/src/test/resources/project/test1.json
@@ -6,18 +6,18 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "OPTIONAL"},
- {name: "green", type: "INT", mode: "REQUIRED"},
- {name: "orange", type: "VARCHAR", mode: "OPTIONAL"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "OPTIONAL"},
+ {name: "green", type: "INT", mode: "REQUIRED"},
+ {name: "orange", type: "VARCHAR", mode: "OPTIONAL"}
+ ]}
]
},
{
@@ -37,4 +37,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/queries/tpch.json b/exec/java-exec/src/test/resources/queries/tpch.json
index 59894398e..589ecb674 100644
--- a/exec/java-exec/src/test/resources/queries/tpch.json
+++ b/exec/java-exec/src/test/resources/queries/tpch.json
@@ -1,7 +1,7 @@
[
tpch1: "
-
+
select
l_returnflag,
l_linestatus,
@@ -27,7 +27,7 @@ LIMIT 1;
",
tpch1a: "
-
+
select
l_returnflag,
l_linestatus,
@@ -50,5 +50,4 @@ order by
LIMIT 1;
"
-
-] \ No newline at end of file
+]
diff --git a/exec/java-exec/src/test/resources/queries/tpch/04.sql b/exec/java-exec/src/test/resources/queries/tpch/04.sql
index feeb2e0b0..75926904b 100644
--- a/exec/java-exec/src/test/resources/queries/tpch/04.sql
+++ b/exec/java-exec/src/test/resources/queries/tpch/04.sql
@@ -8,7 +8,7 @@ from
where
o.o_orderdate >= date '1996-10-01'
and o.o_orderdate < date '1996-10-01' + interval '3' month
- and
+ and
exists (
select
*
diff --git a/exec/java-exec/src/test/resources/queries/tpch/13.sql b/exec/java-exec/src/test/resources/queries/tpch/13.sql
index ae3f691c9..7677922e8 100644
--- a/exec/java-exec/src/test/resources/queries/tpch/13.sql
+++ b/exec/java-exec/src/test/resources/queries/tpch/13.sql
@@ -8,8 +8,8 @@ from
c.c_custkey,
count(o.o_orderkey)
from
- cp.`tpch/customer.parquet` c
- left outer join cp.`tpch/orders.parquet` o
+ cp.`tpch/customer.parquet` c
+ left outer join cp.`tpch/orders.parquet` o
on c.c_custkey = o.o_custkey
and o.o_comment not like '%special%requests%'
group by
@@ -19,4 +19,4 @@ group by
c_count
order by
custdist desc,
- c_count desc; \ No newline at end of file
+ c_count desc;
diff --git a/exec/java-exec/src/test/resources/queries/tpch/15.sql b/exec/java-exec/src/test/resources/queries/tpch/15.sql
index 2f0aa8e9c..49927a456 100644
--- a/exec/java-exec/src/test/resources/queries/tpch/15.sql
+++ b/exec/java-exec/src/test/resources/queries/tpch/15.sql
@@ -12,7 +12,7 @@ create view revenue0 (supplier_no, total_revenue) as
and l_shipdate < date '1993-05-01' + interval '3' month
group by
l_suppkey;
-
+
select
s.s_suppkey,
s.s_name,
@@ -32,5 +32,5 @@ where
)
order by
s.s_suppkey;
-
-drop view revenue0; \ No newline at end of file
+
+drop view revenue0;
diff --git a/exec/java-exec/src/test/resources/queries/tpch/19_1.sql b/exec/java-exec/src/test/resources/queries/tpch/19_1.sql
index 6d7f9c05e..bc5a0d2d3 100644
--- a/exec/java-exec/src/test/resources/queries/tpch/19_1.sql
+++ b/exec/java-exec/src/test/resources/queries/tpch/19_1.sql
@@ -5,7 +5,7 @@ from
cp.`tpch/lineitem.parquet` l,
cp.`tpch/part.parquet` p
where
- p.p_partkey = l.l_partkey
+ p.p_partkey = l.l_partkey
and (
(
p.p_brand = 'Brand#41'
@@ -32,4 +32,4 @@ where
and p.p_size between 1 and 15
and l.l_shipmode in ('AIR', 'AIR REG')
and l.l_shipinstruct = 'DELIVER IN PERSON'
- ) ); \ No newline at end of file
+ ) );
diff --git a/exec/java-exec/src/test/resources/remover/test1.json b/exec/java-exec/src/test/resources/remover/test1.json
index 3abe47695..838bf1671 100644
--- a/exec/java-exec/src/test/resources/remover/test1.json
+++ b/exec/java-exec/src/test/resources/remover/test1.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -36,4 +36,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/scan_json_test_3.json b/exec/java-exec/src/test/resources/scan_json_test_3.json
index cb7897e43..d8755f2a9 100644
--- a/exec/java-exec/src/test/resources/scan_json_test_3.json
+++ b/exec/java-exec/src/test/resources/scan_json_test_3.json
@@ -1,18 +1,18 @@
{
"test": 123,
"a": {
- "b": "test",
- "a": {
- "d": true
- }
+ "b": "test",
+ "a": {
+ "d": true
+ }
}
}
{
"test": 1234,
"a": {
- "b": "test2",
- "a": {
- "d": false
- }
+ "b": "test2",
+ "a": {
+ "d": false
+ }
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/scan_json_test_6.json b/exec/java-exec/src/test/resources/scan_json_test_6.json
index 68b53d4ec..52eb0b53c 100644
--- a/exec/java-exec/src/test/resources/scan_json_test_6.json
+++ b/exec/java-exec/src/test/resources/scan_json_test_6.json
@@ -2,13 +2,13 @@
"test": 123,
"test2": [1,2,3],
"a": {
- "b": 1
+ "b": 1
}
}
{
"test": "abc",
"test2": false,
"a": {
- "b": [1,2,3,4]
+ "b": [1,2,3,4]
}
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/scan_screen_logical.json b/exec/java-exec/src/test/resources/scan_screen_logical.json
index 4f44f9e7d..005251bf1 100644
--- a/exec/java-exec/src/test/resources/scan_screen_logical.json
+++ b/exec/java-exec/src/test/resources/scan_screen_logical.json
@@ -12,7 +12,7 @@
"type" : "mock"
}
},
-
+
query:[
{
@id:"1",
@@ -42,8 +42,6 @@
target: {
file: "console:///stdout"
}
-
}
-
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/sender/broadcast_exchange.json b/exec/java-exec/src/test/resources/sender/broadcast_exchange.json
index 950c31895..12887366c 100644
--- a/exec/java-exec/src/test/resources/sender/broadcast_exchange.json
+++ b/exec/java-exec/src/test/resources/sender/broadcast_exchange.json
@@ -11,14 +11,14 @@
@id:1,
pop:"fs-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:["#{LEFT_FILE}"]
},
{
@id:2,
pop:"fs-scan",
format: {type: "json"},
- storage:{type: "file", connection: "file:///"},
+ storage:{type: "file", connection: "file:///"},
files:["#{RIGHT_FILE}"]
},
{
@@ -40,4 +40,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/server/options_session_check.json b/exec/java-exec/src/test/resources/server/options_session_check.json
index 6cb80fd95..ab5dd8ae8 100644
--- a/exec/java-exec/src/test/resources/server/options_session_check.json
+++ b/exec/java-exec/src/test/resources/server/options_session_check.json
@@ -6,7 +6,7 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"options-reader-group-scan"
diff --git a/exec/java-exec/src/test/resources/server/options_set.json b/exec/java-exec/src/test/resources/server/options_set.json
index dda35fc76..ac625e0c1 100644
--- a/exec/java-exec/src/test/resources/server/options_set.json
+++ b/exec/java-exec/src/test/resources/server/options_set.json
@@ -9,7 +9,7 @@
&REPLACED_IN_TEST&
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"options-reader-group-scan"
@@ -21,4 +21,3 @@
}
]
}
-
diff --git a/exec/java-exec/src/test/resources/simple_plan.json b/exec/java-exec/src/test/resources/simple_plan.json
index 7ffb50411..85b3bfa18 100644
--- a/exec/java-exec/src/test/resources/simple_plan.json
+++ b/exec/java-exec/src/test/resources/simple_plan.json
@@ -10,13 +10,13 @@
storage:{
logs: {
type:"text",
- file: "local://logs/*.log",
- compress:"gzip",
- line-delimiter:"\n",
- record-maker:{
- type:"first-row",
- delimiter:","
- }
+ file: "local://logs/*.log",
+ compress:"gzip",
+ line-delimiter:"\n",
+ record-maker:{
+ type:"first-row",
+ delimiter:","
+ }
},
{
type:"mongo",
@@ -125,9 +125,8 @@
target: {
file: "console:///stdout"
}
-
+
}
-
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/sort/one_key_sort.json b/exec/java-exec/src/test/resources/sort/one_key_sort.json
index 6e5d617df..3a3d072c2 100644
--- a/exec/java-exec/src/test/resources/sort/one_key_sort.json
+++ b/exec/java-exec/src/test/resources/sort/one_key_sort.json
@@ -6,16 +6,16 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 10000000, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]},
+ {records: 10000000, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]},
{records: 10000000, types: [
{name: "blue", type: "INT", mode: "REQUIRED"},
{name: "green", type: "INT", mode: "REQUIRED"}
@@ -41,4 +41,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/sort/two_key_sort.json b/exec/java-exec/src/test/resources/sort/two_key_sort.json
index ab140025c..7d6a1a8b8 100644
--- a/exec/java-exec/src/test/resources/sort/two_key_sort.json
+++ b/exec/java-exec/src/test/resources/sort/two_key_sort.json
@@ -6,16 +6,16 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]},
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]},
{records: 100, types: [
{name: "blue", type: "INT", mode: "REQUIRED"},
{name: "green", type: "INT", mode: "REQUIRED"}
@@ -51,4 +51,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/store/text/test.json b/exec/java-exec/src/test/resources/store/text/test.json
index fbf19a40e..4a312d63f 100644
--- a/exec/java-exec/src/test/resources/store/text/test.json
+++ b/exec/java-exec/src/test/resources/store/text/test.json
@@ -6,7 +6,7 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"fs-scan",
@@ -37,4 +37,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/testRepeatedWrite.json b/exec/java-exec/src/test/resources/testRepeatedWrite.json
index fa5da8359..8a3111d60 100644
--- a/exec/java-exec/src/test/resources/testRepeatedWrite.json
+++ b/exec/java-exec/src/test/resources/testRepeatedWrite.json
@@ -1,74 +1,74 @@
{
- "id": "0001",
- "type": "donut",
- "name": "Cake",
- "ppu": 0.55,
- "sales": 35,
- "topping":
- [
- "None",
- "Glazed",
- "Sugar",
- "Powdered Sugar",
- "Chocolate with Sprinkles",
- "Chocolate",
- "Maple"
- ]
- }
- {
- "id": "0002",
- "type": "donut",
- "name": "Raised",
- "ppu": 0.69,
- "sales": 145,
- "topping":
- [
- "None",
- "Glazed",
- "Sugar",
- "Chocolate",
- "Maple"
- ]
- }
- {
- "id": "0003",
- "type": "donut",
- "name": "Old Fashioned",
- "ppu": 0.55,
- "sales": 300,
- "topping":
- [
- "None",
- "Glazed",
- "Chocolate",
- "Maple"
- ]
- }
- {
- "id": "0004",
- "type": "donut",
- "name": "Filled",
- "ppu": 0.69,
- "sales": 14,
- "topping":
- [
- "None",
- "Glazed",
- "Sugar",
- "Powdered Sugar",
- "Chocolate with Sprinkles",
- "Chocolate",
- "Maple"
- ]
- }
- {
- "id": "0005",
- "type": "donut",
- "name": "Apple Fritter",
- "ppu": 1.00,
- "sales": 700,
- "topping":
- [
- "Glazed"
- ]
- }
+ "id": "0001",
+ "type": "donut",
+ "name": "Cake",
+ "ppu": 0.55,
+ "sales": 35,
+ "topping":
+ [
+ "None",
+ "Glazed",
+ "Sugar",
+ "Powdered Sugar",
+ "Chocolate with Sprinkles",
+ "Chocolate",
+ "Maple"
+ ]
+}
+{
+ "id": "0002",
+ "type": "donut",
+ "name": "Raised",
+ "ppu": 0.69,
+ "sales": 145,
+ "topping":
+ [
+ "None",
+ "Glazed",
+ "Sugar",
+ "Chocolate",
+ "Maple"
+ ]
+}
+{
+ "id": "0003",
+ "type": "donut",
+ "name": "Old Fashioned",
+ "ppu": 0.55,
+ "sales": 300,
+ "topping":
+ [
+ "None",
+ "Glazed",
+ "Chocolate",
+ "Maple"
+ ]
+}
+{
+ "id": "0004",
+ "type": "donut",
+ "name": "Filled",
+ "ppu": 0.69,
+ "sales": 14,
+ "topping":
+ [
+ "None",
+ "Glazed",
+ "Sugar",
+ "Powdered Sugar",
+ "Chocolate with Sprinkles",
+ "Chocolate",
+ "Maple"
+ ]
+}
+{
+ "id": "0005",
+ "type": "donut",
+ "name": "Apple Fritter",
+ "ppu": 1.00,
+ "sales": 700,
+ "topping":
+ [
+ "Glazed"
+ ]
+}
diff --git a/exec/java-exec/src/test/resources/topN/one_key_sort.json b/exec/java-exec/src/test/resources/topN/one_key_sort.json
index 3c919b23f..45b3fc33b 100644
--- a/exec/java-exec/src/test/resources/topN/one_key_sort.json
+++ b/exec/java-exec/src/test/resources/topN/one_key_sort.json
@@ -6,16 +6,16 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-scan",
url: "http://apache.org",
entries:[
- {records: 10000000, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]},
+ {records: 10000000, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]},
{records: 10000000, types: [
{name: "blue", type: "INT", mode: "REQUIRED"},
{name: "green", type: "INT", mode: "REQUIRED"}
@@ -50,4 +50,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/topN/two_key_sort.json b/exec/java-exec/src/test/resources/topN/two_key_sort.json
index 3a05cf6be..500f74029 100644
--- a/exec/java-exec/src/test/resources/topN/two_key_sort.json
+++ b/exec/java-exec/src/test/resources/topN/two_key_sort.json
@@ -6,16 +6,16 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]},
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]},
{records: 100, types: [
{name: "blue", type: "INT", mode: "REQUIRED"},
{name: "green", type: "INT", mode: "REQUIRED"}
@@ -52,4 +52,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/union/test1.json b/exec/java-exec/src/test/resources/union/test1.json
index a4dcc0872..900ac24c7 100644
--- a/exec/java-exec/src/test/resources/union/test1.json
+++ b/exec/java-exec/src/test/resources/union/test1.json
@@ -6,17 +6,17 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
},
{
@@ -24,13 +24,13 @@
pop:"mock-sub-scan",
url: "http://apache.org",
entries:[
- {records: 100, types: [
- {name: "blue", type: "INT", mode: "REQUIRED"},
- {name: "red", type: "BIGINT", mode: "REQUIRED"},
- {name: "green", type: "INT", mode: "REQUIRED"}
- ]}
+ {records: 100, types: [
+ {name: "blue", type: "INT", mode: "REQUIRED"},
+ {name: "red", type: "BIGINT", mode: "REQUIRED"},
+ {name: "green", type: "INT", mode: "REQUIRED"}
+ ]}
]
- },
+ },
{
@id:3,
child: 2,
@@ -46,7 +46,7 @@
@id:5,
child:4,
pop: "selection-vector-remover"
-
+
},
{
@id: 6,
diff --git a/exec/java-exec/src/test/resources/xsort/one_key_sort_descending.json b/exec/java-exec/src/test/resources/xsort/one_key_sort_descending.json
index efb887bbf..f4eab5d2f 100644
--- a/exec/java-exec/src/test/resources/xsort/one_key_sort_descending.json
+++ b/exec/java-exec/src/test/resources/xsort/one_key_sort_descending.json
@@ -6,7 +6,7 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-scan",
@@ -45,4 +45,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/xsort/one_key_sort_descending_sv2.json b/exec/java-exec/src/test/resources/xsort/one_key_sort_descending_sv2.json
index d10aa96a9..96d7f1d50 100644
--- a/exec/java-exec/src/test/resources/xsort/one_key_sort_descending_sv2.json
+++ b/exec/java-exec/src/test/resources/xsort/one_key_sort_descending_sv2.json
@@ -6,7 +6,7 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-scan",
@@ -51,4 +51,4 @@
pop: "screen"
}
]
-} \ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/xsort/oom_sort_test.json b/exec/java-exec/src/test/resources/xsort/oom_sort_test.json
index af5bc4327..22c3a5c95 100644
--- a/exec/java-exec/src/test/resources/xsort/oom_sort_test.json
+++ b/exec/java-exec/src/test/resources/xsort/oom_sort_test.json
@@ -6,7 +6,7 @@
type:"manual"
}
},
- graph:[
+ graph:[
{
@id:1,
pop:"mock-scan",
@@ -54,4 +54,4 @@
maxAllocation: 1000000
}
]
-} \ No newline at end of file
+}