aboutsummaryrefslogtreecommitdiff
path: root/exec
diff options
context:
space:
mode:
authorArina Ielchiieva <arina.yelchiyeva@gmail.com>2018-05-05 15:32:07 +0300
committerAman Sinha <asinha@maprtech.com>2018-05-12 10:54:14 -0700
commitca90229b6deea40282927f8ab5c07715a4e18620 (patch)
tree963596cdae1651f02efd9062a23ec8342134a2ba /exec
parent9a47d555b00052115016fbe35cad5a4147d42ad5 (diff)
DRILL-6272: Refactor dynamic UDFs and function initializer tests to generate needed binary and source jars at runtime
close apache/drill#1225
Diffstat (limited to 'exec')
-rw-r--r--exec/java-exec/pom.xml48
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java54
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java96
-rw-r--r--exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java (renamed from exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java)523
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/pom.xml92
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java63
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java58
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java58
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java64
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java64
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java64
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java64
-rw-r--r--exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf1
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jarbin1892 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jarbin3146 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jarbin1891 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jarbin3142 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jarbin3473 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jarbin5779 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jarbin1892 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jarbin3185 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jarbin1888 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jarbin3201 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jarbin536 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jarbin1863 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jarbin1715 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jarbin3084 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jarbin1899 -> 0 bytes
-rw-r--r--exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jarbin3215 -> 0 bytes
29 files changed, 948 insertions, 301 deletions
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index cbc3a02aa..345e24014 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -584,6 +584,54 @@
<artifactId>netty-tcnative</artifactId>
<classifier>${netty.tcnative.classifier}</classifier>
</dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-embedder</artifactId>
+ <version>3.5.3</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-compat</artifactId>
+ <version>3.5.3</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.aether</groupId>
+ <artifactId>aether-connector-basic</artifactId>
+ <version>1.1.0</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.aether</groupId>
+ <artifactId>aether-transport-wagon</artifactId>
+ <version>1.1.0</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-http</artifactId>
+ <version>3.0.0</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-provider-api</artifactId>
+ <version>3.0.0</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
</dependencies>
<profiles>
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java
index 7c10bd3ca..2ecb8a0c7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java
@@ -19,20 +19,20 @@ package org.apache.drill.exec.expr.fn;
import com.google.common.collect.Lists;
import org.apache.drill.categories.SqlFunctionTest;
-import org.apache.drill.test.TestTools;
+import org.apache.drill.exec.udf.dynamic.JarBuilder;
import org.apache.drill.exec.util.JarUtil;
import org.codehaus.janino.Java.CompilationUnit;
import org.junit.BeforeClass;
+import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.runners.MockitoJUnitRunner;
-import org.mockito.stubbing.Answer;
+import org.junit.rules.TemporaryFolder;
+import java.io.File;
+import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
-import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
@@ -45,25 +45,27 @@ import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.spy;
-@RunWith(MockitoJUnitRunner.class)
@Category(SqlFunctionTest.class)
public class FunctionInitializerTest {
- private static final String CLASS_NAME = "com.drill.udf.CustomLowerFunction";
+ @ClassRule
+ public static final TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+ private static final String CLASS_NAME = "org.apache.drill.udf.dynamic.CustomLowerFunction";
private static URLClassLoader classLoader;
@BeforeClass
public static void init() throws Exception {
- Path jars = TestTools.WORKING_PATH
- .resolve(TestTools.TEST_RESOURCES_REL)
- .resolve("jars");
- String binaryName = "DrillUDF-1.0.jar";
- String sourceName = JarUtil.getSourceName(binaryName);
- URL[] urls = {jars.resolve(binaryName).toUri().toURL(), jars.resolve(sourceName).toUri().toURL()};
+ File buildDirectory = temporaryFolder.getRoot();
+ String binaryName = "drill-custom-lower";
+
+ JarBuilder jarBuilder = new JarBuilder("src/test/resources/drill-udf");
+ String binaryJar = jarBuilder.build(binaryName, buildDirectory.getAbsolutePath(), "**/CustomLowerFunction.java", null);
+
+ URL[] urls = {
+ Paths.get(buildDirectory.getPath(), binaryJar).toUri().toURL(),
+ Paths.get(buildDirectory.getPath(), JarUtil.getSourceName(binaryJar)).toUri().toURL()};
classLoader = new URLClassLoader(urls);
}
@@ -94,27 +96,21 @@ public class FunctionInitializerTest {
@Test
public void testConcurrentFunctionBodyLoad() throws Exception {
- final FunctionInitializer spyFunctionInitializer = spy(new FunctionInitializer(CLASS_NAME, classLoader));
final AtomicInteger counter = new AtomicInteger();
-
- doAnswer(new Answer<CompilationUnit>() {
+ final FunctionInitializer functionInitializer = new FunctionInitializer(CLASS_NAME, classLoader) {
@Override
- public CompilationUnit answer(InvocationOnMock invocation) throws Throwable {
+ CompilationUnit convertToCompilationUnit(Class<?> clazz) throws IOException {
counter.incrementAndGet();
- return (CompilationUnit) invocation.callRealMethod();
+ return super.convertToCompilationUnit(clazz);
}
- }).when(spyFunctionInitializer).convertToCompilationUnit(any(Class.class));
+ };
int threadsNumber = 5;
ExecutorService executor = Executors.newFixedThreadPool(threadsNumber);
try {
- List<Future<String>> results = executor.invokeAll(Collections.nCopies(threadsNumber, new Callable<String>() {
- @Override
- public String call() {
- return spyFunctionInitializer.getMethod("eval");
- }
- }));
+ List<Future<String>> results = executor.invokeAll(Collections.nCopies(threadsNumber,
+ (Callable<String>) () -> functionInitializer.getMethod("eval")));
final Set<String> uniqueResults = new HashSet<>();
for (Future<String> result : results) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java
new file mode 100644
index 000000000..4861c3076
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.udf.dynamic;
+
+import org.apache.maven.cli.MavenCli;
+import org.apache.maven.cli.logging.Slf4jLogger;
+import org.codehaus.plexus.DefaultPlexusContainer;
+import org.codehaus.plexus.PlexusContainer;
+import org.codehaus.plexus.logging.BaseLoggerManager;
+
+import java.util.LinkedList;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+public class JarBuilder {
+
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JarBuilder.class);
+ private static final String MAVEN_MULTI_MODULE_PROJECT_DIRECTORY = "maven.multiModuleProjectDirectory";
+
+ private final MavenCli cli;
+ private final String projectDirectory;
+
+ public JarBuilder(String projectDirectory) {
+ this.cli = new MavenCli() {
+ @Override
+ protected void customizeContainer(PlexusContainer container) {
+ ((DefaultPlexusContainer) container).setLoggerManager(new BaseLoggerManager() {
+ @Override
+ protected org.codehaus.plexus.logging.Logger createLogger(String s) {
+ return new Slf4jLogger(logger);
+ }
+ });
+ }
+ };
+ this.projectDirectory = projectDirectory;
+ }
+
+ /**
+ * Builds jars using embedded maven in provided build directory.
+ * Includes files / resources based given pattern, otherwise using defaults provided in pom.xml.
+ * Checks if build exit code is 0, i.e. build was successful.
+ *
+ * @param jarName jar name
+ * @param buildDirectory build directory
+ * @param includeFiles pattern indicating which files should be included
+ * @param includeResources pattern indicating which resources should be included
+ *
+ * @return binary jar name with jar extension (my-jar.jar)
+ */
+ public String build(String jarName, String buildDirectory, String includeFiles, String includeResources) {
+ String originalPropertyValue = System.setProperty(MAVEN_MULTI_MODULE_PROJECT_DIRECTORY, projectDirectory);
+ try {
+ List<String> params = new LinkedList<>();
+ params.add("clean");
+ params.add("package");
+ params.add("-DskipTests");
+ // uncomment to build with current Drill version
+ // params.add("-Ddrill.version=" + DrillVersionInfo.getVersion());
+ params.add("-Djar.finalName=" + jarName);
+ params.add("-Dcustom.buildDirectory=" + buildDirectory);
+ if (includeFiles != null) {
+ params.add("-Dinclude.files=" + includeFiles);
+ }
+ if (includeResources != null) {
+ params.add("-Dinclude.resources=" + includeResources);
+ }
+ int result = cli.doMain(params.toArray(new String[params.size()]), projectDirectory, System.out, System.err);
+ assertEquals("Build should be successful.", 0, result);
+ return jarName + ".jar";
+ } finally {
+ if (originalPropertyValue != null) {
+ System.setProperty(MAVEN_MULTI_MODULE_PROJECT_DIRECTORY, originalPropertyValue);
+ } else {
+ System.clearProperty(MAVEN_MULTI_MODULE_PROJECT_DIRECTORY);
+ }
+ }
+ }
+
+}
+
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
index 41da12334..047026d65 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.drill;
+package org.apache.drill.exec.udf.dynamic;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
@@ -26,7 +26,6 @@ import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.common.config.CommonConstants;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.test.TestTools;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.exception.VersionMismatchException;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
@@ -40,16 +39,13 @@ import org.apache.drill.exec.util.JarUtil;
import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.test.TestBuilder;
import org.apache.hadoop.fs.FileSystem;
+import org.junit.After;
+import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import org.junit.rules.TestWatcher;
-import org.junit.runner.Description;
-import org.junit.runner.RunWith;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.runners.MockitoJUnitRunner;
-import org.mockito.stubbing.Answer;
+import org.junit.rules.ExpectedException;
import java.io.File;
import java.io.IOException;
@@ -66,9 +62,10 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.anyString;
+import static org.junit.Assert.fail;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.reset;
@@ -76,23 +73,37 @@ import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
-@RunWith(MockitoJUnitRunner.class)
@Category({SlowTest.class, SqlFunctionTest.class})
public class TestDynamicUDFSupport extends BaseTestQuery {
- private static final Path jars = TestTools.WORKING_PATH
- .resolve(TestTools.TEST_RESOURCES_REL)
- .resolve("jars");
- private static final String default_binary_name = "DrillUDF-1.0.jar";
- private static final String UDF_SUB_DIR = "udf";
- private static final String default_source_name = JarUtil.getSourceName(default_binary_name);
+ private static final String DEFAULT_JAR_NAME = "drill-custom-lower";
private static URI fsUri;
private static File udfDir;
+ private static File jarsDir;
+ private static File buildDirectory;
+ private static JarBuilder jarBuilder;
+ private static String defaultBinaryJar;
+ private static String defaultSourceJar;
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
@BeforeClass
- public static void setup() throws IOException {
- udfDir = dirTestWatcher.makeSubDir(Paths.get(UDF_SUB_DIR));
+ public static void buildAndStoreDefaultJars() throws IOException {
+ jarsDir = dirTestWatcher.makeSubDir(Paths.get("jars"));
+ buildDirectory = dirTestWatcher.makeSubDir(Paths.get("drill-udf"));
+
+ jarBuilder = new JarBuilder("src/test/resources/drill-udf");
+ defaultBinaryJar = buildJars(DEFAULT_JAR_NAME, "**/CustomLowerFunction.java", null);
+ defaultSourceJar = JarUtil.getSourceName(defaultBinaryJar);
+
+ FileUtils.copyFileToDirectory(new File(buildDirectory, defaultBinaryJar), jarsDir);
+ FileUtils.copyFileToDirectory(new File(buildDirectory, defaultSourceJar), jarsDir);
+ }
+ @Before
+ public void setupNewDrillbit() throws Exception {
+ udfDir = dirTestWatcher.makeSubDir(Paths.get("udf"));
Properties overrideProps = new Properties();
overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, udfDir.getAbsolutePath());
overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
@@ -101,29 +112,12 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
fsUri = getLocalFileSystem().getUri();
}
- @Rule
- public final TestWatcher clearDirs = new TestWatcher() {
- @Override
- protected void succeeded(Description description) {
- reset();
- }
-
- @Override
- protected void failed(Throwable e, Description description) {
- reset();
- }
-
- private void reset() {
- try {
- closeClient();
- FileUtils.cleanDirectory(udfDir);
- dirTestWatcher.clear();
- setup();
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
- };
+ @After
+ public void cleanup() throws Exception {
+ closeClient();
+ FileUtils.cleanDirectory(udfDir);
+ dirTestWatcher.clear();
+ }
@Test
public void testSyntax() throws Exception {
@@ -143,18 +137,26 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
}
@Test
- public void testDisableDynamicSupport() throws Exception {
+ public void testDisableDynamicSupportCreate() throws Exception {
try {
test("alter system set `exec.udf.enable_dynamic_support` = false");
- String[] actions = new String[] {"create", "drop"};
- String query = "%s function using jar 'jar_name.jar'";
- for (String action : actions) {
- try {
- test(query, action);
- } catch (UserRemoteException e) {
- assertThat(e.getMessage(), containsString("Dynamic UDFs support is disabled."));
- }
- }
+ String query = "create function using jar 'jar_name.jar'";
+ thrown.expect(UserRemoteException.class);
+ thrown.expectMessage(containsString("Dynamic UDFs support is disabled."));
+ test(query);
+ } finally {
+ test("alter system reset `exec.udf.enable_dynamic_support`");
+ }
+ }
+
+ @Test
+ public void testDisableDynamicSupportDrop() throws Exception {
+ try {
+ test("alter system set `exec.udf.enable_dynamic_support` = false");
+ String query = "drop function using jar 'jar_name.jar'";
+ thrown.expect(UserRemoteException.class);
+ thrown.expectMessage(containsString("Dynamic UDFs support is disabled."));
+ test(query);
} finally {
test("alter system reset `exec.udf.enable_dynamic_support`");
}
@@ -162,13 +164,13 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testAbsentBinaryInStaging() throws Exception {
- final Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
+ Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
String summary = String.format("File %s does not exist on file system %s",
- staging.resolve(default_binary_name).toUri().getPath(), fsUri);
+ staging.resolve(defaultBinaryJar).toUri().getPath(), fsUri);
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
@@ -177,15 +179,14 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testAbsentSourceInStaging() throws Exception {
- final Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
-
- copyJar(jars, staging, default_binary_name);
+ Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
+ copyJar(jarsDir.toPath(), staging, defaultBinaryJar);
String summary = String.format("File %s does not exist on file system %s",
- staging.resolve(default_source_name).toUri().getPath(), fsUri);
+ staging.resolve(defaultSourceJar).toUri().getPath(), fsUri);
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
@@ -194,32 +195,32 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testJarWithoutMarkerFile() throws Exception {
- String jarWithNoMarkerFile = "DrillUDF_NoMarkerFile-1.0.jar";
- copyJarsToStagingArea(jarWithNoMarkerFile, JarUtil.getSourceName(jarWithNoMarkerFile));
+ String jarName = "drill-no-marker";
+ String jar = buildAndCopyJarsToStagingArea(jarName, null, "**/dummy.conf");
String summary = "Marker file %s is missing in %s";
testBuilder()
- .sqlQuery("create function using jar '%s'", jarWithNoMarkerFile)
+ .sqlQuery("create function using jar '%s'", jar)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary,
- CommonConstants.DRILL_JAR_MARKER_FILE_RESOURCE_PATHNAME, jarWithNoMarkerFile))
+ CommonConstants.DRILL_JAR_MARKER_FILE_RESOURCE_PATHNAME, jar))
.go();
}
@Test
public void testJarWithoutFunctions() throws Exception {
- String jarWithNoFunctions = "DrillUDF_Empty-1.0.jar";
- copyJarsToStagingArea(jarWithNoFunctions, JarUtil.getSourceName(jarWithNoFunctions));
+ String jarName = "drill-no-functions";
+ String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomLowerDummyFunction.java", null);
String summary = "Jar %s does not contain functions";
testBuilder()
- .sqlQuery("create function using jar '%s'", jarWithNoFunctions)
+ .sqlQuery("create function using jar '%s'", jar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(false, String.format(summary, jarWithNoFunctions))
+ .baselineValues(false, String.format(summary, jar))
.go();
}
@@ -231,10 +232,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(true, String.format(summary, default_binary_name))
+ .baselineValues(true, String.format(summary, defaultBinaryJar))
.go();
RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
@@ -243,79 +244,81 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
assertFalse("Staging area should be empty", fs.listFiles(remoteFunctionRegistry.getStagingArea(), false).hasNext());
assertFalse("Temporary area should be empty", fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
- final Path path = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+ Path path = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
assertTrue("Binary should be present in registry area",
- path.resolve(default_binary_name).toFile().exists());
+ path.resolve(defaultBinaryJar).toFile().exists());
assertTrue("Source should be present in registry area",
- path.resolve(default_source_name).toFile().exists());
+ path.resolve(defaultBinaryJar).toFile().exists());
Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
- assertEquals(registry.getJar(0).getName(), default_binary_name);
+ assertEquals(registry.getJar(0).getName(), defaultBinaryJar);
}
@Test
public void testDuplicatedJarInRemoteRegistry() throws Exception {
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
copyDefaultJarsToStagingArea();
String summary = "Jar with %s name has been already registered";
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(false, String.format(summary, default_binary_name))
+ .baselineValues(false, String.format(summary, defaultBinaryJar))
.go();
}
@Test
public void testDuplicatedJarInLocalRegistry() throws Exception {
- copyDefaultJarsToStagingArea();
+ String jarName = "drill-custom-upper";
+ String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomUpperFunction.java", null);
- test("create function using jar '%s'", default_binary_name);
- test("select custom_lower('A') from (values(1))");
+ test("create function using jar '%s'", jar);
+ test("select custom_upper('A') from (values(1))");
- copyDefaultJarsToStagingArea();
+ copyJarsToStagingArea(buildDirectory.toPath(), jar,JarUtil.getSourceName(jar));
String summary = "Jar with %s name has been already registered";
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", jar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(false, String.format(summary, default_binary_name))
+ .baselineValues(false, String.format(summary, jar))
.go();
}
@Test
public void testDuplicatedFunctionsInRemoteRegistry() throws Exception {
- String jarWithDuplicate = "DrillUDF_Copy-1.0.jar";
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
- copyJarsToStagingArea(jarWithDuplicate, JarUtil.getSourceName(jarWithDuplicate));
+ test("create function using jar '%s'", defaultBinaryJar);
+
+ String jarName = "drill-custom-lower-copy";
+ String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomLowerFunction.java", null);
String summary = "Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)";
testBuilder()
- .sqlQuery("create function using jar '%s'", jarWithDuplicate)
+ .sqlQuery("create function using jar '%s'", jar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(false, String.format(summary, default_binary_name))
+ .baselineValues(false, String.format(summary, defaultBinaryJar))
.go();
}
@Test
public void testDuplicatedFunctionsInLocalRegistry() throws Exception {
- String jarWithDuplicate = "DrillUDF_DupFunc-1.0.jar";
- copyJarsToStagingArea(jarWithDuplicate, JarUtil.getSourceName(jarWithDuplicate));
+ String jarName = "drill-lower";
+ String jar = buildAndCopyJarsToStagingArea(jarName, "**/LowerFunction.java", null);
String summary = "Found duplicated function in %s: lower(VARCHAR-REQUIRED)";
testBuilder()
- .sqlQuery("create function using jar '%s'", jarWithDuplicate)
+ .sqlQuery("create function using jar '%s'", jar)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, LocalFunctionRegistry.BUILT_IN))
@@ -324,10 +327,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testSuccessfulRegistrationAfterSeveralRetryAttempts() throws Exception {
- final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
- final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
- final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
- final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+ RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+ Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+ Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+ Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
copyDefaultJarsToStagingArea();
@@ -340,10 +343,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(true, String.format(summary, default_binary_name))
+ .baselineValues(true, String.format(summary, defaultBinaryJar))
.go();
verify(remoteFunctionRegistry, times(3))
@@ -353,20 +356,20 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
assertTrue("Binary should be present in registry area",
- registryPath.resolve(default_binary_name).toFile().exists());
+ registryPath.resolve(defaultBinaryJar).toFile().exists());
assertTrue("Source should be present in registry area",
- registryPath.resolve(default_source_name).toFile().exists());
+ registryPath.resolve(defaultSourceJar).toFile().exists());
Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
- assertEquals(registry.getJar(0).getName(), default_binary_name);
+ assertEquals(registry.getJar(0).getName(), defaultBinaryJar);
}
@Test
public void testSuccessfulUnregistrationAfterSeveralRetryAttempts() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
reset(remoteFunctionRegistry);
doThrow(new VersionMismatchException("Version mismatch detected", 1))
@@ -378,10 +381,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
- .sqlQuery("drop function using jar '%s'", default_binary_name)
+ .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(true, String.format(summary, default_binary_name))
+ .baselineValues(true, String.format(summary, defaultBinaryJar))
.go();
verify(remoteFunctionRegistry, times(3))
@@ -396,10 +399,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testExceedRetryAttemptsDuringRegistration() throws Exception {
- final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
- final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
- final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
- final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+ RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+ Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+ Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+ Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
copyDefaultJarsToStagingArea();
@@ -409,7 +412,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
String summary = "Failed to update remote function registry. Exceeded retry attempts limit.";
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
@@ -419,9 +422,9 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
.updateRegistry(any(Registry.class), any(DataChangeVersion.class));
assertTrue("Binary should be present in staging area",
- stagingPath.resolve(default_binary_name).toFile().exists());
+ stagingPath.resolve(defaultBinaryJar).toFile().exists());
assertTrue("Source should be present in staging area",
- stagingPath.resolve(default_source_name).toFile().exists());
+ stagingPath.resolve(defaultSourceJar).toFile().exists());
assertTrue("Registry area should be empty", ArrayUtils.isEmpty(registryPath.toFile().listFiles()));
assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
@@ -432,11 +435,11 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testExceedRetryAttemptsDuringUnregistration() throws Exception {
- final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
- final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+ RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+ Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
reset(remoteFunctionRegistry);
doThrow(new VersionMismatchException("Version mismatch detected", 1))
@@ -445,7 +448,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
String summary = "Failed to update remote function registry. Exceeded retry attempts limit.";
testBuilder()
- .sqlQuery("drop function using jar '%s'", default_binary_name)
+ .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
@@ -455,25 +458,23 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
.updateRegistry(any(Registry.class), any(DataChangeVersion.class));
assertTrue("Binary should be present in registry area",
- registryPath.resolve(default_binary_name).toFile().exists());
+ registryPath.resolve(defaultBinaryJar).toFile().exists());
assertTrue("Source should be present in registry area",
- registryPath.resolve(default_source_name).toFile().exists());
+ registryPath.resolve(defaultSourceJar).toFile().exists());
Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
- assertEquals(registry.getJar(0).getName(), default_binary_name);
+ assertEquals(registry.getJar(0).getName(), defaultBinaryJar);
}
@Test
public void testLazyInit() throws Exception {
- try {
- test("select custom_lower('A') from (values(1))");
- } catch (UserRemoteException e){
- assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
- }
+ thrown.expect(UserRemoteException.class);
+ thrown.expectMessage(containsString("No match found for function signature custom_lower(<CHARACTER>)"));
+ test("select custom_lower('A') from (values(1))");
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
testBuilder()
.sqlQuery("select custom_lower('A') as res from (values(1))")
.unOrdered()
@@ -485,21 +486,19 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir", true));
assertTrue("Binary should exist in local udf directory",
- localUdfDirPath.resolve(default_binary_name).toFile().exists());
+ localUdfDirPath.resolve(defaultBinaryJar).toFile().exists());
assertTrue("Source should exist in local udf directory",
- localUdfDirPath.resolve(default_source_name).toFile().exists());
+ localUdfDirPath.resolve(defaultSourceJar).toFile().exists());
}
@Test
public void testLazyInitWhenDynamicUdfSupportIsDisabled() throws Exception {
- try {
- test("select custom_lower('A') from (values(1))");
- } catch (UserRemoteException e){
- assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
- }
+ thrown.expect(UserRemoteException.class);
+ thrown.expectMessage(containsString("No match found for function signature custom_lower(<CHARACTER>)"));
+ test("select custom_lower('A') from (values(1))");
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
try {
testBuilder()
@@ -516,9 +515,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testOverloadedFunctionPlanningStage() throws Exception {
- String jarName = "DrillUDF-overloading-1.0.jar";
- copyJarsToStagingArea(jarName, JarUtil.getSourceName(jarName));
- test("create function using jar '%s'", jarName);
+ String jarName = "drill-custom-abs";
+ String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomAbsFunction.java", null);
+
+ test("create function using jar '%s'", jar);
testBuilder()
.sqlQuery("select abs('A', 'A') as res from (values(1))")
@@ -530,9 +530,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testOverloadedFunctionExecutionStage() throws Exception {
- String jarName = "DrillUDF-overloading-1.0.jar";
- copyJarsToStagingArea(jarName, JarUtil.getSourceName(jarName));
- test("create function using jar '%s'", jarName);
+ String jarName = "drill-custom-log";
+ String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomLogFunction.java", null);
+
+ test("create function using jar '%s'", jar);
testBuilder()
.sqlQuery("select log('A') as res from (values(1))")
@@ -545,67 +546,65 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
@Test
public void testDropFunction() throws Exception {
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
test("select custom_lower('A') from (values(1))");
Path localUdfDirPath = hadoopToJavaPath((org.apache.hadoop.fs.Path)FieldUtils.readField(
getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir", true));
assertTrue("Binary should exist in local udf directory",
- localUdfDirPath.resolve(default_binary_name).toFile().exists());
+ localUdfDirPath.resolve(defaultBinaryJar).toFile().exists());
assertTrue("Source should exist in local udf directory",
- localUdfDirPath.resolve(default_source_name).toFile().exists());
+ localUdfDirPath.resolve(defaultSourceJar).toFile().exists());
String summary = "The following UDFs in jar %s have been unregistered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
- .sqlQuery("drop function using jar '%s'", default_binary_name)
+ .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(true, String.format(summary, default_binary_name))
+ .baselineValues(true, String.format(summary, defaultBinaryJar))
.go();
- try {
- test("select custom_lower('A') from (values(1))");
- } catch (UserRemoteException e){
- assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
- }
+ thrown.expect(UserRemoteException.class);
+ thrown.expectMessage(containsString("No match found for function signature custom_lower(<CHARACTER>)"));
+ test("select custom_lower('A') from (values(1))");
- final RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
- final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+ RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
+ Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
assertEquals("Remote registry should be empty",
remoteFunctionRegistry.getRegistry(new DataChangeVersion()).getJarList().size(), 0);
assertFalse("Binary should not be present in registry area",
- registryPath.resolve(default_binary_name).toFile().exists());
+ registryPath.resolve(defaultBinaryJar).toFile().exists());
assertFalse("Source should not be present in registry area",
- registryPath.resolve(default_source_name).toFile().exists());
+ registryPath.resolve(defaultSourceJar).toFile().exists());
assertFalse("Binary should not be present in local udf directory",
- localUdfDirPath.resolve(default_binary_name).toFile().exists());
+ localUdfDirPath.resolve(defaultBinaryJar).toFile().exists());
assertFalse("Source should not be present in local udf directory",
- localUdfDirPath.resolve(default_source_name).toFile().exists());
+ localUdfDirPath.resolve(defaultSourceJar).toFile().exists());
}
@Test
public void testReRegisterTheSameJarWithDifferentContent() throws Exception {
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
testBuilder()
.sqlQuery("select custom_lower('A') as res from (values(1))")
.unOrdered()
.baselineColumns("res")
.baselineValues("a")
.go();
- test("drop function using jar '%s'", default_binary_name);
+ test("drop function using jar '%s'", defaultBinaryJar);
Thread.sleep(1000);
- Path src = jars.resolve("v2");
- copyJarsToStagingArea(src, default_binary_name, default_source_name);
- test("create function using jar '%s'", default_binary_name);
+ buildAndCopyJarsToStagingArea(DEFAULT_JAR_NAME, "**/CustomLowerFunctionV2.java", null);
+
+ test("create function using jar '%s'", defaultBinaryJar);
testBuilder()
.sqlQuery("select custom_lower('A') as res from (values(1))")
.unOrdered()
@@ -619,36 +618,33 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
String summary = "Jar %s is not registered in remote registry";
testBuilder()
- .sqlQuery("drop function using jar '%s'", default_binary_name)
+ .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(false, String.format(summary, default_binary_name))
+ .baselineValues(false, String.format(summary, defaultBinaryJar))
.go();
}
@Test
public void testRegistrationFailDuringRegistryUpdate() throws Exception {
- final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
- final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
- final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
- final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+ RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+ Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+ Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+ Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
final String errorMessage = "Failure during remote registry update.";
- doAnswer(new Answer<Void>() {
- @Override
- public Void answer(InvocationOnMock invocation) throws Throwable {
- assertTrue("Binary should be present in registry area",
- registryPath.resolve(default_binary_name).toFile().exists());
- assertTrue("Source should be present in registry area",
- registryPath.resolve(default_source_name).toFile().exists());
- throw new RuntimeException(errorMessage);
- }
+ doAnswer(invocation -> {
+ assertTrue("Binary should be present in registry area",
+ registryPath.resolve(defaultBinaryJar).toFile().exists());
+ assertTrue("Source should be present in registry area",
+ registryPath.resolve(defaultSourceJar).toFile().exists());
+ throw new RuntimeException(errorMessage);
}).when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
copyDefaultJarsToStagingArea();
testBuilder()
- .sqlQuery("create function using jar '%s'", default_binary_name)
+ .sqlQuery("create function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, errorMessage)
@@ -657,8 +653,8 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
assertTrue("Registry area should be empty", ArrayUtils.isEmpty(registryPath.toFile().listFiles()));
assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
- assertTrue("Binary should be present in staging area", stagingPath.resolve(default_binary_name).toFile().exists());
- assertTrue("Source should be present in staging area", stagingPath.resolve(default_source_name).toFile().exists());
+ assertTrue("Binary should be present in staging area", stagingPath.resolve(defaultBinaryJar).toFile().exists());
+ assertTrue("Source should be present in staging area", stagingPath.resolve(defaultSourceJar).toFile().exists());
}
@Test
@@ -668,21 +664,18 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
- doAnswer(new Answer<String>() {
- @Override
- public String answer(InvocationOnMock invocation) throws Throwable {
- String result = (String) invocation.callRealMethod();
- latch2.countDown();
- latch1.await();
- return result;
- }
+ doAnswer(invocation -> {
+ String result = (String) invocation.callRealMethod();
+ latch2.countDown();
+ latch1.await();
+ return result;
})
.doCallRealMethod()
.doCallRealMethod()
.when(remoteFunctionRegistry).addToJars(anyString(), any(RemoteFunctionRegistry.Action.class));
- final String query = String.format("create function using jar '%s'", default_binary_name);
+ final String query = String.format("create function using jar '%s'", defaultBinaryJar);
Thread thread = new Thread(new SimpleQueryRunner(query));
thread.start();
@@ -695,14 +688,14 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
.sqlQuery(query)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(false, String.format(summary, default_binary_name))
+ .baselineValues(false, String.format(summary, defaultBinaryJar))
.go();
testBuilder()
- .sqlQuery("drop function using jar '%s'", default_binary_name)
+ .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
.unOrdered()
.baselineColumns("ok", "summary")
- .baselineValues(false, String.format(summary, default_binary_name))
+ .baselineValues(false, String.format(summary, defaultBinaryJar))
.go();
} finally {
@@ -719,51 +712,45 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
final CountDownLatch latch2 = new CountDownLatch(1);
final CountDownLatch latch3 = new CountDownLatch(1);
- doAnswer(new Answer<Void>() {
- @Override
- public Void answer(InvocationOnMock invocation) throws Throwable {
- latch3.countDown();
- latch1.await();
- invocation.callRealMethod();
- latch2.countDown();
- return null;
- }
- }).doAnswer(new Answer<Void>() {
- @Override
- public Void answer(InvocationOnMock invocation) throws Throwable {
- latch1.countDown();
- latch2.await();
- invocation.callRealMethod();
- return null;
- }
+ doAnswer(invocation -> {
+ latch3.countDown();
+ latch1.await();
+ invocation.callRealMethod();
+ latch2.countDown();
+ return null;
+ }).doAnswer(invocation -> {
+ latch1.countDown();
+ latch2.await();
+ invocation.callRealMethod();
+ return null;
})
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
+ final String jar1 = defaultBinaryJar;
+ copyDefaultJarsToStagingArea();
- final String jarName1 = default_binary_name;
- final String jarName2 = "DrillUDF_Copy-1.0.jar";
- final String query = "create function using jar '%s'";
+ final String copyJarName = "drill-custom-lower-copy";
+ final String jar2 = buildAndCopyJarsToStagingArea(copyJarName, "**/CustomLowerFunction.java", null);
- copyDefaultJarsToStagingArea();
- copyJarsToStagingArea(jarName2, JarUtil.getSourceName(jarName2));
+ final String query = "create function using jar '%s'";
Thread thread1 = new Thread(new TestBuilderRunner(
testBuilder()
- .sqlQuery(query, jarName1)
+ .sqlQuery(query, jar1)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true,
String.format("The following UDFs in jar %s have been registered:\n" +
- "[custom_lower(VARCHAR-REQUIRED)]", jarName1))
+ "[custom_lower(VARCHAR-REQUIRED)]", jar1))
));
Thread thread2 = new Thread(new TestBuilderRunner(
testBuilder()
- .sqlQuery(query, jarName2)
+ .sqlQuery(query, jar2)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false,
- String.format("Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)", jarName1))
+ String.format("Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)", jar1))
));
thread1.start();
@@ -778,7 +765,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
assertEquals("Remote registry version should match", 1, version.getVersion());
List<Jar> jarList = registry.getJarList();
assertEquals("Only one jar should be registered", 1, jarList.size());
- assertEquals("Jar name should match", jarName1, jarList.get(0).getName());
+ assertEquals("Jar name should match", jar1, jarList.get(0).getName());
verify(remoteFunctionRegistry, times(2)).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
}
@@ -789,43 +776,40 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(2);
- doAnswer(new Answer<Void>() {
- @Override
- public Void answer(InvocationOnMock invocation) throws Throwable {
- latch2.countDown();
- latch1.await();
- invocation.callRealMethod();
- return null;
- }
+ doAnswer(invocation -> {
+ latch2.countDown();
+ latch1.await();
+ invocation.callRealMethod();
+ return null;
})
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
- final String jarName1 = default_binary_name;
- final String jarName2 = "DrillUDF-2.0.jar";
- final String query = "create function using jar '%s'";
-
+ final String jar1 = defaultBinaryJar;
copyDefaultJarsToStagingArea();
- copyJarsToStagingArea(jarName2, JarUtil.getSourceName(jarName2));
+ final String upperJarName = "drill-custom-upper";
+ final String jar2 = buildAndCopyJarsToStagingArea(upperJarName, "**/CustomUpperFunction.java", null);
+
+ final String query = "create function using jar '%s'";
Thread thread1 = new Thread(new TestBuilderRunner(
testBuilder()
- .sqlQuery(query, jarName1)
+ .sqlQuery(query, jar1)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true,
String.format("The following UDFs in jar %s have been registered:\n" +
- "[custom_lower(VARCHAR-REQUIRED)]", jarName1))
+ "[custom_lower(VARCHAR-REQUIRED)]", jar1))
));
Thread thread2 = new Thread(new TestBuilderRunner(
testBuilder()
- .sqlQuery(query, jarName2)
+ .sqlQuery(query, jar2)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true, String.format("The following UDFs in jar %s have been registered:\n" +
- "[custom_upper(VARCHAR-REQUIRED)]", jarName2))
+ "[custom_upper(VARCHAR-REQUIRED)]", jar2))
));
thread1.start();
@@ -842,7 +826,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
assertEquals("Remote registry version should match", 2, version.getVersion());
List<Jar> actualJars = registry.getJarList();
- List<String> expectedJars = Lists.newArrayList(jarName1, jarName2);
+ List<String> expectedJars = Lists.newArrayList(jar1, jar2);
assertEquals("Only one jar should be registered", 2, actualJars.size());
for (Jar jar : actualJars) {
@@ -856,32 +840,26 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
public void testLazyInitConcurrent() throws Exception {
FunctionImplementationRegistry functionImplementationRegistry = spyFunctionImplementationRegistry();
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
+ test("create function using jar '%s'", defaultBinaryJar);
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
final String query = "select custom_lower('A') from (values(1))";
- doAnswer(new Answer<Boolean>() {
- @Override
- public Boolean answer(InvocationOnMock invocation) throws Throwable {
- latch1.await();
- boolean result = (boolean) invocation.callRealMethod();
- assertTrue("syncWithRemoteRegistry() should return true", result);
- latch2.countDown();
- return true;
- }
+ doAnswer(invocation -> {
+ latch1.await();
+ boolean result = (boolean) invocation.callRealMethod();
+ assertTrue("syncWithRemoteRegistry() should return true", result);
+ latch2.countDown();
+ return true;
})
- .doAnswer(new Answer() {
- @Override
- public Boolean answer(InvocationOnMock invocation) throws Throwable {
- latch1.countDown();
- latch2.await();
- boolean result = (boolean) invocation.callRealMethod();
- assertTrue("syncWithRemoteRegistry() should return true", result);
- return true;
- }
+ .doAnswer(invocation -> {
+ latch1.countDown();
+ latch2.await();
+ boolean result = (boolean) invocation.callRealMethod();
+ assertTrue("syncWithRemoteRegistry() should return true", result);
+ return true;
})
.when(functionImplementationRegistry).syncWithRemoteRegistry(anyLong());
@@ -905,23 +883,17 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
public void testLazyInitNoReload() throws Exception {
FunctionImplementationRegistry functionImplementationRegistry = spyFunctionImplementationRegistry();
copyDefaultJarsToStagingArea();
- test("create function using jar '%s'", default_binary_name);
-
- doAnswer(new Answer<Boolean>() {
- @Override
- public Boolean answer(InvocationOnMock invocation) throws Throwable {
- boolean result = (boolean) invocation.callRealMethod();
- assertTrue("syncWithRemoteRegistry() should return true", result);
- return true;
- }
+ test("create function using jar '%s'", defaultBinaryJar);
+
+ doAnswer(invocation -> {
+ boolean result = (boolean) invocation.callRealMethod();
+ assertTrue("syncWithRemoteRegistry() should return true", result);
+ return true;
})
- .doAnswer(new Answer() {
- @Override
- public Boolean answer(InvocationOnMock invocation) throws Throwable {
- boolean result = (boolean) invocation.callRealMethod();
- assertFalse("syncWithRemoteRegistry() should return false", result);
- return false;
- }
+ .doAnswer(invocation -> {
+ boolean result = (boolean) invocation.callRealMethod();
+ assertFalse("syncWithRemoteRegistry() should return false", result);
+ return false;
})
.when(functionImplementationRegistry).syncWithRemoteRegistry(anyLong());
@@ -929,6 +901,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
try {
test("select unknown_lower('A') from (values(1))");
+ fail();
} catch (UserRemoteException e){
assertThat(e.getMessage(), containsString("No match found for function signature unknown_lower(<CHARACTER>)"));
}
@@ -939,12 +912,18 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
assertEquals("Sync function registry version should match", 1L, localFunctionRegistry.getVersion());
}
+ private static String buildJars(String jarName, String includeFiles, String includeResources) {
+ return jarBuilder.build(jarName, buildDirectory.getAbsolutePath(), includeFiles, includeResources);
+ }
+
private void copyDefaultJarsToStagingArea() throws IOException {
- copyJarsToStagingArea(jars, default_binary_name, default_source_name);
+ copyJarsToStagingArea(jarsDir.toPath(), defaultBinaryJar, defaultSourceJar);
}
- private void copyJarsToStagingArea(String binaryName, String sourceName) throws IOException {
- copyJarsToStagingArea(jars, binaryName, sourceName);
+ private String buildAndCopyJarsToStagingArea(String jarName, String includeFiles, String includeResources) throws IOException {
+ String binaryJar = buildJars(jarName, includeFiles, includeResources);
+ copyJarsToStagingArea(buildDirectory.toPath(), binaryJar, JarUtil.getSourceName(binaryJar));
+ return binaryJar;
}
private void copyJarsToStagingArea(Path src, String binaryName, String sourceName) throws IOException {
diff --git a/exec/java-exec/src/test/resources/drill-udf/pom.xml b/exec/java-exec/src/test/resources/drill-udf/pom.xml
new file mode 100644
index 000000000..73618451b
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/pom.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <groupId>org.apache.drill.udf</groupId>
+ <artifactId>drill-udf</artifactId>
+ <version>1.0</version>
+
+ <properties>
+ <jar.finalName>${project.name}</jar.finalName>
+ <custom.buildDirectory>${project.basedir}/target</custom.buildDirectory>
+ <drill.version>1.13.0</drill.version>
+ <include.files>**/*.java</include.files>
+ <include.resources>**/*.conf</include.resources>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.drill.exec</groupId>
+ <artifactId>drill-java-exec</artifactId>
+ <version>${drill.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <directory>${custom.buildDirectory}</directory>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <includes>
+ <include>${include.resources}</include>
+ </includes>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <includes>
+ <include>${include.files}</include>
+ </includes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-source-plugin</artifactId>
+ <version>2.4</version>
+ <configuration>
+ <finalName>${jar.finalName}</finalName>
+ <includes>
+ <include>${include.files}</include>
+ </includes>
+ </configuration>
+ <executions>
+ <execution>
+ <id>attach-sources</id>
+ <phase>package</phase>
+ <goals>
+ <goal>jar-no-fork</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+
+</project>
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java
new file mode 100644
index 000000000..9bdcffb65
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+ name="abs",
+ scope= FunctionTemplate.FunctionScope.SIMPLE,
+ nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomAbsFunction implements DrillSimpleFunc {
+
+ @Param
+ VarCharHolder input1;
+
+ @Param
+ VarCharHolder input2;
+
+ @Output
+ VarCharHolder out;
+
+ @Inject
+ DrillBuf buffer;
+
+ public void setup() {
+
+ }
+
+ public void eval() {
+ String inputString1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input1.start, input1.end, input1.buffer);
+ String inputString2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input2.start, input2.end, input2.buffer);
+ String outputValue = String.format("ABS was overloaded. Input: %s, %s", inputString1, inputString2);
+
+ out.buffer = buffer;
+ out.start = 0;
+ out.end = outputValue.getBytes().length;
+ buffer.setBytes(0, outputValue.getBytes());
+ }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java
new file mode 100644
index 000000000..fa49a35c7
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+import javax.inject.Inject;
+
+@FunctionTemplate(
+ name="log",
+ scope= FunctionTemplate.FunctionScope.SIMPLE,
+ nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomLogFunction implements DrillSimpleFunc {
+
+ @Param
+ VarCharHolder input;
+
+ @Output
+ VarCharHolder out;
+
+ @Inject
+ DrillBuf buffer;
+
+ public void setup() {
+
+ }
+
+ public void eval() {
+ String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+ String outputValue = "LOG was overloaded. Input: " + inputString;
+
+ out.buffer = buffer;
+ out.start = 0;
+ out.end = outputValue.getBytes().length;
+ buffer.setBytes(0, outputValue.getBytes());
+ }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java
new file mode 100644
index 000000000..1e401d106
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+public class CustomLowerDummyFunction implements DrillSimpleFunc {
+
+ @Param
+ VarCharHolder input;
+
+ @Output
+ VarCharHolder output;
+
+ @Inject
+ DrillBuf buffer;
+
+ public void setup() {
+ }
+
+ public void eval() {
+
+ // get value
+ String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+ // convert to lower case
+ String outputValue = inputString.toLowerCase();
+
+ // put the output value into output buffer
+ output.buffer = buffer;
+ output.start = 0;
+ output.end = outputValue.getBytes().length;
+ buffer.setBytes(0, outputValue.getBytes());
+
+ }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java
new file mode 100644
index 000000000..f868be310
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+ name="custom_lower",
+ scope = FunctionTemplate.FunctionScope.SIMPLE,
+ nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomLowerFunction implements DrillSimpleFunc {
+
+ @Param
+ VarCharHolder input;
+
+ @Output
+ VarCharHolder output;
+
+ @Inject
+ DrillBuf buffer;
+
+ public void setup() {
+ }
+
+ public void eval() {
+
+ // get value
+ String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+ // convert to lower case
+ String outputValue = inputString.toLowerCase();
+
+ // put the output value into output buffer
+ output.buffer = buffer;
+ output.start = 0;
+ output.end = outputValue.getBytes().length;
+ buffer.setBytes(0, outputValue.getBytes());
+
+ }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java
new file mode 100644
index 000000000..e564d7ff0
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+ name="custom_lower",
+ scope = FunctionTemplate.FunctionScope.SIMPLE,
+ nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomLowerFunctionV2 implements DrillSimpleFunc {
+
+ @Param
+ VarCharHolder input;
+
+ @Output
+ VarCharHolder output;
+
+ @Inject
+ DrillBuf buffer;
+
+ public void setup() {
+ }
+
+ public void eval() {
+
+ // get value
+ String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+ // convert to lower case
+ String outputValue = inputString.toLowerCase() + "_v2";
+
+ // put the output value into output buffer
+ output.buffer = buffer;
+ output.start = 0;
+ output.end = outputValue.getBytes().length;
+ buffer.setBytes(0, outputValue.getBytes());
+
+ }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java
new file mode 100644
index 000000000..9ac473b10
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+ name="custom_upper",
+ scope = FunctionTemplate.FunctionScope.SIMPLE,
+ nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomUpperFunction implements DrillSimpleFunc {
+
+ @Param
+ VarCharHolder input;
+
+ @Output
+ VarCharHolder output;
+
+ @Inject
+ DrillBuf buffer;
+
+ public void setup() {
+ }
+
+ public void eval() {
+
+ // get value
+ String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+ // convert to upper case
+ String outputValue = inputString.toUpperCase();
+
+ // put the output value into output buffer
+ output.buffer = buffer;
+ output.start = 0;
+ output.end = outputValue.getBytes().length;
+ buffer.setBytes(0, outputValue.getBytes());
+
+ }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java
new file mode 100644
index 000000000..0d5d149a5
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+ name="lower",
+ scope = FunctionTemplate.FunctionScope.SIMPLE,
+ nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class LowerFunction implements DrillSimpleFunc {
+
+ @Param
+ VarCharHolder input;
+
+ @Output
+ VarCharHolder output;
+
+ @Inject
+ DrillBuf buffer;
+
+ public void setup() {
+ }
+
+ public void eval() {
+
+ // get value
+ String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+ // convert to lower case
+ String outputValue = inputString.toLowerCase();
+
+ // put the output value into output buffer
+ output.buffer = buffer;
+ output.start = 0;
+ output.end = outputValue.getBytes().length;
+ buffer.setBytes(0, outputValue.getBytes());
+
+ }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf b/exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf
new file mode 100644
index 000000000..0b2948a2c
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf
@@ -0,0 +1 @@
+drill.classpath.scanning.packages += "org.apache.drill.udf.dynamic"
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jar
deleted file mode 100644
index b5965c958..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jar
deleted file mode 100644
index 7cd2eeb34..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jar
deleted file mode 100644
index 1c8308c31..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jar
deleted file mode 100644
index 3522c1e84..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jar
deleted file mode 100644
index f6b250ec0..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jar
deleted file mode 100644
index 4b5ef8bc4..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jar
deleted file mode 100644
index fa449e270..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jar
deleted file mode 100644
index 8945fe758..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jar
deleted file mode 100644
index b19ade637..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jar
deleted file mode 100644
index 56a649c47..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jar
deleted file mode 100644
index 2a82dc9e2..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jar
deleted file mode 100644
index 11ed28b66..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jar
deleted file mode 100644
index dbc97dd6c..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jar
deleted file mode 100644
index cba65da47..000000000
--- a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jar
deleted file mode 100644
index 583b1c4a8..000000000
--- a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jar
+++ /dev/null
Binary files differ
diff --git a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jar b/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jar
deleted file mode 100644
index 42df4a401..000000000
--- a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jar
+++ /dev/null
Binary files differ