aboutsummaryrefslogtreecommitdiff
path: root/exec/java-exec/src/main/java/org/apache
diff options
context:
space:
mode:
authorArina Ielchiieva <arina.yelchiyeva@gmail.com>2018-06-22 19:36:41 +0300
committerArina Ielchiieva <arina.yelchiyeva@gmail.com>2018-06-27 19:14:20 +0300
commitc346859735e4eab3ad12e755e5643fceb9536f74 (patch)
treecad1b5a8ed81caec5b73ff35c1ce5f18dd9f4456 /exec/java-exec/src/main/java/org/apache
parent779edf880a1e92608b68108f18e79eff6eb4afa5 (diff)
DRILL-6526: Refactor FileSystemConfig to disallow direct access from the code to its variables
Diffstat (limited to 'exec/java-exec/src/main/java/org/apache')
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java44
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java31
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java11
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java14
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java26
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java10
-rw-r--r--exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java46
7 files changed, 117 insertions, 65 deletions
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
index 3c8f3a7f7..4eda95582 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
@@ -19,6 +19,8 @@ package org.apache.drill.exec.store.dfs;
import java.util.Map;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
@@ -26,12 +28,44 @@ import com.fasterxml.jackson.annotation.JsonTypeName;
@JsonTypeName(FileSystemConfig.NAME)
public class FileSystemConfig extends StoragePluginConfig {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FileSystemConfig.class);
+
public static final String NAME = "file";
- public String connection;
- public Map<String, String> config;
- public Map<String, WorkspaceConfig> workspaces;
- public Map<String, FormatPluginConfig> formats;
+
+ private final String connection;
+ private final Map<String, String> config;
+ private final Map<String, WorkspaceConfig> workspaces;
+ private final Map<String, FormatPluginConfig> formats;
+
+ @JsonCreator
+ public FileSystemConfig(@JsonProperty("connection") String connection,
+ @JsonProperty("config") Map<String, String> config,
+ @JsonProperty("workspaces") Map<String, WorkspaceConfig> workspaces,
+ @JsonProperty("formats") Map<String, FormatPluginConfig> formats) {
+ this.connection = connection;
+ this.config = config;
+ this.workspaces = workspaces;
+ this.formats = formats;
+ }
+
+ @JsonProperty
+ public String getConnection() {
+ return connection;
+ }
+
+ @JsonProperty
+ public Map<String, String> getConfig() {
+ return config;
+ }
+
+ @JsonProperty
+ public Map<String, WorkspaceConfig> getWorkspaces() {
+ return workspaces;
+ }
+
+ @JsonProperty
+ public Map<String, FormatPluginConfig> getFormats() {
+ return formats;
+ }
@Override
public int hashCode() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index 734ab735d..e71e7e145 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -20,8 +20,11 @@ package org.apache.drill.exec.store.dfs;
import static org.apache.drill.exec.store.dfs.FileSystemSchemaFactory.DEFAULT_WS_NAME;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import org.apache.calcite.schema.SchemaPlus;
@@ -44,8 +47,6 @@ import org.apache.hadoop.fs.FileSystem;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
/**
* A Storage engine associated with a Hadoop FileSystem Implementation. Examples include HDFS, MapRFS, QuantacastFileSystem,
@@ -62,42 +63,38 @@ public class FileSystemPlugin extends AbstractStoragePlugin {
private final Configuration fsConf;
private final LogicalPlanPersistence lpPersistance;
- public FileSystemPlugin(FileSystemConfig config, DrillbitContext context, String name)
- throws ExecutionSetupException{
+ public FileSystemPlugin(FileSystemConfig config, DrillbitContext context, String name) throws ExecutionSetupException {
super(context, name);
this.config = config;
this.lpPersistance = context.getLpPersistence();
try {
-
fsConf = new Configuration();
- if (config.config != null) {
- for (String s : config.config.keySet()) {
- fsConf.set(s, config.config.get(s));
- }
- }
- fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.connection);
+ Optional.ofNullable(config.getConfig())
+ .ifPresent(c -> c.forEach(fsConf::set));
+
+ fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.getConnection());
fsConf.set("fs.classpath.impl", ClassPathFileSystem.class.getName());
fsConf.set("fs.drill-local.impl", LocalSyncableFileSystem.class.getName());
formatCreator = newFormatCreator(config, context, fsConf);
- List<FormatMatcher> matchers = Lists.newArrayList();
- formatPluginsByConfig = Maps.newHashMap();
+ List<FormatMatcher> matchers = new ArrayList<>();
+ formatPluginsByConfig = new HashMap<>();
for (FormatPlugin p : formatCreator.getConfiguredFormatPlugins()) {
matchers.add(p.getMatcher());
formatPluginsByConfig.put(p.getConfig(), p);
}
- final boolean noWorkspace = config.workspaces == null || config.workspaces.isEmpty();
- List<WorkspaceSchemaFactory> factories = Lists.newArrayList();
+ boolean noWorkspace = config.getWorkspaces() == null || config.getWorkspaces().isEmpty();
+ List<WorkspaceSchemaFactory> factories = new ArrayList<>();
if (!noWorkspace) {
- for (Map.Entry<String, WorkspaceConfig> space : config.workspaces.entrySet()) {
+ for (Map.Entry<String, WorkspaceConfig> space : config.getWorkspaces().entrySet()) {
factories.add(new WorkspaceSchemaFactory(this, space.getKey(), name, space.getValue(), matchers, context.getLpPersistence(), context.getClasspathScan()));
}
}
// if the "default" workspace is not given add one.
- if (noWorkspace || !config.workspaces.containsKey(DEFAULT_WS_NAME)) {
+ if (noWorkspace || !config.getWorkspaces().containsKey(DEFAULT_WS_NAME)) {
factories.add(new WorkspaceSchemaFactory(this, DEFAULT_WS_NAME, name, WorkspaceConfig.DEFAULT, matchers, context.getLpPersistence(), context.getClasspathScan()));
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
index fe9014b54..b981adf82 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
@@ -21,6 +21,7 @@ import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
import java.util.Map;
import org.apache.drill.common.exceptions.UserException;
@@ -31,8 +32,6 @@ import org.apache.drill.common.util.ConstructorChecker;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.hadoop.conf.Configuration;
-import com.google.common.collect.Maps;
-
/**
* Responsible for instantiating format plugins
*/
@@ -51,7 +50,7 @@ public class FormatCreator {
* @return a map of type to constructor that taks the config
*/
private static Map<Class<?>, Constructor<?>> initConfigConstructors(Collection<Class<? extends FormatPlugin>> pluginClasses) {
- Map<Class<?>, Constructor<?>> constructors = Maps.newHashMap();
+ Map<Class<?>, Constructor<?>> constructors = new HashMap<>();
for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
for (Constructor<?> c : pluginClass.getConstructors()) {
try {
@@ -91,8 +90,8 @@ public class FormatCreator {
this.pluginClasses = classpathScan.getImplementations(FormatPlugin.class);
this.configConstructors = initConfigConstructors(pluginClasses);
- Map<String, FormatPlugin> plugins = Maps.newHashMap();
- if (storageConfig.formats == null || storageConfig.formats.isEmpty()) {
+ Map<String, FormatPlugin> plugins = new HashMap<>();
+ if (storageConfig.getFormats() == null || storageConfig.getFormats().isEmpty()) {
for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
for (Constructor<?> c : pluginClass.getConstructors()) {
try {
@@ -107,7 +106,7 @@ public class FormatCreator {
}
}
} else {
- for (Map.Entry<String, FormatPluginConfig> e : storageConfig.formats.entrySet()) {
+ for (Map.Entry<String, FormatPluginConfig> e : storageConfig.getFormats().entrySet()) {
Constructor<?> c = configConstructors.get(e.getValue().getClass());
if (c == null) {
logger.warn("Unable to find constructor for storage config named '{}' of type '{}'.", e.getKey(), e.getValue().getClass().getName());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
index 095e09a06..5eec5cc6e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
@@ -18,10 +18,10 @@
package org.apache.drill.exec.store.easy.json;
import java.io.IOException;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
@@ -44,7 +44,6 @@ import org.apache.hadoop.fs.FileSystem;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
@@ -60,14 +59,17 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
}
@Override
- public RecordReader getRecordReader(FragmentContext context, DrillFileSystem dfs, FileWork fileWork,
- List<SchemaPath> columns, String userName) throws ExecutionSetupException {
+ public RecordReader getRecordReader(FragmentContext context,
+ DrillFileSystem dfs,
+ FileWork fileWork,
+ List<SchemaPath> columns,
+ String userName) {
return new JSONRecordReader(context, fileWork.getPath(), dfs, columns);
}
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
- Map<String, String> options = Maps.newHashMap();
+ Map<String, String> options = new HashMap<>();
options.put("location", writer.getLocation());
@@ -76,7 +78,7 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
options.put("prefix", fragmentId);
options.put("separator", " ");
- options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
+ options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig) writer.getStorageConfig()).getConnection());
options.put("extension", "json");
options.put("extended", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_EXTENDED_TYPES)));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
index 8209252d7..b61ce30ba 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
@@ -19,10 +19,10 @@ package org.apache.drill.exec.store.easy.text;
import java.io.IOException;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
@@ -60,14 +60,13 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextFormatConfig> {
private final static String DEFAULT_NAME = "text";
public TextFormatPlugin(String name, DrillbitContext context, Configuration fsConf, StoragePluginConfig storageConfig) {
super(name, context, fsConf, storageConfig, new TextFormatConfig(), true, false, true, true,
- Collections.<String>emptyList(), DEFAULT_NAME);
+ Collections.emptyList(), DEFAULT_NAME);
}
public TextFormatPlugin(String name, DrillbitContext context, Configuration fsConf, StoragePluginConfig config,
@@ -78,17 +77,20 @@ public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextForm
@Override
- public RecordReader getRecordReader(FragmentContext context, DrillFileSystem dfs, FileWork fileWork,
- List<SchemaPath> columns, String userName) throws ExecutionSetupException {
+ public RecordReader getRecordReader(FragmentContext context,
+ DrillFileSystem dfs,
+ FileWork fileWork,
+ List<SchemaPath> columns,
+ String userName) {
Path path = dfs.makeQualified(new Path(fileWork.getPath()));
FileSplit split = new FileSplit(path, fileWork.getStart(), fileWork.getLength(), new String[]{""});
- if (context.getOptions().getOption(ExecConstants.ENABLE_NEW_TEXT_READER_KEY).bool_val == true) {
+ if (context.getOptions().getBoolean(ExecConstants.ENABLE_NEW_TEXT_READER_KEY)) {
TextParsingSettings settings = new TextParsingSettings();
- settings.set((TextFormatConfig)formatConfig);
+ settings.set(formatConfig);
return new CompliantTextRecordReader(split, dfs, settings, columns);
} else {
- char delim = ((TextFormatConfig)formatConfig).getFieldDelimiter();
+ char delim = formatConfig.getFieldDelimiter();
return new DrillTextRecordReader(split, dfs.getConf(), context, delim, columns);
}
}
@@ -112,7 +114,7 @@ public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextForm
@Override
public RecordWriter getRecordWriter(final FragmentContext context, final EasyWriter writer) throws IOException {
- final Map<String, String> options = Maps.newHashMap();
+ final Map<String, String> options = new HashMap<>();
options.put("location", writer.getLocation());
@@ -120,10 +122,10 @@ public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextForm
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
- options.put("separator", ((TextFormatConfig)getConfig()).getFieldDelimiterAsString());
- options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
+ options.put("separator", getConfig().getFieldDelimiterAsString());
+ options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig) writer.getStorageConfig()).getConnection());
- options.put("extension", ((TextFormatConfig)getConfig()).getExtensions().get(0));
+ options.put("extension", getConfig().getExtensions().get(0));
RecordWriter recordWriter = new DrillTextRecordWriter(context.getAllocator(), writer.getStorageStrategy());
recordWriter.init(options);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
index 94760da6f..7d5959be1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
@@ -18,6 +18,7 @@
package org.apache.drill.exec.store.parquet;
import java.io.IOException;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -65,7 +66,6 @@ import org.apache.parquet.hadoop.ParquetFileWriter;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
public class ParquetFormatPlugin implements FormatPlugin{
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MockStorageEngine.class);
@@ -126,12 +126,12 @@ public class ParquetFormatPlugin implements FormatPlugin{
}
@Override
- public AbstractWriter getWriter(PhysicalOperator child, String location, List<String> partitionColumns) throws IOException {
+ public AbstractWriter getWriter(PhysicalOperator child, String location, List<String> partitionColumns) {
return new ParquetWriter(child, location, partitionColumns, this);
}
public RecordWriter getRecordWriter(FragmentContext context, ParquetWriter writer) throws IOException, OutOfMemoryException {
- Map<String, String> options = Maps.newHashMap();
+ Map<String, String> options = new HashMap<>();
options.put("location", writer.getLocation());
@@ -139,7 +139,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
- options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
+ options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig) writer.getStorageConfig()).getConnection());
options.put(ExecConstants.PARQUET_BLOCK_SIZE, context.getOptions().getOption(ExecConstants.PARQUET_BLOCK_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_WRITER_USE_SINGLE_FS_BLOCK,
@@ -215,7 +215,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
private final ParquetFormatConfig formatConfig;
- public ParquetFormatMatcher(ParquetFormatPlugin plugin, ParquetFormatConfig formatConfig) {
+ ParquetFormatMatcher(ParquetFormatPlugin plugin, ParquetFormatConfig formatConfig) {
super(plugin, PATTERNS, MAGIC_STRINGS);
this.formatConfig = formatConfig;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
index 4a90a4e27..16836c295 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
@@ -18,11 +18,13 @@
package org.apache.drill.exec.util;
import java.io.File;
+import java.util.HashMap;
import java.util.Map;
+import java.util.Optional;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.store.dfs.FileSystemConfig;
import org.apache.drill.exec.store.dfs.FileSystemPlugin;
@@ -68,21 +70,27 @@ public class StoragePluginTestUtils {
final FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin(pluginName);
final FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig();
- Map<String, WorkspaceConfig> workspaces = Maps.newHashMap();
+ Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>();
+ Optional.ofNullable(pluginConfig.getWorkspaces())
+ .ifPresent(newWorkspaces::putAll);
if (schemas.length == 0) {
schemas = new String[]{TMP_SCHEMA};
}
- for (String schema: schemas) {
- WorkspaceConfig workspaceConfig = pluginConfig.workspaces.get(schema);
- String inputFormat = workspaceConfig == null ? null: workspaceConfig.getDefaultInputFormat();
+ for (String schema : schemas) {
+ WorkspaceConfig workspaceConfig = newWorkspaces.get(schema);
+ String inputFormat = workspaceConfig == null ? null : workspaceConfig.getDefaultInputFormat();
WorkspaceConfig newWorkspaceConfig = new WorkspaceConfig(tmpDirPath.getAbsolutePath(), true, inputFormat, false);
- workspaces.put(schema, newWorkspaceConfig);
+ newWorkspaces.put(schema, newWorkspaceConfig);
}
- pluginConfig.workspaces.putAll(workspaces);
- pluginRegistry.createOrUpdate(pluginName, pluginConfig, true);
+ FileSystemConfig newPluginConfig = new FileSystemConfig(
+ pluginConfig.getConnection(),
+ pluginConfig.getConfig(),
+ newWorkspaces,
+ pluginConfig.getFormats());
+ pluginRegistry.createOrUpdate(pluginName, newPluginConfig, true);
}
public static void configureFormatPlugins(StoragePluginRegistry pluginRegistry) throws ExecutionSetupException {
@@ -94,32 +102,42 @@ public class StoragePluginTestUtils {
FileSystemPlugin fileSystemPlugin = (FileSystemPlugin) pluginRegistry.getPlugin(storagePlugin);
FileSystemConfig fileSystemConfig = (FileSystemConfig) fileSystemPlugin.getConfig();
+ Map<String, FormatPluginConfig> newFormats = new HashMap<>();
+ Optional.ofNullable(fileSystemConfig.getFormats())
+ .ifPresent(newFormats::putAll);
+
TextFormatPlugin.TextFormatConfig textConfig = new TextFormatPlugin.TextFormatConfig();
textConfig.extensions = ImmutableList.of("txt");
textConfig.fieldDelimiter = '\u0000';
- fileSystemConfig.formats.put("txt", textConfig);
+ newFormats.put("txt", textConfig);
TextFormatPlugin.TextFormatConfig ssvConfig = new TextFormatPlugin.TextFormatConfig();
ssvConfig.extensions = ImmutableList.of("ssv");
ssvConfig.fieldDelimiter = ' ';
- fileSystemConfig.formats.put("ssv", ssvConfig);
+ newFormats.put("ssv", ssvConfig);
TextFormatPlugin.TextFormatConfig psvConfig = new TextFormatPlugin.TextFormatConfig();
psvConfig.extensions = ImmutableList.of("tbl");
psvConfig.fieldDelimiter = '|';
- fileSystemConfig.formats.put("psv", psvConfig);
+ newFormats.put("psv", psvConfig);
SequenceFileFormatConfig seqConfig = new SequenceFileFormatConfig();
seqConfig.extensions = ImmutableList.of("seq");
- fileSystemConfig.formats.put("sequencefile", seqConfig);
+ newFormats.put("sequencefile", seqConfig);
TextFormatPlugin.TextFormatConfig csvhtestConfig = new TextFormatPlugin.TextFormatConfig();
csvhtestConfig.extensions = ImmutableList.of("csvh-test");
csvhtestConfig.fieldDelimiter = ',';
csvhtestConfig.extractHeader = true;
csvhtestConfig.skipFirstLine = true;
- fileSystemConfig.formats.put("csvh-test", csvhtestConfig);
+ newFormats.put("csvh-test", csvhtestConfig);
+
+ FileSystemConfig newFileSystemConfig = new FileSystemConfig(
+ fileSystemConfig.getConnection(),
+ fileSystemConfig.getConfig(),
+ fileSystemConfig.getWorkspaces(),
+ newFormats);
- pluginRegistry.createOrUpdate(storagePlugin, fileSystemConfig, true);
+ pluginRegistry.createOrUpdate(storagePlugin, newFileSystemConfig, true);
}
}