diff options
author | Sumit Mohanty <smohanty@hortonworks.com> | 2017-08-15 15:59:46 -0700 |
---|---|---|
committer | Sumit Mohanty <smohanty@hortonworks.com> | 2017-08-15 15:59:46 -0700 |
commit | 3d07ec5a091de3735e63c5500c380612e6533baa (patch) | |
tree | 457a3e417af975d8c0569cc285a9b25906cb2f0f | |
parent | fb5567608f173fa4cbc4f1beed6fcf9163c418ed (diff) |
AMBARI-21045. AMBARI-21045. Enable Storm's AutoTGT configs in secure mode (Sriharsha Chintalapani via smohanty)
17 files changed, 240 insertions, 51 deletions
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog251.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog251.java index afda1f6769..1a854f8ccf 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog251.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog251.java @@ -170,4 +170,40 @@ public class UpgradeCatalog251 extends AbstractUpgradeCatalog { } } } + + /** + * Make sure storm-env changes are applied to anyone upgrading to HDP-2.6.1 Storm + * If the base version was before Ambari 2.5.0, this method should wind up doing nothing. + * @throws AmbariException + */ + protected void updateSTORMConfigs() throws AmbariException { + AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class); + Clusters clusters = ambariManagementController.getClusters(); + if (clusters != null) { + Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters); + if (clusterMap != null && !clusterMap.isEmpty()) { + for (final Cluster cluster : clusterMap.values()) { + Set<String> installedServices = cluster.getServices().keySet(); + + if (installedServices.contains("STORM") && cluster.getSecurityType() == SecurityType.KERBEROS) { + Config stormEnv = cluster.getDesiredConfigByType(STORM_ENV_CONFIG); + String content = stormEnv.getProperties().get("content"); + if (content != null && !content.contains("STORM_AUTOCREDS_LIB_DIR")) { + Map<String, String> newProperties = new HashMap<>(); + String stormEnvConfigs = "\n #set storm-auto creds \n" + + "# check if storm_jaas.conf in config , only enable storm_auto_creds in secure mode.\n " + + "STORM_JAAS_CONF=$STORM_HOME/conf/storm_jaas.conf \n" + + "STORM_AUTOCREDS_LIB_DIR=$STORM_HOME/external/storm-autocreds \n" + + "if [ -f $STORM_JAAS_CONF ] && [ -d $STORM_AUTOCREDS_LIB_DIR ]; then \n" + + " export STORM_EXT_CLASSPATH=$STORM_AUTOCREDS_LIB_DIR \n" + + "fi\n"; + content += stormEnvConfigs; + newProperties.put("content", content); + updateConfigurationPropertiesForCluster(cluster, "storm-env", newProperties, true, false); + } + } + } + } + } + } } diff --git a/ambari-server/src/main/resources/common-services/STORM/1.1.0/configuration/storm-env.xml b/ambari-server/src/main/resources/common-services/STORM/1.1.0/configuration/storm-env.xml new file mode 100644 index 0000000000..2ce560d2ac --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/1.1.0/configuration/storm-env.xml @@ -0,0 +1,54 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +--> +<configuration supports_adding_forbidden="true"> + <!-- storm-env.sh --> + <property> + <name>content</name> + <display-name>storm-env template</display-name> + <description>This is the jinja template for storm-env.sh file</description> + <value> +#!/bin/bash + +# Set Storm specific environment variables here. + +# The java implementation to use. +export JAVA_HOME={{java64_home}} + +export STORM_CONF_DIR={{conf_dir}} +export STORM_HOME={{storm_component_home_dir}} +export STORM_JAR_JVM_OPTS={{jar_jvm_opts}} + +#set storm-auto creds +# check if storm_jaas.conf in config , only enable storm_auto_creds in secure mode. +STORM_JAAS_CONF=$STORM_HOME/conf/storm_jaas.conf +STORM_AUTOCREDS_LIB_DIR=$STORM_HOME/external/storm-autocreds + +if [ -f $STORM_JAAS_CONF ] && [ -d $STORM_AUTOCREDS_LIB_DIR ]; then + export STORM_EXT_CLASSPATH=$STORM_AUTOCREDS_LIB_DIR +fi + </value> + <value-attributes> + <type>content</type> + </value-attributes> + <on-ambari-upgrade add="true"/> + </property> +</configuration> diff --git a/ambari-server/src/main/resources/common-services/STORM/1.1.0/configuration/storm-site.xml b/ambari-server/src/main/resources/common-services/STORM/1.1.0/configuration/storm-site.xml deleted file mode 100644 index b2e9acb473..0000000000 --- a/ambari-server/src/main/resources/common-services/STORM/1.1.0/configuration/storm-site.xml +++ /dev/null @@ -1,48 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ ---> -<configuration supports_final="true"> - <property> - <name>nimbus.impersonation.acl</name> - <description> - The ImpersonationAuthorizer uses nimbus.impersonation.acl as the acl to authorize users. Following is a sample nimbus config for supporting impersonation: - nimbus.impersonation.acl: - impersonating_user1: - hosts: - [comma separated list of hosts from which impersonating_user1 is allowed to impersonate other users] - groups: - [comma separated list of groups whose users impersonating_user1 is allowed to impersonate] - impersonating_user2: - hosts: - [comma separated list of hosts from which impersonating_user2 is allowed to impersonate other users] - groups: - [comma separated list of groups whose users impersonating_user2 is allowed to impersonate] - </description> - <!-- The depends-on section is new in this version, required so Stack Advisor can include streamline-env and streamline-common configs. --> - <depends-on> - <property> - <type>streamline-env</type> - <name>streamline_principal_name</name> - </property> - </depends-on> - <on-ambari-upgrade add="false"/> - </property> -</configuration> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml index d8e2d7d753..0c4106e0f6 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml @@ -664,6 +664,13 @@ replace-with="${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}"/> </definition> + <definition xsi:type="configure" id="storm_nimbus_autocred_config" summary="Update Storm's Nimbus AutoCred config"> + <type>storm-site</type> + <set key="nimbus.autocredential.plugins.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.freq.secs" value="82800" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + </definition> + </changes> </component> </service> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml index ca3c19358a..c45c7c5510 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml @@ -638,6 +638,10 @@ <task xsi:type="configure" id="hdp_2_5_0_0_upgrade_storm_1.0"/> </execute-stage> + <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Storm AutoCreds"> + <task xsi:type="configure" id="storm_nimbus_autocred_config"/> + </execute-stage> + <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Storm"> <!-- Remove Atlas configs that were incorrectly added to storm-site instead of Atlas' application.properties. --> <task xsi:type="configure" id="hdp_2_5_0_0_remove_storm_atlas_configs"/> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml index cf2c2574da..69be4dcfbd 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml @@ -1130,6 +1130,7 @@ <task xsi:type="configure" id="storm_worker_log4j_parameterize" /> <task xsi:type="configure" id="storm_cluster_log4j_parameterize" /> <task xsi:type="configure" id="storm_worker_log4j_directory" /> + <task xsi:type="configure" id="storm_nimbus_autocred_config" /> </pre-upgrade> <pre-downgrade> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml index 83a0a1eddf..3e62fb1dec 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml @@ -483,6 +483,13 @@ <replace key="content" find="${sys:storm.log.dir}/${sys:logfile.name}" replace-with="${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}"/> </definition> + <definition xsi:type="configure" id="storm_nimbus_autocred_config" summary="Update Storm's Nimbus AutoCred config"> + <type>storm-site</type> + <set key="nimbus.autocredential.plugins.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.freq.secs" value="82800" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + </definition> + </changes> </component> </service> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml index 6e899ee18f..b6f48fcb94 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml @@ -563,6 +563,10 @@ <task xsi:type="configure" id="hdp_2_5_0_0_remove_storm_atlas_configs"/> </execute-stage> + <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Storm AutoCreds"> + <task xsi:type="configure" id="storm_nimbus_autocred_config"/> + </execute-stage> + <execute-stage service="STORM" component="NIMBUS" title="Apply security changes for Storm"> <!-- Add nimbus.impersonation acls . --> <task xsi:type="configure" id="hdp_2_5_0_0_add_storm_security_configs" /> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml index 4e14e83bd6..f1d34e73e5 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml @@ -1092,6 +1092,8 @@ <task xsi:type="configure" id="storm_worker_log4j_parameterize" /> <task xsi:type="configure" id="storm_cluster_log4j_parameterize" /> <task xsi:type="configure" id="storm_worker_log4j_directory" /> + <task xsi:type="configure" id="storm_nimbus_autocred_config" /> + </pre-upgrade> <pre-downgrade> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml index f227bacb4b..8539006853 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml @@ -63,6 +63,13 @@ <regex-replace key="content" find="A1" immediateFlush="false"
 fileName="\$\{sys:storm.log.dir}/\$\{sys:logfile.name}"
 filePattern="\$\{sys:storm.log.dir}/\$\{sys:logfile.name}.%i.gz">
 <PatternLayout>
 <pattern>\$\{pattern}</pattern>
 </PatternLayout>
 <Policies>
 <SizeBasedTriggeringPolicy size="(?:[0-9]+) MB"/> <!-- Or every 100 MB -->
 </Policies>
 <DefaultRolloverStrategy max="([0-9]+)" replace-with="A1" immediateFlush="false"
 fileName="${sys:storm.log.dir}/${sys:logfile.name}"
 filePattern="${sys:storm.log.dir}/${sys:logfile.name}.%i.gz">
 <PatternLayout>
 <pattern>${pattern}</pattern>
 </PatternLayout>
 <Policies>
 <SizeBasedTriggeringPolicy size="{{storm_a1_maxfilesize}} MB"/> <!-- Or every 100 MB -->
 </Policies>
 <DefaultRolloverStrategy max="{{storm_a1_maxbackupindex}}"/> </definition> + <definition xsi:type="configure" id="storm_nimbus_autocred_config" summary="Update Storm's Nimbus AutoCred config"> + <type>storm-site</type> + <set key="nimbus.autocredential.plugins.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.freq.secs" value="82800" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + </definition> + </changes> </component> </service> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml index ce10e8b1f4..ef4c6f4c94 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml @@ -416,6 +416,10 @@ <task xsi:type="configure" id="hdp_2_6_0_0_remove_bind_anonymous"/> </execute-stage> + <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Storm AutoCreds"> + <task xsi:type="configure" id="storm_nimbus_autocred_config"/> + </execute-stage> + <!--RANGER--> <execute-stage service="RANGER" component="RANGER_ADMIN" title="Parameterizing Ranger Admin Log4J Properties"> <task xsi:type="configure" id="admin_log4j_parameterize"> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml index 840b17d967..d9675f1f48 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml @@ -1030,6 +1030,7 @@ <pre-upgrade> <task xsi:type="configure" id="storm_worker_log4j_parameterize" /> <task xsi:type="configure" id="storm_cluster_log4j_parameterize" /> + <task xsi:type="configure" id="storm_nimbus_autocred_config" /> </pre-upgrade> <pre-downgrade/> <upgrade> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py index cc5fa92ce6..974b10c149 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py @@ -41,11 +41,92 @@ class HDP26StackAdvisor(HDP25StackAdvisor): "HIVE": self.recommendHIVEConfigurations, "HBASE": self.recommendHBASEConfigurations, "YARN": self.recommendYARNConfigurations, - "KAFKA": self.recommendKAFKAConfigurations + "KAFKA": self.recommendKAFKAConfigurations, + "BEACON": self.recommendBEACONConfigurations, + "STORM": self.recommendSTORMConfigurations } parentRecommendConfDict.update(childRecommendConfDict) return parentRecommendConfDict + def recommendSTORMConfigurations(self, configurations, clusterData, services, hosts): + """ + In HDF-2.6.1 we introduced a new way of doing Auto Credentials with services such as + HDFS, HIVE, HBASE. This method will update the required configs for autocreds if the users installs + STREAMLINE service. + """ + super(HDP26StackAdvisor, self).recommendStormConfigurations(configurations, clusterData, services, hosts) + storm_site = self.getServicesSiteProperties(services, "storm-site") + storm_env = self.getServicesSiteProperties(services, "storm-env") + putStormSiteProperty = self.putProperty(configurations, "storm-site", services) + putStormSiteAttributes = self.putPropertyAttribute(configurations, "storm-site") + security_enabled = self.isSecurityEnabled(services) + servicesList = [service["StackServices"]["service_name"] for service in services["services"]] + + if storm_env and storm_site and security_enabled and 'STREAMLINE' in servicesList: + storm_nimbus_impersonation_acl = storm_site["nimbus.impersonation.acl"] if "nimbus.impersonation.acl" in storm_site else None + streamline_env = self.getServicesSiteProperties(services, "streamline-env") + _streamline_principal_name = streamline_env['streamline_principal_name'] if 'streamline_principal_name' in streamline_env else None + if _streamline_principal_name is not None and storm_nimbus_impersonation_acl is not None: + streamline_bare_principal = get_bare_principal(_streamline_principal_name) + storm_nimbus_impersonation_acl.replace('{{streamline_bare_principal}}', streamline_bare_principal) + putStormSiteProperty('nimbus.impersonation.acl', storm_nimbus_impersonation_acl) + + storm_nimbus_autocred_plugin_classes = storm_site["nimbus.autocredential.plugins.classes"] if "nimbus.autocredential.plugins.classes" in storm_site else None + if storm_nimbus_autocred_plugin_classes is not None: + new_storm_nimbus_autocred_plugin_classes = ['org.apache.storm.hdfs.security.AutoHDFS', + 'org.apache.storm.hbase.security.AutoHBase', + 'org.apache.storm.hive.security.AutoHive'] + new_conf = DefaultStackAdvisor.appendToYamlString(storm_nimbus_autocred_plugin_classes, + new_storm_nimbus_autocred_plugin_classes) + + putStormSiteProperty("nimbus.autocredential.plugins.classes", new_conf) + else: + putStormSiteProperty("nimbus.autocredential.plugins.classes", "['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']") + + + storm_nimbus_credential_renewer_classes = storm_site["nimbus.credential.renewers.classes"] if "nimbus.credential.renewers.classes" in storm_site else None + if storm_nimbus_credential_renewer_classes is not None: + new_storm_nimbus_credential_renewer_classes_array = ['org.apache.storm.hdfs.security.AutoHDFS', + 'org.apache.storm.hbase.security.AutoHBase', + 'org.apache.storm.hive.security.AutoHive'] + new_conf = DefaultStackAdvisor.appendToYamlString(storm_nimbus_credential_renewer_classes, + new_storm_nimbus_credential_renewer_classes_array) + putStormSiteProperty("nimbus.autocredential.plugins.classes", new_conf) + else: + putStormSiteProperty("nimbus.credential.renewers.classes", "['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']") + putStormSiteProperty("nimbus.credential.renewers.freq.secs", "82800") + pass + + def recommendBEACONConfigurations(self, configurations, clusterData, services, hosts): + beaconEnvProperties = self.getSiteProperties(services['configurations'], 'beacon-env') + putbeaconEnvProperty = self.putProperty(configurations, "beacon-env", services) + + # database URL and driver class recommendations + if beaconEnvProperties and self.checkSiteProperties(beaconEnvProperties, 'beacon_store_driver') and self.checkSiteProperties(beaconEnvProperties, 'beacon_database'): + putbeaconEnvProperty('beacon_store_driver', self.getDBDriver(beaconEnvProperties['beacon_database'])) + if beaconEnvProperties and self.checkSiteProperties(beaconEnvProperties, 'beacon_store_db_name', 'beacon_store_url') and self.checkSiteProperties(beaconEnvProperties, 'beacon_database'): + beaconServerHost = self.getHostWithComponent('BEACON', 'BEACON_SERVER', services, hosts) + beaconDBConnectionURL = beaconEnvProperties['beacon_store_url'] + protocol = self.getProtocol(beaconEnvProperties['beacon_database']) + oldSchemaName = getOldValue(self, services, "beacon-env", "beacon_store_db_name") + oldDBType = getOldValue(self, services, "beacon-env", "beacon_database") + # under these if constructions we are checking if beacon server hostname available, + # if it's default db connection url with "localhost" or if schema name was changed or if db type was changed (only for db type change from default mysql to existing mysql) + # or if protocol according to current db type differs with protocol in db connection url(other db types changes) + if beaconServerHost is not None: + if (beaconDBConnectionURL and "//localhost" in beaconDBConnectionURL) or oldSchemaName or oldDBType or (protocol and beaconDBConnectionURL and not beaconDBConnectionURL.startswith(protocol)): + dbConnection = self.getDBConnectionStringBeacon(beaconEnvProperties['beacon_database']).format(beaconServerHost['Hosts']['host_name'], beaconEnvProperties['beacon_store_db_name']) + putbeaconEnvProperty('beacon_store_url', dbConnection) + + def getDBConnectionStringBeacon(self, databaseType): + driverDict = { + 'NEW DERBY DATABASE': 'jdbc:derby:${{beacon.data.dir}}/${{beacon.store.db.name}}-db;create=true', + 'EXISTING MYSQL DATABASE': 'jdbc:mysql://{0}/{1}', + 'EXISTING MYSQL / MARIADB DATABASE': 'jdbc:mysql://{0}/{1}', + 'EXISTING ORACLE DATABASE': 'jdbc:oracle:thin:@//{0}:1521/{1}' + } + return driverDict.get(databaseType.upper()) + def recommendAtlasConfigurations(self, configurations, clusterData, services, hosts): super(HDP26StackAdvisor, self).recommendAtlasConfigurations(configurations, clusterData, services, hosts) servicesList = [service["StackServices"]["service_name"] for service in services["services"]] @@ -401,9 +482,9 @@ class HDP26StackAdvisor(HDP25StackAdvisor): propertyValue = "https://"+webapp_address+"/ws/v1/applicationhistory" Logger.info("validateYarnSiteConfigurations: recommended value for webservice url"+services["configurations"]["yarn-site"]["properties"]["yarn.log.server.web-service.url"]) if services["configurations"]["yarn-site"]["properties"]["yarn.log.server.web-service.url"] != propertyValue: - validationItems.append( + validationItems = [ {"config-name": "yarn.log.server.web-service.url", - "item": self.getWarnItem("Value should be %s" % propertyValue)}) + "item": self.getWarnItem("Value should be %s" % propertyValue)}] return self.toConfigurationValidationProblems(validationItems, "yarn-site") def validateDruidHistoricalConfigurations(self, properties, recommendedDefaults, configurations, services, hosts): diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml index c2c153285d..38811379ca 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml @@ -111,6 +111,12 @@ <set key="ranger.plugin.storm.ambari.cluster.name" value="{{cluster_name}}" if-type="ranger-storm-plugin-properties" if-key="ranger-storm-plugin-enabled" if-key-state="present"/> </definition> + <definition xsi:type="configure" id="storm_nimbus_autocred_config" summary="Update Storm's Nimbus AutoCred config"> + <type>storm-site</type> + <set key="nimbus.autocredential.plugins.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + <set key="nimbus.credential.renewers.freq.secs" value="82800" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/> + </definition> </changes> </component> </service> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml index df609cddd2..6d8821bc67 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml @@ -328,6 +328,10 @@ <task xsi:type="configure" id="hdp_2_6_maint_ranger_storm_plugin_cluster_name"/> </execute-stage> + <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Storm AutoCreds"> + <task xsi:type="configure" id="storm_nimbus_autocred_config"/> + </execute-stage> + <!-- YARN --> <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Ranger Yarn plugin"> <task xsi:type="configure" id="hdp_2_6_maint_ranger_yarn_plugin_cluster_name"/> diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml index b376fa7404..07f0960023 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml @@ -965,6 +965,7 @@ <component name="NIMBUS"> <pre-upgrade> <task xsi:type="configure" id="hdp_2_6_maint_ranger_storm_plugin_cluster_name"/> + <task xsi:type="configure" id="storm_nimbus_autocred_config"/> </pre-upgrade> <pre-downgrade/> <!-- no-op to prevent config changes on downgrade --> <upgrade> diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py b/ambari-server/src/main/resources/stacks/stack_advisor.py index ccbb6765e1..8ccbaba9db 100644 --- a/ambari-server/src/main/resources/stacks/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/stack_advisor.py @@ -1375,3 +1375,21 @@ class DefaultStackAdvisor(StackAdvisor): if recommendation: put_f(name, ",".join(recommendation)) + + @classmethod + def appendToYamlString(cls, yaml_string, list_classes): + updated_yaml_string = "" + try: + strip_yaml_str = re.sub('[\[\]\']', ' ', yaml_string) + klass_array = [x.strip() for x in strip_yaml_str.split(',')] + if yaml_string: + for klass in list_classes: + klass = klass.strip() + klass_array.append(klass) + klass_set = set(klass_array) + klass_list = [("'" + e + "'") for e in klass_set] + updated_yaml_string = "[" + ",".join(klass_list) + "]" + except Exception: + klass_list = [("'" + e + "'") for e in list_classes] + updated_yaml_string = "[" + ",".join(klass_list) + "]" + return updated_yaml_string |