summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMyroslav Papirkovskyi <mpapirkovskyy@apache.org>2018-07-09 20:43:51 +0300
committerGitHub <noreply@github.com>2018-07-09 20:43:51 +0300
commit4b6541b50f0161c6a4b823fb429a4db8aa8911e0 (patch)
treed15b0bc63f984c0d422c9419f17f7ed6d969665f
parent3d935c5b5d5894ace8d97fa2a3965181e71dcc6c (diff)
AMBARI-24263. Restart services just before stack upgrade fails due to AMS package incompatibility errors, causing EU not to be started. (mpapirkovskyy) (#1707) (#1726)
* AMBARI-24263. Restart services just before stack upgrade fails due to AMS package incompatibility errors, causing EU not to be started. (mpapirkovskyy) * AMBARI-24263. Scriptlets logic update. Nullable metrics template. (mpapirkovskyy) * AMBARI-24263. Conditional metrics templates. (mpapirkovskyy) * AMBARI-24263. Templates typo fix. (mpapirkovskyy) * AMBARI-24263. Fix template in config. (mpapirkovskyy) * AMBARI-24263. Fix IFS usage side effects. (mpapirkovskyy) * AMBARI-24263. Restart services just before stack upgrade fails due to AMS package incompatibility errors, causing EU not to be started. (mpapirkovskyy)
-rw-r--r--ambari-common/src/main/python/resource_management/libraries/functions/constants.py1
-rw-r--r--ambari-metrics/ambari-metrics-assembly/pom.xml5
-rw-r--r--ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/preinst45
-rw-r--r--ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/preinstall.sh45
-rw-r--r--ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog270.java27
-rw-r--r--ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py1
-rw-r--r--ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/hadoop-metrics2-accumulo.properties.j24
-rw-r--r--ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase_service.py3
-rw-r--r--ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j22
-rw-r--r--ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py3
-rw-r--r--ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py1
-rw-r--r--ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j24
-rw-r--r--ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j24
-rw-r--r--ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py3
-rw-r--r--ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py3
-rw-r--r--ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py1
-rw-r--r--ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hivemetastore.properties.j24
-rw-r--r--ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hiveserver2.properties.j24
-rw-r--r--ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llapdaemon.j24
-rw-r--r--ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llaptaskscheduler.j24
-rw-r--r--ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py3
-rw-r--r--ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/stack-hooks/before-START/templates/hadoop-metrics2.properties.j24
-rw-r--r--ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json5
-rw-r--r--ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml5
-rw-r--r--ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog270Test.java58
26 files changed, 228 insertions, 17 deletions
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 443d639f8c..f25b2b599a 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -123,3 +123,4 @@ class StackFeature:
KAFKA_EXTENDED_SASL_SUPPORT = "kafka_extended_sasl_support"
OOZIE_EXTJS_INCLUDED = "oozie_extjs_included"
MULTIPLE_ENV_SH_FILES_SUPPORT = "multiple_env_sh_files_support"
+ AMS_LEGACY_HADOOP_SINK = "ams_legacy_hadoop_sink"
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml b/ambari-metrics/ambari-metrics-assembly/pom.xml
index 5e6b2d6881..f89b9dfaa2 100644
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -389,7 +389,10 @@
<defaultFilemode>644</defaultFilemode>
<defaultUsername>root</defaultUsername>
<defaultGroupname>root</defaultGroupname>
-
+ <preinstallScriptlet>
+ <scriptFile>${project.build.directory}/resources/rpm/sink/preinstall.sh</scriptFile>
+ <fileEncoding>utf-8</fileEncoding>
+ </preinstallScriptlet>
<postinstallScriptlet>
<scriptFile>${project.build.directory}/resources/rpm/sink/postinstall.sh</scriptFile>
<fileEncoding>utf-8</fileEncoding>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/preinst b/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/preinst
new file mode 100644
index 0000000000..7560fb9808
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/preinst
@@ -0,0 +1,45 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+JAR_FILES_LEGACY_FOLDER="/usr/lib/ambari-metrics-sink-legacy"
+
+HADOOP_SINK_LINK="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar"
+
+HADOOP_LEGACY_LINK_NAME="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar"
+
+if [ -f ${HADOOP_SINK_LINK} ]
+then
+ old_jar=$(readlink -f ${HADOOP_SINK_LINK})
+ version_part=$(basename ${old_jar} | awk -F"-" '{print $7}')
+ IFS=. version=(${version_part})
+ unset IFS
+
+ if [[ ${version[0]} -le 2 && ${version[1]} -lt 7 ]] # backup only required on upgrade from version < 2.7
+ then
+ if [ ! -d "$JAR_FILES_LEGACY_FOLDER" ]
+ then
+ mkdir -p "$JAR_FILES_LEGACY_FOLDER"
+ fi
+ echo "Backing up Ambari metrics hadoop sink jar ${old_jar} -> $JAR_FILES_LEGACY_FOLDER/"
+ cp "${old_jar}" "${JAR_FILES_LEGACY_FOLDER}/"
+
+ HADOOP_SINK_LEGACY_JAR="$JAR_FILES_LEGACY_FOLDER/$(basename ${old_jar})"
+ echo "Creating symlink for backup jar $HADOOP_LEGACY_LINK_NAME -> $HADOOP_SINK_LEGACY_JAR"
+ rm -f "${HADOOP_LEGACY_LINK_NAME}" ; ln -s "${HADOOP_SINK_LEGACY_JAR}" "${HADOOP_LEGACY_LINK_NAME}"
+ fi
+fi
+
+exit 0
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/preinstall.sh b/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/preinstall.sh
new file mode 100644
index 0000000000..7560fb9808
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/preinstall.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+JAR_FILES_LEGACY_FOLDER="/usr/lib/ambari-metrics-sink-legacy"
+
+HADOOP_SINK_LINK="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar"
+
+HADOOP_LEGACY_LINK_NAME="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar"
+
+if [ -f ${HADOOP_SINK_LINK} ]
+then
+ old_jar=$(readlink -f ${HADOOP_SINK_LINK})
+ version_part=$(basename ${old_jar} | awk -F"-" '{print $7}')
+ IFS=. version=(${version_part})
+ unset IFS
+
+ if [[ ${version[0]} -le 2 && ${version[1]} -lt 7 ]] # backup only required on upgrade from version < 2.7
+ then
+ if [ ! -d "$JAR_FILES_LEGACY_FOLDER" ]
+ then
+ mkdir -p "$JAR_FILES_LEGACY_FOLDER"
+ fi
+ echo "Backing up Ambari metrics hadoop sink jar ${old_jar} -> $JAR_FILES_LEGACY_FOLDER/"
+ cp "${old_jar}" "${JAR_FILES_LEGACY_FOLDER}/"
+
+ HADOOP_SINK_LEGACY_JAR="$JAR_FILES_LEGACY_FOLDER/$(basename ${old_jar})"
+ echo "Creating symlink for backup jar $HADOOP_LEGACY_LINK_NAME -> $HADOOP_SINK_LEGACY_JAR"
+ rm -f "${HADOOP_LEGACY_LINK_NAME}" ; ln -s "${HADOOP_SINK_LEGACY_JAR}" "${HADOOP_LEGACY_LINK_NAME}"
+ fi
+fi
+
+exit 0
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog270.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog270.java
index 43bbc87d7c..86590825ec 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog270.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog270.java
@@ -1062,6 +1062,7 @@ public class UpgradeCatalog270 extends AbstractUpgradeCatalog {
updateSolrConfigurations();
updateAmsConfigs();
updateStormConfigs();
+ clearHadoopMetrics2Content();
}
protected void renameAmbariInfra() {
@@ -1942,4 +1943,30 @@ public class UpgradeCatalog270 extends AbstractUpgradeCatalog {
}
}
}
+
+ protected void clearHadoopMetrics2Content() throws AmbariException {
+ AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+ Clusters clusters = ambariManagementController.getClusters();
+ if (clusters != null) {
+ Map<String, Cluster> clusterMap = clusters.getClusters();
+
+ if (clusterMap != null && !clusterMap.isEmpty()) {
+ String hadoopMetrics2ContentProperty = "content";
+ String hadoopMetrics2ContentValue = "";
+ String hadoopMetrics2ConfigType = "hadoop-metrics2.properties";
+ for (final Cluster cluster : clusterMap.values()) {
+ Config config = cluster.getDesiredConfigByType(hadoopMetrics2ConfigType);
+ if (config != null) {
+ Map<String, String> hadoopMetrics2Configs = config.getProperties();
+ if (hadoopMetrics2Configs.containsKey(hadoopMetrics2ContentProperty)) {
+ LOG.info("Updating " + hadoopMetrics2ContentProperty);
+ Map<String, String> updateProperty = Collections.singletonMap(hadoopMetrics2ContentProperty, hadoopMetrics2ContentValue);
+ updateConfigurationPropertiesForCluster(cluster, hadoopMetrics2ConfigType, updateProperty, Collections.EMPTY_SET,
+ true, false);
+ }
+ }
+ }
+ }
+ }
+ }
}
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index d87d9c2b30..80cb4c2863 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -150,6 +150,7 @@ if has_metric_collector:
metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+ metric_legacy_hadoop_sink = check_stack_feature(StackFeature.AMS_LEGACY_HADOOP_SINK, stack_version_formatted)
pass
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/hadoop-metrics2-accumulo.properties.j2 b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/hadoop-metrics2-accumulo.properties.j2
index 282f904c80..13def8ede2 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/hadoop-metrics2-accumulo.properties.j2
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/hadoop-metrics2-accumulo.properties.j2
@@ -24,7 +24,11 @@
{% if has_metric_collector %}
+{% if metric_legacy_hadoop_sink %}
+*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+{% else %}
*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+{% endif %}
*.sink.timeline.slave.host.name={{hostname}}
accumulo.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
accumulo.period={{metrics_collection_period}}
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase_service.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase_service.py
index 6105139381..d322f51b35 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase_service.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase_service.py
@@ -20,7 +20,6 @@ limitations under the License.
from resource_management.core.resources.system import Execute, File
from resource_management.libraries.functions.format import format
-from ambari_commons.repo_manager.repo_manager_helper import check_installed_metrics_hadoop_sink_version
def hbase_service(
name,
@@ -34,8 +33,6 @@ def hbase_service(
no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1")
if action == 'start':
- # Check ambari-metrics-hadoop-sink version is less than 2.7.0.0
- check_installed_metrics_hadoop_sink_version()
daemon_cmd = format("{cmd} start {role}")
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
index 4a6cd29e02..a7c41a586e 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
@@ -59,4 +59,4 @@ hbase.sink.timeline.truststore.password = {{metric_truststore_password}}
# Switch off metrics generation on a per region basis
*.source.filter.class=org.apache.hadoop.metrics2.filter.RegexFilter
-hbase.*.source.filter.exclude=.*(Regions|Users|Tables).* \ No newline at end of file
+hbase.*.source.filter.exclude=.*(Regions|Users|Tables).*
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py
index 6928182924..474c74c3f2 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py
@@ -22,7 +22,6 @@ from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.show_logs import show_logs
from resource_management.core.shell import as_sudo
from resource_management.core.resources.system import Execute, File
-from ambari_commons.repo_manager.repo_manager_helper import check_installed_metrics_hadoop_sink_version
def hbase_service(
name,
@@ -37,8 +36,6 @@ def hbase_service(
no_op_test = as_sudo(["test", "-f", pid_file]) + format(" && ps -p `{pid_expression}` >/dev/null 2>&1")
if action == 'start':
- # Check ambari-metrics-hadoop-sink version is less than 2.7.0.0
- check_installed_metrics_hadoop_sink_version()
daemon_cmd = format("{cmd} start {role}")
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 0552275e29..0f01ea3f82 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -186,6 +186,7 @@ if has_metric_collector:
metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+ metric_legacy_hadoop_sink = check_stack_feature(StackFeature.AMS_LEGACY_HADOOP_SINK, version_for_stack_feature_checks)
pass
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2 b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
index 66796b429b..2811d72e6d 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
@@ -49,7 +49,11 @@ hbase.extendedperiod = 3600
{% if has_metric_collector %}
+{% if metric_legacy_hadoop_sink %}
+*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+{% else %}
*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+{% endif %}
*.sink.timeline.slave.host.name={{hostname}}
hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2 b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
index 4ed68baf12..d15c842a81 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
@@ -48,7 +48,11 @@ hbase.extendedperiod = 3600
{% if has_metric_collector %}
+{% if metric_legacy_hadoop_sink %}
+*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+{% else %}
*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+{% endif %}
*.sink.timeline.slave.host.name={{hostname}}
hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
hbase.period={{metrics_collection_period}}
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
index f6cea7ab02..9d146e004a 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
@@ -37,7 +37,6 @@ from resource_management.libraries.functions.curl_krb_request import curl_krb_re
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.namenode_ha_utils import get_namenode_states
from resource_management.libraries.functions.show_logs import show_logs
-from ambari_commons.repo_manager.repo_manager_helper import check_installed_metrics_hadoop_sink_version
from ambari_commons.inet_utils import ensure_ssl_using_protocol
from zkfc_slave import ZkfcSlaveDefault
@@ -270,8 +269,6 @@ def service(action=None, name=None, user=None, options="", create_pid_dir=False,
daemon_cmd = as_user(cmd, user)
if action == "start":
- # Check ambari-metrics-hadoop-sink version is less than 2.7.0.0
- check_installed_metrics_hadoop_sink_version()
# remove pid file from dead process
File(pid_file, action="delete", not_if=process_id_exists_command)
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
index 9bc2601073..4a9ecc9751 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
@@ -36,7 +36,6 @@ from resource_management.libraries.functions.stack_features import check_stack_f
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
-from ambari_commons.repo_manager.repo_manager_helper import check_installed_metrics_hadoop_sink_version
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
@@ -73,8 +72,6 @@ def hive_service(name, action='start', upgrade_type=None):
process_id_exists_command = format("ls {pid_file} >/dev/null 2>&1 && ps -p {pid} >/dev/null 2>&1")
if action == 'start':
- # Check ambari-metrics-hadoop-sink version is less than 2.7.0.0
- check_installed_metrics_hadoop_sink_version()
if name == 'hiveserver2':
check_fs_root(params.hive_server_conf_dir, params.execute_path)
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index d0a9d3da9c..77ad09a924 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -574,6 +574,7 @@ if has_metric_collector:
metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+ metric_legacy_hadoop_sink = check_stack_feature(StackFeature.AMS_LEGACY_HADOOP_SINK, version_for_stack_feature_checks)
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hivemetastore.properties.j2 b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hivemetastore.properties.j2
index d4573c3d2b..6602ceea6a 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hivemetastore.properties.j2
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hivemetastore.properties.j2
@@ -37,7 +37,11 @@
{% if has_metric_collector %}
*.period={{metrics_collection_period}}
+ {% if metric_legacy_hadoop_sink %}
+ *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+ {% else %}
*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+ {% endif %}
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
*.sink.timeline.period={{metrics_collection_period}}
*.sink.timeline.sendInterval={{metrics_report_interval}}000
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hiveserver2.properties.j2 b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hiveserver2.properties.j2
index c67d0023b8..a4c9968d1e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hiveserver2.properties.j2
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-hiveserver2.properties.j2
@@ -37,7 +37,11 @@
{% if has_metric_collector %}
*.period={{metrics_collection_period}}
+ {% if metric_legacy_hadoop_sink %}
+ *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+ {% else %}
*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+ {% endif %}
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
*.sink.timeline.period={{metrics_collection_period}}
*.sink.timeline.sendInterval={{metrics_report_interval}}000
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llapdaemon.j2 b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llapdaemon.j2
index cd23e8abe5..0ce7d8ed10 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llapdaemon.j2
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llapdaemon.j2
@@ -37,7 +37,11 @@
{% if has_metric_collector %}
*.period={{metrics_collection_period}}
+ {% if metric_legacy_hadoop_sink %}
+ *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+ {% else %}
*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+ {% endif %}
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
*.sink.timeline.period={{metrics_collection_period}}
*.sink.timeline.sendInterval={{metrics_report_interval}}000
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llaptaskscheduler.j2 b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llaptaskscheduler.j2
index 674d3ccaba..023e689f58 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llaptaskscheduler.j2
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/hadoop-metrics2-llaptaskscheduler.j2
@@ -37,7 +37,11 @@
{% if has_metric_collector %}
*.period={{metrics_collection_period}}
+ {% if metric_legacy_hadoop_sink %}
+ *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+ {% else %}
*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+ {% endif %}
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
*.sink.timeline.period={{metrics_collection_period}}
*.sink.timeline.sendInterval={{metrics_report_interval}}000
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
index cc790e9dd2..d68408009b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
@@ -21,7 +21,6 @@ Ambari Agent
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
-from ambari_commons.repo_manager.repo_manager_helper import check_installed_metrics_hadoop_sink_version
from resource_management.core.shell import as_user, as_sudo
from resource_management.libraries.functions.show_logs import show_logs
from resource_management.libraries.functions.format import format
@@ -65,8 +64,6 @@ def service(componentName, action='start', serviceName='yarn'):
cmd = format("export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {daemon} --config {hadoop_conf_dir}")
if action == 'start':
- # Check ambari-metrics-hadoop-sink version is less than 2.7.0.0
- check_installed_metrics_hadoop_sink_version()
daemon_cmd = format("{ulimit_cmd} {cmd} start {componentName}")
check_process = as_sudo(["test", "-f", pid_file]) + " && " + as_sudo(["pgrep", "-F", pid_file])
diff --git a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
index e3fa4765c2..0f29e242d4 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
@@ -162,8 +162,10 @@ if has_metric_collector:
metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+ metric_legacy_hadoop_sink = check_stack_feature(StackFeature.AMS_LEGACY_HADOOP_SINK, version_for_stack_feature_checks)
pass
+
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
diff --git a/ambari-server/src/main/resources/stack-hooks/before-START/templates/hadoop-metrics2.properties.j2 b/ambari-server/src/main/resources/stack-hooks/before-START/templates/hadoop-metrics2.properties.j2
index 281ac27db9..49be9c4aba 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-START/templates/hadoop-metrics2.properties.j2
+++ b/ambari-server/src/main/resources/stack-hooks/before-START/templates/hadoop-metrics2.properties.j2
@@ -67,7 +67,11 @@ resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
{% if has_metric_collector %}
*.period={{metrics_collection_period}}
+{% if metric_legacy_hadoop_sink %}
+*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+{% else %}
*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+{% endif %}
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
*.sink.timeline.period={{metrics_collection_period}}
*.sink.timeline.sendInterval={{metrics_report_interval}}000
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index 6d622ecfe9..417f9bb994 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -458,6 +458,11 @@
"description": "ExtJS is included in the repository and automatically installed by Ambari",
"min_version": "2.2.0.0",
"max_version": "2.6.0.0"
+ },
+ {
+ "name": "ams_legacy_hadoop_sink",
+ "description": "Legacy AMS hadoop sink should be used",
+ "max_version": "2.6.99.99"
}
]
}
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml
index 02be7559b7..4fc8bd06b6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml
@@ -78,7 +78,11 @@ resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
{% if has_metric_collector %}
*.period={{metrics_collection_period}}
+{% if metric_legacy_hadoop_sink %}
+*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar
+{% else %}
*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+{% endif %}
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
*.sink.timeline.period={{metrics_collection_period}}
*.sink.timeline.sendInterval={{metrics_report_interval}}000
@@ -124,6 +128,7 @@ namenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}
</value>
<value-attributes>
<type>content</type>
+ <empty-value-valid>true</empty-value-valid>
</value-attributes>
<on-ambari-upgrade add="false"/>
</property>
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog270Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog270Test.java
index 68c6b1fd9c..477da85688 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog270Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog270Test.java
@@ -330,6 +330,7 @@ public class UpgradeCatalog270Test {
Method updateSolrConfigurations = UpgradeCatalog270.class.getDeclaredMethod("updateSolrConfigurations");
Method updateAmsConfigurations = UpgradeCatalog270.class.getDeclaredMethod("updateAmsConfigs");
Method updateStormConfigurations = UpgradeCatalog270.class.getDeclaredMethod("updateStormConfigs");
+ Method clearHadoopMetrics2Content = UpgradeCatalog270.class.getDeclaredMethod("clearHadoopMetrics2Content");
UpgradeCatalog270 upgradeCatalog270 = createMockBuilder(UpgradeCatalog270.class)
.addMockedMethod(showHcatDeletedUserMessage)
@@ -346,6 +347,7 @@ public class UpgradeCatalog270Test {
.addMockedMethod(updateSolrConfigurations)
.addMockedMethod(updateAmsConfigurations)
.addMockedMethod(updateStormConfigurations)
+ .addMockedMethod(clearHadoopMetrics2Content)
.createMock();
@@ -389,6 +391,9 @@ public class UpgradeCatalog270Test {
upgradeCatalog270.updateStormConfigs();
expectLastCall().once();
+ upgradeCatalog270.clearHadoopMetrics2Content();
+ expectLastCall().once();
+
replay(upgradeCatalog270);
upgradeCatalog270.executeDMLUpdates();
@@ -1664,4 +1669,57 @@ public class UpgradeCatalog270Test {
assertTrue(Maps.difference(newStormProperties, updatedProperties).areEqual());
}
+
+ @Test
+ public void testClearHadoopMetrics2Content() throws Exception {
+
+ Map<String, String> oldContentProperty = new HashMap<String, String>() {
+ {
+ put("content", "# Licensed to the Apache Software Foundation (ASF) under one or more...");
+ }
+ };
+ Map<String, String> newContentProperty = new HashMap<String, String>() {
+ {
+ put("content", "");
+ }
+ };
+
+ EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+ Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+ final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+ Config mockHadoopMetrics2Properties = easyMockSupport.createNiceMock(Config.class);
+
+ expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+ put("normal", cluster);
+ }}).once();
+ expect(cluster.getDesiredConfigByType("hadoop-metrics2.properties")).andReturn(mockHadoopMetrics2Properties).atLeastOnce();
+ expect(mockHadoopMetrics2Properties.getProperties()).andReturn(oldContentProperty).anyTimes();
+
+ Injector injector = easyMockSupport.createNiceMock(Injector.class);
+
+ replay(injector, clusters, mockHadoopMetrics2Properties, cluster);
+
+ AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+ .addMockedMethod("getClusters", new Class[] { })
+ .addMockedMethod("createConfig")
+ .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+ .createNiceMock();
+
+ Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+ Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+ expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+ expect(controller.getClusters()).andReturn(clusters).anyTimes();
+ expect(controller.createConfig(anyObject(Cluster.class), anyObject(StackId.class), anyString(), capture(propertiesCapture), anyString(),
+ anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+
+ replay(controller, injector2);
+ new UpgradeCatalog270(injector2).clearHadoopMetrics2Content();
+ easyMockSupport.verifyAll();
+
+ Map<String, String> updatedProperties = propertiesCapture.getValue();
+ assertTrue(Maps.difference(newContentProperty, updatedProperties).areEqual());
+
+ }
}