diff options
author | Youngwoo Kim <ywkim@apache.org> | 2017-06-12 13:21:29 +0900 |
---|---|---|
committer | Youngwoo Kim <ywkim@apache.org> | 2017-06-17 16:02:06 +0900 |
commit | 5b8bba96a20d026194a042f46489041d5525d17d (patch) | |
tree | 9fa03f26e89a44fd5171c77b0be73003788e7b8d | |
parent | c6b60a81e4cbb5adf2a6659553dc76ec91feed4e (diff) |
BIGTOP-2807: Upgrade Spark to 2.1.1
-rw-r--r-- | bigtop-packages/src/common/spark/do-component-build | 2 | ||||
-rw-r--r-- | bigtop-packages/src/deb/spark/control | 2 | ||||
-rw-r--r-- | bigtop-packages/src/deb/spark/rules | 21 | ||||
-rw-r--r-- | bigtop-packages/src/rpm/spark/SPECS/spark.spec | 21 | ||||
-rw-r--r-- | bigtop.bom | 2 |
5 files changed, 44 insertions, 4 deletions
diff --git a/bigtop-packages/src/common/spark/do-component-build b/bigtop-packages/src/common/spark/do-component-build index 21dffbe0..3b79b0d3 100644 --- a/bigtop-packages/src/common/spark/do-component-build +++ b/bigtop-packages/src/common/spark/do-component-build @@ -28,7 +28,7 @@ BUILD_OPTS="-Divy.home=${HOME}/.ivy2 -Dsbt.ivy.home=${HOME}/.ivy2 -Duser.home=${ -Dyarn.version=$HADOOP_VERSION \ -Dprotobuf.version=2.5.0 \ -DrecompileMode=all \ - -Pyarn -Phadoop-2.6 \ + -Pyarn -Phadoop-2.7 \ -Phive -Phive-thriftserver \ $SPARK_BUILD_OPTS" diff --git a/bigtop-packages/src/deb/spark/control b/bigtop-packages/src/deb/spark/control index 4b7f2ffe..01a393de 100644 --- a/bigtop-packages/src/deb/spark/control +++ b/bigtop-packages/src/deb/spark/control @@ -23,7 +23,7 @@ Homepage: http://spark.apache.org/ Package: spark-core Architecture: all -Depends: adduser, bigtop-utils (>= 0.7), hadoop-client +Depends: adduser, bigtop-utils (>= 0.7), hadoop-client, hadoop-yarn Description: Lightning-Fast Cluster Computing Spark is a MapReduce-like cluster computing framework designed to support low-latency iterative jobs and interactive use from an interpreter. It is diff --git a/bigtop-packages/src/deb/spark/rules b/bigtop-packages/src/deb/spark/rules index 0bc7fc7f..d5fdb6d9 100644 --- a/bigtop-packages/src/deb/spark/rules +++ b/bigtop-packages/src/deb/spark/rules @@ -22,6 +22,10 @@ export DH_VERBOSE=1 # This has to be exported to make some magic below work. export DH_OPTIONS +lib_spark=/usr/lib/spark +lib_hadoop_client=/usr/lib/hadoop/client +lib_hadoop_yarn=/usr/lib/hadoop-yarn/ + %: dh $@ @@ -45,3 +49,20 @@ override_dh_install: dh_install # BIGTOP-2588 drop datanucleus jars from spark-core rm -Rf debian/spark-core/usr/lib/spark/jars/datanucleus* + + rm -f debian/tmp/${lib_spark}/jars/hadoop-*.jar + ln -s ${lib_hadoop_client}/hadoop-annotations.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-auth.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-client.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-common.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-hdfs.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-mapreduce-client-app.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-mapreduce-client-common.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-mapreduce-client-core.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-mapreduce-client-jobclient.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_client}/hadoop-mapreduce-client-shuffle.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_yarn}/hadoop-yarn-api.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_yarn}/hadoop-yarn-client.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_yarn}/hadoop-yarn-common.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_yarn}/hadoop-yarn-server-common.jar debian/tmp/${lib_spark}/jars/ + ln -s ${lib_hadoop_yarn}/hadoop-yarn-server-web-proxy.jar debian/tmp/${lib_spark}/jars/ diff --git a/bigtop-packages/src/rpm/spark/SPECS/spark.spec b/bigtop-packages/src/rpm/spark/SPECS/spark.spec index d2b07a68..24961095 100644 --- a/bigtop-packages/src/rpm/spark/SPECS/spark.spec +++ b/bigtop-packages/src/rpm/spark/SPECS/spark.spec @@ -24,6 +24,8 @@ %define bin /usr/bin %define man_dir /usr/share/man %define spark_services master worker history-server thriftserver +%define lib_hadoop_client /usr/lib/hadoop/client +%define lib_hadoop_yarn /usr/lib/hadoop-yarn/ %if %{?suse_version:1}0 %define doc_spark %{_docdir}/spark @@ -54,7 +56,7 @@ Source6: init.d.tmpl Source7: spark-history-server.svc Source8: spark-thriftserver.svc Source9: bigtop.bom -Requires: bigtop-utils >= 0.7, hadoop-client +Requires: bigtop-utils >= 0.7, hadoop-client, hadoop-yarn Requires(preun): /sbin/service %global initd_dir %{_sysconfdir}/init.d @@ -157,6 +159,23 @@ bash $RPM_SOURCE_DIR/install_spark.sh \ --prefix=$RPM_BUILD_ROOT \ --doc-dir=%{doc_spark} +%__rm -f $RPM_BUILD_ROOT/%{lib_spark}/jars/hadoop-*.jar +%__ln_s %{lib_hadoop_client}/hadoop-annotations.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-auth.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-client.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-common.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-hdfs.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-mapreduce-client-app.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-mapreduce-client-common.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-mapreduce-client-core.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-mapreduce-client-jobclient.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_client}/hadoop-mapreduce-client-shuffle.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_yarn}/hadoop-yarn-api.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_yarn}/hadoop-yarn-client.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_yarn}/hadoop-yarn-common.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_yarn}/hadoop-yarn-server-common.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ +%__ln_s %{lib_hadoop_yarn}/hadoop-yarn-server-web-proxy.jar $RPM_BUILD_ROOT/%{lib_spark}/jars/ + for service in %{spark_services} do # Install init script @@ -304,7 +304,7 @@ bigtop { name = 'spark' pkg = 'spark-core' relNotes = 'Apache Spark' - version { base = '2.1.0'; pkg = base; release = 1 } + version { base = '2.1.1'; pkg = base; release = 1 } tarball { destination = "$name-${version.base}.tar.gz" source = "$name-${version.base}.tgz" } url { download_path = "/$name/$name-${version.base}" |