sys-cluster/spark-bin: Remove ${ED} from dosym
authorAlec Ten Harmsel <alec@alectenharmsel.com>
Mon, 11 Nov 2019 21:02:16 +0000 (16:02 -0500)
committerJoonas Niilola <juippis@gentoo.org>
Wed, 13 Nov 2019 15:04:55 +0000 (17:04 +0200)
Relative links are preferred over using ${ED} in symlink paths

Signed-off-by: Alec Ten Harmsel <alec@alectenharmsel.com>
Closes: https://bugs.gentoo.org/699504
Package-Manager: Portage-2.3.76, Repoman-2.3.16
Signed-off-by: Joonas Niilola <juippis@gentoo.org>
sys-cluster/spark-bin/spark-bin-2.3.1-r1.ebuild [new file with mode: 0644]
sys-cluster/spark-bin/spark-bin-2.4.4-r1.ebuild [new file with mode: 0644]

diff --git a/sys-cluster/spark-bin/spark-bin-2.3.1-r1.ebuild b/sys-cluster/spark-bin/spark-bin-2.3.1-r1.ebuild
new file mode 100644 (file)
index 0000000..b459784
--- /dev/null
@@ -0,0 +1,61 @@
+# Copyright 1999-2019 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=6
+
+inherit java-pkg-2
+
+DESCRIPTION="Lightning-fast unified analytics engine"
+HOMEPAGE="https://spark.apache.org"
+SRC_URI="mirror://apache/spark/spark-${PV}/spark-${PV}-bin-hadoop2.7.tgz -> ${P}.tgz"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+RDEPEND="
+       >=virtual/jre-1.8"
+
+DEPEND="
+       >=virtual/jdk-1.8"
+
+S="${WORKDIR}/spark-${PV}-bin-hadoop2.7"
+
+DOCS=( LICENSE NOTICE README.md RELEASE )
+
+# Nothing to compile here.
+src_compile() { :; }
+
+src_install() {
+       dodir usr/lib/spark
+       into usr/lib/spark
+
+       dobin bin/beeline \
+               bin/find-spark-home \
+               bin/pyspark \
+               bin/spark-class \
+               bin/spark-shell \
+               bin/spark-sql \
+               bin/spark-submit
+
+       insinto usr/lib/spark/bin
+       doins bin/load-spark-env.sh
+
+       insinto usr/lib/spark
+       doins -r conf
+       doins -r jars
+       doins -r python
+       doins -r sbin
+       doins -r yarn
+
+       dosym ../lib/spark/bin/beeline /usr/bin/beeline
+       dosym ../lib/spark/bin/find-spark-home /usr/bin/find-spark-home
+       dosym ../lib/spark/bin/pyspark /usr/bin/pyspark
+       dosym ../lib/spark/bin/spark-class /usr/bin/spark-class
+       dosym ../lib/spark/bin/spark-shell /usr/bin/spark-shell
+       dosym ../lib/spark/bin/spark-sql /usr/bin/spark-sql
+       dosym ../lib/spark/bin/spark-submit /usr/bin/spark-submit
+
+       doenvd "${FILESDIR}"/99spark
+       einstalldocs
+}
diff --git a/sys-cluster/spark-bin/spark-bin-2.4.4-r1.ebuild b/sys-cluster/spark-bin/spark-bin-2.4.4-r1.ebuild
new file mode 100644 (file)
index 0000000..63bb7e0
--- /dev/null
@@ -0,0 +1,61 @@
+# Copyright 1999-2019 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=7
+
+inherit java-pkg-2
+
+DESCRIPTION="Lightning-fast unified analytics engine"
+HOMEPAGE="https://spark.apache.org"
+SRC_URI="mirror://apache/spark/spark-${PV}/spark-${PV}-bin-hadoop2.7.tgz -> ${P}.tgz"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+RDEPEND="
+       >=virtual/jre-1.8"
+
+DEPEND="
+       >=virtual/jdk-1.8"
+
+S="${WORKDIR}/spark-${PV}-bin-hadoop2.7"
+
+DOCS=( LICENSE NOTICE README.md RELEASE )
+
+# Nothing to compile here.
+src_compile() { :; }
+
+src_install() {
+       dodir usr/lib/spark
+       into usr/lib/spark
+
+       dobin bin/beeline \
+               bin/find-spark-home \
+               bin/pyspark \
+               bin/spark-class \
+               bin/spark-shell \
+               bin/spark-sql \
+               bin/spark-submit
+
+       insinto usr/lib/spark/bin
+       doins bin/load-spark-env.sh
+
+       insinto usr/lib/spark
+       doins -r conf
+       doins -r jars
+       doins -r python
+       doins -r sbin
+       doins -r yarn
+
+       dosym ../lib/spark/bin/beeline /usr/bin/beeline
+       dosym ../lib/spark/bin/find-spark-home /usr/bin/find-spark-home
+       dosym ../lib/spark/bin/pyspark /usr/bin/pyspark
+       dosym ../lib/spark/bin/spark-class /usr/bin/spark-class
+       dosym ../lib/spark/bin/spark-shell /usr/bin/spark-shell
+       dosym ../lib/spark/bin/spark-sql /usr/bin/spark-sql
+       dosym ../lib/spark/bin/spark-submit /usr/bin/spark-submit
+
+       doenvd "${FILESDIR}"/99spark
+       einstalldocs
+}