Created
April 26, 2019 00:30
-
-
Save blurbdust/3e03531d890c8bcf1da3c3d1192ce4d5 to your computer and use it in GitHub Desktop.
Apache Spark PKGBUILD modified from current AUR
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Maintainer: François Garillot ("huitseeker") <francois [at] garillot.net> | |
# Contributor: Christian Krause ("wookietreiber") <[email protected]> | |
pkgname=apache-spark | |
pkgver=2.4.2 | |
pkgrel=1 | |
pkgdesc="fast and general engine for large-scale data processing" | |
arch=('any') | |
url="http://spark.apache.org" | |
license=('APACHE') | |
depends=('java-environment>=6' 'java-environment<9') | |
optdepends=('python2: python2 support for pyspark' | |
'ipython2: ipython2 support for pyspark' | |
'python: python3 support for pyspark' | |
'ipython: ipython3 support for pyspark' | |
'r: support for sparkR' | |
'rsync: support rsync hadoop binaries from master' | |
'hadoop: support for running on YARN') | |
install=apache-spark.install | |
source=("https://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&filename=spark/spark-${pkgver}/spark-${pkgver}-bin-without-hadoop.tgz" | |
'apache-spark-master.service' | |
'[email protected]' | |
'spark-env.sh' | |
'spark-daemon-run.sh' | |
'run-master.sh' | |
'run-slave.sh') | |
sha1sums=('a82249973bbff186b7539a80795f5f6f5179e3c9' | |
'ac71d12070a9a10323e8ec5aed4346b1dd7f21c6' | |
'a191e4f8f7f8bbc596f4fadfb3c592c3efbc4fc0' | |
'3fa39d55075d4728bd447692d648053c9f6b07ec' | |
'08557d2d5328d5c99e533e16366fd893fffaad78' | |
'323445b8d64aea0534a2213d2600d438f406855b' | |
'65b1bc5fce63d1fa7a1b90f2d54a09acf62012a4') | |
backup=('etc/apache-spark/spark-env.sh') | |
PKGEXT=${PKGEXT:-'.pkg.tar.xz'} | |
prepare() { | |
cd "$srcdir/spark-${pkgver}-bin-without-hadoop" | |
} | |
package() { | |
cd "$srcdir/spark-${pkgver}-bin-without-hadoop" | |
install -d "$pkgdir/usr/bin" "$pkgdir/opt" "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work" | |
chmod 2775 "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work" | |
cp -r "$srcdir/spark-${pkgver}-bin-without-hadoop" "$pkgdir/opt/apache-spark/" | |
cd "$pkgdir/usr/bin" | |
for binary in beeline pyspark sparkR spark-class spark-shell find-spark-home spark-sql spark-submit load-spark-env.sh; do | |
binpath="/opt/apache-spark/bin/$binary" | |
ln -s "$binpath" $binary | |
sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "$pkgdir/$binpath" | |
sed -i -Ee 's/\$\(dirname "\$0"\)/$(dirname "$(readlink -f "$0")")/g' "$pkgdir/$binpath" | |
done | |
mkdir -p $pkgdir/etc/profile.d | |
echo '#!/bin/sh' > $pkgdir/etc/profile.d/apache-spark.sh | |
echo 'SPARK_HOME=/opt/apache-spark' >> $pkgdir/etc/profile.d/apache-spark.sh | |
echo 'export SPARK_HOME' >> $pkgdir/etc/profile.d/apache-spark.sh | |
chmod 755 $pkgdir/etc/profile.d/apache-spark.sh | |
install -Dm644 "$srcdir/apache-spark-master.service" "$pkgdir/usr/lib/systemd/system/apache-spark-master.service" | |
install -Dm644 "$srcdir/[email protected]" "$pkgdir/usr/lib/systemd/system/[email protected]" | |
install -Dm644 "$srcdir/spark-env.sh" "$pkgdir/etc/apache-spark/spark-env.sh" | |
for script in run-master.sh run-slave.sh spark-daemon-run.sh; do | |
install -Dm755 "$srcdir/$script" "$pkgdir/opt/apache-spark/sbin/$script" | |
done | |
install -Dm644 "$srcdir/spark-${pkgver}-bin-without-hadoop/conf"/* "$pkgdir/etc/apache-spark" | |
cd "$pkgdir/opt/apache-spark" | |
mv conf conf-templates | |
ln -sf "/etc/apache-spark" conf | |
ln -sf "/var/lib/apache-spark/work" . | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Works, thanks! Coming from AUR.