{ stdenv, fetchurl, jre, bash, simpleBuildTool, python27Packages }: stdenv.mkDerivation rec { name = "spark-${version}"; version = "0.9.1"; src = fetchurl { url = "http://d3kbcqa49mib13.cloudfront.net/${name}-bin-cdh4.tgz"; sha256 = "1k3954srx3km3ckmfi6wn8rldrljxc039g0pf5m3azgkmaz0gld5"; }; unpackPhase = ''tar zxf $src''; untarDir = "${name}-bin-cdh4"; installPhase = '' set -x mkdir -p $out/lib $out/bin mv ${untarDir} $out/lib cat > $out/bin/spark-class < []" >&2 exit 1 fi export SPARK_MEM=\''${SPARK_MEM:-1024m} JAVA_OPTS="" JAVA_OPTS="\$JAVA_OPTS -Djava.library.path=\"\$SPARK_LIBRARY_PATH\"" JAVA_OPTS="\$JAVA_OPTS -Xms\$SPARK_MEM -Xmx\$SPARK_MEM" export JAVA_OPTS CLASSPATH=\`$out/lib/${untarDir}/bin/compute-classpath.sh\` export CLASSPATH exec ${jre}/bin/java -cp "\$CLASSPATH" \$JAVA_OPTS "\$@" EOF chmod +x $out/bin/spark-class cat > $out/bin/spark-shell </dev/null) if [[ ! \$? ]]; then saved_stty="" fi $out/bin/spark-class \$OPTIONS org.apache.spark.repl.Main "\$@" exit_status=\$? onExit EOF chmod +x $out/bin/spark-shell cat > $out/bin/pyspark < $out/bin/spark-upload-scala < $out/bin/spark-upload-python <