Merge pull request #256737 from illustris/spark
spark: init 3.5.0, 3.4.0->3.4.2, 3.3.2->3.3.3
This commit is contained in:
commit
d41449afb3
4 changed files with 100 additions and 89 deletions
|
@ -69,8 +69,8 @@ with lib;
|
|||
confDir = mkOption {
|
||||
type = types.path;
|
||||
description = lib.mdDoc "Spark configuration directory. Spark will use the configuration files (spark-defaults.conf, spark-env.sh, log4j.properties, etc) from this directory.";
|
||||
default = "${cfg.package}/lib/${cfg.package.untarDir}/conf";
|
||||
defaultText = literalExpression ''"''${package}/lib/''${package.untarDir}/conf"'';
|
||||
default = "${cfg.package}/conf";
|
||||
defaultText = literalExpression ''"''${package}/conf"'';
|
||||
};
|
||||
logDir = mkOption {
|
||||
type = types.path;
|
||||
|
@ -111,9 +111,9 @@ with lib;
|
|||
Type = "forking";
|
||||
User = "spark";
|
||||
Group = "spark";
|
||||
WorkingDirectory = "${cfg.package}/lib/${cfg.package.untarDir}";
|
||||
ExecStart = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/start-master.sh";
|
||||
ExecStop = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/stop-master.sh";
|
||||
WorkingDirectory = "${cfg.package}/";
|
||||
ExecStart = "${cfg.package}/sbin/start-master.sh";
|
||||
ExecStop = "${cfg.package}/sbin/stop-master.sh";
|
||||
TimeoutSec = 300;
|
||||
StartLimitBurst=10;
|
||||
Restart = "always";
|
||||
|
@ -134,9 +134,9 @@ with lib;
|
|||
serviceConfig = {
|
||||
Type = "forking";
|
||||
User = "spark";
|
||||
WorkingDirectory = "${cfg.package}/lib/${cfg.package.untarDir}";
|
||||
ExecStart = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/start-worker.sh spark://${cfg.worker.master}";
|
||||
ExecStop = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/stop-worker.sh";
|
||||
WorkingDirectory = "${cfg.package}/";
|
||||
ExecStart = "${cfg.package}/sbin/start-worker.sh spark://${cfg.worker.master}";
|
||||
ExecStop = "${cfg.package}/sbin/stop-worker.sh";
|
||||
TimeoutSec = 300;
|
||||
StartLimitBurst=10;
|
||||
Restart = "always";
|
||||
|
|
|
@ -1,28 +1,48 @@
|
|||
import ../make-test-python.nix ({...}: {
|
||||
name = "spark";
|
||||
{ pkgs, ... }:
|
||||
|
||||
nodes = {
|
||||
worker = { nodes, pkgs, ... }: {
|
||||
services.spark.worker = {
|
||||
enable = true;
|
||||
master = "master:7077";
|
||||
};
|
||||
virtualisation.memorySize = 2048;
|
||||
};
|
||||
master = { config, pkgs, ... }: {
|
||||
services.spark.master = {
|
||||
enable = true;
|
||||
bind = "0.0.0.0";
|
||||
};
|
||||
networking.firewall.allowedTCPPorts = [ 22 7077 8080 ];
|
||||
};
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
tests = {
|
||||
default = testsForPackage { sparkPackage = pkgs.spark; };
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
master.wait_for_unit("spark-master.service")
|
||||
worker.wait_for_unit("spark-worker.service")
|
||||
worker.copy_from_host( "${./spark_sample.py}", "/spark_sample.py" )
|
||||
assert "<title>Spark Master at spark://" in worker.succeed("curl -sSfkL http://master:8080/")
|
||||
worker.succeed("spark-submit --master spark://master:7077 --executor-memory 512m --executor-cores 1 /spark_sample.py")
|
||||
'';
|
||||
})
|
||||
testsForPackage = args: lib.recurseIntoAttrs {
|
||||
sparkCluster = testSparkCluster args;
|
||||
passthru.override = args': testsForPackage (args // args');
|
||||
};
|
||||
testSparkCluster = { sparkPackage, ... }: pkgs.nixosTest ({
|
||||
name = "spark";
|
||||
|
||||
nodes = {
|
||||
worker = { nodes, pkgs, ... }: {
|
||||
services.spark = {
|
||||
package = sparkPackage;
|
||||
worker = {
|
||||
enable = true;
|
||||
master = "master:7077";
|
||||
};
|
||||
};
|
||||
virtualisation.memorySize = 2048;
|
||||
};
|
||||
master = { config, pkgs, ... }: {
|
||||
services.spark = {
|
||||
package = sparkPackage;
|
||||
master = {
|
||||
enable = true;
|
||||
bind = "0.0.0.0";
|
||||
};
|
||||
};
|
||||
networking.firewall.allowedTCPPorts = [ 22 7077 8080 ];
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
master.wait_for_unit("spark-master.service")
|
||||
worker.wait_for_unit("spark-worker.service")
|
||||
worker.copy_from_host( "${./spark_sample.py}", "/spark_sample.py" )
|
||||
assert "<title>Spark Master at spark://" in worker.succeed("curl -sSfkL http://master:8080/")
|
||||
worker.succeed("spark-submit --version | systemd-cat")
|
||||
worker.succeed("spark-submit --master spark://master:7077 --executor-memory 512m --executor-cores 1 /spark_sample.py")
|
||||
'';
|
||||
});
|
||||
in tests
|
||||
|
|
|
@ -3,63 +3,55 @@
|
|||
, fetchzip
|
||||
, makeWrapper
|
||||
, jdk8
|
||||
, python3Packages
|
||||
, extraPythonPackages ? [ ]
|
||||
, python3
|
||||
, python310
|
||||
, coreutils
|
||||
, hadoopSupport ? true
|
||||
, hadoop
|
||||
, RSupport ? true
|
||||
, R
|
||||
, nixosTests
|
||||
}:
|
||||
|
||||
let
|
||||
spark = { pname, version, hash, extraMeta ? {} }:
|
||||
stdenv.mkDerivation rec {
|
||||
inherit pname version;
|
||||
jdk = if hadoopSupport then hadoop.jdk else jdk8;
|
||||
spark = { pname, version, hash, extraMeta ? {}, pysparkPython ? python3 }:
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
inherit pname version hash hadoop R pysparkPython;
|
||||
inherit (finalAttrs.hadoop) jdk;
|
||||
src = fetchzip {
|
||||
url = "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
|
||||
inherit hash;
|
||||
url = with finalAttrs; "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
|
||||
inherit (finalAttrs) hash;
|
||||
};
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
buildInputs = [ jdk python3Packages.python ]
|
||||
++ extraPythonPackages
|
||||
++ lib.optional RSupport R;
|
||||
buildInputs = with finalAttrs; [ jdk pysparkPython ]
|
||||
++ lib.optional RSupport finalAttrs.R;
|
||||
|
||||
untarDir = "${pname}-${version}";
|
||||
installPhase = ''
|
||||
mkdir -p $out/{lib/${untarDir}/conf,bin,/share/java}
|
||||
mv * $out/lib/${untarDir}
|
||||
|
||||
cp $out/lib/${untarDir}/conf/log4j.properties{.template,} || \
|
||||
cp $out/lib/${untarDir}/conf/log4j2.properties{.template,}
|
||||
|
||||
cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF
|
||||
export JAVA_HOME="${jdk}"
|
||||
export SPARK_HOME="$out/lib/${untarDir}"
|
||||
'' + lib.optionalString hadoopSupport ''
|
||||
export SPARK_DIST_CLASSPATH=$(${hadoop}/bin/hadoop classpath)
|
||||
'' + ''
|
||||
export PYSPARK_PYTHON="${python3Packages.python}/bin/${python3Packages.python.executable}"
|
||||
export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH"
|
||||
${lib.optionalString RSupport ''
|
||||
export SPARKR_R_SHELL="${R}/bin/R"
|
||||
export PATH="\$PATH:${R}/bin"''}
|
||||
EOF
|
||||
|
||||
for n in $(find $out/lib/${untarDir}/bin -type f ! -name "*.*"); do
|
||||
makeWrapper "$n" "$out/bin/$(basename $n)"
|
||||
substituteInPlace "$n" --replace dirname ${coreutils.out}/bin/dirname
|
||||
mkdir -p "$out/opt"
|
||||
mv * $out/
|
||||
for n in $(find $out/bin -type f -executable ! -name "find-spark-home"); do
|
||||
wrapProgram "$n" --set JAVA_HOME "${finalAttrs.jdk}" \
|
||||
--run "[ -z $SPARK_DIST_CLASSPATH ] && export SPARK_DIST_CLASSPATH=$(${finalAttrs.hadoop}/bin/hadoop classpath)" \
|
||||
${lib.optionalString RSupport ''--set SPARKR_R_SHELL "${finalAttrs.R}/bin/R"''} \
|
||||
--prefix PATH : "${
|
||||
lib.makeBinPath (
|
||||
[ finalAttrs.pysparkPython ] ++
|
||||
(lib.optionals RSupport [ finalAttrs.R ])
|
||||
)}"
|
||||
done
|
||||
for n in $(find $out/lib/${untarDir}/sbin -type f); do
|
||||
# Spark deprecated scripts with "slave" in the name.
|
||||
# This line adds forward compatibility with the nixos spark module for
|
||||
# older versions of spark that don't have the new "worker" scripts.
|
||||
ln -s "$n" $(echo "$n" | sed -r 's/slave(s?).sh$/worker\1.sh/g') || true
|
||||
done
|
||||
ln -s $out/lib/${untarDir}/lib/spark-assembly-*.jar $out/share/java
|
||||
ln -s ${finalAttrs.hadoop} "$out/opt/hadoop"
|
||||
${lib.optionalString RSupport ''ln -s ${finalAttrs.R} "$out/opt/R"''}
|
||||
'';
|
||||
|
||||
passthru = {
|
||||
tests = nixosTests.spark.default.passthru.override {
|
||||
sparkPackage = finalAttrs.finalPackage;
|
||||
};
|
||||
# Add python packages to PYSPARK_PYTHON
|
||||
withPythonPackages = f: finalAttrs.finalPackage.overrideAttrs (old: {
|
||||
pysparkPython = old.pysparkPython.withPackages f;
|
||||
});
|
||||
};
|
||||
|
||||
meta = {
|
||||
description = "Apache Spark is a fast and general engine for large-scale data processing";
|
||||
homepage = "https://spark.apache.org/";
|
||||
|
@ -68,24 +60,23 @@ let
|
|||
platforms = lib.platforms.all;
|
||||
maintainers = with lib.maintainers; [ thoughtpolice offline kamilchm illustris ];
|
||||
} // extraMeta;
|
||||
};
|
||||
});
|
||||
in
|
||||
{
|
||||
spark_3_5 = spark rec {
|
||||
pname = "spark";
|
||||
version = "3.5.0";
|
||||
hash = "sha256-f+a4a23aOM0GCDoZlZ7WNXs0Olzyh3yMtO8ZmEoYvZ4=";
|
||||
};
|
||||
spark_3_4 = spark rec {
|
||||
pname = "spark";
|
||||
version = "3.4.0";
|
||||
hash = "sha256-0y80dRYzb6Ceu6MlGQHtpMdzOob/TBg6kf8dtF6KyCk=";
|
||||
version = "3.4.2";
|
||||
hash = "sha256-qr0tRuzzEcarJznrQYkaQzGqI7tugp/XJpoZxL7tJwk=";
|
||||
};
|
||||
spark_3_3 = spark rec {
|
||||
pname = "spark";
|
||||
version = "3.3.2";
|
||||
hash = "sha256-AeKe2QN+mhUJgZRSIgbi/DttAWlDgwC1kl9p7syEvbo=";
|
||||
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
|
||||
};
|
||||
spark_3_2 = spark rec {
|
||||
pname = "spark";
|
||||
version = "3.2.4";
|
||||
hash = "sha256-xL4W+dTWbvmmncq3/8iXmhp24rp5SftvoRfkTyxCI8E=";
|
||||
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
|
||||
version = "3.3.3";
|
||||
hash = "sha256-YtHxRYTwrwSle3UpFjRSwKcnLFj2m9/zLBENH/HVzuM=";
|
||||
pysparkPython = python310;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -18035,8 +18035,8 @@ with pkgs;
|
|||
smiley-sans = callPackage ../data/fonts/smiley-sans { };
|
||||
|
||||
inherit (callPackages ../applications/networking/cluster/spark { })
|
||||
spark_3_4 spark_3_3 spark_3_2;
|
||||
spark3 = spark_3_4;
|
||||
spark_3_5 spark_3_4 spark_3_3;
|
||||
spark3 = spark_3_5;
|
||||
spark = spark3;
|
||||
|
||||
sparkleshare = callPackage ../applications/version-management/sparkleshare { };
|
||||
|
|
Loading…
Reference in a new issue