localPackages.vasp: fix

This commit is contained in:
陈浩南 2024-03-23 13:27:10 +08:00
parent 6cbe29b4f6
commit c2b816a668
8 changed files with 129 additions and 37 deletions

View File

@ -52,10 +52,15 @@ let
};
startScript = version: writeScript "vasp-nvidia-${version}"
''
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}
@ -63,7 +68,7 @@ let
'';
runEnv = version: buildFHSEnv
{
name = "vasp-amd-${version}";
name = "vasp-amd-${builtins.replaceStrings ["."] [""] version}-env";
targetPkgs = _: [ zlib (vasp version) aocc aocl openmpi gcc.cc.lib hdf5 wannier90 libpsm2 ];
runScript = startScript version;
};

View File

@ -35,14 +35,19 @@ let
};
startScript = version: writeShellApplication
{
name = "vasp-gnu-${version}";
runtimeInputs = [ (vasp version) ];
name = "vasp-gnu-${builtins.replaceStrings ["."] [""] version}-env";
runtimeInputs = [(vasp version)];
text =
''
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}

View File

@ -34,14 +34,19 @@ let
};
startScript = version: writeShellApplication
{
name = "vasp-gnu-${version}";
runtimeInputs = [ (vasp version) ];
name = "vasp-gnu-${builtins.replaceStrings ["."] [""] version}-env";
runtimeInputs = [(vasp version)];
text =
''
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}

View File

@ -1,7 +1,7 @@
{
buildFHSEnv, writeScript, stdenvNoCC, requireFile, substituteAll, symlinkJoin,
buildFHSEnv, writeScript, stdenvNoCC, requireFile, substituteAll, symlinkJoin, writeTextDir,
config, oneapiArch ? config.oneapiArch or "SSE3", additionalCommands ? "",
oneapi, gcc, glibc, lmod, rsync, which, wannier90, binutils, hdf5
oneapi, gcc, glibc, lmod, rsync, which, wannier90, binutils, hdf5, coreutils, slurm, zlib
}:
let
sources = import ../source.nix { inherit requireFile; };
@ -9,7 +9,7 @@ let
{
name = "buildEnv";
# make "module load mpi" success
targetPkgs = pkgs: with pkgs; [ zlib (writeTextDir "etc/release" "") gccFull ];
targetPkgs = _: [ zlib (writeTextDir "etc/release" "") gccFull ];
};
buildScript = writeScript "build"
''
@ -55,21 +55,62 @@ let
module load tbb compiler-rt oclfpga # dependencies
module load mpi mkl compiler
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
# if I_MPI_PIN_PROCESSOR_LIST is not set and SLURM_JOB_ID is not set, set it to allcores
if [ -z "''${I_MPI_PIN_PROCESSOR_LIST-}" ] && [ -z "''${SLURM_JOB_ID-}" ]; then
I_MPI_PIN_PROCESSOR_LIST=allcores
fi
export I_MPI_PIN_PROCESSOR_LIST
# if I_MPI_PMI_LIBRARY is not set and SLURM_JOB_ID is set, set it to libpmi2.so
if [ -z "''${I_MPI_PMI_LIBRARY-}" ] && [ -n "''${SLURM_JOB_ID-}" ]; then
I_MPI_PMI_LIBRARY=${slurm}/lib/libpmi2.so
fi
export I_MPI_PMI_LIBRARY
# set I_MPI_PIN I_MPI_PIN_DOMAIN I_MPI_DEBUG if not set
export I_MPI_PIN=''${I_MPI_PIN-yes}
export I_MPI_PIN_DOMAIN=''${I_MPI_PIN_DOMAIN-omp}
export I_MPI_DEBUG=''${I_MPI_DEBUG-4}
${additionalCommands}
# guess command we want to run
variant=$(${coreutils}/bin/basename $0 | ${coreutils}/bin/cut -d- -f4)
if [ -z "$variant" ]; then
variant=std
fi
if [ "$variant" = "env" ]; then
exec "$@"
else if [ -n "''${SLURM_JOB_ID-}" ]; then
# srun should be in PATH
exec srun --mpi=pmi2 ${vasp version}/bin/vasp-$variant
else
exec mpirun -n 1 ${vasp version}/bin/vasp-$variant
fi
exec "$@"
'';
runEnv = version: buildFHSEnv
runEnv = version: let shortVersion = builtins.replaceStrings ["."] [""] version; in buildFHSEnv
{
name = "vasp-intel-${version}";
targetPkgs = pkgs: with pkgs; [ zlib (vasp version) (writeTextDir "etc/release" "") gccFull ];
name = "vasp-intel-${shortVersion}";
targetPkgs = _: [ zlib (vasp version) (writeTextDir "etc/release" "") gccFull ];
runScript = startScript version;
extraInstallCommands =
"for i in std gam ncl; do ln -s ${vasp version}/bin/vasp-$i $out/bin/vasp-intel-${version}-$i; done";
''
pushd $out/bin
for i in std gam ncl env; do ln -s vasp-intel-${shortVersion} vasp-intel-${shortVersion}-$i; done
popd
'';
};
in builtins.mapAttrs (version: _: runEnv version) sources

View File

@ -1,14 +1,14 @@
{
buildFHSEnv, writeScript, stdenvNoCC, requireFile, substituteAll,
config, cudaCapabilities ? config.cudaCapabilities, nvhpcArch ? config.nvhpcArch or "px", additionalCommands ? "",
nvhpc, lmod, mkl, gfortran, rsync, which, hdf5, wannier90
nvhpc, lmod, mkl, gfortran, rsync, which, hdf5, wannier90, coreutils, zlib
}:
let
sources = import ../source.nix { inherit requireFile; };
buildEnv = buildFHSEnv
{
name = "buildEnv";
targetPkgs = pkgs: with pkgs; [ zlib ];
targetPkgs = _: [ zlib ];
};
buildScript = writeScript "build"
''
@ -57,19 +57,43 @@ let
module use ${nvhpc}/share/nvhpc/modulefiles
module load nvhpc
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}
variant=$(${coreutils}/bin/basename $0 | ${coreutils}/bin/cut -d- -f4)
if [ -z "$variant" ]; then
variant=std
fi
if [ "$variant" = "env" ]; then
exec "$@"
else if [ -n "''${SLURM_JOB_ID-}" ]; then
# srun should be in PATH
exec mpirun ${vasp version}/bin/vasp-$variant
else
exec mpirun -np 1 ${vasp version}/bin/vasp-$variant
fi
exec "$@"
'';
runEnv = version: buildFHSEnv
runEnv = version: let shortVersion = builtins.replaceStrings ["."] [""] version; in buildFHSEnv
{
name = "vasp-nvidia-${version}";
targetPkgs = pkgs: with pkgs; [ zlib (vasp version) ];
name = "vasp-nvidia-${shortVersion}";
targetPkgs = _: [ zlib (vasp version) ];
runScript = startScript version;
extraInstallCommands =
''
pushd $out/bin
for i in std gam ncl env; do ln -s vasp-intel-${shortVersion} vasp-intel-${shortVersion}-$i; done
popd
'';
};
in builtins.mapAttrs (version: _: runEnv version) sources

16
modules/packages/vasp.nix Normal file
View File

@ -0,0 +1,16 @@
inputs:
{
config = inputs.lib.mkIf (builtins.elem "workstation" inputs.config.nixos.packages._packageSets)
{
nixos.packages._packages =
(
(builtins.map
(version: (inputs.pkgs.localPackages.vasp.intel.override
{ slurm = inputs.config.services.slurm.package; }).${version})
[ "6.3.1" "6.4.0" ])
++ (builtins.concatLists (builtins.map
(compiler: builtins.map (version: inputs.pkgs.localPackages.vasp.${compiler}.${version}) [ "6.3.1" "6.4.0" ])
[ "gnu" "gnu-mkl" "nvidia" "amd" ]))
);
};
}

View File

@ -37,10 +37,7 @@ inputs:
microsoft-edge tor-browser
# news
rssguard newsflash newsboat
]
++ (builtins.concatLists (builtins.map
(compiler: builtins.map (version: localPackages.vasp.${compiler}.${version}) [ "6.3.1" "6.4.0" ])
[ "gnu" "nvidia" "intel" "amd" ]));
];
_pythonPackages = [(pythonPackages: with pythonPackages;
[
phonopy tensorflow keras scipy scikit-learn jupyterlab autograd # localPackages.pix2tex

View File

@ -67,7 +67,6 @@ inputs:
''
echo export CUDA_DEVICE_ORDER=PCI_BUS_ID
echo export SLURM_THREADS_PER_CPU=${builtins.toString slurm.cpu.threads}
echo export I_MPI_PMI_LIBRARY=${inputs.config.services.slurm.package}/lib/libpmi2.so
'';
in
''