Compare commits

...

6 Commits

Author SHA1 Message Date
chn
52459243ed devices.vps6: fix 2024-03-24 13:59:41 +08:00
chn
398f4de618 services.acme: fix 2024-03-24 13:55:47 +08:00
chn
17051ccd12 devices.pc: fix 2024-03-24 13:55:06 +08:00
chn
4c037193cd localPackages.vasp: fix 2024-03-24 13:53:39 +08:00
chn
2331cdc8d3 localPackages.vasp: fix 2024-03-24 13:53:39 +08:00
chn
234f9116f5 localPackages.vasp: fix 2024-03-24 13:53:39 +08:00
11 changed files with 128 additions and 56 deletions

View File

@ -98,7 +98,7 @@ inputs:
(name: { inherit name; value = "74.211.99.69"; })
[ "mirism.one" "beta.mirism.one" "ng01.mirism.one" "initrd.vps6.chn.moe" ]);
firewall.trustedInterfaces = [ "virbr0" "waydroid0" ];
acme = { enable = true; cert."debug.mirism.one" = {}; };
acme.cert."debug.mirism.one" = {};
frpClient =
{
enable = true;

View File

@ -65,7 +65,7 @@ inputs:
};
};
coturn = {};
httpua.enable = true;
httpua = {};
mirism.enable = true;
fail2ban.enable = true;
wireguard =

View File

@ -52,10 +52,15 @@ let
};
startScript = version: writeScript "vasp-nvidia-${version}"
''
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}
@ -63,7 +68,7 @@ let
'';
runEnv = version: buildFHSEnv
{
name = "vasp-amd-${version}";
name = "vasp-amd-${builtins.replaceStrings ["."] [""] version}-env";
targetPkgs = _: [ zlib (vasp version) aocc aocl openmpi gcc.cc.lib hdf5 wannier90 libpsm2 ];
runScript = startScript version;
};

View File

@ -35,14 +35,19 @@ let
};
startScript = version: writeShellApplication
{
name = "vasp-gnu-${version}";
runtimeInputs = [ (vasp version) ];
name = "vasp-gnu-${builtins.replaceStrings ["."] [""] version}-env";
runtimeInputs = [(vasp version)];
text =
''
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}

View File

@ -34,14 +34,19 @@ let
};
startScript = version: writeShellApplication
{
name = "vasp-gnu-${version}";
runtimeInputs = [ (vasp version) ];
name = "vasp-gnu-${builtins.replaceStrings ["."] [""] version}-env";
runtimeInputs = [(vasp version)];
text =
''
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}

View File

@ -1,7 +1,7 @@
{
buildFHSEnv, writeScript, stdenvNoCC, requireFile, substituteAll, symlinkJoin,
buildFHSEnv, writeScript, stdenvNoCC, requireFile, substituteAll, symlinkJoin, writeTextDir,
config, oneapiArch ? config.oneapiArch or "SSE3", additionalCommands ? "",
oneapi, gcc, glibc, lmod, rsync, which, wannier90, binutils, hdf5
oneapi, gcc, glibc, lmod, rsync, which, wannier90, binutils, hdf5, zlib
}:
let
sources = import ../source.nix { inherit requireFile; };
@ -9,7 +9,7 @@ let
{
name = "buildEnv";
# make "module load mpi" success
targetPkgs = pkgs: with pkgs; [ zlib (writeTextDir "etc/release" "") gccFull ];
targetPkgs = _: [ zlib (writeTextDir "etc/release" "") gccFull ];
};
buildScript = writeScript "build"
''
@ -48,28 +48,64 @@ let
dontFixup = true;
requiredSystemFeatures = [ "gccarch-exact-${stdenvNoCC.hostPlatform.gcc.arch}" "big-parallel" ];
};
startScript = version: writeScript "vasp-intel-${version}"
startScript = { version, variant }: writeScript "vasp-intel-${version}"
''
. ${lmod}/share/lmod/lmod/init/bash
module use ${oneapi}/share/intel/modulefiles
module load tbb compiler-rt oclfpga # dependencies
module load mpi mkl compiler
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
# if I_MPI_PIN_PROCESSOR_LIST is not set, set it to allcores
if [ -z "''${I_MPI_PIN_PROCESSOR_LIST-}" ]; then
I_MPI_PIN_PROCESSOR_LIST=allcores
fi
export I_MPI_PIN_PROCESSOR_LIST
# set I_MPI_PIN I_MPI_PIN_DOMAIN I_MPI_DEBUG if not set
export I_MPI_PIN=''${I_MPI_PIN-yes}
export I_MPI_PIN_DOMAIN=''${I_MPI_PIN_DOMAIN-omp}
export I_MPI_DEBUG=''${I_MPI_DEBUG-4}
# do not respect slurm allocation
export I_MPI_JOB_RESPECT_PROCESS_PLACEMENT=no
# fork to bootstrap, do not use srun, causing it could not find proper ld
I_MPI_HYDRA_BOOTSTRAP=fork
${additionalCommands}
exec "$@"
${
if variant == "env" then ''exec "$@"''
else
''
if [ -n "''${SLURM_JOB_ID-}" ]; then
exec mpirun -n $SLURM_NTASKS ${vasp version}/bin/vasp-${variant}
else
exec mpirun -n 1 ${vasp version}/bin/vasp-${variant}
fi
''
}
'';
runEnv = version: buildFHSEnv
runEnv = { version, variant }: let shortVersion = builtins.replaceStrings ["."] [""] version; in buildFHSEnv
{
name = "vasp-intel-${shortVersion}${if variant == "" then "" else "-${variant}"}";
targetPkgs = _: [ zlib (vasp version) (writeTextDir "etc/release" "") gccFull ];
runScript = startScript { inherit version; variant = if variant == "" then "std" else variant; };
};
in builtins.mapAttrs
(version: _: symlinkJoin
{
name = "vasp-intel-${version}";
targetPkgs = pkgs: with pkgs; [ zlib (vasp version) (writeTextDir "etc/release" "") gccFull ];
runScript = startScript version;
extraInstallCommands =
"for i in std gam ncl; do ln -s ${vasp version}/bin/vasp-$i $out/bin/vasp-intel-${version}-$i; done";
};
in builtins.mapAttrs (version: _: runEnv version) sources
paths = builtins.map (variant: runEnv { inherit version variant; }) [ "" "env" "std" "gam" "ncl" ];
})
sources

View File

@ -1,14 +1,14 @@
{
buildFHSEnv, writeScript, stdenvNoCC, requireFile, substituteAll,
buildFHSEnv, writeScript, stdenvNoCC, requireFile, substituteAll, symlinkJoin,
config, cudaCapabilities ? config.cudaCapabilities, nvhpcArch ? config.nvhpcArch or "px", additionalCommands ? "",
nvhpc, lmod, mkl, gfortran, rsync, which, hdf5, wannier90
nvhpc, lmod, mkl, gfortran, rsync, which, hdf5, wannier90, zlib
}:
let
sources = import ../source.nix { inherit requireFile; };
buildEnv = buildFHSEnv
{
name = "buildEnv";
targetPkgs = pkgs: with pkgs; [ zlib ];
targetPkgs = _: [ zlib ];
};
buildScript = writeScript "build"
''
@ -51,25 +51,47 @@ let
dontFixup = true;
requiredSystemFeatures = [ "gccarch-exact-${stdenvNoCC.hostPlatform.gcc.arch}" "big-parallel" ];
};
startScript = version: writeScript "vasp-nvidia-${version}"
startScript = { version, variant }: writeScript "vasp-nvidia-${version}"
''
. ${lmod}/share/lmod/lmod/init/bash
module use ${nvhpc}/share/nvhpc/modulefiles
module load nvhpc
# if SLURM_CPUS_PER_TASK is set, use it to set OMP_NUM_THREADS
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
# if OMP_NUM_THREADS is not set, set it according to SLURM_CPUS_PER_TASK or to 1
if [ -z "''${OMP_NUM_THREADS-}" ]; then
if [ -n "''${SLURM_CPUS_PER_TASK-}" ]; then
OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK
else
OMP_NUM_THREADS=1
fi
fi
export OMP_NUM_THREADS
${additionalCommands}
exec "$@"
${
if variant == "env" then ''exec "$@"''
else
''
if [ -n "''${SLURM_JOB_ID-}" ]; then
# srun should be in PATH
exec mpirun ${vasp version}/bin/vasp-${variant}
else
exec mpirun -np 1 ${vasp version}/bin/vasp-${variant}
fi
''
}
'';
runEnv = version: buildFHSEnv
runEnv = { version, variant }: let shortVersion = builtins.replaceStrings ["."] [""] version; in buildFHSEnv
{
name = "vasp-nvidia-${shortVersion}${if variant == "" then "" else "-${variant}"}";
targetPkgs = _: [ zlib (vasp version) ];
runScript = startScript { inherit version; variant = if variant == "" then "std" else variant; };
};
in builtins.mapAttrs
(version: _: symlinkJoin
{
name = "vasp-nvidia-${version}";
targetPkgs = pkgs: with pkgs; [ zlib (vasp version) ];
runScript = startScript version;
};
in builtins.mapAttrs (version: _: runEnv version) sources
paths = builtins.map (variant: runEnv { inherit version variant; }) [ "" "env" "std" "gam" "ncl" ];
})
sources

View File

@ -0,0 +1,9 @@
inputs:
{
config = inputs.lib.mkIf (builtins.elem "workstation" inputs.config.nixos.packages._packageSets)
{
nixos.packages._packages = builtins.concatLists (builtins.map
(compiler: builtins.map (version: inputs.pkgs.localPackages.vasp.${compiler}.${version}) [ "6.3.1" "6.4.0" ])
[ "amd" "gnu" "gnu-mkl" "intel" "nvidia" ]);
};
}

View File

@ -37,10 +37,7 @@ inputs:
microsoft-edge tor-browser
# news
rssguard newsflash newsboat
]
++ (builtins.concatLists (builtins.map
(compiler: builtins.map (version: localPackages.vasp.${compiler}.${version}) [ "6.3.1" "6.4.0" ])
[ "gnu" "nvidia" "intel" "amd" ]));
];
_pythonPackages = [(pythonPackages: with pythonPackages;
[
phonopy tensorflow keras scipy scikit-learn jupyterlab autograd # localPackages.pix2tex

View File

@ -36,7 +36,7 @@ inputs:
group = inputs.lib.mkIf (cert.value.group != null) cert.value.group;
};
})
(inputs.lib.localLib.attrsToList acme.cert));
(inputs.localLib.attrsToList acme.cert));
};
sops.secrets."acme/cloudflare.ini" = {};
};

View File

@ -25,12 +25,6 @@ inputs:
buildInputs = prev.buildInputs ++ [ cuda_nvml_dev ];
LDFLAGS = [ "-L${cuda_nvml_dev}/lib/stubs" ];
nativeBuildInputs = prev.nativeBuildInputs ++ [ inputs.pkgs.wrapGAppsHook ];
postInstall =
''
pushd contribs/pmi2
make install
popd
'' + prev.postInstall;
});
clusterName = inputs.config.nixos.system.networking.hostname;
dbdserver =
@ -67,7 +61,6 @@ inputs:
''
echo export CUDA_DEVICE_ORDER=PCI_BUS_ID
echo export SLURM_THREADS_PER_CPU=${builtins.toString slurm.cpu.threads}
echo export I_MPI_PMI_LIBRARY=${inputs.config.services.slurm.package}/lib/libpmi2.so
'';
in
''