changeset 167:3213a8bb2ed1

new style batch jobs, see cirrus_work repo for _xxx.sh
author Henry S. Thompson <ht@inf.ed.ac.uk>
date Mon, 18 Jul 2022 19:16:20 +0100
parents afd7879181c9
children 57cf91a0d97d
files bin/ex1.sh bin/nl1.sh bin/runme.sh bin/test.sh
diffstat 4 files changed, 59 insertions(+), 13 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/bin/ex1.sh	Mon Jul 18 19:16:20 2022 +0100
@@ -0,0 +1,16 @@
+#!/bin/bash
+# Invoke this as e.g. sbatch --ntasks=10 -c 20 ex1.sh \
+#                     CC-MAIN-2019-35 cdx_counts 0 299
+# It will run on the login node
+
+#SBATCH --time=01:00:00
+#SBATCH --partition=standard
+#SBATCH --qos=standard
+#SBATCH --account=ec184-guest
+#SBATCH --job-name ex1
+
+export W=/work/dc007/dc007
+
+echo $(date) Launching $SLURM_JOB_NUM_NODES nodes for ex1 "$@" from $(hostname) to do $SLURM_NTASKS tasks, $SLURM_CPUS_PER_TASK cpus each
+
+srun -c $SLURM_CPUS_PER_TASK $W/shared/bin/sing $W/hst/bin/_ex1.sh "$@"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/bin/nl1.sh	Mon Jul 18 19:16:20 2022 +0100
@@ -0,0 +1,16 @@
+#!/bin/bash
+# Invoke this as e.g. sbatch --ntasks=10 -c 20 nl1.sh \
+#                     CC-MAIN-2019-35 nl1_counts langs 0 299
+# It will run on the login node
+
+#SBATCH --time=01:00:00
+#SBATCH --partition=standard
+#SBATCH --qos=standard
+#SBATCH --account=ec184-guest
+#SBATCH --job-name ex1
+
+export W=/work/dc007/dc007
+
+echo $(date) Launching $SLURM_JOB_NUM_NODES nodes for nl1 "$@" from $(hostname) to do $SLURM_NTASKS tasks, $SLURM_CPUS_PER_TASK cpus each
+
+srun -c $SLURM_CPUS_PER_TASK $W/shared/bin/sing $W/hst/bin/_nl1.sh "$@"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/bin/runme.sh	Mon Jul 18 19:16:20 2022 +0100
@@ -0,0 +1,15 @@
+#!/usr/bin/bash
+# Invoke this as e.g. sbatch --ntasks=10 -c 20 runme.sh $PWD CMD ARGS
+# It will run on the login node
+
+#SBATCH --time=01:00:00
+#SBATCH --partition=standard
+#SBATCH --qos=standard
+#SBATCH --account=ec184-guest
+#SBATCH --job-name runme
+
+export W=/work/dc007/dc007
+
+echo $(date) Launching $SLURM_JOB_NUM_NODES nodes in $(pwd) for runme "$@" from $(hostname) to do $SLURM_NTASKS tasks, $SLURM_CPUS_PER_TASK cpus each 1>&2
+
+srun -c $SLURM_CPUS_PER_TASK $W/shared/bin/sing $W/hst/bin/_runme.sh "$@"
\ No newline at end of file
--- a/bin/test.sh	Mon Jul 18 19:15:20 2022 +0100
+++ b/bin/test.sh	Mon Jul 18 19:16:20 2022 +0100
@@ -1,18 +1,17 @@
 #!/usr/bin/bash
-# Invoke this as e.g. sbatch --time=00:05:00 [--exclusive] -N 2 --ntasks-per-node 2 -c 2 masterJob.sh test
-# run doTest.sh in parallel on 2 nodes, 2 tasks each, with input from test/n.t.txt
-n=$SLURM_NTASKS
-c=$SLURM_CPUS_PER_TASK
-nodename=$SLURMD_NODENAME
-task=$SLURM_LOCALID
-node=$SLURM_NODEID
-echo $(date) $nodename:$node:$task start
+# Invoke this as e.g. sbatch --time=00:05:00 [--exclusive] --ntasks=6 -c 13 ~/bin/test.sh
+# run _test.sh in parallel on 3 nodes, 2 tasks each
 
-#type parallel
-#module load gnu-parallel
+#SBATCH --time=00:01:00
+#SBATCH --partition=standard
+#SBATCH --qos=standard
+#SBATCH --account=ec184-guest
+#SBATCH --job-name test
 
-export PYTHONPATH=$PYTHONPATH:$HOME/lib/python
-parallel --will-cite -j $c dotest.sh $node $task '{}' < test.$node.$task.txt
+export W=/work/dc007/dc007
+
+echo $(date) Launching $SLURM_JOB_NUM_NODES nodes for test "$@" from $(hostname) to do $SLURM_NTASKS tasks, $SLURM_CPUS_PER_TASK cpus each
 
-echo $(date) $nodename:$node:$task end
+srun -c $SLURM_CPUS_PER_TASK $W/shared/bin/sing $W/hst/bin/_test.sh "$@"
 
+