-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun.sh
45 lines (35 loc) · 1.21 KB
/
run.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
#!/bin/bash
DATABASES=("kegg" "hallmark" "wiki_pathways")
MODELS=("ann" "gnn" "pgnn" "megagnn" "kpnn")
TMP_DIR=$(mktemp -d)
trap 'rm -rf "$TMP_DIR"' EXIT
generate_job_name() {
local database=$1
local model=$2
echo "${database}_${model}.sh"
}
for database in "${DATABASES[@]}"; do
for model in "${MODELS[@]}"; do
args="--database $database --model $model"
job_name=$(generate_job_name "$database" "$model")
cat <<EOT > "$TMP_DIR/$job_name"
#!/bin/bash
#SBATCH -N 1 # number of nodes
#SBATCH -c 32 # number of cores
#SBATCH -t 7-00:00:00 # time in d-hh:mm:ss
#SBATCH -p general # partition
#SBATCH -q public # QOS
#SBATCH -o artifacts/slurm.%j.out # file to save job's STDOUT (%j = JobId)
#SBATCH -e artifacts/slurm.%j.err # file to save job's STDERR (%j = JobId)
#SBATCH --mail-type=ALL # Send an e-mail when a job starts, stops, or fails
#SBATCH --export=NONE # Purge the job-submitting shell environment
module load mamba/latest
source activate bio_eval
python bioinformatics-sota-eval/main.py $args
EOT
done
done
for job_script in "$TMP_DIR"/*.sh; do
sbatch "$job_script"
sleep 1
done