nonparametric.sh (1232B)
1 #!/bin/bash 2 #SBATCH -A lco@gpu 3 #SBATCH -C v100-32g 4 #SBATCH --job-name=gbure_nonparametric 5 #SBATCH --ntasks=1 6 #SBATCH --gres=gpu:1 7 #SBATCH --cpus-per-task=20 8 #SBATCH --distribution=block:block 9 #SBATCH --hint=nomultithread 10 #SBATCH --time=20:00:00 11 #SBATCH --output=/gpfswork/rech/lco/url46ht/Étienne/runs/%x_%j.stdout 12 #SBATCH --error=/gpfswork/rech/lco/url46ht/Étienne/runs/%x_%j.stderr 13 #SBATCH --array=0-2 14 15 # %x = nom du job 16 # %j = id du job 17 18 module purge 19 source ~/.bashrc 20 conda activate Étienne 21 export DATA_PATH=$WORK/Étienne/data 22 export LOG_PATH=$WORK/Étienne/log 23 export HF_DATASETS_OFFLINE=1 24 export TRANSFORMERS_OFFLINE=1 25 cd $WORK/Étienne/code 26 27 # echo des commandes lancées 28 set -x 29 30 config="" 31 32 if [ $(( $SLURM_ARRAY_TASK_ID % 3 )) -eq 0 ]; then 33 config="$config --undefined_poison_whole_meta=True" 34 elif [ $(( $SLURM_ARRAY_TASK_ID % 3 )) -eq 1 ]; then 35 config="$config --undefined_poison_whole_meta=False --neutral_topological_similarity=None" 36 else 37 config="$config --undefined_poison_whole_meta=False --neutral_topological_similarity=(1536*2)**0.5" 38 fi 39 40 for topological_weight in 0.1 0.15 0.2 0.22 0.25 0.3 0.5 1; do 41 python -m gbure.train gbure/config/nonparametric.py $config --topological_weight=$topological_weight 42 done