Parallel-XNLIvar / generative /scripts /array-llama.sh
jaio98's picture
Upload 10 files
a88e206 verified
#!/usr/bin/env bash
#SBATCH --qos=regular
#SBATCH --job-name=xnli_llamainstruct70
#SBATCH --cpus-per-task=2
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --mem=64GB
#SBATCH --gres=gpu:4
#SBATCH --constraint=a100-sxm4
#SBATCH --output=/scratch/jbengoetxea/phd/XNLIvar/scripts/generative/logs/xnli-llamainstruct70_%a.log
#SBATCH --error=/scratch/jbengoetxea/phd/XNLIvar/scripts/generative/logs/xnli-llamainstruct70_%a.err
#SBATCH --time=01:00:00 #ee-hh:mm:ss
#SBATCH --mail-type=REQUEUE
#SBATCH --mail-user=jaione.bengoetxea@ehu.eus
#SBATCH --array=0-5%2
source /scratch/jbengoetxea/phd/.phd_venv_new/bin/activate
export TRANSFORMERS_CACHE="/scratch/jbengoetxea/.cache"
# Values for the 2 loops:
DATASET_VALUES=(xnli-eu-nat-biz xnli-eu-nat-gip xnli-eu-nat-naf)
PROMPT_TYPE_VALUES=(nli-few nli-zero)
N=${#PROMPT_TYPE_VALUES[@]} # Number of items in the second level (VALUES2)
# Decode SLURM_ARRAY_TASK_ID to get the two indices
IDX1=$((SLURM_ARRAY_TASK_ID / N))
IDX2=$((SLURM_ARRAY_TASK_ID % N))
# Use IDX1 and IDX2 for your two-level loops
DATASET="${DATASET_VALUES[${IDX1}]}"
PROMPT_TYPE="${PROMPT_TYPE_VALUES[${IDX2}]}"
TASK=trilabel
MODEL=llama3instruct70
OUTPUT=/scratch/jbengoetxea/phd/XNLIvar/scripts/generative/results/$DATASET/$MODEL
python3 /scratch/jbengoetxea/phd/XNLIvar/scripts/generative/scripts/zero_shot.py \
--dataset "${DATASET}" \
--model $MODEL \
--output_dir $OUTPUT \
--task $TASK \
--prompt_type "${PROMPT_TYPE}"