#!/bin/bash
#SBATCH --time=2:00:00
#SBATCH --mem=4G
#SBATCH --cpus-per-task=1
source activate myenv # If using conda
python my_analysis.py
#!/bin/bash
#SBATCH --time=8:00:00
#SBATCH --mem=16G
#SBATCH --gres=gpu:a100:1
#SBATCH --cpus-per-task=4
source activate pytorch_env
python train_model.py
#!/bin/bash
#SBATCH --time=4:00:00
#SBATCH --mem=4G
#SBATCH --array=0-9
#SBATCH --output=paramSweep_%A_%a.log
# Define parameter arrays
LEARNING_RATES=(0.001 0.01 0.1)
BATCH_SIZES=(32 64 128)
OPTIMIZERS=("adam" "sgd")
# Calculate parameters for this task
lr_idx=$(( $SLURM_ARRAY_TASK_ID % 3 ))
bs_idx=$(( ($SLURM_ARRAY_TASK_ID / 3) % 3 ))
opt_idx=$(( $SLURM_ARRAY_TASK_ID / 9 ))
# Run with specific parameters
python train.py \
--lr ${LEARNING_RATES[$lr_idx]} \
--batch-size ${BATCH_SIZES[$bs_idx]} \
--optimizer ${OPTIMIZERS[$opt_idx]}