Forked from
Hellequin Remi / tp_isia
16 commits ahead of the upstream repository.
-
Remi Hellequin authoredRemi Hellequin authored
single_gpu_training.pbs 857 B
#PBS -S /bin/bash
#PBS -N squad_train_single
#PBS -j oe
#PBS -l walltime=02:00:00
#PBS -l select=1:ncpus=12:ngpus=1:mem=20gb
#PBS -q gpuq
##PBS -q isiaq
#PBS -P isia
# Go to the current directory
cd $PBS_O_WORKDIR
# Load the same modules as for compilation
module load anaconda3/5.3.1
source activate transformers_env
# Path to dataset
export SQUAD_DIR=/gpfs/opt/data/squad/1.1
#export SQUAD_DIR=/gpfs/opt/data/squad/2.0
# Run code
python ./transformers/examples/run_squad.py \
--model_type bert \
--model_name_or_path bert-base-cased \
--do_train \
--do_eval \
--do_lower_case \
--train_file $SQUAD_DIR/train-v1.1.json \
--predict_file $SQUAD_DIR/dev-v1.1.json \
--per_gpu_train_batch_size 12 \
--learning_rate 3e-5 \
--num_train_epochs 2.0 \
--max_seq_length 384 \
--doc_stride 128 \
--output_dir ./debug_squad_$PBS_JOBID/