Skip to content

Commit ee6a202

Browse files
committed
Add hyak prefix for dropouts
1 parent 2745ead commit ee6a202

File tree

1 file changed

+18
-6
lines changed

1 file changed

+18
-6
lines changed

run_dropout.sh

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,25 @@
11
#!/bin/bash
2+
#SBATCH --job-name="dropout_0"
3+
#SBATCH --account=dynamicsai
4+
#SBATCH --partition=gpu-a40
5+
#SBATCH --nodes=1
6+
#SBATCH --cpus-per-task=4
7+
#SBATCH --gpus=1
8+
#SBATCH --mem=32G
9+
#SBATCH --time=23:00:00
10+
#SBATCH --output=logs/slurm-%A_%a.out
211

3-
# In this example, we show how to train SimCSE on unsupervised Wikipedia data.
4-
# If you want to train it with multiple GPU cards, see "run_sup_example.sh"
5-
# about how to use PyTorch's distributed data parallel.
12+
source ~/.bashrc
13+
14+
module load gcc/11.2.0
15+
module load cuda/11.8.0
16+
17+
conda activate compute
618

719
python train.py \
820
--model_name_or_path bert-base-uncased \
921
--train_file data/wiki1m_for_simcse.txt \
10-
--output_dir result/dropout_"$1" \
22+
--output_dir result/dropout_0 \
1123
--num_train_epochs 1 \
1224
--per_device_train_batch_size 64 \
1325
--learning_rate 3e-5 \
@@ -22,7 +34,7 @@ python train.py \
2234
--temp 0.05 \
2335
--do_train \
2436
--do_eval \
25-
--attention_probs_dropout_prob "$1" \
26-
--hidden_dropout_prob "$1" \
37+
--attention_probs_dropout_prob 0.0 \
38+
--hidden_dropout_prob 0.0 \
2739
--fp16 \
2840
"$@"

0 commit comments

Comments
 (0)