Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Gaëtan Caillaut
minibert-deft2018
Commits
118379de
Commit
118379de
authored
Mar 30, 2021
by
Gaëtan Caillaut
Browse files
refactoring in slurm_scripts
parent
11b063c3
Changes
39
Hide whitespace changes
Inline
Side-by-side
slurm_scripts/h1d2/job.sh
View file @
118379de
...
...
@@ -37,14 +37,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2"
TB_DIR
=
"
${
LOGDIR
}
/
${
RUN_NAME
}
"
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
else
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
fi
done
done
...
...
slurm_scripts/h1d2/job_lemmatized.sh
View file @
118379de
...
...
@@ -37,14 +37,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2"
TB_DIR
=
"
${
LOGDIR
}
/
${
RUN_NAME
}
"
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
else
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
fi
done
done
...
...
slurm_scripts/h1d2/job_t1_fs.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h1d2"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
else
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
fi
done
done
...
...
slurm_scripts/h1d2/job_t1_fs_lemmatized.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h1d2"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
else
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
fi
done
done
...
...
slurm_scripts/h1d2/job_t2_fs.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h1d2"
TB_DIR
=
"
${
LOGDIR
}
/
${
T2_RUN_NAME
}
"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T2_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
else
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
fi
done
done
...
...
slurm_scripts/h1d2/job_t2_fs_lemmatized.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h1d2"
TB_DIR
=
"
${
LOGDIR
}
/
${
T2_RUN_NAME
}
"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d2_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T2_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
1
--depth
2
else
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
1
--depth
2
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
1
--depth
2
fi
done
done
...
...
slurm_scripts/h2d1/job.sh
View file @
118379de
...
...
@@ -38,13 +38,12 @@ for E in $(seq -f "%05g" 0 10 40); do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1"
TB_DIR
=
"
${
LOGDIR
}
/
${
RUN_NAME
}
"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
else
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
fi
done
done
...
...
slurm_scripts/h2d1/job_lemmatized.sh
View file @
118379de
...
...
@@ -37,14 +37,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1"
TB_DIR
=
"
${
LOGDIR
}
/
${
RUN_NAME
}
"
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
else
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
fi
done
done
...
...
slurm_scripts/h2d1/job_t1_fs.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h2d1"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
else
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
fi
done
done
...
...
slurm_scripts/h2d1/job_t1_fs_lemmatized.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h2d1"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
else
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
fi
done
done
...
...
slurm_scripts/h2d1/job_t2_fs.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h2d1"
TB_DIR
=
"
${
LOGDIR
}
/
${
T2_RUN_NAME
}
"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T2_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
else
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
fi
done
done
...
...
slurm_scripts/h2d1/job_t2_fs_lemmatized.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_h2d1"
TB_DIR
=
"
${
LOGDIR
}
/
${
T2_RUN_NAME
}
"
T2_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h2d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T2_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--height
2
--depth
1
else
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--height
2
--depth
1
python train.py t2-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--height
2
--depth
1
fi
done
done
...
...
slurm_scripts/job.sh
View file @
118379de
...
...
@@ -34,17 +34,16 @@ set -x
set
-e
for
E
in
$(
seq
-f
"%05g"
0 10 40
)
;
do
for
D
in
16 32 64 96 128
;
do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
"trained"
;
do
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
TB_DIR
=
"
${
LOGDIR
}
/
${
RUN_NAME
}
"
for
POS
in
"none"
"fixed"
;
do
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
TB_
DIR
}
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
LOG
DIR
}
else
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
fi
done
done
...
...
slurm_scripts/job_lemmatized.sh
View file @
118379de
...
...
@@ -37,14 +37,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
TB_DIR
=
"
${
LOGDIR
}
/
${
RUN_NAME
}
"
RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
TB_
DIR
}
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--checkpoint
${
CHECKPOINT
}
--logdir
${
LOG
DIR
}
else
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
python train.py mlm
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--bs
${
BS
}
--epochs
10
--attention
${
ATT
}
--position
${
POS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
fi
done
done
...
...
slurm_scripts/job_t1.sh
View file @
118379de
...
...
@@ -38,15 +38,14 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
else
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
fi
done
done
...
...
slurm_scripts/job_t1_frozen.sh
View file @
118379de
...
...
@@ -38,15 +38,14 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_frozen"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax_frozen"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--freeze-attention
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--freeze-attention
else
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--freeze-attention
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--freeze-attention
fi
done
done
...
...
slurm_scripts/job_t1_fs.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
else
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
fi
done
done
...
...
slurm_scripts/job_t1_fs_lemmatized.sh
View file @
118379de
...
...
@@ -39,15 +39,13 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
else
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
python train.py t1-fs
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
fi
done
done
...
...
slurm_scripts/job_t1_lemmatized.sh
View file @
118379de
...
...
@@ -38,18 +38,15 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/mi
\
nibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_DIR
}
--checkpoi
\
nt
${
CHECKPOINT
}
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOGDIR
}
--checkpoint
${
CHECKPOINT
}
else
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/mi
\
nibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
nibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
fi
done
done
...
...
slurm_scripts/job_t1_lemmatized_frozen.sh
View file @
118379de
...
...
@@ -38,15 +38,14 @@ for E in $(seq -f "%05g" 0 10 40); do
for
D
in
32
;
do
for
ATT
in
"self-attention"
"non-transforming"
"semi-transforming"
;
do
for
POS
in
"none"
"fixed"
;
do
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_norm_frozen"
TB_DIR
=
"
${
LOGDIR
}
/
${
T1_RUN_NAME
}
"
MLM_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax"
T1_RUN_NAME
=
"d
${
D
}
_
${
ATT
}
_
${
POS
}
_gelu_norm_h1d1_softmax_frozen"
if
((
10#
$E
>
0
))
;
then
CHECKPOINT
=
"
${
OUT_DIR
}
/
${
T1_RUN_NAME
}
/checkpoint-
${
E
}
.tar"
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--checkpoint
${
CHECKPOINT
}
--freeze-attention
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--checkpoint
${
CHECKPOINT
}
--freeze-attention
else
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
TB_
DIR
}
--freeze-attention
python train.py t1
${
TRAIN
}
${
TEST
}
${
DEV
}
${
TOKENIZER
}
"
${
PRETRAINED_DIR
}
/
${
MLM_RUN_NAME
}
/minibert-model.pt"
-o
${
OUT_DIR
}
-d
${
D
}
--attention
${
ATT
}
--position
${
POS
}
--epochs
10
--bs
${
BS
}
--device
${
DEVICE
}
--logdir
${
LOG
DIR
}
--freeze-attention
fi
done
done
...
...
Prev
1
2
Next
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment