Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Gaëtan Caillaut
minibert-deft2018
Commits
f876e6c2
Commit
f876e6c2
authored
Mar 30, 2021
by
Gaëtan Caillaut
Browse files
fix typos
parent
215506ec
Changes
1
Hide whitespace changes
Inline
Side-by-side
train.py
View file @
f876e6c2
...
...
@@ -566,7 +566,7 @@ def t1_from_scratch(args):
test_dataset
,
collate_fn
=
deft_collater
,
batch_size
=
args
.
bs
,
pin_memory
=
pin_memory
)
model
,
optimizer
,
prev_epoch
,
config_dict
=
t1_model_from_params
(
None
,
args
.
d
,
attention_type
,
position_type
,
tokenizer
,
max_seq_size
,
mask_token
,
pad_token
,
device
,
checkpoint_path
=
args
.
checkpoint
,
height
=
args
.
height
,
depth
=
args
.
depth
,
,
attention_scaling
=
attention_scaling
)
None
,
args
.
d
,
attention_type
,
position_type
,
tokenizer
,
max_seq_size
,
mask_token
,
pad_token
,
device
,
checkpoint_path
=
args
.
checkpoint
,
height
=
args
.
height
,
depth
=
args
.
depth
,
attention_scaling
=
attention_scaling
)
run_name
=
t1_run_name_from_params
(
args
)
if
args
.
logdir
is
None
:
...
...
@@ -713,7 +713,7 @@ def finetune_t2(args):
test_dataset
,
collate_fn
=
deft_collater
,
batch_size
=
args
.
bs
,
pin_memory
=
pin_memory
)
model
,
optimizer
,
prev_epoch
,
config_dict
=
t2_model_from_params
(
args
.
model
,
args
.
d
,
attention_type
,
position_type
,
tokenizer
,
max_seq_size
,
mask_token
,
pad_token
,
device
,
checkpoint_path
=
args
.
checkpoint
,
height
=
args
.
height
,
depth
=
args
.
depth
,
,
attention_scaling
=
attention_scaling
)
args
.
model
,
args
.
d
,
attention_type
,
position_type
,
tokenizer
,
max_seq_size
,
mask_token
,
pad_token
,
device
,
checkpoint_path
=
args
.
checkpoint
,
height
=
args
.
height
,
depth
=
args
.
depth
,
attention_scaling
=
attention_scaling
)
run_name
=
t2_run_name_from_params
(
args
)
try
:
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment