Commit f876e6c2 authored by Gaëtan Caillaut's avatar Gaëtan Caillaut
Browse files

fix typos

parent 215506ec
......@@ -566,7 +566,7 @@ def t1_from_scratch(args):
test_dataset, collate_fn=deft_collater, batch_size=args.bs, pin_memory=pin_memory)
model, optimizer, prev_epoch, config_dict = t1_model_from_params(
None, args.d, attention_type, position_type, tokenizer, max_seq_size, mask_token, pad_token, device, checkpoint_path=args.checkpoint, height=args.height, depth=args.depth, , attention_scaling=attention_scaling)
None, args.d, attention_type, position_type, tokenizer, max_seq_size, mask_token, pad_token, device, checkpoint_path=args.checkpoint, height=args.height, depth=args.depth, attention_scaling=attention_scaling)
run_name = t1_run_name_from_params(args)
if args.logdir is None:
......@@ -713,7 +713,7 @@ def finetune_t2(args):
test_dataset, collate_fn=deft_collater, batch_size=args.bs, pin_memory=pin_memory)
model, optimizer, prev_epoch, config_dict = t2_model_from_params(
args.model, args.d, attention_type, position_type, tokenizer, max_seq_size, mask_token, pad_token, device, checkpoint_path=args.checkpoint, height=args.height, depth=args.depth, , attention_scaling=attention_scaling)
args.model, args.d, attention_type, position_type, tokenizer, max_seq_size, mask_token, pad_token, device, checkpoint_path=args.checkpoint, height=args.height, depth=args.depth, attention_scaling=attention_scaling)
run_name = t2_run_name_from_params(args)
try:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment