@@ -560,7 +560,7 @@ def main():
560560 best_metric = None
561561 best_epoch = None
562562 saver = None
563- output_dir = ''
563+ output_dir = None
564564 if args .local_rank == 0 :
565565 if args .experiment :
566566 exp_name = args .experiment
@@ -606,9 +606,10 @@ def main():
606606 # step LR for next epoch
607607 lr_scheduler .step (epoch + 1 , eval_metrics [eval_metric ])
608608
609- update_summary (
610- epoch , train_metrics , eval_metrics , os .path .join (output_dir , 'summary.csv' ),
611- write_header = best_metric is None , log_wandb = args .log_wandb and has_wandb )
609+ if output_dir is not None :
610+ update_summary (
611+ epoch , train_metrics , eval_metrics , os .path .join (output_dir , 'summary.csv' ),
612+ write_header = best_metric is None , log_wandb = args .log_wandb and has_wandb )
612613
613614 if saver is not None :
614615 # save proper checkpoint with eval metric
@@ -623,7 +624,7 @@ def main():
623624
624625def train_one_epoch (
625626 epoch , model , loader , optimizer , loss_fn , args ,
626- lr_scheduler = None , saver = None , output_dir = '' , amp_autocast = suppress ,
627+ lr_scheduler = None , saver = None , output_dir = None , amp_autocast = suppress ,
627628 loss_scaler = None , model_ema = None , mixup_fn = None ):
628629
629630 if args .mixup_off_epoch and epoch >= args .mixup_off_epoch :
0 commit comments