From c8e95b0ddc9eb2417c88a51b83e869d606c6fe0b Mon Sep 17 00:00:00 2001 From: D-X-Y <280835372@qq.com> Date: Sat, 22 May 2021 09:43:48 +0000 Subject: [PATCH] Update LFNA --- exps/LFNA/lfna.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/exps/LFNA/lfna.py b/exps/LFNA/lfna.py index d916cc4..3264c3e 100644 --- a/exps/LFNA/lfna.py +++ b/exps/LFNA/lfna.py @@ -100,9 +100,15 @@ def pretrain(base_model, meta_model, criterion, xenv, args, logger): weight_decay=args.weight_decay, amsgrad=True, ) + logger.log("Pre-train the meta-model") + logger.log("Using the optimizer: {:}".format(optimizer)) meta_model.set_best_dir(logger.path(None) / "checkpoint-pretrain") + per_epoch_time, start_time = AverageMeter(), time.time() for iepoch in range(args.epochs): + left_time = "Time Left: {:}".format( + convert_secs2time(per_epoch_time.avg * (args.epochs - iepoch), True) + ) total_meta_losses, total_match_losses = [], [] for ibatch in range(args.meta_batch): rand_index = random.randint(0, meta_model.meta_length - xenv.seq_length - 1) @@ -151,7 +157,11 @@ def pretrain(base_model, meta_model, criterion, xenv, args, logger): final_match_loss.item(), ) + ", batch={:}".format(len(total_meta_losses)) + + ", success={:}, best_score={:.4f}".format(success, -best_score) + + " {:}".format(left_time) ) + per_epoch_time.update(time.time() - start_time) + start_time = time.time() def main(args):