From c5788ba19c3df403ddc433260ebc2cb9ef622710 Mon Sep 17 00:00:00 2001 From: D-X-Y <280835372@qq.com> Date: Mon, 24 May 2021 02:35:32 +0000 Subject: [PATCH] Updates --- exps/LFNA/lfna_meta_model.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/exps/LFNA/lfna_meta_model.py b/exps/LFNA/lfna_meta_model.py index 3ec1acc..823040e 100644 --- a/exps/LFNA/lfna_meta_model.py +++ b/exps/LFNA/lfna_meta_model.py @@ -1,7 +1,6 @@ ##################################################### # Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.04 # ##################################################### -import copy import torch import torch.nn.functional as F @@ -294,7 +293,9 @@ class LFNA_Meta(super_core.SuperModule): best_loss = init_info["loss"] new_param.data.copy_(init_info["param"].data) else: - best_new_param, best_loss = None, 1e9 + best_loss = 1e9 + with torch.no_grad(): + best_new_param = new_param.detach().clone() for iepoch in range(epochs): optimizer.zero_grad() _, [_], time_embed = self(timestamp.view(1, 1), None, True) @@ -310,7 +311,7 @@ class LFNA_Meta(super_core.SuperModule): if meta_loss.item() < best_loss: with torch.no_grad(): best_loss = meta_loss.item() - best_new_param = new_param.detach() + best_new_param = new_param.detach().clone() with torch.no_grad(): self.replace_append_learnt(None, None) self.append_fixed(timestamp, best_new_param)