update codes
This commit is contained in:
		| @@ -54,15 +54,15 @@ def main(): | ||||
|   if not os.path.isdir(args.save_path): | ||||
|     os.makedirs(args.save_path) | ||||
|   log = open(os.path.join(args.save_path, 'log-seed-{:}.txt'.format(args.manualSeed)), 'w') | ||||
|   print_log('save path : {}'.format(args.save_path), log) | ||||
|   print_log('Save Path      : {:}'.format(args.save_path), log) | ||||
|   state = {k: v for k, v in args._get_kwargs()} | ||||
|   print_log(state, log) | ||||
|   print_log("Random Seed: {}".format(args.manualSeed), log) | ||||
|   print_log("Python version : {}".format(sys.version.replace('\n', ' ')), log) | ||||
|   print_log("Torch  version : {}".format(torch.__version__), log) | ||||
|   print_log("CUDA   version : {}".format(torch.version.cuda), log) | ||||
|   print_log("cuDNN  version : {}".format(cudnn.version()), log) | ||||
|   print_log("Num of GPUs    : {}".format(torch.cuda.device_count()), log) | ||||
|   print_log("Random Seed    : {:}".format(args.manualSeed), log) | ||||
|   print_log("Python version : {:}".format(sys.version.replace('\n', ' ')), log) | ||||
|   print_log("Torch  version : {:}".format(torch.__version__), log) | ||||
|   print_log("CUDA   version : {:}".format(torch.version.cuda), log) | ||||
|   print_log("cuDNN  version : {:}".format(cudnn.version()), log) | ||||
|   print_log("Num of GPUs    : {:}".format(torch.cuda.device_count()), log) | ||||
|   args.dataset = args.dataset.lower() | ||||
|  | ||||
|   config = load_config(args.model_config) | ||||
|   | ||||
| @@ -21,7 +21,7 @@ def obtain_best(accuracies): | ||||
|  | ||||
| def main_procedure(config, dataset, data_path, args, genotype, init_channels, layers, log): | ||||
|    | ||||
|   train_data, test_data, class_num = get_datasets(dataset, data_path, args.cutout) | ||||
|   train_data, test_data, class_num = get_datasets(dataset, data_path, config.cutout) | ||||
|  | ||||
|   print_log('-------------------------------------- main-procedure', log) | ||||
|   print_log('config        : {:}'.format(config), log) | ||||
| @@ -39,9 +39,9 @@ def main_procedure(config, dataset, data_path, args, genotype, init_channels, la | ||||
|   print_log('genotype      : {:}'.format(genotype), log) | ||||
|   print_log('args          : {:}'.format(args), log) | ||||
|   print_log('Train-Dataset : {:}'.format(train_data), log) | ||||
|   print_log('Train-Trans   : {:}'.format(train_transform), log) | ||||
|   print_log('Train-Trans   : {:}'.format(train_data.transform), log) | ||||
|   print_log('Test--Dataset : {:}'.format(test_data ), log) | ||||
|   print_log('Test--Trans   : {:}'.format(test_transform ), log) | ||||
|   print_log('Test--Trans   : {:}'.format(test_data.transform ), log) | ||||
|  | ||||
|  | ||||
|   train_loader = torch.utils.data.DataLoader(train_data, batch_size=config.batch_size, shuffle=True, | ||||
|   | ||||
| @@ -62,7 +62,7 @@ def main_procedure_imagenet(config, data_path, args, genotype, init_channels, la | ||||
|  | ||||
|   total_param, aux_param = count_parameters_in_MB(basemodel), count_parameters_in_MB(basemodel.auxiliary_param()) | ||||
|   print_log('Network =>\n{:}'.format(basemodel), log) | ||||
|   #print_FLOPs(basemodel, (1,3,224,224), [print_log, log]) | ||||
|   print_FLOPs(basemodel, (1,3,224,224), [print_log, log]) | ||||
|   print_log('Parameters : {:} - {:} = {:.3f} MB'.format(total_param, aux_param, total_param - aux_param), log) | ||||
|   print_log('config        : {:}'.format(config), log) | ||||
|   print_log('genotype      : {:}'.format(genotype), log) | ||||
| @@ -75,7 +75,7 @@ def main_procedure_imagenet(config, data_path, args, genotype, init_channels, la | ||||
|   criterion_smooth = CrossEntropyLabelSmooth(class_num, config.label_smooth).cuda() | ||||
|  | ||||
|  | ||||
|   optimizer = torch.optim.SGD(model.parameters(), config.LR, momentum=config.momentum, weight_decay=config.decay, nestero=True) | ||||
|   optimizer = torch.optim.SGD(model.parameters(), config.LR, momentum=config.momentum, weight_decay=config.decay, nesterov=True) | ||||
|   if config.type == 'cosine': | ||||
|     scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, float(config.epochs)) | ||||
|   elif config.type == 'steplr': | ||||
|   | ||||
		Reference in New Issue
	
	Block a user