update configs
This commit is contained in:
		| @@ -5,6 +5,7 @@ | |||||||
|   "momentum"  : ["float", 0.9], |   "momentum"  : ["float", 0.9], | ||||||
|   "decay"     : ["float", 0.0003], |   "decay"     : ["float", 0.0003], | ||||||
|   "LR"        : ["float", 0.025], |   "LR"        : ["float", 0.025], | ||||||
|  |   "LR_MIN"    : ["float", 0.0001], | ||||||
|   "auxiliary" : ["bool", 1], |   "auxiliary" : ["bool", 1], | ||||||
|   "auxiliary_weight" : ["float", 0.4], |   "auxiliary_weight" : ["float", 0.4], | ||||||
|   "grad_clip" : ["float", 5], |   "grad_clip" : ["float", 5], | ||||||
|   | |||||||
							
								
								
									
										14
									
								
								configs/nas-cifar-cos-cutB128.config
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								configs/nas-cifar-cos-cutB128.config
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | |||||||
|  | { | ||||||
|  |   "type"      : ["str",   "cosine"], | ||||||
|  |   "batch_size": ["int",   128], | ||||||
|  |   "epochs"    : ["int",   600], | ||||||
|  |   "momentum"  : ["float", 0.9], | ||||||
|  |   "decay"     : ["float", 0.0003], | ||||||
|  |   "LR"        : ["float", 0.025], | ||||||
|  |   "LR_MIN"    : ["float", 0.0001], | ||||||
|  |   "auxiliary" : ["bool", 1], | ||||||
|  |   "auxiliary_weight" : ["float", 0.4], | ||||||
|  |   "grad_clip" : ["float", 5], | ||||||
|  |   "cutout"    : ["int", 16], | ||||||
|  |   "drop_path_prob" : ["float", 0.2] | ||||||
|  | } | ||||||
							
								
								
									
										14
									
								
								configs/nas-cifar-cos-cutB64.config
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								configs/nas-cifar-cos-cutB64.config
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | |||||||
|  | { | ||||||
|  |   "type"      : ["str",   "cosine"], | ||||||
|  |   "batch_size": ["int",   64], | ||||||
|  |   "epochs"    : ["int",   600], | ||||||
|  |   "momentum"  : ["float", 0.9], | ||||||
|  |   "decay"     : ["float", 0.0003], | ||||||
|  |   "LR"        : ["float", 0.025], | ||||||
|  |   "LR_MIN"    : ["float", 0.0001], | ||||||
|  |   "auxiliary" : ["bool", 1], | ||||||
|  |   "auxiliary_weight" : ["float", 0.4], | ||||||
|  |   "grad_clip" : ["float", 5], | ||||||
|  |   "cutout"    : ["int", 16], | ||||||
|  |   "drop_path_prob" : ["float", 0.2] | ||||||
|  | } | ||||||
| @@ -5,6 +5,7 @@ | |||||||
|   "momentum"  : ["float", 0.9], |   "momentum"  : ["float", 0.9], | ||||||
|   "decay"     : ["float", 0.0003], |   "decay"     : ["float", 0.0003], | ||||||
|   "LR"        : ["float", 0.025], |   "LR"        : ["float", 0.025], | ||||||
|  |   "LR_MIN"    : ["float", 0.0001], | ||||||
|   "auxiliary" : ["bool", 1], |   "auxiliary" : ["bool", 1], | ||||||
|   "auxiliary_weight" : ["float", 0.4], |   "auxiliary_weight" : ["float", 0.4], | ||||||
|   "grad_clip" : ["float", 5], |   "grad_clip" : ["float", 5], | ||||||
|   | |||||||
| @@ -54,7 +54,7 @@ def main_procedure(config, dataset, data_path, args, genotype, init_channels, la | |||||||
|   optimizer = torch.optim.SGD(model.parameters(), config.LR, momentum=config.momentum, weight_decay=config.decay) |   optimizer = torch.optim.SGD(model.parameters(), config.LR, momentum=config.momentum, weight_decay=config.decay) | ||||||
|   #optimizer = torch.optim.SGD(model.parameters(), config.LR, momentum=config.momentum, weight_decay=config.decay, nestero=True) |   #optimizer = torch.optim.SGD(model.parameters(), config.LR, momentum=config.momentum, weight_decay=config.decay, nestero=True) | ||||||
|   if config.type == 'cosine': |   if config.type == 'cosine': | ||||||
|     scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, float(config.epochs)) |     scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, float(config.epochs), eta_min=float(config.LR_MIN)) | ||||||
|   else: |   else: | ||||||
|     raise ValueError('Can not find the schedular type : {:}'.format(config.type)) |     raise ValueError('Can not find the schedular type : {:}'.format(config.type)) | ||||||
|  |  | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user