class_or_func: Adam module_path: torch.optim args: [] kwargs: betas: [0.9, 0.999] weight_decay: 0.1 amsgrad: False