Print( 'Lookahead - k ]ĭ_optimizer = (dis_parameters, betas=( 0, 0.999)) # make dirs if not os.path.exists(args.out_path): #optimizer = Lamb(params, lr=args.lr, weight_decay=args.wdecay, min_trust=0.1) #optimizer = Lamb(params, lr=args.lr, weight_decay=args.wdecay, min_trust=0, random_min_trust=0.2, random_trust_dice=10) #optimizer = Lamb(params, lr=args.lr, weight_decay=args.wdecay, min_trust=0.2, random_min_trust=0.5, random_trust_dice=4) from lookahead import Lookahead Optimizer = Lamb(params, lr=args.lr, weight_decay=args.wdecay, min_trust= 0.25) Optimizer = (params, lr=args.lr, weight_decay=args.wdecay) Optimizer = (params, lr=args.lr, weight_decay=args.wdecay) if args.optimizer = 'adamw': Adaptive Softmax) if args.optimizer = 'sgd': Optimizer = None # Ensure the optimizer is optimizing params, which includes both the model's weights as well as the criterion's weight (i.e. Stored_loss = 100000000 # At any point you can hit Ctrl + C to break out of training early. Save_path = os.path.join(dirname, "sol.pt") If not len(dirname) != 0 and os.path.exists(dirname): Print "Failed to load Optim Settings" elif lowest_loss > sum_loss/steps:ĭirname = train_config Optimizer.load_state_dict(safe_load.torch_state(optim_path)) Optim_path = os.path.join(train_config, "sol_optim.pt") Optimizer = (sol.parameters(), lr=train_config) Sol, lf, hw = init_model(config, sol_dir= 'current', only_load= 'sol') Print "First Validation Step Complete" print "Benchmark Validation CER:", sum_loss/steps Loss = alignment_loss(predictions, sol_gt, x, alpha_alignment, alpha_backprop) Predictions = transformation_utils.pt_xyrs_2_xyxy(predictions)
0 Comments
Leave a Reply. |
AuthorWrite something about yourself. No need to be fancy, just an overview. ArchivesCategories |