{"id":24266,"library":"prodigy-plus-schedule-free","title":"ProdigyPlusScheduleFree","description":"Automatic learning rate optimizer combining Prodigy's adaptive LR with Schedule-Free's constant-parameter interpolation. Version 2.0.1 improved weight decay handling. Active development.","status":"active","version":"2.0.1","language":"python","source_language":"en","source_url":"https://github.com/LoganBooker/prodigy-plus-schedule-free","tags":["optimizer","learning-rate","schedule-free","prodigy","pytorch"],"install":[{"cmd":"pip install prodigy-plus-schedule-free","lang":"bash","label":"Default install"}],"dependencies":[{"reason":"PyTorch is required for optimizers and tensors.","package":"torch","optional":false}],"imports":[{"note":"Common mistake: using 'prodigyplus' instead of 'prodigy_plus_schedule_free'.","wrong":"from prodigy_plus_schedule_free import ProdigyPlusScheduleFree","symbol":"ProdigyPlusScheduleFree","correct":"from prodigy_plus_schedule_free import ProdigyPlusScheduleFree"}],"quickstart":{"code":"import torch\nfrom prodigy_plus_schedule_free import ProdigyPlusScheduleFree\n\nmodel = torch.nn.Linear(10, 2)\noptimizer = ProdigyPlusScheduleFree(model.parameters(), lr=1.0)\noptimizer.train()\nfor data, target in [(torch.randn(10), torch.tensor(1))]:\n    optimizer.zero_grad()\n    loss = torch.nn.functional.cross_entropy(model(data), target.unsqueeze(0))\n    loss.backward()\n    optimizer.step()","lang":"python","description":"Basic usage: instantiate optimizer, call .train() before training loop, step normally."},"warnings":[{"fix":"Update import: 'from prodigy_plus_schedule_free import ProdigyPlusScheduleFree'.","message":"In v2.0.0, the import path changed from 'prodigyplus_schedulefree' to 'prodigy_plus_schedule_free'. Old imports will break.","severity":"breaking","affected_versions":"<2.0.0"},{"fix":"Always switch modes: optimizer.train() before training, optimizer.eval() before inference.","message":"You must call .train() at the start of each training loop and .eval() for evaluation to ensure correct parameter interpolation.","severity":"gotcha","affected_versions":"all"},{"fix":"Upgrade to >=2.0.1. If you cannot upgrade, avoid using weight_decay or implement manually.","message":"Parameter 'weight_decay' had a bug in v1.x where it was applied incorrectly. Use v2.0.1+ for correct weight decay.","severity":"deprecated","affected_versions":"<2.0.1"}],"env_vars":null,"last_verified":"2026-05-01T00:00:00.000Z","next_check":"2026-07-30T00:00:00.000Z","problems":[{"fix":"Use: from prodigy_plus_schedule_free import ProdigyPlusScheduleFree","cause":"Old import pattern (prodigyplus vs prodigy_plus_schedule_free).","error":"ImportError: cannot import name 'ProdigyPlusScheduleFree' from 'prodigyplus'"},{"fix":"Create optimizer after moving model to device: model.to(device); optimizer = ProdigyPlusScheduleFree(model.parameters(), lr=1.0)","cause":"Using default behavior without specifying device; optimizer might not handle device placement automatically.","error":"RuntimeError: Expected all tensors to be on the same device, but found at least two devices"},{"fix":"Ensure optimizer.train() is called before the loop and zero_grad() before backward.","cause":"Calling optimizer.step() without optimizer.zero_grad() or without setting model to train mode.","error":"TypeError: 'NoneType' object is not callable"}],"ecosystem":"pypi","meta_description":null,"install_score":null,"install_tag":null,"quickstart_score":null,"quickstart_tag":null}