[文档]classLRScheduler(metaclass=ABCMeta):r"""Base class for all learning rate based schedulers. Args: optimizer (Optimizer): wrapped optimizer. current_epoch (int): the index of current epoch. Default: -1. Returns: An instance of the LRScheduler. """def__init__(# pylint: disable=too-many-branchesself,optimizer:Optimizer,current_epoch:int=-1):ifnotisinstance(optimizer,Optimizer):raiseTypeError("optimizer argument given to the lr_scheduler should be Optimizer")self.optimizer=optimizerself.current_epoch=current_epochifcurrent_epoch==-1:forgroupinself.optimizer.param_groups:group.setdefault("initial_lr",group["lr"])else:fori,groupinenumerate(optimizer.param_groups):if"initial_lr"notingroup:raiseKeyError("param 'initial_lr' is not specified in ""param_groups[{}] when resuming an optimizer".format(i))self.base_lrs=list(map(lambdagroup:group["initial_lr"],self.optimizer.param_groups))self.step()
[文档]defstate_dict(self):r"""Returns the state of the scheduler as a :class:`dict`. It contains an entry for every variable in self.__dict__ which is not the optimizer. """raiseNotImplementedError
[文档]defload_state_dict(self,state_dict):r"""Loads the schedulers state. Args: state_dict: scheduler state. """raiseNotImplementedError
[文档]defget_lr(self):r"""Compute current learning rate for the scheduler."""raiseNotImplementedError