torch.optim 的源代码
""
`torch.optim`是一个实现各种优化算法的包。
最常用的方法已经得到支持,并且接口足够通用
,以便更复杂的方法也可以轻松集成。
未来。
""
from torch.optim 导入
学习率调度器 as
学习率调度器,
SWA 工具 as swa_utils
from torch.optim._adafactor 导入 Adafactor as Adafactor
from torch.optim.adadelta 导入 Adadelta as Adadelta
from torch.optim.adagrad 导入 Adagrad as Adagrad
from torch.optim.adam 导入
亚当 as
亚当
from torch.optim.adamax 导入 Adamax as Adamax
from torch.optim.adamw 导入 AdamW as AdamW
from torch.optim.asgd 导入 ASGD as ASGD
from torch.optim.lbfgs 导入 LBFGS as LBFGS
from torch.optim.nadam 导入 NAdam as NAdam
from torch.optim.optimizer 导入
优化器 as
优化器
from torch.optim.radam 导入 RAdam as RAdam
from torch.optim.rmsprop 导入 RMSprop as RMSprop
from torch.optim.rprop 导入 Rprop as Rprop
from torch.optim.sgd 导入
新加坡元 as
新加坡元
from torch.optim.sparse_adam 导入 SparseAdam as SparseAdam
Adafactor.__module__ = torch.optim
删除 adadelta # type: ignore[name-defined] # noqa: F821
删除 adagrad # type: ignore[name-defined] # noqa: F821
删除
亚当 # type: ignore[name-defined] # noqa: F821
删除
亚当 w # type: ignore[name-defined] # noqa: F821
删除 sparse_adam
# 类型:忽略[name-defined] # noqa: F821
删除 adamax
# 类型:忽略[name-defined] # noqa: F821
删除 asgd
# 类型:忽略[name-defined] # noqa: F821
删除 sgd
# 类型:忽略[name-defined] # noqa: F821
删除 radam
# 类型:忽略[name-defined] # noqa: F821
删除 rprop
# 类型:忽略[name-defined] # noqa: F821
删除 rmsprop # type: ignore[name-defined] # noqa: F821
删除
优化器 # type: ignore[name-defined] # noqa: F821
删除 nadam # type: ignore[name-defined] # noqa: F821
删除 lbfgs # type: ignore[name-defined] # noqa: F821
全部 = [
"Adafactor",
"Adadelta",
"Adagrad",
"Adam",
"Adamax",
"AdamW",
"ASGD",
"LBFGS",
"学习率调度器",
"NAdam",
"优化器",
"RAdam",
RMSprop,
Rprop,
SGD,
SparseAdam,
swa_utils,
]