Sign in
android
/
platform
/
external
/
pytorch
/
eb9a3383c2216c1013404a9eaa7b1c879836536c
/
.
/
torch
/
optim
/
adagrad.pyi
blob: 4557ece1417f9bf3d3c56497355e9147cc4dedbd [
file
] [
log
] [
blame
]
from
.
optimizer
import
Optimizer
,
ParamsT
class
Adagrad
(
Optimizer
):
def
__init__
(
self
,
params
:
ParamsT
,
lr
:
float
=
...,
lr_decay
:
float
=
...,
weight_decay
:
float
=
...,
initial_accumulator_value
:
float
=
...,
eps
:
float
=
...,
)
->
None
:
...