Sign in
android
/
platform
/
external
/
pytorch
/
b5dd37f23efecdd27d1ff8862dcd59613c36211d
/
.
/
torch
/
optim
/
adagrad.pyi
blob: 4557ece1417f9bf3d3c56497355e9147cc4dedbd [
file
] [
log
] [
blame
]
from
.
optimizer
import
Optimizer
,
ParamsT
class
Adagrad
(
Optimizer
):
def
__init__
(
self
,
params
:
ParamsT
,
lr
:
float
=
...,
lr_decay
:
float
=
...,
weight_decay
:
float
=
...,
initial_accumulator_value
:
float
=
...,
eps
:
float
=
...,
)
->
None
:
...