blob: 5dc2be2f54c54a86b28837aa270bae50a358281f [file] [log] [blame]
from .optimizer import Optimizer, params_t
class Adagrad(Optimizer):
def __init__(
self,
params: params_t,
lr: float = ...,
lr_decay: float = ...,
weight_decay: float = ...,
initial_accumulator_value: float = ...,
eps: float = ...,
) -> None: ...