blob: 5ebacf4bb3d2a6fb9edf153fa3fa29bcb2393853 [file] [log] [blame]
from ... import Tensor
from .. import Parameter
from .module import Module
from typing import Any, Optional
class Threshold(Module):
threshold: float = ...
value: float = ...
inplace: bool = ...
def __init__(self, threshold: float, value: float, inplace: bool = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class ReLU(Threshold):
def __init__(self, inplace: bool = ...) -> None: ...
class RReLU(Module):
lower: float = ...
upper: float = ...
inplace: bool = ...
def __init__(self, lower: float = ..., upper: float = ..., inplace: bool = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class Hardtanh(Module):
min_val: float = ...
max_val: float = ...
inplace: bool = ...
def __init__(self, min_val: float = ..., max_val: float = ..., inplace: bool = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class ReLU6(Hardtanh):
def __init__(self, inplace: bool = ...) -> None: ...
class Sigmoid(Module):
def forward(self, input: Tensor) -> Tensor: ...
class Tanh(Module):
def forward(self, input: Tensor) -> Tensor: ...
class ELU(Module):
alpha: float = ...
inplace: bool = ...
def __init__(self, alpha: float = ..., inplace: bool = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class CELU(Module):
alpha: float = ...
inplace: bool = ...
def __init__(self, alpha: float = ..., inplace: bool = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class SELU(Module):
inplace: bool = ...
def __init__(self, inplace: bool = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class GLU(Module):
dim: int = ...
def __init__(self, dim: int = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class GELU(Module):
def forward(self, input: Tensor) -> Tensor: ...
class Hardshrink(Module):
lambd: float = ...
def __init__(self, lambd: float = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class LeakyReLU(Module):
negative_slope: float = ...
inplace: bool = ...
def __init__(self, negative_slope: float = ..., inplace: bool = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class LogSigmoid(Module):
def forward(self, input: Tensor) -> Tensor: ...
class Softplus(Module):
beta: float = ...
threshold: float = ...
def __init__(self, beta: float = ..., threshold: float = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class Softshrink(Module):
lambd: float = ...
def __init__(self, lambd: float = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class PReLU(Module):
num_parameters: int = ...
weight: Parameter = ...
def __init__(self, num_parameters: int = ..., init: float = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class Softsign(Module):
def forward(self, input: Tensor) -> Tensor: ...
class Tanhshrink(Module):
def forward(self, input: Tensor) -> Tensor: ...
class Softmin(Module):
dim: int = ...
def __init__(self, dim: Optional[int] = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class Softmax(Module):
dim: int = ...
def __init__(self, dim: Optional[int] = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class Softmax2d(Module):
def forward(self, input: Tensor) -> Tensor: ...
class LogSoftmax(Module):
dim: int = ...
def __init__(self, dim: Optional[int] = ...) -> None: ...
def forward(self, input: Tensor) -> Tensor: ...
class MultiheadAttention(Module):
embed_dim: Any = ...
kdim: Any = ...
vdim: Any = ...
num_heads: Any = ...
dropout: Any = ...
head_dim: Any = ...
in_proj_weight: Any = ...
q_proj_weight: Any = ...
k_proj_weight: Any = ...
v_proj_weight: Any = ...
in_proj_bias: Any = ...
out_proj: Any = ...
bias_k: Any = ...
bias_v: Any = ...
add_zero_attn: Any = ...
def __init__(self, embed_dim: Any, num_heads: Any, dropout: float = ..., bias: bool = ..., add_bias_kv: bool = ..., add_zero_attn: bool = ..., kdim: Optional[Any] = ..., vdim: Optional[Any] = ...) -> None: ...
def forward(self, query: Any, key: Any, value: Any, key_padding_mask: Optional[Any] = ..., need_weights: bool = ..., attn_mask: Optional[Any] = ...): ...