mirror of
https://github.com/huggingface/candle.git
synced 2025-06-20 20:09:50 +00:00

* Negative and `*args` shape handling * Rename to `PyShapeWithHole` + validate that only one hole exists * Regenerate stubs --------- Co-authored-by: Laurent Mazare <laurent.mazare@gmail.com>
55 lines
1.3 KiB
Python
55 lines
1.3 KiB
Python
# Generated content DO NOT EDIT
|
|
from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
|
|
from os import PathLike
|
|
from candle.typing import _ArrayLike, Device, Scalar, Index, Shape
|
|
from candle import Tensor, DType, QTensor
|
|
|
|
@staticmethod
|
|
def avg_pool2d(tensor: Tensor, ksize: int, stride: int = 1) -> Tensor:
|
|
"""
|
|
Applies the 2d avg-pool function to a given tensor.#
|
|
"""
|
|
pass
|
|
|
|
@staticmethod
|
|
def gelu(tensor: Tensor) -> Tensor:
|
|
"""
|
|
Applies the Gaussian Error Linear Unit (GELU) function to a given tensor.
|
|
"""
|
|
pass
|
|
|
|
@staticmethod
|
|
def max_pool2d(tensor: Tensor, ksize: int, stride: int = 1) -> Tensor:
|
|
"""
|
|
Applies the 2d max-pool function to a given tensor.#
|
|
"""
|
|
pass
|
|
|
|
@staticmethod
|
|
def relu(tensor: Tensor) -> Tensor:
|
|
"""
|
|
Applies the Rectified Linear Unit (ReLU) function to a given tensor.
|
|
"""
|
|
pass
|
|
|
|
@staticmethod
|
|
def silu(tensor: Tensor) -> Tensor:
|
|
"""
|
|
Applies the Sigmoid Linear Unit (SiLU) function to a given tensor.
|
|
"""
|
|
pass
|
|
|
|
@staticmethod
|
|
def softmax(tensor: Tensor, dim: int) -> Tensor:
|
|
"""
|
|
Applies the Softmax function to a given tensor.#
|
|
"""
|
|
pass
|
|
|
|
@staticmethod
|
|
def tanh(tensor: Tensor) -> Tensor:
|
|
"""
|
|
Applies the tanh function to a given tensor.
|
|
"""
|
|
pass
|