Generate *.pyi stubs for PyO3 wrapper (#870)

* Begin to generate typehints.

* generate correct stubs

* Correctly include stubs

* Add comments and typhints to static functions

* ensure candle-pyo3 directory

* Make `llama.rope.freq_base` optional

* `fmt`
This commit is contained in:
Lukas Kreussel
2023-09-16 18:23:38 +02:00
committed by GitHub
parent 7cafca835a
commit 8658df3485
15 changed files with 857 additions and 40 deletions

View File

@ -0,0 +1 @@
from .candle import *

View File

@ -0,0 +1,248 @@
# Generated content DO NOT EDIT
from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
from os import PathLike
from candle.typing import _ArrayLike, Device
class bf16(DType):
pass
@staticmethod
def cat(tensors: List[Tensor], dim: int):
"""
Concatenate the tensors across one axis.
"""
pass
class f16(DType):
pass
class f32(DType):
pass
class f64(DType):
pass
class i64(DType):
pass
@staticmethod
def ones(shape: Sequence[int], dtype: Optional[DType] = None, device: Optional[Device] = None):
""" """
pass
@staticmethod
def rand(shape: Sequence[int], device: Optional[Device] = None):
"""
Creates a new tensor with random values.
"""
pass
@staticmethod
def randn(shape: Sequence[int], device: Optional[Device] = None):
""" """
pass
@staticmethod
def stack(tensors: List[Tensor], dim: int):
"""
Stack the tensors along a new axis.
"""
pass
@staticmethod
def tensor(data: _ArrayLike):
"""
Creates a new tensor from a Python value. The value can be a scalar or array-like object.
"""
pass
class u32(DType):
pass
class u8(DType):
pass
@staticmethod
def zeros(shape: Sequence[int], dtype: Optional[DType] = None, device: Optional[Device] = None):
""" """
pass
class DType:
pass
class QTensor:
def dequantize(self):
""" """
pass
@property
def ggml_dtype(self):
""" """
pass
def matmul_t(self, lhs):
""" """
pass
@property
def rank(self):
""" """
pass
@property
def shape(self):
""" """
pass
class Tensor:
def __init__(data: _ArrayLike):
pass
def argmax_keepdim(self, dim):
""" """
pass
def argmin_keepdim(self, dim):
""" """
pass
def broadcast_add(self, rhs):
""" """
pass
def broadcast_as(self, shape):
""" """
pass
def broadcast_div(self, rhs):
""" """
pass
def broadcast_left(self, shape):
""" """
pass
def broadcast_mul(self, rhs):
""" """
pass
def broadcast_sub(self, rhs):
""" """
pass
def contiguous(self):
""" """
pass
def copy(self):
""" """
pass
def cos(self):
""" """
pass
def detach(self):
""" """
pass
@property
def device(self):
""" """
pass
@property
def dtype(self):
""" """
pass
def exp(self):
""" """
pass
def flatten_all(self):
""" """
pass
def flatten_from(self, dim):
""" """
pass
def flatten_to(self, dim):
""" """
pass
def get(self, index):
""" """
pass
def index_select(self, rhs, dim):
""" """
pass
def is_contiguous(self):
""" """
pass
def is_fortran_contiguous(self):
""" """
pass
def log(self):
""" """
pass
def matmul(self, rhs):
""" """
pass
def max_keepdim(self, dim):
""" """
pass
def mean_all(self):
""" """
pass
def min_keepdim(self, dim):
""" """
pass
def narrow(self, dim, start, len):
""" """
pass
def powf(self, p):
""" """
pass
def quantize(self, quantized_dtype):
""" """
pass
@property
def rank(self):
""" """
pass
def recip(self):
""" """
pass
def reshape(self, shape):
""" """
pass
@property
def shape(self):
"""
Gets the tensor shape as a Python tuple.
"""
pass
def sin(self):
""" """
pass
def sqr(self):
""" """
pass
def sqrt(self):
""" """
pass
def squeeze(self, dim):
""" """
pass
@property
def stride(self):
""" """
pass
def sum_all(self):
""" """
pass
def sum_keepdim(self, dims):
""" """
pass
def t(self):
""" """
pass
def to_device(self, device):
""" """
pass
def to_dtype(self, dtype):
""" """
pass
def transpose(self, dim1, dim2):
""" """
pass
def unsqueeze(self, dim):
""" """
pass
def values(self):
"""
Gets the tensor's data as a Python scalar or array-like object.
"""
pass
def where_cond(self, on_true, on_false):
""" """
pass

View File

@ -0,0 +1,5 @@
# Generated content DO NOT EDIT
from .. import nn
silu = nn.silu
softmax = nn.softmax

View File

@ -0,0 +1,19 @@
# Generated content DO NOT EDIT
from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
from os import PathLike
from candle.typing import _ArrayLike, Device
from candle import Tensor, DType
@staticmethod
def silu(tensor: Tensor):
"""
Applies the Sigmoid Linear Unit (SiLU) function to a given tensor.
"""
pass
@staticmethod
def softmax(tensor: Tensor, dim: int):
"""
Applies the Softmax function to a given tensor.
"""
pass

View File

@ -0,0 +1,16 @@
from typing import TypeVar, Union, Sequence
_T = TypeVar("_T")
_ArrayLike = Union[
_T,
Sequence[_T],
Sequence[Sequence[_T]],
Sequence[Sequence[Sequence[_T]]],
Sequence[Sequence[Sequence[Sequence[_T]]]],
]
CPU:str = "cpu"
CUDA:str = "cuda"
Device = TypeVar("Device", CPU, CUDA)

View File

@ -0,0 +1,11 @@
# Generated content DO NOT EDIT
from .. import utils
cuda_is_available = utils.cuda_is_available
get_num_threads = utils.get_num_threads
has_accelerate = utils.has_accelerate
has_mkl = utils.has_mkl
load_ggml = utils.load_ggml
load_gguf = utils.load_gguf
load_safetensors = utils.load_safetensors
save_safetensors = utils.save_safetensors

View File

@ -0,0 +1,63 @@
# Generated content DO NOT EDIT
from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
from os import PathLike
from candle.typing import _ArrayLike, Device
from candle import Tensor, DType
@staticmethod
def cuda_is_available():
"""
Returns true if the 'cuda' backend is available.
"""
pass
@staticmethod
def get_num_threads():
"""
Returns the number of threads used by the candle.
"""
pass
@staticmethod
def has_accelerate():
"""
Returns true if candle was compiled with 'accelerate' support.
"""
pass
@staticmethod
def has_mkl():
"""
Returns true if candle was compiled with MKL support.
"""
pass
@staticmethod
def load_ggml(path: Union[str, PathLike]):
"""
Load a GGML file. Returns a tuple of three objects: a dictionary mapping tensor names to tensors,
a dictionary mapping hyperparameter names to hyperparameter values, and a vocabulary.
"""
pass
@staticmethod
def load_gguf(path: Union[str, PathLike]):
"""
Loads a GGUF file. Returns a tuple of two dictionaries: the first maps tensor names to tensors,
and the second maps metadata keys to metadata values.
"""
pass
@staticmethod
def load_safetensors(path: Union[str, PathLike]):
"""
Loads a safetensors file. Returns a dictionary mapping tensor names to tensors.
"""
pass
@staticmethod
def save_safetensors(path: Union[str, PathLike], tensors: Dict[str, Tensor]):
"""
Saves a dictionary of tensors to a safetensors file.
"""
pass