ai-content-maker/.venv/Lib/site-packages/thinc/layers/__init__.py

171 lines
4.3 KiB
Python
Raw Normal View History

2024-05-03 04:18:51 +03:00
# Weights layers
# Combinators
from .add import add
# Array manipulation
from .array_getitem import array_getitem
from .bidirectional import bidirectional
from .cauchysimilarity import CauchySimilarity
from .chain import chain
from .clipped_linear import ClippedLinear, HardSigmoid, HardTanh, ReluK
from .clone import clone
from .concatenate import concatenate
from .dish import Dish
from .dropout import Dropout
from .embed import Embed
from .expand_window import expand_window
from .gelu import Gelu
from .hard_swish import HardSwish
from .hard_swish_mobilenet import HardSwishMobilenet
from .hashembed import HashEmbed
from .layernorm import LayerNorm
from .linear import Linear
# Data-type transfers
from .list2array import list2array
from .list2padded import list2padded
from .list2ragged import list2ragged
from .logistic import Logistic
from .lstm import LSTM, PyTorchLSTM
from .map_list import map_list
from .maxout import Maxout
from .mish import Mish
from .multisoftmax import MultiSoftmax
from .mxnetwrapper import MXNetWrapper
from .noop import noop
from .padded2list import padded2list
from .parametricattention import ParametricAttention
from .parametricattention_v2 import ParametricAttention_v2
from .premap_ids import premap_ids
from .pytorchwrapper import (
PyTorchRNNWrapper,
PyTorchWrapper,
PyTorchWrapper_v2,
PyTorchWrapper_v3,
)
from .ragged2list import ragged2list
# Pooling
from .reduce_first import reduce_first
from .reduce_last import reduce_last
from .reduce_max import reduce_max
from .reduce_mean import reduce_mean
from .reduce_sum import reduce_sum
from .relu import Relu
from .remap_ids import remap_ids, remap_ids_v2
from .residual import residual
from .resizable import resizable
from .siamese import siamese
from .sigmoid import Sigmoid
from .sigmoid_activation import sigmoid_activation
from .softmax import Softmax, Softmax_v2
from .softmax_activation import softmax_activation
from .sparselinear import SparseLinear, SparseLinear_v2
from .strings2arrays import strings2arrays
from .swish import Swish
from .tensorflowwrapper import TensorFlowWrapper, keras_subclass
from .torchscriptwrapper import TorchScriptWrapper_v1, pytorch_to_torchscript_wrapper
from .tuplify import tuplify
from .uniqued import uniqued
from .with_array import with_array
from .with_array2d import with_array2d
from .with_cpu import with_cpu
from .with_debug import with_debug
from .with_flatten import with_flatten
from .with_flatten_v2 import with_flatten_v2
from .with_getitem import with_getitem
from .with_list import with_list
from .with_nvtx_range import with_nvtx_range
from .with_padded import with_padded
from .with_ragged import with_ragged
from .with_reshape import with_reshape
from .with_signpost_interval import with_signpost_interval
# fmt: off
__all__ = [
"CauchySimilarity",
"Linear",
"Dropout",
"Embed",
"expand_window",
"HashEmbed",
"LayerNorm",
"LSTM",
"Maxout",
"Mish",
"MultiSoftmax",
"ParametricAttention",
"ParametricAttention_v2",
"PyTorchLSTM",
"PyTorchWrapper",
"PyTorchWrapper_v2",
"PyTorchWrapper_v3",
"PyTorchRNNWrapper",
"Relu",
"sigmoid_activation",
"Sigmoid",
"softmax_activation",
"Softmax",
"Softmax_v2",
"SparseLinear",
"SparseLinear_v2",
"TensorFlowWrapper",
"TorchScriptWrapper_v1",
"add",
"bidirectional",
"chain",
"clone",
"concatenate",
"noop",
"residual",
"uniqued",
"siamese",
"reduce_first",
"reduce_last",
"reduce_max",
"reduce_mean",
"reduce_sum",
"resizable",
"list2array",
"list2ragged",
"list2padded",
"ragged2list",
"padded2list",
"with_reshape",
"with_getitem",
"with_array",
"with_array2d",
"with_cpu",
"with_list",
"with_ragged",
"with_padded",
"with_flatten",
"with_flatten_v2",
"with_debug",
"with_nvtx_range",
"with_signpost_interval",
"remap_ids",
"remap_ids_v2",
"premap_ids",
"softmax_activation",
"Logistic",
"Sigmoid",
"ClippedLinear",
"ReluK",
"HardTanh",
"HardSigmoid",
"Dish",
"HardSwish",
"HardSwishMobilenet",
"Swish",
"Gelu",
"keras_subclass",
"MXNetWrapper",
"map_list",
"strings2arrays",
"array_getitem",
"tuplify",
"pytorch_to_torchscript_wrapper",
]
# fmt: on