|
import torch |
|
|
|
import activation._activation |
|
|
|
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: |
|
torch.ops._activation.silu_and_mul(out, x) |
|
|
|
|
|
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: |
|
torch.ops._activation.gelu_and_mul(out, x) |
|
|
|
|
|
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: |
|
torch.ops._activation.gelu_tanh_and_mul(out, x) |
|
|
|
|
|
def fatrelu_and_mul(out: torch.Tensor, |
|
x: torch.Tensor, |
|
threshold: float = 0.0) -> None: |
|
torch.ops._activation.fatrelu_and_mul(out, x, threshold) |
|
|
|
|
|
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None: |
|
torch.ops._activation.gelu_fast(out, x) |
|
|
|
|
|
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None: |
|
torch.ops._activation.gelu_new(out, x) |
|
|
|
|
|
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None: |
|
torch.ops._activation.gelu_quick(out, x) |
|
|