danieldk HF staff commited on
Commit
7c89add
·
1 Parent(s): 5600c5f
Files changed (24) hide show
  1. build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py +35 -29
  2. build/torch24-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
  3. build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py +35 -29
  4. build/torch24-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
  5. build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py +35 -29
  6. build/torch24-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
  7. build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py +35 -29
  8. build/torch24-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
  9. build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py +35 -29
  10. build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
  11. build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py +35 -29
  12. build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
  13. build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py +35 -29
  14. build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
  15. build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py +35 -29
  16. build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
  17. build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py +35 -29
  18. build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
  19. build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py +35 -29
  20. build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
  21. build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py +35 -29
  22. build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
  23. build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py +35 -29
  24. build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8ca53c70614cd1f97c97abfce947104caae5b9074ecadb4a5d2b84cc40524942
3
- size 2393240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6dcb1ada7977e1a47ed9d2947c41daf7db9074a9947f36487de13a99e0840ba
3
+ size 2393088
build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1c70e284ac1ae381df2aaf3b98539d64228b85c16a88e001813f81a2aa4e3df2
3
  size 2393472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7383c424c3dadb9cf2c32d7b19f28bddb6e3dd31052b3cb2e324d05f5592c84
3
  size 2393472
build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:904772268421f53ec8f6e76866a4c7abc97aecf4938eeb230a847472cc6786f2
3
  size 2277656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ae1b1f6a69835b66aef515a31ef7cc35c976090e6932b0dd747911e4453c23c
3
  size 2277656
build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b9691e9352ebbe508313084a0690674adb0af8c0e0918d82d768ba0912769dd
3
- size 2373608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7d7a781448907bebe4fa8c73e45d751a224a8849de099338904b32e70779039
3
+ size 2377704
build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:394c18b71dd74931d728dc8600baa396818fe6c1824982482666f01595853030
3
  size 2373608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87f4213948d1b1083a39b9136f7ff08814482cf83ed100b132652963702e22b3
3
  size 2373608
build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f4aa9557e057878e1ba9f9edae7a264e6fb084f3c431a282b7ce98ab41df9178
3
  size 2253864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9aebc81d7832f6125bb64a3cc16ceef16fcb02dd5c129d45d16c65526e297dd7
3
  size 2253864
build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:426d63a1e7c672d83c8e724b9027e571faa023c68dd46204bfbf6c006a0cf116
3
- size 2393240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c8dec0f48ddf3b692b0f2d5d44615cd28c03d71240b96d30632932baef967d9
3
+ size 2393088
build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6392a7591ec92fb69d4a6f9fd2fa06f8afebc38180ef7b2d2d07d91f1dfb9f3
3
  size 2393472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0415c28343a1e9eb0dfa459ee62825444b9c87378dd6d682d92d38e7445b3b7
3
  size 2393472
build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9848d0284c261a3efe998166403457f7c0f45223bed616e4c7f811a34139cbc3
3
  size 2277656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79dccf3c5f06ab07eb34b91c5f3dce260ee42514b4c01a8f09b3e4f7707ec026
3
  size 2277656
build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ea398accce77248a1456627b368c2b7aa06965f52bb49e80d522b88535a81cf
3
- size 2373608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12be474aa43aa5ba609fae1e3c0c7695178f9addfd931096fda6080a16e4f810
3
+ size 2377704
build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d1344074b7cc330c0456aff6c51aa7362be27effbe6d686076cd757cd383206e
3
  size 2373608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2686f1c659110f1f6ad6ef32d73c0bb4fc9563b7975f320742b597c4938fe6f3
3
  size 2373608
build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -6,36 +6,42 @@ except ImportError as e:
6
  # Fallback for local development.
7
  try:
8
  import _activation
 
9
  ops = torch.ops._activition
10
  except ImportError:
11
  raise e
12
-
13
-
14
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
- ops.silu_and_mul(out, x)
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
-
21
-
22
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
- ops.gelu_tanh_and_mul(out, x)
24
-
25
-
26
- def fatrelu_and_mul(out: torch.Tensor,
27
- x: torch.Tensor,
28
- threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
-
31
-
32
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
- ops.gelu_fast(out, x)
34
-
35
-
36
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
- ops.gelu_new(out, x)
38
-
39
-
40
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
 
 
 
 
41
  ops.gelu_quick(out, x)
 
 
6
  # Fallback for local development.
7
  try:
8
  import _activation
9
+
10
  ops = torch.ops._activition
11
  except ImportError:
12
  raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
  ops.gelu_quick(out, x)
47
+ return out
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:44f297423f172b633df2e0eaf2e8a1895135c7f27cf113def3532a1a9a7745dd
3
  size 2253864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4b5dd766087d45ed5c570af1c2fdfcf8aebbba88a7b8d77d13e1aa1875d9e90
3
  size 2253864