diff --git a/build/torch25-cxx11-cu118-x86_64-linux/attention/_attention_6yvgebnqctora.abi3.so b/build/torch25-cxx11-cu118-x86_64-linux/attention/_attention_6yvgebnqctora.abi3.so deleted file mode 100755 index 29733cfb726d11a1d278fb0f3679c010cf5210e2..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu118-x86_64-linux/attention/_attention_6yvgebnqctora.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:aee255dc2618e23d4e2076ff3d16c4fbd12d63742fde84252cfb6bfe55c5376e -size 78886392 diff --git a/build/torch25-cxx11-cu118-x86_64-linux/attention/_ops.py b/build/torch25-cxx11-cu118-x86_64-linux/attention/_ops.py deleted file mode 100644 index 1379d7cc10c5fafa877e3ea73be33d3eed57b449..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu118-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_6yvgebnqctora -ops = torch.ops._attention_6yvgebnqctora - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_6yvgebnqctora::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu118-x86_64-linux/attention/__init__.py b/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch25-cxx11-cu118-x86_64-linux/attention/__init__.py rename to build/torch25-cxx11-cu118-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch25-cxx11-cu118-x86_64-linux/attention/_custom_ops.py b/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch25-cxx11-cu118-x86_64-linux/attention/_custom_ops.py rename to build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_ops.py b/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..609570440c63122010e6254ac2f92d4e4e52ec02 --- /dev/null +++ b/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_fao6f4gjjrpl6 +ops = torch.ops._paged_attention_fao6f4gjjrpl6 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_fao6f4gjjrpl6::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_paged_attention_fao6f4gjjrpl6.abi3.so b/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_paged_attention_fao6f4gjjrpl6.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..a4e60f2c567eb63c84430e9b80acaa0aa6974b1e --- /dev/null +++ b/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/_paged_attention_fao6f4gjjrpl6.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd3f60d12b061ef8cc4adb9bd2d60539bc233075c007af99b995e4c12bb5df98 +size 78886416 diff --git a/build/torch25-cxx11-cu118-x86_64-linux/attention/platforms.py b/build/torch25-cxx11-cu118-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch25-cxx11-cu118-x86_64-linux/attention/platforms.py rename to build/torch25-cxx11-cu118-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch25-cxx11-cu121-x86_64-linux/attention/_attention_4jg2igd54wzge.abi3.so b/build/torch25-cxx11-cu121-x86_64-linux/attention/_attention_4jg2igd54wzge.abi3.so deleted file mode 100755 index a58d380aa758b8e6842e89013229bee3711286ef..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu121-x86_64-linux/attention/_attention_4jg2igd54wzge.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:22599ebe9d209fcc82068054caf39f93e6828bb3889b344e655fee50e7a98864 -size 75398808 diff --git a/build/torch25-cxx11-cu121-x86_64-linux/attention/_ops.py b/build/torch25-cxx11-cu121-x86_64-linux/attention/_ops.py deleted file mode 100644 index 9dee16955e9d988953733fae4e743d92886c92b1..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu121-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_4jg2igd54wzge -ops = torch.ops._attention_4jg2igd54wzge - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_4jg2igd54wzge::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu121-x86_64-linux/attention/__init__.py b/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch25-cxx11-cu121-x86_64-linux/attention/__init__.py rename to build/torch25-cxx11-cu121-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch25-cxx11-cu121-x86_64-linux/attention/_custom_ops.py b/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch25-cxx11-cu121-x86_64-linux/attention/_custom_ops.py rename to build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_ops.py b/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..9e52382b912b4e2d07f84982f762345debdbbfc8 --- /dev/null +++ b/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_eo7ts45r6k64y +ops = torch.ops._paged_attention_eo7ts45r6k64y + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_eo7ts45r6k64y::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_paged_attention_eo7ts45r6k64y.abi3.so b/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_paged_attention_eo7ts45r6k64y.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..c20f9501a41daa820dfda27434674d032931b51e --- /dev/null +++ b/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/_paged_attention_eo7ts45r6k64y.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4f9be8ea04e306483bd77ee84a70be4b1e2bbe1e942766f398cb41afe8cb3f3 +size 75398840 diff --git a/build/torch25-cxx11-cu121-x86_64-linux/attention/platforms.py b/build/torch25-cxx11-cu121-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch25-cxx11-cu121-x86_64-linux/attention/platforms.py rename to build/torch25-cxx11-cu121-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch25-cxx11-cu124-x86_64-linux/attention/_attention_syg6kbhkhc4xk.abi3.so b/build/torch25-cxx11-cu124-x86_64-linux/attention/_attention_syg6kbhkhc4xk.abi3.so deleted file mode 100755 index 369150e0964eaca52c0c7906addf9f18d8ec7270..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu124-x86_64-linux/attention/_attention_syg6kbhkhc4xk.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:42a3b2b450b7e284694e8e6d7398627b977d1e5da12bb79d93c6009c192922f9 -size 75568320 diff --git a/build/torch25-cxx11-cu124-x86_64-linux/attention/_ops.py b/build/torch25-cxx11-cu124-x86_64-linux/attention/_ops.py deleted file mode 100644 index 0bac0403831e313bcf9cbab1a35c2cbe4d5ef08f..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu124-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_syg6kbhkhc4xk -ops = torch.ops._attention_syg6kbhkhc4xk - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_syg6kbhkhc4xk::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu124-x86_64-linux/attention/__init__.py b/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch25-cxx11-cu124-x86_64-linux/attention/__init__.py rename to build/torch25-cxx11-cu124-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch25-cxx11-cu124-x86_64-linux/attention/_custom_ops.py b/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch25-cxx11-cu124-x86_64-linux/attention/_custom_ops.py rename to build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_ops.py b/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..5f01e3f8c4ae3a031f109f78e010014d34347647 --- /dev/null +++ b/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_5odgyxqhwqtv2 +ops = torch.ops._paged_attention_5odgyxqhwqtv2 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_5odgyxqhwqtv2::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_paged_attention_5odgyxqhwqtv2.abi3.so b/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_paged_attention_5odgyxqhwqtv2.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..74f9714690337f49661c641a4f60f6e1e1f56cfa --- /dev/null +++ b/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/_paged_attention_5odgyxqhwqtv2.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4fbf3b6cf357130fb9bb6e68cab38cbcbf485147d9dcce1a9a7d7c02fb2e1af8 +size 75568352 diff --git a/build/torch25-cxx11-cu124-x86_64-linux/attention/platforms.py b/build/torch25-cxx11-cu124-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch25-cxx11-cu124-x86_64-linux/attention/platforms.py rename to build/torch25-cxx11-cu124-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch25-cxx98-cu118-x86_64-linux/attention/_attention_hhzgzhvc7zviy.abi3.so b/build/torch25-cxx98-cu118-x86_64-linux/attention/_attention_hhzgzhvc7zviy.abi3.so deleted file mode 100755 index 05529e8bcee239db92984acb3e19926697c64a3f..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu118-x86_64-linux/attention/_attention_hhzgzhvc7zviy.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ffad04fc3e82be818bafed25c1be1e9e6145f99eb0ef89ab87ef5ab8c8366f9b -size 78850608 diff --git a/build/torch25-cxx98-cu118-x86_64-linux/attention/_ops.py b/build/torch25-cxx98-cu118-x86_64-linux/attention/_ops.py deleted file mode 100644 index 270fd3d0005a3e44dc6625c3ab4948a7fa7892bb..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu118-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_hhzgzhvc7zviy -ops = torch.ops._attention_hhzgzhvc7zviy - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_hhzgzhvc7zviy::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu118-x86_64-linux/attention/__init__.py b/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch25-cxx98-cu118-x86_64-linux/attention/__init__.py rename to build/torch25-cxx98-cu118-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch25-cxx98-cu118-x86_64-linux/attention/_custom_ops.py b/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch25-cxx98-cu118-x86_64-linux/attention/_custom_ops.py rename to build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_ops.py b/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..a3016a6b1cd7ae051012084bbd39d6f2e0913ace --- /dev/null +++ b/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_uy2moinaww2jc +ops = torch.ops._paged_attention_uy2moinaww2jc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_uy2moinaww2jc::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_paged_attention_uy2moinaww2jc.abi3.so b/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_paged_attention_uy2moinaww2jc.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..445652acd4719542710cda86a2d08c70a56c8094 --- /dev/null +++ b/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/_paged_attention_uy2moinaww2jc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:546d3cb8850575fdd3dd198614d7c64894517dfc124ecd0b384f1918940e9db0 +size 78850640 diff --git a/build/torch25-cxx98-cu118-x86_64-linux/attention/platforms.py b/build/torch25-cxx98-cu118-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch25-cxx98-cu118-x86_64-linux/attention/platforms.py rename to build/torch25-cxx98-cu118-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch25-cxx98-cu121-x86_64-linux/attention/_attention_gbi5gm244waic.abi3.so b/build/torch25-cxx98-cu121-x86_64-linux/attention/_attention_gbi5gm244waic.abi3.so deleted file mode 100755 index cb6cccabe445cbf7bfd797b4645300e5a2a4ec38..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu121-x86_64-linux/attention/_attention_gbi5gm244waic.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2ed1c9c4c080a10f7d7f8c18e8e96613020851f769a1bf5e2b92bf19b4e01fb6 -size 75359216 diff --git a/build/torch25-cxx98-cu121-x86_64-linux/attention/_ops.py b/build/torch25-cxx98-cu121-x86_64-linux/attention/_ops.py deleted file mode 100644 index a517876400c08f9800107c61d6ca3f57e0bdc2e6..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu121-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_gbi5gm244waic -ops = torch.ops._attention_gbi5gm244waic - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_gbi5gm244waic::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu121-x86_64-linux/attention/__init__.py b/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch25-cxx98-cu121-x86_64-linux/attention/__init__.py rename to build/torch25-cxx98-cu121-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch25-cxx98-cu121-x86_64-linux/attention/_custom_ops.py b/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch25-cxx98-cu121-x86_64-linux/attention/_custom_ops.py rename to build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_ops.py b/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..e2cd992a80d4b938f243f0e6060e863278aca7f6 --- /dev/null +++ b/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_35dt23tewn2p2 +ops = torch.ops._paged_attention_35dt23tewn2p2 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_35dt23tewn2p2::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_paged_attention_35dt23tewn2p2.abi3.so b/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_paged_attention_35dt23tewn2p2.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..1f6414c382a753edb7512927ac5f3e31b196531d --- /dev/null +++ b/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/_paged_attention_35dt23tewn2p2.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45aacf490514529bb7c46955239c5e2f8c07b827184ebdb1bc78ffff6c304b51 +size 75359240 diff --git a/build/torch25-cxx98-cu121-x86_64-linux/attention/platforms.py b/build/torch25-cxx98-cu121-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch25-cxx98-cu121-x86_64-linux/attention/platforms.py rename to build/torch25-cxx98-cu121-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch25-cxx98-cu124-x86_64-linux/attention/_attention_ill75rmpj7yds.abi3.so b/build/torch25-cxx98-cu124-x86_64-linux/attention/_attention_ill75rmpj7yds.abi3.so deleted file mode 100755 index bf93abf5555357ad397844421fcfc66ae0743166..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu124-x86_64-linux/attention/_attention_ill75rmpj7yds.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f263e022ef503e7fffcbc15ef59e515b84889d4c473b9113f3fea292725b9e37 -size 75532912 diff --git a/build/torch25-cxx98-cu124-x86_64-linux/attention/_ops.py b/build/torch25-cxx98-cu124-x86_64-linux/attention/_ops.py deleted file mode 100644 index f49b90de8bda122b2049bf57f5012b60e05364fe..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu124-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_ill75rmpj7yds -ops = torch.ops._attention_ill75rmpj7yds - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_ill75rmpj7yds::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu124-x86_64-linux/attention/__init__.py b/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch25-cxx98-cu124-x86_64-linux/attention/__init__.py rename to build/torch25-cxx98-cu124-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch25-cxx98-cu124-x86_64-linux/attention/_custom_ops.py b/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch25-cxx98-cu124-x86_64-linux/attention/_custom_ops.py rename to build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_ops.py b/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..150412d67365be8ae5668f83d1939148bb576050 --- /dev/null +++ b/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_fhq57q56w3m5o +ops = torch.ops._paged_attention_fhq57q56w3m5o + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_fhq57q56w3m5o::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_paged_attention_fhq57q56w3m5o.abi3.so b/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_paged_attention_fhq57q56w3m5o.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..ee97eee26a4de8d14d7ccdadaf406eed8405de39 --- /dev/null +++ b/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/_paged_attention_fhq57q56w3m5o.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e614a9c07805944a58249f9fbcf8ba6368c1e06e104382eba51ffa168c2d1667 +size 75532944 diff --git a/build/torch25-cxx98-cu124-x86_64-linux/attention/platforms.py b/build/torch25-cxx98-cu124-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch25-cxx98-cu124-x86_64-linux/attention/platforms.py rename to build/torch25-cxx98-cu124-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch26-cxx11-cu118-x86_64-linux/attention/_attention_6qe5ft3kiteru.abi3.so b/build/torch26-cxx11-cu118-x86_64-linux/attention/_attention_6qe5ft3kiteru.abi3.so deleted file mode 100755 index 0bbd1dc682174c9d7fba2ee7426e1183e668ab79..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu118-x86_64-linux/attention/_attention_6qe5ft3kiteru.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e66eca8e825e5cee2dc18c1235319a4e5b1372d843cab74660e8d94792e02f7c -size 78857896 diff --git a/build/torch26-cxx11-cu118-x86_64-linux/attention/_ops.py b/build/torch26-cxx11-cu118-x86_64-linux/attention/_ops.py deleted file mode 100644 index f9b2a39308433746718b31f0d9830b27f72f5242..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu118-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_6qe5ft3kiteru -ops = torch.ops._attention_6qe5ft3kiteru - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_6qe5ft3kiteru::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu118-x86_64-linux/attention/__init__.py b/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch26-cxx11-cu118-x86_64-linux/attention/__init__.py rename to build/torch26-cxx11-cu118-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch26-cxx11-cu118-x86_64-linux/attention/_custom_ops.py b/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch26-cxx11-cu118-x86_64-linux/attention/_custom_ops.py rename to build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_ops.py b/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..2bfef111c96308e595eb628bc88ab660a443089c --- /dev/null +++ b/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_xvepb4loq5mm2 +ops = torch.ops._paged_attention_xvepb4loq5mm2 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_xvepb4loq5mm2::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_paged_attention_xvepb4loq5mm2.abi3.so b/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_paged_attention_xvepb4loq5mm2.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..1ea51bd49f8ec76bbe306a261021da52fe6a980f --- /dev/null +++ b/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/_paged_attention_xvepb4loq5mm2.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f9ea33ac4e0a3fb3aeac93525946f52326dd1a84a40e82ed86ff896f3a887b74 +size 78857928 diff --git a/build/torch26-cxx11-cu118-x86_64-linux/attention/platforms.py b/build/torch26-cxx11-cu118-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch26-cxx11-cu118-x86_64-linux/attention/platforms.py rename to build/torch26-cxx11-cu118-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch26-cxx11-cu124-x86_64-linux/attention/_attention_ftq3cjdxqfw4m.abi3.so b/build/torch26-cxx11-cu124-x86_64-linux/attention/_attention_ftq3cjdxqfw4m.abi3.so deleted file mode 100755 index d7fa42c3682924a46e9c5b4a7e847a6b4415c5c8..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu124-x86_64-linux/attention/_attention_ftq3cjdxqfw4m.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:913ba8f5166dc4e84ed8a2da4b1dc44c178a93eeb16aae9782176fb089a459a7 -size 75552112 diff --git a/build/torch26-cxx11-cu124-x86_64-linux/attention/_ops.py b/build/torch26-cxx11-cu124-x86_64-linux/attention/_ops.py deleted file mode 100644 index 27b44593d2252bfe5399c8dcd883aa497223f158..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu124-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_ftq3cjdxqfw4m -ops = torch.ops._attention_ftq3cjdxqfw4m - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_ftq3cjdxqfw4m::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu124-x86_64-linux/attention/__init__.py b/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch26-cxx11-cu124-x86_64-linux/attention/__init__.py rename to build/torch26-cxx11-cu124-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch26-cxx11-cu124-x86_64-linux/attention/_custom_ops.py b/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch26-cxx11-cu124-x86_64-linux/attention/_custom_ops.py rename to build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_ops.py b/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..8928daeec47128544cef187bf18f214fc2238019 --- /dev/null +++ b/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_uyfdujhnc2xoe +ops = torch.ops._paged_attention_uyfdujhnc2xoe + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_uyfdujhnc2xoe::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_paged_attention_uyfdujhnc2xoe.abi3.so b/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_paged_attention_uyfdujhnc2xoe.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..cf8ebe40f27db0fa87c46d7b4066494e65843820 --- /dev/null +++ b/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/_paged_attention_uyfdujhnc2xoe.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f43b82896ccd22f8860fce338ab01c3eb93fba305c27d1b9a73252151ebcd534 +size 75552144 diff --git a/build/torch26-cxx11-cu124-x86_64-linux/attention/platforms.py b/build/torch26-cxx11-cu124-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch26-cxx11-cu124-x86_64-linux/attention/platforms.py rename to build/torch26-cxx11-cu124-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch26-cxx11-cu126-x86_64-linux/attention/_attention_lkibbjh726iwm.abi3.so b/build/torch26-cxx11-cu126-x86_64-linux/attention/_attention_lkibbjh726iwm.abi3.so deleted file mode 100755 index 4a4cccfd49090ac213bbf562a9c4bb2ff2920eb0..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu126-x86_64-linux/attention/_attention_lkibbjh726iwm.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:91380eebc7db2ff85f92e687d388055f210123bac602a6bc273172834bf49012 -size 75376640 diff --git a/build/torch26-cxx11-cu126-x86_64-linux/attention/_ops.py b/build/torch26-cxx11-cu126-x86_64-linux/attention/_ops.py deleted file mode 100644 index ac89377661ed1c5f2eca40cf199a15209af0c05c..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu126-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_lkibbjh726iwm -ops = torch.ops._attention_lkibbjh726iwm - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_lkibbjh726iwm::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu126-x86_64-linux/attention/__init__.py b/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch26-cxx11-cu126-x86_64-linux/attention/__init__.py rename to build/torch26-cxx11-cu126-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch26-cxx11-cu126-x86_64-linux/attention/_custom_ops.py b/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch26-cxx11-cu126-x86_64-linux/attention/_custom_ops.py rename to build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_ops.py b/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..dff8537df63e1ef37769a6b7ba6b8c58192d7faa --- /dev/null +++ b/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_pervvqmod6pi4 +ops = torch.ops._paged_attention_pervvqmod6pi4 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_pervvqmod6pi4::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_paged_attention_pervvqmod6pi4.abi3.so b/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_paged_attention_pervvqmod6pi4.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..77eb42e3471e9aa84d1f5d9854995c9737ed6bf3 --- /dev/null +++ b/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/_paged_attention_pervvqmod6pi4.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:94bc66e346dbe4af37083e914c6835eebb1c43f6a2ab16b45977fd017ab079fa +size 75376672 diff --git a/build/torch26-cxx11-cu126-x86_64-linux/attention/platforms.py b/build/torch26-cxx11-cu126-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch26-cxx11-cu126-x86_64-linux/attention/platforms.py rename to build/torch26-cxx11-cu126-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch26-cxx98-cu118-x86_64-linux/attention/_attention_vbhagz24hyij6.abi3.so b/build/torch26-cxx98-cu118-x86_64-linux/attention/_attention_vbhagz24hyij6.abi3.so deleted file mode 100755 index 4d87629674e87a746aaec4ccadb26bb2a72f2d43..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu118-x86_64-linux/attention/_attention_vbhagz24hyij6.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3746697abeeb7f829661c0912ccb36a7f7bb16c1f9eb7f14b1ee5e52c93ec055 -size 78830632 diff --git a/build/torch26-cxx98-cu118-x86_64-linux/attention/_ops.py b/build/torch26-cxx98-cu118-x86_64-linux/attention/_ops.py deleted file mode 100644 index 2f05f1ffd05c49971dfc9b45971efb5a055c7e52..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu118-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_vbhagz24hyij6 -ops = torch.ops._attention_vbhagz24hyij6 - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_vbhagz24hyij6::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu118-x86_64-linux/attention/__init__.py b/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch26-cxx98-cu118-x86_64-linux/attention/__init__.py rename to build/torch26-cxx98-cu118-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch26-cxx98-cu118-x86_64-linux/attention/_custom_ops.py b/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch26-cxx98-cu118-x86_64-linux/attention/_custom_ops.py rename to build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_ops.py b/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..543c64d1589cb1747d7dc1ac29bd8f2cbeb61ab7 --- /dev/null +++ b/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_24rowhxd5ebcc +ops = torch.ops._paged_attention_24rowhxd5ebcc + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_24rowhxd5ebcc::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_paged_attention_24rowhxd5ebcc.abi3.so b/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_paged_attention_24rowhxd5ebcc.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..43ec3529d8eac816c31cc1eaad4cc2baa3cbd3d6 --- /dev/null +++ b/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/_paged_attention_24rowhxd5ebcc.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df8846b6673d825f2b2a0ac4951bb599d62a086fa1e47bd9642fdb9425d42d04 +size 78830656 diff --git a/build/torch26-cxx98-cu118-x86_64-linux/attention/platforms.py b/build/torch26-cxx98-cu118-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch26-cxx98-cu118-x86_64-linux/attention/platforms.py rename to build/torch26-cxx98-cu118-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch26-cxx98-cu124-x86_64-linux/attention/_attention_sfjvhlixssyce.abi3.so b/build/torch26-cxx98-cu124-x86_64-linux/attention/_attention_sfjvhlixssyce.abi3.so deleted file mode 100755 index ee6153972f28bd997e1fc4a7eaaf425fd5adc918..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu124-x86_64-linux/attention/_attention_sfjvhlixssyce.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:68ecca9bc82b5fb7bf290f0c91ff86b65d25f7c5534f607b98bec8557922cf84 -size 75521080 diff --git a/build/torch26-cxx98-cu124-x86_64-linux/attention/_ops.py b/build/torch26-cxx98-cu124-x86_64-linux/attention/_ops.py deleted file mode 100644 index 530d483cdf8243f6c863ca49c0e87018634e69d0..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu124-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_sfjvhlixssyce -ops = torch.ops._attention_sfjvhlixssyce - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_sfjvhlixssyce::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu124-x86_64-linux/attention/__init__.py b/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch26-cxx98-cu124-x86_64-linux/attention/__init__.py rename to build/torch26-cxx98-cu124-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch26-cxx98-cu124-x86_64-linux/attention/_custom_ops.py b/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch26-cxx98-cu124-x86_64-linux/attention/_custom_ops.py rename to build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_ops.py b/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..1d62b9bb1cfb040d7f68cd108ac9067100b4cf2d --- /dev/null +++ b/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_5yleoqr3zje4w +ops = torch.ops._paged_attention_5yleoqr3zje4w + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_5yleoqr3zje4w::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_paged_attention_5yleoqr3zje4w.abi3.so b/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_paged_attention_5yleoqr3zje4w.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..ffed60cc0a3948bdea6aa7fb4d486d9b943215ec --- /dev/null +++ b/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/_paged_attention_5yleoqr3zje4w.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:927aefd1ac0c4ad2b46927dcd4a333439b47a426143661b8d4fa2e4ccb88c54c +size 75521112 diff --git a/build/torch26-cxx98-cu124-x86_64-linux/attention/platforms.py b/build/torch26-cxx98-cu124-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch26-cxx98-cu124-x86_64-linux/attention/platforms.py rename to build/torch26-cxx98-cu124-x86_64-linux/paged_attention/platforms.py diff --git a/build/torch26-cxx98-cu126-x86_64-linux/attention/_attention_g7oqtcveiuapk.abi3.so b/build/torch26-cxx98-cu126-x86_64-linux/attention/_attention_g7oqtcveiuapk.abi3.so deleted file mode 100755 index fe58b4ce4158bf5ee55371329396ac8e573cfc85..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu126-x86_64-linux/attention/_attention_g7oqtcveiuapk.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:adb35fabc23d8caa55f061d32ee48688c32e3efa0b4bf9aaed58cc59620e422c -size 75341504 diff --git a/build/torch26-cxx98-cu126-x86_64-linux/attention/_ops.py b/build/torch26-cxx98-cu126-x86_64-linux/attention/_ops.py deleted file mode 100644 index 1e504e67dd25c4aa79bcc509316f3f23e6e3e6ef..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu126-x86_64-linux/attention/_ops.py +++ /dev/null @@ -1,9 +0,0 @@ -import torch -from . import _attention_g7oqtcveiuapk -ops = torch.ops._attention_g7oqtcveiuapk - -def add_op_namespace_prefix(op_name: str): - """ - Prefix op by namespace. - """ - return f"_attention_g7oqtcveiuapk::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu126-x86_64-linux/attention/__init__.py b/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/__init__.py similarity index 100% rename from build/torch26-cxx98-cu126-x86_64-linux/attention/__init__.py rename to build/torch26-cxx98-cu126-x86_64-linux/paged_attention/__init__.py diff --git a/build/torch26-cxx98-cu126-x86_64-linux/attention/_custom_ops.py b/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_custom_ops.py similarity index 100% rename from build/torch26-cxx98-cu126-x86_64-linux/attention/_custom_ops.py rename to build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_custom_ops.py diff --git a/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_ops.py b/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ee817d13be64b46e3cb44ad192af4a5f3817bbf7 --- /dev/null +++ b/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _paged_attention_3rbp7xipfucgo +ops = torch.ops._paged_attention_3rbp7xipfucgo + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_paged_attention_3rbp7xipfucgo::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_paged_attention_3rbp7xipfucgo.abi3.so b/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_paged_attention_3rbp7xipfucgo.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..5d5b3ffda2fd6a830d12341bab26dc5ec03f4a86 --- /dev/null +++ b/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/_paged_attention_3rbp7xipfucgo.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a8967fe312a19d3ffd5fc80ec5e554869f92589f441d986ee85cc943ae93ef5 +size 75341536 diff --git a/build/torch26-cxx98-cu126-x86_64-linux/attention/platforms.py b/build/torch26-cxx98-cu126-x86_64-linux/paged_attention/platforms.py similarity index 100% rename from build/torch26-cxx98-cu126-x86_64-linux/attention/platforms.py rename to build/torch26-cxx98-cu126-x86_64-linux/paged_attention/platforms.py