Build
Browse files- build/torch25-cxx11-cu118-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so} +2 -2
- build/torch25-cxx11-cu118-x86_64-linux/moe/_ops.py +3 -3
- build/torch25-cxx11-cu118-x86_64-linux/moe/layers.py +2 -0
- build/torch25-cxx11-cu121-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so} +2 -2
- build/torch25-cxx11-cu121-x86_64-linux/moe/_ops.py +3 -3
- build/torch25-cxx11-cu121-x86_64-linux/moe/layers.py +2 -0
- build/torch25-cxx11-cu124-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so} +2 -2
- build/torch25-cxx11-cu124-x86_64-linux/moe/_ops.py +3 -3
- build/torch25-cxx11-cu124-x86_64-linux/moe/layers.py +2 -0
- build/torch25-cxx98-cu118-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so} +2 -2
- build/torch25-cxx98-cu118-x86_64-linux/moe/_ops.py +3 -3
- build/torch25-cxx98-cu118-x86_64-linux/moe/layers.py +2 -0
- build/torch25-cxx98-cu121-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch25-cxx98-cu121-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch25-cxx98-cu121-x86_64-linux/moe/_ops.py +3 -3
- build/torch25-cxx98-cu121-x86_64-linux/moe/layers.py +2 -0
- build/torch25-cxx98-cu124-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch25-cxx98-cu124-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch25-cxx98-cu124-x86_64-linux/moe/_ops.py +3 -3
- build/torch25-cxx98-cu124-x86_64-linux/moe/layers.py +2 -0
- build/torch26-cxx11-cu118-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch26-cxx11-cu118-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch26-cxx11-cu118-x86_64-linux/moe/_ops.py +3 -3
- build/torch26-cxx11-cu118-x86_64-linux/moe/layers.py +2 -0
- build/torch26-cxx11-cu124-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch26-cxx11-cu124-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch26-cxx11-cu124-x86_64-linux/moe/_ops.py +3 -3
- build/torch26-cxx11-cu124-x86_64-linux/moe/layers.py +2 -0
- build/torch26-cxx11-cu126-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch26-cxx11-cu126-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch26-cxx11-cu126-x86_64-linux/moe/_ops.py +3 -3
- build/torch26-cxx11-cu126-x86_64-linux/moe/layers.py +2 -0
- build/torch26-cxx98-cu118-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch26-cxx98-cu118-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch26-cxx98-cu118-x86_64-linux/moe/_ops.py +3 -3
- build/torch26-cxx98-cu118-x86_64-linux/moe/layers.py +2 -0
- build/torch26-cxx98-cu124-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch26-cxx98-cu124-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch26-cxx98-cu124-x86_64-linux/moe/_ops.py +3 -3
- build/torch26-cxx98-cu124-x86_64-linux/moe/layers.py +2 -0
- build/torch26-cxx98-cu126-x86_64-linux/moe/_moe_2218ad7.abi3.so +0 -3
- build/torch26-cxx98-cu126-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so +3 -0
- build/torch26-cxx98-cu126-x86_64-linux/moe/_ops.py +3 -3
- build/torch26-cxx98-cu126-x86_64-linux/moe/layers.py +2 -0
build/torch25-cxx11-cu118-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17cf96e50dd9d5632a7b5959037c5d11e79fae301989ee6315bc8e3d4d545bde
|
3 |
+
size 87056376
|
build/torch25-cxx11-cu118-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch25-cxx11-cu118-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch25-cxx11-cu121-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:95186380b2815d16e6b8d5cc2d87a93d45dadf31ac197028e37b7d6f65680cf1
|
3 |
+
size 87250920
|
build/torch25-cxx11-cu121-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch25-cxx11-cu121-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch25-cxx11-cu124-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:53bbb3f5ee8f5cbc0d640e924797f71f2c5b2767ec044470d6200fd154c7887b
|
3 |
+
size 86965656
|
build/torch25-cxx11-cu124-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch25-cxx11-cu124-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch25-cxx98-cu118-x86_64-linux/moe/{_moe_2218ad7.abi3.so → _moe_c558d47_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d0eedac7fe7e2bcdace617e68444affa54523ae20cf620ad92959476f93f9e9c
|
3 |
+
size 87052576
|
build/torch25-cxx98-cu118-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch25-cxx98-cu118-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch25-cxx98-cu121-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:d2d4157287a3e7979780f23a709eba01e787186bc32a5e56c0620b5429e9cfd3
|
3 |
-
size 87243240
|
|
|
|
|
|
|
|
build/torch25-cxx98-cu121-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:004cbfe65028ad7b4dd0c2bcffe16bbcb8fe94e3d3771b466808dbd1996fde12
|
3 |
+
size 87247384
|
build/torch25-cxx98-cu121-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch25-cxx98-cu121-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch25-cxx98-cu124-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:122544181246b179a772eb07c9e01c8df6b3025c20b333c566d0e84bfd7bea2d
|
3 |
-
size 86953880
|
|
|
|
|
|
|
|
build/torch25-cxx98-cu124-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3785d7743d33c834d0a3ac99dcb8add3a951314290dd9f7d4eb87045c7424f90
|
3 |
+
size 86958024
|
build/torch25-cxx98-cu124-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch25-cxx98-cu124-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch26-cxx11-cu118-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:49e17eb28438bddf98e314893cf262b807d64ee03850b46abe4d0bf6151f62b6
|
3 |
-
size 87060352
|
|
|
|
|
|
|
|
build/torch26-cxx11-cu118-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6fc2df0cc4b652b7c1a2a34191bedc3a3190cac9e74b44403ce3f0c763bdf072
|
3 |
+
size 87060400
|
build/torch26-cxx11-cu118-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch26-cxx11-cu118-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch26-cxx11-cu124-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f804164f561c9b46f3b997a6d13552ca4d704c43484b5cd8d14682b4450ed472
|
3 |
-
size 86965664
|
|
|
|
|
|
|
|
build/torch26-cxx11-cu124-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:511359ec7e5a3c4ea2beda0a31b005a0210aadf3d2247b87e990eefa0f8ff8ba
|
3 |
+
size 86965712
|
build/torch26-cxx11-cu124-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch26-cxx11-cu124-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch26-cxx11-cu126-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:1102bf615b2d2f7c320ac73eed63b982e969683ac72c958080dddb87166fa595
|
3 |
-
size 87432960
|
|
|
|
|
|
|
|
build/torch26-cxx11-cu126-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b546237970e07f6125675971da472a7cacfaa81384f6ddfa4eb36f4da644c0fc
|
3 |
+
size 87424816
|
build/torch26-cxx11-cu126-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch26-cxx11-cu126-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch26-cxx98-cu118-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:9e739bb546d3d1730fa7696fbd767fd588286dec369f1b7551edd1ec481df96f
|
3 |
-
size 87044288
|
|
|
|
|
|
|
|
build/torch26-cxx98-cu118-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:889767dfddb5c30f49394395f34b6e25c2f097b32826e02bc73e7381a8ef8b24
|
3 |
+
size 87048456
|
build/torch26-cxx98-cu118-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch26-cxx98-cu118-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch26-cxx98-cu124-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:bcb950d2e7196ad22cad926749b7e0e06e5454f0a732755b72f0b8dd456529c6
|
3 |
-
size 86953856
|
|
|
|
|
|
|
|
build/torch26-cxx98-cu124-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a8b46694ef9faa8dc3bc01413602d4c5a7ce10fdd7e34c6773e778b7917a9066
|
3 |
+
size 86949808
|
build/torch26-cxx98-cu124-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch26-cxx98-cu124-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|
build/torch26-cxx98-cu126-x86_64-linux/moe/_moe_2218ad7.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:fe5c605f1da902aebc1d7ce0355b649fcfcc44aed0023fdc87974f3d56273897
|
3 |
-
size 87417064
|
|
|
|
|
|
|
|
build/torch26-cxx98-cu126-x86_64-linux/moe/_moe_c558d47_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6023b925f8f17a3e8dca1f673bdb6348c661e94423f86114f14fef1bf5f69947
|
3 |
+
size 87417112
|
build/torch26-cxx98-cu126-x86_64-linux/moe/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _moe_c558d47_dirty
|
3 |
+
ops = torch.ops._moe_c558d47_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_moe_c558d47_dirty::{op_name}"
|
build/torch26-cxx98-cu126-x86_64-linux/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|