Uploaded using `kernel-builder`.
Browse files- build/torch210-metal-aarch64-darwin/__init__.py +3 -0
- build/torch210-metal-aarch64-darwin/_mlx_rmsnorm_metal_b150ff3.abi3.so +3 -0
- build/torch210-metal-aarch64-darwin/_ops.py +9 -0
- build/torch210-metal-aarch64-darwin/functions.py +23 -0
- build/torch210-metal-aarch64-darwin/layers.py +11 -0
- build/torch210-metal-aarch64-darwin/metadata.json +8 -0
- build/torch210-metal-aarch64-darwin/mlx_rmsnorm/__init__.py +26 -0
- build/torch211-metal-aarch64-darwin/__init__.py +3 -0
- build/torch211-metal-aarch64-darwin/_mlx_rmsnorm_metal_b150ff3.abi3.so +3 -0
- build/torch211-metal-aarch64-darwin/_ops.py +9 -0
- build/torch211-metal-aarch64-darwin/functions.py +23 -0
- build/torch211-metal-aarch64-darwin/layers.py +11 -0
- build/torch211-metal-aarch64-darwin/metadata.json +8 -0
- build/torch211-metal-aarch64-darwin/mlx_rmsnorm/__init__.py +26 -0
build/torch210-metal-aarch64-darwin/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .functions import rmsnorm_forward, rmsnorm_backward
|
| 2 |
+
from . import layers
|
| 3 |
+
__all__ = ["layers", "rmsnorm_forward", "rmsnorm_backward"]
|
build/torch210-metal-aarch64-darwin/_mlx_rmsnorm_metal_b150ff3.abi3.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:081e37329b9871e3547287660c1e3fa1d300f94cf92b1a86e9d4919645d7578d
|
| 3 |
+
size 221840
|
build/torch210-metal-aarch64-darwin/_ops.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from . import _mlx_rmsnorm_metal_b150ff3
|
| 3 |
+
ops = torch.ops._mlx_rmsnorm_metal_b150ff3
|
| 4 |
+
|
| 5 |
+
def add_op_namespace_prefix(op_name: str):
|
| 6 |
+
"""
|
| 7 |
+
Prefix op by namespace.
|
| 8 |
+
"""
|
| 9 |
+
return f"_mlx_rmsnorm_metal_b150ff3::{op_name}"
|
build/torch210-metal-aarch64-darwin/functions.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from ._ops import ops
|
| 3 |
+
|
| 4 |
+
def rmsnorm_forward(x: torch.Tensor, weight: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
|
| 5 |
+
original_shape = x.shape
|
| 6 |
+
x = x.view(-1, x.shape[-1])
|
| 7 |
+
weight = weight.view(-1)
|
| 8 |
+
output = torch.zeros_like(x)
|
| 9 |
+
ops.launch_forward_kernel(x, weight, output, epsilon)
|
| 10 |
+
output = output.view(original_shape)
|
| 11 |
+
return output
|
| 12 |
+
|
| 13 |
+
def rmsnorm_backward(x: torch.Tensor, weight: torch.Tensor, grad_output: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
|
| 14 |
+
original_shape = x.shape
|
| 15 |
+
x = x.view(-1, x.shape[-1])
|
| 16 |
+
weight = weight.view(-1)
|
| 17 |
+
grad_output = grad_output.view(-1)
|
| 18 |
+
grad_input = torch.zeros_like(x)
|
| 19 |
+
grad_weight = torch.zeros_like(weight)
|
| 20 |
+
ops.launch_backward_kernel(x, weight, grad_output, grad_input, grad_weight, epsilon)
|
| 21 |
+
grad_input = grad_input.view(original_shape)
|
| 22 |
+
grad_weight = grad_weight.view(original_shape)
|
| 23 |
+
return grad_input, grad_weight
|
build/torch210-metal-aarch64-darwin/layers.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .functions import rmsnorm_forward
|
| 2 |
+
import torch
|
| 3 |
+
|
| 4 |
+
class RMSNorm(torch.nn.Module):
|
| 5 |
+
weight: torch.Tensor
|
| 6 |
+
variance_epsilon: float
|
| 7 |
+
|
| 8 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
| 9 |
+
return rmsnorm_forward(x, self.weight, self.variance_epsilon)
|
| 10 |
+
|
| 11 |
+
|
build/torch210-metal-aarch64-darwin/metadata.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"version": 1,
|
| 3 |
+
"license": "MIT",
|
| 4 |
+
"python-depends": [],
|
| 5 |
+
"backend": {
|
| 6 |
+
"type": "metal"
|
| 7 |
+
}
|
| 8 |
+
}
|
build/torch210-metal-aarch64-darwin/mlx_rmsnorm/__init__.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ctypes
|
| 2 |
+
import importlib.util
|
| 3 |
+
import sys
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from types import ModuleType
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _import_from_path(file_path: Path) -> ModuleType:
|
| 9 |
+
# We cannot use the module name as-is, after adding it to `sys.modules`,
|
| 10 |
+
# it would also be used for other imports. So, we make a module name that
|
| 11 |
+
# depends on the path for it to be unique using the hex-encoded hash of
|
| 12 |
+
# the path.
|
| 13 |
+
path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
|
| 14 |
+
module_name = path_hash
|
| 15 |
+
spec = importlib.util.spec_from_file_location(module_name, file_path)
|
| 16 |
+
if spec is None:
|
| 17 |
+
raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
|
| 18 |
+
module = importlib.util.module_from_spec(spec)
|
| 19 |
+
if module is None:
|
| 20 |
+
raise ImportError(f"Cannot load module {module_name} from spec")
|
| 21 |
+
sys.modules[module_name] = module
|
| 22 |
+
spec.loader.exec_module(module) # type: ignore
|
| 23 |
+
return module
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
|
build/torch211-metal-aarch64-darwin/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .functions import rmsnorm_forward, rmsnorm_backward
|
| 2 |
+
from . import layers
|
| 3 |
+
__all__ = ["layers", "rmsnorm_forward", "rmsnorm_backward"]
|
build/torch211-metal-aarch64-darwin/_mlx_rmsnorm_metal_b150ff3.abi3.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e58acba77e25fca83fd4792ab7418780850d7c35354f74ef2cc1f0bcd16af13e
|
| 3 |
+
size 221840
|
build/torch211-metal-aarch64-darwin/_ops.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from . import _mlx_rmsnorm_metal_b150ff3
|
| 3 |
+
ops = torch.ops._mlx_rmsnorm_metal_b150ff3
|
| 4 |
+
|
| 5 |
+
def add_op_namespace_prefix(op_name: str):
|
| 6 |
+
"""
|
| 7 |
+
Prefix op by namespace.
|
| 8 |
+
"""
|
| 9 |
+
return f"_mlx_rmsnorm_metal_b150ff3::{op_name}"
|
build/torch211-metal-aarch64-darwin/functions.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from ._ops import ops
|
| 3 |
+
|
| 4 |
+
def rmsnorm_forward(x: torch.Tensor, weight: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
|
| 5 |
+
original_shape = x.shape
|
| 6 |
+
x = x.view(-1, x.shape[-1])
|
| 7 |
+
weight = weight.view(-1)
|
| 8 |
+
output = torch.zeros_like(x)
|
| 9 |
+
ops.launch_forward_kernel(x, weight, output, epsilon)
|
| 10 |
+
output = output.view(original_shape)
|
| 11 |
+
return output
|
| 12 |
+
|
| 13 |
+
def rmsnorm_backward(x: torch.Tensor, weight: torch.Tensor, grad_output: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
|
| 14 |
+
original_shape = x.shape
|
| 15 |
+
x = x.view(-1, x.shape[-1])
|
| 16 |
+
weight = weight.view(-1)
|
| 17 |
+
grad_output = grad_output.view(-1)
|
| 18 |
+
grad_input = torch.zeros_like(x)
|
| 19 |
+
grad_weight = torch.zeros_like(weight)
|
| 20 |
+
ops.launch_backward_kernel(x, weight, grad_output, grad_input, grad_weight, epsilon)
|
| 21 |
+
grad_input = grad_input.view(original_shape)
|
| 22 |
+
grad_weight = grad_weight.view(original_shape)
|
| 23 |
+
return grad_input, grad_weight
|
build/torch211-metal-aarch64-darwin/layers.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .functions import rmsnorm_forward
|
| 2 |
+
import torch
|
| 3 |
+
|
| 4 |
+
class RMSNorm(torch.nn.Module):
|
| 5 |
+
weight: torch.Tensor
|
| 6 |
+
variance_epsilon: float
|
| 7 |
+
|
| 8 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
| 9 |
+
return rmsnorm_forward(x, self.weight, self.variance_epsilon)
|
| 10 |
+
|
| 11 |
+
|
build/torch211-metal-aarch64-darwin/metadata.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"version": 1,
|
| 3 |
+
"license": "MIT",
|
| 4 |
+
"python-depends": [],
|
| 5 |
+
"backend": {
|
| 6 |
+
"type": "metal"
|
| 7 |
+
}
|
| 8 |
+
}
|
build/torch211-metal-aarch64-darwin/mlx_rmsnorm/__init__.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ctypes
|
| 2 |
+
import importlib.util
|
| 3 |
+
import sys
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from types import ModuleType
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _import_from_path(file_path: Path) -> ModuleType:
|
| 9 |
+
# We cannot use the module name as-is, after adding it to `sys.modules`,
|
| 10 |
+
# it would also be used for other imports. So, we make a module name that
|
| 11 |
+
# depends on the path for it to be unique using the hex-encoded hash of
|
| 12 |
+
# the path.
|
| 13 |
+
path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
|
| 14 |
+
module_name = path_hash
|
| 15 |
+
spec = importlib.util.spec_from_file_location(module_name, file_path)
|
| 16 |
+
if spec is None:
|
| 17 |
+
raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
|
| 18 |
+
module = importlib.util.module_from_spec(spec)
|
| 19 |
+
if module is None:
|
| 20 |
+
raise ImportError(f"Cannot load module {module_name} from spec")
|
| 21 |
+
sys.modules[module_name] = module
|
| 22 |
+
spec.loader.exec_module(module) # type: ignore
|
| 23 |
+
return module
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
|