danieldk HF Staff commited on
Commit
5553592
·
verified ·
1 Parent(s): c984bd7

Build uploaded using `kernels`.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. build/torch210-cxx11-cu126-x86_64-linux/__init__.py +0 -12
  2. build/torch210-cxx11-cu126-x86_64-linux/_ops.py +0 -9
  3. build/torch210-cxx11-cu126-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  4. build/torch210-cxx11-cu126-x86_64-linux/metadata.json +0 -1
  5. build/torch210-cxx11-cu126-x86_64-linux/relu/__init__.py +0 -26
  6. build/torch210-cxx11-cu128-x86_64-linux/__init__.py +0 -12
  7. build/torch210-cxx11-cu128-x86_64-linux/_ops.py +0 -9
  8. build/torch210-cxx11-cu128-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  9. build/torch210-cxx11-cu128-x86_64-linux/metadata.json +0 -1
  10. build/torch210-cxx11-cu128-x86_64-linux/relu/__init__.py +0 -26
  11. build/torch210-cxx11-cu130-x86_64-linux/__init__.py +0 -12
  12. build/torch210-cxx11-cu130-x86_64-linux/_ops.py +0 -9
  13. build/torch210-cxx11-cu130-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  14. build/torch210-cxx11-cu130-x86_64-linux/metadata.json +0 -1
  15. build/torch210-cxx11-cu130-x86_64-linux/relu/__init__.py +0 -26
  16. build/torch210-cxx11-rocm70-x86_64-linux/__init__.py +0 -12
  17. build/torch210-cxx11-rocm70-x86_64-linux/_ops.py +0 -9
  18. build/torch210-cxx11-rocm70-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  19. build/torch210-cxx11-rocm70-x86_64-linux/metadata.json +0 -1
  20. build/torch210-cxx11-rocm70-x86_64-linux/relu/__init__.py +0 -26
  21. build/torch210-cxx11-rocm71-x86_64-linux/__init__.py +0 -12
  22. build/torch210-cxx11-rocm71-x86_64-linux/_ops.py +0 -9
  23. build/torch210-cxx11-rocm71-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  24. build/torch210-cxx11-rocm71-x86_64-linux/metadata.json +0 -1
  25. build/torch210-cxx11-rocm71-x86_64-linux/relu/__init__.py +0 -26
  26. build/torch210-cxx11-xpu20253-x86_64-linux/__init__.py +0 -12
  27. build/torch210-cxx11-xpu20253-x86_64-linux/_ops.py +0 -9
  28. build/torch210-cxx11-xpu20253-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  29. build/torch210-cxx11-xpu20253-x86_64-linux/metadata.json +0 -1
  30. build/torch210-cxx11-xpu20253-x86_64-linux/relu/__init__.py +0 -26
  31. build/torch28-cxx11-cu126-x86_64-linux/__init__.py +0 -12
  32. build/torch28-cxx11-cu126-x86_64-linux/_ops.py +0 -9
  33. build/torch28-cxx11-cu126-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  34. build/torch28-cxx11-cu126-x86_64-linux/metadata.json +0 -1
  35. build/torch28-cxx11-cu126-x86_64-linux/relu/__init__.py +0 -26
  36. build/torch28-cxx11-cu128-x86_64-linux/__init__.py +0 -12
  37. build/torch28-cxx11-cu128-x86_64-linux/_ops.py +0 -9
  38. build/torch28-cxx11-cu128-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  39. build/torch28-cxx11-cu128-x86_64-linux/metadata.json +0 -1
  40. build/torch28-cxx11-cu128-x86_64-linux/relu/__init__.py +0 -26
  41. build/torch28-cxx11-cu129-x86_64-linux/__init__.py +0 -12
  42. build/torch28-cxx11-cu129-x86_64-linux/_ops.py +0 -9
  43. build/torch28-cxx11-cu129-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  44. build/torch28-cxx11-cu129-x86_64-linux/metadata.json +0 -1
  45. build/torch28-cxx11-cu129-x86_64-linux/relu/__init__.py +0 -26
  46. build/torch28-cxx11-rocm63-x86_64-linux/__init__.py +0 -12
  47. build/torch28-cxx11-rocm63-x86_64-linux/_ops.py +0 -9
  48. build/torch28-cxx11-rocm63-x86_64-linux/_relu_9f1f069.abi3.so +0 -3
  49. build/torch28-cxx11-rocm63-x86_64-linux/metadata.json +0 -1
  50. build/torch28-cxx11-rocm63-x86_64-linux/relu/__init__.py +0 -26
build/torch210-cxx11-cu126-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu126-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu126-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4331a70e73fe4493c4a77e411ceb88fbdf91fe22988cf387ba888457bc95428a
3
- size 2019872
 
 
 
 
build/torch210-cxx11-cu126-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch210-cxx11-cu126-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu128-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu128-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu128-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb9d4b04679a723ed97e12592f1c9977d3e858d896c527b0e99302de5dcbf00d
3
- size 2111824
 
 
 
 
build/torch210-cxx11-cu128-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch210-cxx11-cu128-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu130-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu130-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-cu130-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2983b028177a3807d423750a37ca685c7ebfc8100b5f6598df69667afe8f35ec
3
- size 2129640
 
 
 
 
build/torch210-cxx11-cu130-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch210-cxx11-cu130-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-rocm70-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-rocm70-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-rocm70-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d0ee62fbd93bbf70a5239bd94669ec10c5466bd871d225dd42d89f68055335c1
3
- size 1990320
 
 
 
 
build/torch210-cxx11-rocm70-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch210-cxx11-rocm70-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-rocm71-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-rocm71-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-rocm71-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:87f0d86aeedffc637b070645822e6688be5744263c5fda3d0f92c4a827d8a37b
3
- size 1990224
 
 
 
 
build/torch210-cxx11-rocm71-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch210-cxx11-rocm71-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-xpu20253-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-xpu20253-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch210-cxx11-xpu20253-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3b8050743b1530ad911c74eef35ec5ffb76c2dda91790c56e083635d49aca10e
3
- size 2050440
 
 
 
 
build/torch210-cxx11-xpu20253-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch210-cxx11-xpu20253-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu126-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu126-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu126-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7ce61ae869ce8392e5ffcbf234b073368eed20808fedcf7236fa10690133d5a2
3
- size 2013928
 
 
 
 
build/torch28-cxx11-cu126-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch28-cxx11-cu126-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu128-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu128-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu128-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f53b59d3863275f40114ae9d415e8dfbd1af68ea11bd924ed60088e76467a645
3
- size 2101656
 
 
 
 
build/torch28-cxx11-cu128-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch28-cxx11-cu128-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu129-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu129-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-cu129-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c4cb02e5b63c1236dbbfc491dc79ac88da1172389f8402f23cbe76e76f1dd0a0
3
- size 2135440
 
 
 
 
build/torch28-cxx11-cu129-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch28-cxx11-cu129-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-rocm63-x86_64-linux/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- from typing import Optional
2
-
3
- import torch
4
-
5
- from ._ops import ops
6
-
7
-
8
- def relu(x: torch.Tensor, out: Optional[torch.Tensor] = None) -> torch.Tensor:
9
- if out is None:
10
- out = torch.empty_like(x)
11
- ops.relu(out, x)
12
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-rocm63-x86_64-linux/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _relu_9f1f069
3
- ops = torch.ops._relu_9f1f069
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_relu_9f1f069::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch28-cxx11-rocm63-x86_64-linux/_relu_9f1f069.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fc1207b128f1d74a74c0dc454c74c20894a2b80568b6abec71b12effc660f95f
3
- size 1970264
 
 
 
 
build/torch28-cxx11-rocm63-x86_64-linux/metadata.json DELETED
@@ -1 +0,0 @@
1
- {"python-depends":[]}
 
 
build/torch28-cxx11-rocm63-x86_64-linux/relu/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- import ctypes
2
- import sys
3
-
4
- import importlib
5
- from pathlib import Path
6
- from types import ModuleType
7
-
8
- def _import_from_path(file_path: Path) -> ModuleType:
9
- # We cannot use the module name as-is, after adding it to `sys.modules`,
10
- # it would also be used for other imports. So, we make a module name that
11
- # depends on the path for it to be unique using the hex-encoded hash of
12
- # the path.
13
- path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path.absolute())).value)
14
- module_name = path_hash
15
- spec = importlib.util.spec_from_file_location(module_name, file_path)
16
- if spec is None:
17
- raise ImportError(f"Cannot load spec for {module_name} from {file_path}")
18
- module = importlib.util.module_from_spec(spec)
19
- if module is None:
20
- raise ImportError(f"Cannot load module {module_name} from spec")
21
- sys.modules[module_name] = module
22
- spec.loader.exec_module(module) # type: ignore
23
- return module
24
-
25
-
26
- globals().update(vars(_import_from_path(Path(__file__).parent.parent / "__init__.py")))