-
Notifications
You must be signed in to change notification settings - Fork 0
/
tensor.py
150 lines (100 loc) · 5.11 KB
/
tensor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
from enum import Enum
from typing import Callable, Self
from functools import partial
import torch
from utils import TensorInstructions, TensorRef
class Strategy(Enum):
# We are going shard and replicate only dim 0 for now!
Shard = 0
Replicate = 1
class STensor:
# STensor works exactly like PyTorch Dtensor except one main difference,
# for every operation it is going to it will send a message via callback
# function to the distribution center.
def __init__(self, tensor: torch.Tensor, callback: Callable) -> None:
self.tensor = tensor
self.callback = callback
def __add__(self, other: Self | int | float):
return self._single_inp_op(other, "__add__")
def __iadd__(self, other: Self | int | float):
return self._single_inp_op(other, "__iadd__")
def __radd__(self, other: Self | int | float):
return self._single_inp_op(other, "__radd__")
def __sub__(self, other: Self | int | float):
return self._single_inp_op(other, "__sub__")
def __isub__(self, other: Self | int | float):
return self._single_inp_op(other, "__isub__")
def __rsub__(self, other: Self | int | float):
return self._single_inp_op(other, "__rsub__")
def __mul__(self, other: Self | int | float):
return self._single_inp_op(other, "__mul__")
def __imul__(self, other: Self | int | float):
return self._single_inp_op(other, "__imul__")
def __rmul__(self, other: Self | int | float):
return self._single_inp_op(other, "__rmul__")
def __truediv__(self, other: Self | int | float):
return self._single_inp_op(other, "__truediv__")
def __idiv__(self, other: Self | int | float):
return self._single_inp_op(other, "__idiv__")
def __rtruediv__(self, other: Self | int | float):
return self._single_inp_op(other, "__rtruediv__")
# Not supported yet
def __floordiv__(self, other: Self | int | float):
raise NotImplementedError("Floordiv is not supported yet!")
def __ifloordiv__(self, other: Self | int | float):
raise NotImplementedError("Floordiv is not supported yet!")
def __rfloordiv__(self, other: Self | int | float):
raise NotImplementedError("Floordiv is not supported yet!")
def __mod__(self, other: Self | int | float):
return self._single_inp_op(other, "__mod__")
def __imod__(self, other: Self | int | float):
return self._single_inp_op(other, "__imod__")
def __rmod__(self, other: Self | int | float):
return self._single_inp_op(other, "__rmod__")
def __pow__(self, other: Self | int | float):
return self._single_inp_op(other, "__pow__")
def __ipow__(self, other: Self | int | float):
return self._single_inp_op(other, "__ipow__")
def __rpow__(self, other: Self | int | float):
return self._single_inp_op(other, "__rpow__")
def __rshift__(self, other: Self | int | float):
return self._single_inp_op(other, "__rshift__")
def __irshift__(self, other: Self | int | float):
return self._single_inp_op(other, "__irshift__")
def __lshift__(self, other: Self | int | float):
return self._single_inp_op(other, "__lshift__")
def __ilshift__(self, other: Self | int | float):
return self._single_inp_op(other, "__ilshift__")
def __and__(self, other: Self | int | float):
return self._single_inp_op(other, "__and__")
def __iand__(self, other: Self | int | float):
return self._single_inp_op(other, "__iand__")
def __or__(self, other: Self | int | float):
return self._single_inp_op(other, "__or__")
def __ior__(self, other: Self | int | float):
return self._single_inp_op(other, "__ior__")
def __xor__(self, other: Self | int | float):
return self._single_inp_op(other, "__xor__")
def __ixor__(self, other: Self | int | float):
return self._single_inp_op(other, "__ixor__")
def __lt__(self, other: Self | int | float):
return self._single_inp_op(other, "__lt__")
def __le__(self, other: Self | int | float):
return self._single_inp_op(other, "__le__")
def __gt__(self, other: Self | int | float):
return self._single_inp_op(other, "__gt__")
def __ge__(self, other: Self | int | float):
return self._single_inp_op(other, "__ge__")
def __eq__(self, other: Self | int | float):
return self._single_inp_op(other, "__eq__")
def __ne__(self, other: Self | int | float):
return self._single_inp_op(other, "__ne__")
def _single_inp_op(self, other: Self | int | float, op_name: str):
self_ref = TensorRef(id(self))
other_ref = TensorRef(id(self)) if isinstance(other, STensor) else other
other = other.tensor if isinstance(other, STensor) else other
save_callback = self.callback(op_name, self_ref, (other_ref,))
result = getattr(self.tensor, op_name)(other)
result = STensor(result, self.callback)
save_callback(id(result))
return result