Skip to content

Commit

Permalink
Merge pull request #4 from dantp-ai/solution/task-0-1
Browse files Browse the repository at this point in the history
Solution/task 0 1
  • Loading branch information
dantp-ai authored Feb 29, 2024
2 parents cb9298a + 904d1c4 commit 3619ca0
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions minitorch/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def sigmoid(x: float) -> float:
for stability.
"""
# TODO: Implement for Task 0.1.
raise NotImplementedError("Need to implement for Task 0.1")
return (1.0 / (1.0 + math.exp(-x))) if x >= 0 else math.exp(x) / (1 + math.exp(x))


def relu(x: float) -> float:
Expand Down Expand Up @@ -103,7 +103,7 @@ def exp(x: float) -> float:
def log_back(x: float, d: float) -> float:
r"If $f = log$ as above, compute $d \times f'(x)$"
# TODO: Implement for Task 0.1.
raise NotImplementedError("Need to implement for Task 0.1")
return d / x


def inv(x: float) -> float:
Expand All @@ -116,7 +116,7 @@ def inv(x: float) -> float:
def inv_back(x: float, d: float) -> float:
r"If $f(x) = 1/x$ compute $d \times f'(x)$"
# TODO: Implement for Task 0.1.
raise NotImplementedError("Need to implement for Task 0.1")
return -(x ** (-2)) * d


def relu_back(x: float, d: float) -> float:
Expand Down

0 comments on commit 3619ca0

Please sign in to comment.