From 4b90492149a536665645c18e01c8d2eea79c053d Mon Sep 17 00:00:00 2001
From: "github-classroom[bot]"
<66690702+github-classroom[bot]@users.noreply.github.com>
Date: Thu, 11 Sep 2025 12:38:06 +0000
Subject: [PATCH 1/6] GitHub Classroom Autograding Workflow
---
.github/workflows/classroom.yml | 67 +++++++++++++++++++++++++++++++++
1 file changed, 67 insertions(+)
create mode 100644 .github/workflows/classroom.yml
diff --git a/.github/workflows/classroom.yml b/.github/workflows/classroom.yml
new file mode 100644
index 00000000..694e0c44
--- /dev/null
+++ b/.github/workflows/classroom.yml
@@ -0,0 +1,67 @@
+name: Autograding Tests
+'on':
+- workflow_dispatch
+- repository_dispatch
+permissions:
+ checks: write
+ actions: read
+ contents: read
+jobs:
+ run-autograding-tests:
+ runs-on: ubuntu-latest
+ if: github.actor != 'github-classroom[bot]'
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ - name: Setup
+ id: setup
+ uses: classroom-resources/autograding-command-grader@v1
+ with:
+ test-name: Setup
+ setup-command: sudo -H pip3 install -qr requirements.txt; sudo -H pip3 install
+ flake8==5.0.4
+ command: flake8 --ignore "N801, E203, E266, E501, W503, F812, E741, N803,
+ N802, N806" minitorch/ tests/ project/; mypy minitorch/*
+ timeout: 10
+ - name: Task 0.1
+ id: task-0-1
+ uses: classroom-resources/autograding-command-grader@v1
+ with:
+ test-name: Task 0.1
+ setup-command: sudo -H pip3 install -qr requirements.txt
+ command: pytest -m task0_1
+ timeout: 10
+ - name: Task 0.2
+ id: task-0-2
+ uses: classroom-resources/autograding-command-grader@v1
+ with:
+ test-name: Task 0.2
+ setup-command: sudo -H pip3 install -qr requirements.txt
+ command: pytest -m task0_2
+ timeout: 10
+ - name: Task 0.3
+ id: task-0-3
+ uses: classroom-resources/autograding-command-grader@v1
+ with:
+ test-name: Task 0.3
+ setup-command: sudo -H pip3 install -qr requirements.txt
+ command: pytest -m task0_3
+ timeout: 10
+ - name: Task 0.4
+ id: task-0-4
+ uses: classroom-resources/autograding-command-grader@v1
+ with:
+ test-name: Task 0.4
+ setup-command: sudo -H pip3 install -qr requirements.txt
+ command: pytest -m task0_4
+ timeout: 10
+ - name: Autograding Reporter
+ uses: classroom-resources/autograding-grading-reporter@v1
+ env:
+ SETUP_RESULTS: "${{steps.setup.outputs.result}}"
+ TASK-0-1_RESULTS: "${{steps.task-0-1.outputs.result}}"
+ TASK-0-2_RESULTS: "${{steps.task-0-2.outputs.result}}"
+ TASK-0-3_RESULTS: "${{steps.task-0-3.outputs.result}}"
+ TASK-0-4_RESULTS: "${{steps.task-0-4.outputs.result}}"
+ with:
+ runners: setup,task-0-1,task-0-2,task-0-3,task-0-4
From f60774393a0439844cbb708c71492d5b213f6629 Mon Sep 17 00:00:00 2001
From: "github-classroom[bot]"
<66690702+github-classroom[bot]@users.noreply.github.com>
Date: Thu, 11 Sep 2025 12:38:07 +0000
Subject: [PATCH 2/6] GitHub Classroom Feedback
---
.github/.keep | 0
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 .github/.keep
diff --git a/.github/.keep b/.github/.keep
new file mode 100644
index 00000000..e69de29b
From 4f01e5edf8cdc2be2e4e144c94e432b9cb6db1a6 Mon Sep 17 00:00:00 2001
From: "github-classroom[bot]"
<66690702+github-classroom[bot]@users.noreply.github.com>
Date: Thu, 11 Sep 2025 12:38:07 +0000
Subject: [PATCH 3/6] Setting up GitHub Classroom Feedback
From 9038b4dee271cb1fb9cd3e09c506cdc7bc149773 Mon Sep 17 00:00:00 2001
From: "github-classroom[bot]"
<66690702+github-classroom[bot]@users.noreply.github.com>
Date: Thu, 11 Sep 2025 12:38:10 +0000
Subject: [PATCH 4/6] add online IDE url
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 62e4d6ba..cec64585 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,4 @@
+[](https://classroom.github.com/online_ide?assignment_repo_id=20449663&assignment_repo_type=AssignmentRepo)
# MiniTorch Module 0
From f4f60ddd5c0bc73aa0774a2ab58dbbe6555e3d89 Mon Sep 17 00:00:00 2001
From: Krish
Date: Fri, 12 Sep 2025 13:52:51 +0100
Subject: [PATCH 5/6] get operators to pass and tests
---
minitorch/datasets.py | 24 ++++--
minitorch/operators.py | 170 ++++++++++++++++++++++++++++------------
tests/test_operators.py | 40 +++++-----
3 files changed, 157 insertions(+), 77 deletions(-)
diff --git a/minitorch/datasets.py b/minitorch/datasets.py
index b3bd9faa..699cad04 100644
--- a/minitorch/datasets.py
+++ b/minitorch/datasets.py
@@ -67,19 +67,29 @@ def circle(N):
def spiral(N):
-
def x(t):
return t * math.cos(t) / 20.0
def y(t):
return t * math.sin(t) / 20.0
- X = [(x(10.0 * (float(i) / (N // 2))) + 0.5, y(10.0 * (float(i) / (N //
- 2))) + 0.5) for i in range(5 + 0, 5 + N // 2)]
- X = X + [(y(-10.0 * (float(i) / (N // 2))) + 0.5, x(-10.0 * (float(i) /
- (N // 2))) + 0.5) for i in range(5 + 0, 5 + N // 2)]
+
+ X = [
+ (x(10.0 * (float(i) / (N // 2))) + 0.5, y(10.0 * (float(i) / (N // 2))) + 0.5)
+ for i in range(5 + 0, 5 + N // 2)
+ ]
+ X = X + [
+ (y(-10.0 * (float(i) / (N // 2))) + 0.5, x(-10.0 * (float(i) / (N // 2))) + 0.5)
+ for i in range(5 + 0, 5 + N // 2)
+ ]
y2 = [0] * (N // 2) + [1] * (N // 2)
return Graph(N, X, y2)
-datasets = {'Simple': simple, 'Diag': diag, 'Split': split, 'Xor': xor,
- 'Circle': circle, 'Spiral': spiral}
+datasets = {
+ "Simple": simple,
+ "Diag": diag,
+ "Split": split,
+ "Xor": xor,
+ "Circle": circle,
+ "Spiral": spiral,
+}
diff --git a/minitorch/operators.py b/minitorch/operators.py
index 37cc7c09..6d6469c2 100644
--- a/minitorch/operators.py
+++ b/minitorch/operators.py
@@ -1,54 +1,124 @@
"""Collection of the core mathematical operators used throughout the code base."""
import math
+from typing import Callable, Iterable, Iterator
-# ## Task 0.1
-from typing import Callable, Iterable
-
-#
-# Implementation of a prelude of elementary functions.
-
-# Mathematical functions:
-# - mul
-# - id
-# - add
-# - neg
-# - lt
-# - eq
-# - max
-# - is_close
-# - sigmoid
-# - relu
-# - log
-# - exp
-# - log_back
-# - inv
-# - inv_back
-# - relu_back
-#
-# For sigmoid calculate as:
-# $f(x) = \frac{1.0}{(1.0 + e^{-x})}$ if x >=0 else $\frac{e^x}{(1.0 + e^{x})}$
-# For is_close:
-# $f(x) = |x - y| < 1e-2$
-
-
-# TODO: Implement for Task 0.1.
-
-
-# ## Task 0.3
-
-# Small practice library of elementary higher-order functions.
-
-# Implement the following core functions
-# - map
-# - zipWith
-# - reduce
-#
-# Use these to implement
-# - negList : negate a list
-# - addLists : add two lists together
-# - sum: sum lists
-# - prod: take the product of lists
-
-
-# TODO: Implement for Task 0.3.
+
+def mul(x: float, y: float) -> float:
+ return x * y
+
+
+def id(x: float) -> float:
+ return x
+
+
+def add(x: float, y: float) -> float:
+ return x + y
+
+
+def neg(x: float) -> float:
+ return -x
+
+
+def lt(x: float, y: float) -> bool:
+ if x < y:
+ return True
+ else:
+ return False
+
+
+def max(x: float, y: float) -> float:
+ if x > y:
+ return x
+ else:
+ return y
+
+
+def eq(x: float, y: float) -> bool:
+ return x == y
+
+
+def is_close(x: float, y: float) -> bool:
+ epsilon = abs(x - y)
+ if epsilon > 1e-2:
+ return False
+ else:
+ return True
+
+
+def sigmoid(x: float) -> float:
+ return 1.0 / (1 + math.exp(-x))
+
+
+def relu(x: float) -> float:
+ return max(0, x)
+
+
+def log(x: float) -> float:
+ return math.log(x)
+
+
+def exp(x: float) -> float:
+ return math.exp(x)
+
+
+def log_back(x: float, b: float) -> float:
+ return b / x
+
+
+def inv(x: float) -> float:
+ return 1.0 / x
+
+
+def inv_back(x: float, b: float) -> float:
+ return -b / (x**2)
+
+
+def relu_back(x: float, b: float) -> float:
+ if x < 0:
+ return 0
+ else:
+ return b
+
+
+def map(xs: Iterable[float], fn: Callable[[float], float]) -> Iterator[float]:
+ for x in xs:
+ yield fn(x)
+
+
+def zipWith(
+ xs: Iterable[float], ys: Iterable[float], fn: Callable[[float, float], float]
+) -> Iterable[float]:
+ iter_ys = iter(ys)
+ for x in xs:
+ y = next(iter_ys)
+ yield fn(x, y)
+
+
+def reduce(xs: Iterable[float], fn: Callable[[float, float], float]) -> float:
+ iter_xs = iter(xs)
+
+ try:
+ result = next(iter_xs)
+ except StopIteration:
+ return 0
+
+ for x in iter_xs:
+ result = fn(result, x)
+ return result
+
+
+def negList(xs: list[float]) -> list[float]:
+ return list(map(xs, lambda x: -x))
+
+
+def addLists(xs: list[float], ys: list[float]) -> list[float]:
+ return list(zipWith(xs, ys, lambda x, y: x + y))
+
+
+def sum(xs: list[float]) -> float:
+ return reduce(xs, lambda x, y: x + y)
+
+
+def prod(xs: list[float]) -> float:
+ return reduce(xs, lambda x, y: x * y)
diff --git a/tests/test_operators.py b/tests/test_operators.py
index f6e555af..0fa67658 100644
--- a/tests/test_operators.py
+++ b/tests/test_operators.py
@@ -107,41 +107,40 @@ def test_sigmoid(a: float) -> None:
* It crosses 0 at 0.5
* It is strictly increasing.
"""
- # TODO: Implement for Task 0.2.
- raise NotImplementedError("Need to implement for Task 0.2")
+ result = sigmoid(a)
+
+ assert result >= 0 and result <= 1.0
+
+ assert sigmoid(0) == 0.5
+
+ increasing_floats = [sigmoid(i) for i in range(0, 10)]
+ assert sorted(increasing_floats) == increasing_floats
@pytest.mark.task0_2
@given(small_floats, small_floats, small_floats)
def test_transitive(a: float, b: float, c: float) -> None:
"""Test the transitive property of less-than (a < b and b < c implies a < c)"""
- # TODO: Implement for Task 0.2.
- raise NotImplementedError("Need to implement for Task 0.2")
+ if lt(a, b) and lt(b, c):
+ assert lt(a, c) == True
@pytest.mark.task0_2
-def test_symmetric() -> None:
+@given(small_floats, small_floats)
+def test_symmetric(a: float, b: float) -> None:
"""Write a test that ensures that :func:`minitorch.operators.mul` is symmetric, i.e.
gives the same value regardless of the order of its input.
"""
- # TODO: Implement for Task 0.2.
- raise NotImplementedError("Need to implement for Task 0.2")
+ assert mul(a, b) == mul(b, a)
@pytest.mark.task0_2
-def test_distribute() -> None:
+@given(small_floats, small_floats, small_floats)
+def test_distribute(a: float, b: float, c: float) -> None:
r"""Write a test that ensures that your operators distribute, i.e.
:math:`z \times (x + y) = z \times x + z \times y`
"""
- # TODO: Implement for Task 0.2.
- raise NotImplementedError("Need to implement for Task 0.2")
-
-
-@pytest.mark.task0_2
-def test_other() -> None:
- """Write a test that ensures some other property holds for your functions."""
- # TODO: Implement for Task 0.2.
- raise NotImplementedError("Need to implement for Task 0.2")
+ assert_close(mul(c, add(a, b)), add(mul(c, a), mul(c, b)))
# ## Task 0.3 - Higher-order functions
@@ -168,9 +167,10 @@ def test_sum_distribute(ls1: List[float], ls2: List[float]) -> None:
"""Write a test that ensures that the sum of `ls1` plus the sum of `ls2`
is the same as the sum of each element of `ls1` plus each element of `ls2`.
"""
- # TODO: Implement for Task 0.3.
- raise NotImplementedError("Need to implement for Task 0.3")
-
+ assert_close(
+ minitorch.operators.sum(ls1) + minitorch.operators.sum(ls2),
+ minitorch.operators.sum(minitorch.operators.addLists(ls1, ls2))
+ )
@pytest.mark.task0_3
@given(lists(small_floats))
From f35e102d84aa37bfb824b3241e61f41970b36a90 Mon Sep 17 00:00:00 2001
From: Krish
Date: Fri, 12 Sep 2025 17:29:24 +0100
Subject: [PATCH 6/6] make everything pass
---
minitorch/module.py | 26 ++++++++++++++++++--------
minitorch/operators.py | 4 ++--
tests/test_operators.py | 3 ++-
3 files changed, 22 insertions(+), 11 deletions(-)
diff --git a/minitorch/module.py b/minitorch/module.py
index 0a66058c..77460cad 100644
--- a/minitorch/module.py
+++ b/minitorch/module.py
@@ -31,13 +31,15 @@ def modules(self) -> Sequence[Module]:
def train(self) -> None:
"""Set the mode of this module and all descendent modules to `train`."""
- # TODO: Implement for Task 0.4.
- raise NotImplementedError("Need to implement for Task 0.4")
+ self.training = True
+ for m in self.modules():
+ m.train()
def eval(self) -> None:
"""Set the mode of this module and all descendent modules to `eval`."""
- # TODO: Implement for Task 0.4.
- raise NotImplementedError("Need to implement for Task 0.4")
+ self.training = False
+ for m in self.modules():
+ m.eval()
def named_parameters(self) -> Sequence[Tuple[str, Parameter]]:
"""Collect all the parameters of this module and its descendents.
@@ -47,13 +49,21 @@ def named_parameters(self) -> Sequence[Tuple[str, Parameter]]:
The name and `Parameter` of each ancestor parameter.
"""
- # TODO: Implement for Task 0.4.
- raise NotImplementedError("Need to implement for Task 0.4")
+ total = list(self._parameters.items())
+ for mod_name, mod in self._modules.items():
+ params = [
+ (f"{mod_name}.{pname}", pval) for pname, pval in mod.named_parameters()
+ ]
+ total.extend(params)
+ return total
def parameters(self) -> Sequence[Parameter]:
"""Enumerate over all the parameters of this module and its descendents."""
- # TODO: Implement for Task 0.4.
- raise NotImplementedError("Need to implement for Task 0.4")
+ total = list(self._parameters.values())
+ for m in self.modules():
+ params = m.parameters()
+ total.extend(params)
+ return total
def add_parameter(self, k: str, v: Any) -> Parameter:
"""Manually add a parameter. Useful helper for scalar parameters.
diff --git a/minitorch/operators.py b/minitorch/operators.py
index 6d6469c2..1725c7c2 100644
--- a/minitorch/operators.py
+++ b/minitorch/operators.py
@@ -97,12 +97,12 @@ def zipWith(
def reduce(xs: Iterable[float], fn: Callable[[float, float], float]) -> float:
iter_xs = iter(xs)
-
+
try:
result = next(iter_xs)
except StopIteration:
return 0
-
+
for x in iter_xs:
result = fn(result, x)
return result
diff --git a/tests/test_operators.py b/tests/test_operators.py
index 0fa67658..3ef48a25 100644
--- a/tests/test_operators.py
+++ b/tests/test_operators.py
@@ -169,9 +169,10 @@ def test_sum_distribute(ls1: List[float], ls2: List[float]) -> None:
"""
assert_close(
minitorch.operators.sum(ls1) + minitorch.operators.sum(ls2),
- minitorch.operators.sum(minitorch.operators.addLists(ls1, ls2))
+ minitorch.operators.sum(minitorch.operators.addLists(ls1, ls2)),
)
+
@pytest.mark.task0_3
@given(lists(small_floats))
def test_sum(ls: List[float]) -> None: