Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added .github/.keep
Empty file.
67 changes: 67 additions & 0 deletions .github/workflows/classroom.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: Autograding Tests
'on':
- workflow_dispatch
- repository_dispatch
permissions:
checks: write
actions: read
contents: read
jobs:
run-autograding-tests:
runs-on: ubuntu-latest
if: github.actor != 'github-classroom[bot]'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup
id: setup
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Setup
setup-command: sudo -H pip3 install -qr requirements.txt; sudo -H pip3 install
flake8==5.0.4
command: flake8 --ignore "N801, E203, E266, E501, W503, F812, E741, N803,
N802, N806" minitorch/ tests/ project/; mypy minitorch/*
timeout: 10
- name: Task 0.1
id: task-0-1
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.1
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_1
timeout: 10
- name: Task 0.2
id: task-0-2
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.2
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_2
timeout: 10
- name: Task 0.3
id: task-0-3
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.3
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_3
timeout: 10
- name: Task 0.4
id: task-0-4
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.4
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_4
timeout: 10
- name: Autograding Reporter
uses: classroom-resources/autograding-grading-reporter@v1
env:
SETUP_RESULTS: "${{steps.setup.outputs.result}}"
TASK-0-1_RESULTS: "${{steps.task-0-1.outputs.result}}"
TASK-0-2_RESULTS: "${{steps.task-0-2.outputs.result}}"
TASK-0-3_RESULTS: "${{steps.task-0-3.outputs.result}}"
TASK-0-4_RESULTS: "${{steps.task-0-4.outputs.result}}"
with:
runners: setup,task-0-1,task-0-2,task-0-3,task-0-4
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[![Open in Visual Studio Code](https://classroom.github.com/assets/open-in-vscode-2e0aaae1b6195c2367325f4f02e2d04e9abb55f0b24a779b69b11b9e10269abc.svg)](https://classroom.github.com/online_ide?assignment_repo_id=20449663&assignment_repo_type=AssignmentRepo)
# MiniTorch Module 0

<img src="https://minitorch.github.io/minitorch.svg" width="50%">
Expand Down
24 changes: 17 additions & 7 deletions minitorch/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,19 +67,29 @@ def circle(N):


def spiral(N):

def x(t):
return t * math.cos(t) / 20.0

def y(t):
return t * math.sin(t) / 20.0
X = [(x(10.0 * (float(i) / (N // 2))) + 0.5, y(10.0 * (float(i) / (N //
2))) + 0.5) for i in range(5 + 0, 5 + N // 2)]
X = X + [(y(-10.0 * (float(i) / (N // 2))) + 0.5, x(-10.0 * (float(i) /
(N // 2))) + 0.5) for i in range(5 + 0, 5 + N // 2)]

X = [
(x(10.0 * (float(i) / (N // 2))) + 0.5, y(10.0 * (float(i) / (N // 2))) + 0.5)
for i in range(5 + 0, 5 + N // 2)
]
X = X + [
(y(-10.0 * (float(i) / (N // 2))) + 0.5, x(-10.0 * (float(i) / (N // 2))) + 0.5)
for i in range(5 + 0, 5 + N // 2)
]
y2 = [0] * (N // 2) + [1] * (N // 2)
return Graph(N, X, y2)


datasets = {'Simple': simple, 'Diag': diag, 'Split': split, 'Xor': xor,
'Circle': circle, 'Spiral': spiral}
datasets = {
"Simple": simple,
"Diag": diag,
"Split": split,
"Xor": xor,
"Circle": circle,
"Spiral": spiral,
}
26 changes: 18 additions & 8 deletions minitorch/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,15 @@ def modules(self) -> Sequence[Module]:

def train(self) -> None:
"""Set the mode of this module and all descendent modules to `train`."""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
self.training = True
for m in self.modules():
m.train()

def eval(self) -> None:
"""Set the mode of this module and all descendent modules to `eval`."""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
self.training = False
for m in self.modules():
m.eval()

def named_parameters(self) -> Sequence[Tuple[str, Parameter]]:
"""Collect all the parameters of this module and its descendents.
Expand All @@ -47,13 +49,21 @@ def named_parameters(self) -> Sequence[Tuple[str, Parameter]]:
The name and `Parameter` of each ancestor parameter.

"""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
total = list(self._parameters.items())
for mod_name, mod in self._modules.items():
params = [
(f"{mod_name}.{pname}", pval) for pname, pval in mod.named_parameters()
]
total.extend(params)
return total

def parameters(self) -> Sequence[Parameter]:
"""Enumerate over all the parameters of this module and its descendents."""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
total = list(self._parameters.values())
for m in self.modules():
params = m.parameters()
total.extend(params)
return total

def add_parameter(self, k: str, v: Any) -> Parameter:
"""Manually add a parameter. Useful helper for scalar parameters.
Expand Down
170 changes: 120 additions & 50 deletions minitorch/operators.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,124 @@
"""Collection of the core mathematical operators used throughout the code base."""

import math
from typing import Callable, Iterable, Iterator

# ## Task 0.1
from typing import Callable, Iterable

#
# Implementation of a prelude of elementary functions.

# Mathematical functions:
# - mul
# - id
# - add
# - neg
# - lt
# - eq
# - max
# - is_close
# - sigmoid
# - relu
# - log
# - exp
# - log_back
# - inv
# - inv_back
# - relu_back
#
# For sigmoid calculate as:
# $f(x) = \frac{1.0}{(1.0 + e^{-x})}$ if x >=0 else $\frac{e^x}{(1.0 + e^{x})}$
# For is_close:
# $f(x) = |x - y| < 1e-2$


# TODO: Implement for Task 0.1.


# ## Task 0.3

# Small practice library of elementary higher-order functions.

# Implement the following core functions
# - map
# - zipWith
# - reduce
#
# Use these to implement
# - negList : negate a list
# - addLists : add two lists together
# - sum: sum lists
# - prod: take the product of lists


# TODO: Implement for Task 0.3.

def mul(x: float, y: float) -> float:
return x * y


def id(x: float) -> float:
return x


def add(x: float, y: float) -> float:
return x + y


def neg(x: float) -> float:
return -x


def lt(x: float, y: float) -> bool:
if x < y:
return True
else:
return False


def max(x: float, y: float) -> float:
if x > y:
return x
else:
return y


def eq(x: float, y: float) -> bool:
return x == y


def is_close(x: float, y: float) -> bool:
epsilon = abs(x - y)
if epsilon > 1e-2:
return False
else:
return True


def sigmoid(x: float) -> float:
return 1.0 / (1 + math.exp(-x))


def relu(x: float) -> float:
return max(0, x)


def log(x: float) -> float:
return math.log(x)


def exp(x: float) -> float:
return math.exp(x)


def log_back(x: float, b: float) -> float:
return b / x


def inv(x: float) -> float:
return 1.0 / x


def inv_back(x: float, b: float) -> float:
return -b / (x**2)


def relu_back(x: float, b: float) -> float:
if x < 0:
return 0
else:
return b


def map(xs: Iterable[float], fn: Callable[[float], float]) -> Iterator[float]:
for x in xs:
yield fn(x)


def zipWith(
xs: Iterable[float], ys: Iterable[float], fn: Callable[[float, float], float]
) -> Iterable[float]:
iter_ys = iter(ys)
for x in xs:
y = next(iter_ys)
yield fn(x, y)


def reduce(xs: Iterable[float], fn: Callable[[float, float], float]) -> float:
iter_xs = iter(xs)

try:
result = next(iter_xs)
except StopIteration:
return 0

for x in iter_xs:
result = fn(result, x)
return result


def negList(xs: list[float]) -> list[float]:
return list(map(xs, lambda x: -x))


def addLists(xs: list[float], ys: list[float]) -> list[float]:
return list(zipWith(xs, ys, lambda x, y: x + y))


def sum(xs: list[float]) -> float:
return reduce(xs, lambda x, y: x + y)


def prod(xs: list[float]) -> float:
return reduce(xs, lambda x, y: x * y)
Loading