Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added .github/.keep
Empty file.
67 changes: 67 additions & 0 deletions .github/workflows/classroom.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: Autograding Tests
'on':
- workflow_dispatch
- repository_dispatch
permissions:
checks: write
actions: read
contents: read
jobs:
run-autograding-tests:
runs-on: ubuntu-latest
if: github.actor != 'github-classroom[bot]'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup
id: setup
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Setup
setup-command: sudo -H pip3 install -qr requirements.txt; sudo -H pip3 install
flake8==5.0.4
command: flake8 --ignore "N801, E203, E266, E501, W503, F812, E741, N803,
N802, N806" minitorch/ tests/ project/; mypy minitorch/*
timeout: 10
- name: Task 0.1
id: task-0-1
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.1
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_1
timeout: 10
- name: Task 0.2
id: task-0-2
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.2
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_2
timeout: 10
- name: Task 0.3
id: task-0-3
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.3
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_3
timeout: 10
- name: Task 0.4
id: task-0-4
uses: classroom-resources/autograding-command-grader@v1
with:
test-name: Task 0.4
setup-command: sudo -H pip3 install -qr requirements.txt
command: pytest -m task0_4
timeout: 10
- name: Autograding Reporter
uses: classroom-resources/autograding-grading-reporter@v1
env:
SETUP_RESULTS: "${{steps.setup.outputs.result}}"
TASK-0-1_RESULTS: "${{steps.task-0-1.outputs.result}}"
TASK-0-2_RESULTS: "${{steps.task-0-2.outputs.result}}"
TASK-0-3_RESULTS: "${{steps.task-0-3.outputs.result}}"
TASK-0-4_RESULTS: "${{steps.task-0-4.outputs.result}}"
with:
runners: setup,task-0-1,task-0-2,task-0-3,task-0-4
40 changes: 40 additions & 0 deletions .github/workflows/minitorch.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: CI (Module 0)

on:
push:
pull_request:

jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"

- name: Install dependencies
run: |
python -m pip install --upgrade pip
# если есть requirements.txt — подтянем
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
# установить сам пакет (editable) + тестовые зависимости
pip install -e .
pip install pytest hypothesis

- name: Lint (optional, можно удалить)
run: |
pip install flake8 pep8-naming
flake8 --ignore "N801,E203,E266,E501,W503,F812,F401,F841,E741,N803,N802,N806" minitorch/ tests/ project/

- name: Run tests (Module 0 only)
run: |
echo "Module 0"
pytest -q -m task0_1
pytest -q -m task0_2
pytest -q -m task0_3
pytest -q -m task0_4
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[![Open in Visual Studio Code](https://classroom.github.com/assets/open-in-vscode-2e0aaae1b6195c2367325f4f02e2d04e9abb55f0b24a779b69b11b9e10269abc.svg)](https://classroom.github.com/online_ide?assignment_repo_id=20760858&assignment_repo_type=AssignmentRepo)
# MiniTorch Module 0

<img src="https://minitorch.github.io/minitorch.svg" width="50%">
Expand Down
27 changes: 20 additions & 7 deletions minitorch/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,13 +73,26 @@ def x(t):

def y(t):
return t * math.sin(t) / 20.0
X = [(x(10.0 * (float(i) / (N // 2))) + 0.5, y(10.0 * (float(i) / (N //
2))) + 0.5) for i in range(5 + 0, 5 + N // 2)]
X = X + [(y(-10.0 * (float(i) / (N // 2))) + 0.5, x(-10.0 * (float(i) /
(N // 2))) + 0.5) for i in range(5 + 0, 5 + N // 2)]
y2 = [0] * (N // 2) + [1] * (N // 2)

half = N // 2
idx_range = range(5, 5 + half)

t_pos = [10.0 * (float(i) / half) for i in idx_range]
t_neg = [-10.0 * (float(i) / half) for i in idx_range]

X1 = [(x(t) + 0.5, y(t) + 0.5) for t in t_pos]
X2 = [(y(t) + 0.5, x(t) + 0.5) for t in t_neg]

X = X1 + X2
y2 = [0] * half + [1] * half
return Graph(N, X, y2)


datasets = {'Simple': simple, 'Diag': diag, 'Split': split, 'Xor': xor,
'Circle': circle, 'Spiral': spiral}
datasets = {
"Simple": simple,
"Diag": diag,
"Split": split,
"Xor": xor,
"Circle": circle,
"Spiral": spiral,
}
27 changes: 18 additions & 9 deletions minitorch/module.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from typing import Any, Dict, Optional, Sequence, Tuple
from typing import Any, Dict, Optional, Sequence, Tuple, List


class Module:
Expand Down Expand Up @@ -31,13 +31,15 @@ def modules(self) -> Sequence[Module]:

def train(self) -> None:
"""Set the mode of this module and all descendent modules to `train`."""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
self.training = True
for child in self._modules.values():
child.train()

def eval(self) -> None:
"""Set the mode of this module and all descendent modules to `eval`."""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
self.training = False
for child in self._modules.values():
child.eval()

def named_parameters(self) -> Sequence[Tuple[str, Parameter]]:
"""Collect all the parameters of this module and its descendents.
Expand All @@ -47,13 +49,20 @@ def named_parameters(self) -> Sequence[Tuple[str, Parameter]]:
The name and `Parameter` of each ancestor parameter.

"""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
out: List[Tuple[str, Parameter]] = []

for name, p in self._parameters.items():
out.append((name, p))

for child_name, child_mod in self._modules.items():
for sub_name, p in child_mod.named_parameters():
out.append((f"{child_name}.{sub_name}", p))

return out

def parameters(self) -> Sequence[Parameter]:
"""Enumerate over all the parameters of this module and its descendents."""
# TODO: Implement for Task 0.4.
raise NotImplementedError("Need to implement for Task 0.4")
return [p for _, p in self.named_parameters()]

def add_parameter(self, k: str, v: Any) -> Parameter:
"""Manually add a parameter. Useful helper for scalar parameters.
Expand Down
117 changes: 114 additions & 3 deletions minitorch/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import math

# ## Task 0.1
from typing import Callable, Iterable
from typing import Callable, Iterable, Iterator, List, TypeVar, Sequence

#
# Implementation of a prelude of elementary functions.
Expand Down Expand Up @@ -32,7 +32,73 @@
# $f(x) = |x - y| < 1e-2$


# TODO: Implement for Task 0.1.
def mul(x: float, y: float) -> float:
return x * y


def id(x: float) -> float:
return x


def add(x: float, y: float) -> float:
return x + y


def neg(x: float) -> float:
return -x


def lt(x: float, y: float) -> float:
return 1.0 if x < y else 0.0


def eq(x: float, y: float) -> float:
return 1.0 if x == y else 0.0


def max(x: float, y: float) -> float:
return x if x > y else y


def is_close(x: float, y: float) -> bool:
return abs(x - y) < 1e-2


def sigmoid(x: float) -> float:
if x >= 0.0:
z = math.exp(-x)
return 1.0 / (1.0 + z)
else:
z = math.exp(x)
return z / (1.0 + z)


def relu(x: float) -> float:
return x if x > 0.0 else 0.0


def log(x: float) -> float:
return math.log(x)


def exp(x: float) -> float:
return math.exp(x)


def inv(x: float) -> float:
return 1.0 / x


def log_back(a: float, b: float) -> float:
return b / a


def inv_back(a: float, b: float) -> float:
return -b / (a * a)


def relu_back(a: float, b: float) -> float:
return b if a > 0.0 else 0.0


# ## Task 0.3
Expand All @@ -51,4 +117,49 @@
# - prod: take the product of lists


# TODO: Implement for Task 0.3.
T = TypeVar("T")
U = TypeVar("U")


def map(fn: Callable[[T], U], it: Iterable[T]) -> List[U]:
out: List[U] = []
for v in it:
out.append(fn(v))
return out


def zipWith(fn: Callable[[T, U], T], a: Iterable[T], b: Iterable[U]) -> List[T]:
out: List[T] = []
ia = iter(a)
ib = iter(b)
while True:
try:
va = next(ia)
vb = next(ib)
except StopIteration:
break
out.append(fn(va, vb))
return out


def reduce(fn: Callable[[T, T], T], it: Iterable[T], start: T) -> T:
acc: T = start
for v in it:
acc = fn(acc, v)
return acc


def negList(ls: Iterable[float]) -> List[float]:
return map(neg, ls)


def addLists(a: Iterable[float], b: Iterable[float]) -> List[float]:
return zipWith(add, a, b)


def sum(ls: Iterable[float]) -> float:
return reduce(add, ls, 0.0)


def prod(ls: Iterable[float]) -> float:
return reduce(mul, ls, 1.0)
Loading