diff --git a/.github/.keep b/.github/.keep new file mode 100644 index 00000000..e69de29b diff --git a/.github/workflows/classroom.yml b/.github/workflows/classroom.yml new file mode 100644 index 00000000..694e0c44 --- /dev/null +++ b/.github/workflows/classroom.yml @@ -0,0 +1,67 @@ +name: Autograding Tests +'on': +- workflow_dispatch +- repository_dispatch +permissions: + checks: write + actions: read + contents: read +jobs: + run-autograding-tests: + runs-on: ubuntu-latest + if: github.actor != 'github-classroom[bot]' + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Setup + id: setup + uses: classroom-resources/autograding-command-grader@v1 + with: + test-name: Setup + setup-command: sudo -H pip3 install -qr requirements.txt; sudo -H pip3 install + flake8==5.0.4 + command: flake8 --ignore "N801, E203, E266, E501, W503, F812, E741, N803, + N802, N806" minitorch/ tests/ project/; mypy minitorch/* + timeout: 10 + - name: Task 0.1 + id: task-0-1 + uses: classroom-resources/autograding-command-grader@v1 + with: + test-name: Task 0.1 + setup-command: sudo -H pip3 install -qr requirements.txt + command: pytest -m task0_1 + timeout: 10 + - name: Task 0.2 + id: task-0-2 + uses: classroom-resources/autograding-command-grader@v1 + with: + test-name: Task 0.2 + setup-command: sudo -H pip3 install -qr requirements.txt + command: pytest -m task0_2 + timeout: 10 + - name: Task 0.3 + id: task-0-3 + uses: classroom-resources/autograding-command-grader@v1 + with: + test-name: Task 0.3 + setup-command: sudo -H pip3 install -qr requirements.txt + command: pytest -m task0_3 + timeout: 10 + - name: Task 0.4 + id: task-0-4 + uses: classroom-resources/autograding-command-grader@v1 + with: + test-name: Task 0.4 + setup-command: sudo -H pip3 install -qr requirements.txt + command: pytest -m task0_4 + timeout: 10 + - name: Autograding Reporter + uses: classroom-resources/autograding-grading-reporter@v1 + env: + SETUP_RESULTS: "${{steps.setup.outputs.result}}" + TASK-0-1_RESULTS: "${{steps.task-0-1.outputs.result}}" + TASK-0-2_RESULTS: "${{steps.task-0-2.outputs.result}}" + TASK-0-3_RESULTS: "${{steps.task-0-3.outputs.result}}" + TASK-0-4_RESULTS: "${{steps.task-0-4.outputs.result}}" + with: + runners: setup,task-0-1,task-0-2,task-0-3,task-0-4 diff --git a/.github/workflows/minitorch.yml b/.github/workflows/minitorch.yml new file mode 100644 index 00000000..73ad015e --- /dev/null +++ b/.github/workflows/minitorch.yml @@ -0,0 +1,40 @@ +name: CI (Module 0) + +on: + push: + pull_request: + +jobs: + tests: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + # если есть requirements.txt — подтянем + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + # установить сам пакет (editable) + тестовые зависимости + pip install -e . + pip install pytest hypothesis + + - name: Lint (optional, можно удалить) + run: | + pip install flake8 pep8-naming + flake8 --ignore "N801,E203,E266,E501,W503,F812,F401,F841,E741,N803,N802,N806" minitorch/ tests/ project/ + + - name: Run tests (Module 0 only) + run: | + echo "Module 0" + pytest -q -m task0_1 + pytest -q -m task0_2 + pytest -q -m task0_3 + pytest -q -m task0_4 diff --git a/README.md b/README.md index 62e4d6ba..345a3201 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ +[![Open in Visual Studio Code](https://classroom.github.com/assets/open-in-vscode-2e0aaae1b6195c2367325f4f02e2d04e9abb55f0b24a779b69b11b9e10269abc.svg)](https://classroom.github.com/online_ide?assignment_repo_id=20760858&assignment_repo_type=AssignmentRepo) # MiniTorch Module 0 diff --git a/minitorch/datasets.py b/minitorch/datasets.py index b3bd9faa..0d45fd44 100644 --- a/minitorch/datasets.py +++ b/minitorch/datasets.py @@ -73,13 +73,26 @@ def x(t): def y(t): return t * math.sin(t) / 20.0 - X = [(x(10.0 * (float(i) / (N // 2))) + 0.5, y(10.0 * (float(i) / (N // - 2))) + 0.5) for i in range(5 + 0, 5 + N // 2)] - X = X + [(y(-10.0 * (float(i) / (N // 2))) + 0.5, x(-10.0 * (float(i) / - (N // 2))) + 0.5) for i in range(5 + 0, 5 + N // 2)] - y2 = [0] * (N // 2) + [1] * (N // 2) + + half = N // 2 + idx_range = range(5, 5 + half) + + t_pos = [10.0 * (float(i) / half) for i in idx_range] + t_neg = [-10.0 * (float(i) / half) for i in idx_range] + + X1 = [(x(t) + 0.5, y(t) + 0.5) for t in t_pos] + X2 = [(y(t) + 0.5, x(t) + 0.5) for t in t_neg] + + X = X1 + X2 + y2 = [0] * half + [1] * half return Graph(N, X, y2) -datasets = {'Simple': simple, 'Diag': diag, 'Split': split, 'Xor': xor, - 'Circle': circle, 'Spiral': spiral} +datasets = { + "Simple": simple, + "Diag": diag, + "Split": split, + "Xor": xor, + "Circle": circle, + "Spiral": spiral, +} diff --git a/minitorch/module.py b/minitorch/module.py index 0a66058c..dcdcffab 100644 --- a/minitorch/module.py +++ b/minitorch/module.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Optional, Sequence, Tuple +from typing import Any, Dict, Optional, Sequence, Tuple, List class Module: @@ -31,13 +31,15 @@ def modules(self) -> Sequence[Module]: def train(self) -> None: """Set the mode of this module and all descendent modules to `train`.""" - # TODO: Implement for Task 0.4. - raise NotImplementedError("Need to implement for Task 0.4") + self.training = True + for child in self._modules.values(): + child.train() def eval(self) -> None: """Set the mode of this module and all descendent modules to `eval`.""" - # TODO: Implement for Task 0.4. - raise NotImplementedError("Need to implement for Task 0.4") + self.training = False + for child in self._modules.values(): + child.eval() def named_parameters(self) -> Sequence[Tuple[str, Parameter]]: """Collect all the parameters of this module and its descendents. @@ -47,13 +49,20 @@ def named_parameters(self) -> Sequence[Tuple[str, Parameter]]: The name and `Parameter` of each ancestor parameter. """ - # TODO: Implement for Task 0.4. - raise NotImplementedError("Need to implement for Task 0.4") + out: List[Tuple[str, Parameter]] = [] + + for name, p in self._parameters.items(): + out.append((name, p)) + + for child_name, child_mod in self._modules.items(): + for sub_name, p in child_mod.named_parameters(): + out.append((f"{child_name}.{sub_name}", p)) + + return out def parameters(self) -> Sequence[Parameter]: """Enumerate over all the parameters of this module and its descendents.""" - # TODO: Implement for Task 0.4. - raise NotImplementedError("Need to implement for Task 0.4") + return [p for _, p in self.named_parameters()] def add_parameter(self, k: str, v: Any) -> Parameter: """Manually add a parameter. Useful helper for scalar parameters. diff --git a/minitorch/operators.py b/minitorch/operators.py index 37cc7c09..89bb04ab 100644 --- a/minitorch/operators.py +++ b/minitorch/operators.py @@ -3,7 +3,7 @@ import math # ## Task 0.1 -from typing import Callable, Iterable +from typing import Callable, Iterable, Iterator, List, TypeVar, Sequence # # Implementation of a prelude of elementary functions. @@ -32,7 +32,73 @@ # $f(x) = |x - y| < 1e-2$ -# TODO: Implement for Task 0.1. +def mul(x: float, y: float) -> float: + return x * y + + +def id(x: float) -> float: + return x + + +def add(x: float, y: float) -> float: + return x + y + + +def neg(x: float) -> float: + return -x + + +def lt(x: float, y: float) -> float: + return 1.0 if x < y else 0.0 + + +def eq(x: float, y: float) -> float: + return 1.0 if x == y else 0.0 + + +def max(x: float, y: float) -> float: + return x if x > y else y + + +def is_close(x: float, y: float) -> bool: + return abs(x - y) < 1e-2 + + +def sigmoid(x: float) -> float: + if x >= 0.0: + z = math.exp(-x) + return 1.0 / (1.0 + z) + else: + z = math.exp(x) + return z / (1.0 + z) + + +def relu(x: float) -> float: + return x if x > 0.0 else 0.0 + + +def log(x: float) -> float: + return math.log(x) + + +def exp(x: float) -> float: + return math.exp(x) + + +def inv(x: float) -> float: + return 1.0 / x + + +def log_back(a: float, b: float) -> float: + return b / a + + +def inv_back(a: float, b: float) -> float: + return -b / (a * a) + + +def relu_back(a: float, b: float) -> float: + return b if a > 0.0 else 0.0 # ## Task 0.3 @@ -51,4 +117,49 @@ # - prod: take the product of lists -# TODO: Implement for Task 0.3. +T = TypeVar("T") +U = TypeVar("U") + + +def map(fn: Callable[[T], U], it: Iterable[T]) -> List[U]: + out: List[U] = [] + for v in it: + out.append(fn(v)) + return out + + +def zipWith(fn: Callable[[T, U], T], a: Iterable[T], b: Iterable[U]) -> List[T]: + out: List[T] = [] + ia = iter(a) + ib = iter(b) + while True: + try: + va = next(ia) + vb = next(ib) + except StopIteration: + break + out.append(fn(va, vb)) + return out + + +def reduce(fn: Callable[[T, T], T], it: Iterable[T], start: T) -> T: + acc: T = start + for v in it: + acc = fn(acc, v) + return acc + + +def negList(ls: Iterable[float]) -> List[float]: + return map(neg, ls) + + +def addLists(a: Iterable[float], b: Iterable[float]) -> List[float]: + return zipWith(add, a, b) + + +def sum(ls: Iterable[float]) -> float: + return reduce(add, ls, 0.0) + + +def prod(ls: Iterable[float]) -> float: + return reduce(mul, ls, 1.0) diff --git a/tests/test_operators.py b/tests/test_operators.py index f6e555af..1f7c6341 100644 --- a/tests/test_operators.py +++ b/tests/test_operators.py @@ -1,7 +1,7 @@ from typing import Callable, List, Tuple import pytest -from hypothesis import given +from hypothesis import given, assume from hypothesis.strategies import lists from minitorch import MathTest @@ -107,41 +107,51 @@ def test_sigmoid(a: float) -> None: * It crosses 0 at 0.5 * It is strictly increasing. """ - # TODO: Implement for Task 0.2. - raise NotImplementedError("Need to implement for Task 0.2") + s = sigmoid(a) + assert 0.0 <= s <= 1.0 + assert_close(1.0 - s, sigmoid(-a)) + assert_close(sigmoid(0.0), 0.5) + eps = 1e-3 + s_left = sigmoid(a - eps) + s_right = sigmoid(a + eps) + assert s_left <= s <= s_right @pytest.mark.task0_2 @given(small_floats, small_floats, small_floats) def test_transitive(a: float, b: float, c: float) -> None: """Test the transitive property of less-than (a < b and b < c implies a < c)""" - # TODO: Implement for Task 0.2. - raise NotImplementedError("Need to implement for Task 0.2") + if lt(a, b) == 1.0 and lt(b, c) == 1.0: + assert lt(a, c) == 1.0 @pytest.mark.task0_2 -def test_symmetric() -> None: +@given(small_floats, small_floats) +def test_symmetric(x: float, y: float) -> None: """Write a test that ensures that :func:`minitorch.operators.mul` is symmetric, i.e. gives the same value regardless of the order of its input. """ - # TODO: Implement for Task 0.2. - raise NotImplementedError("Need to implement for Task 0.2") + assert_close(mul(x, y), mul(y, x)) @pytest.mark.task0_2 -def test_distribute() -> None: +@given(small_floats, small_floats, small_floats) +def test_distribute(z: float, x: float, y: float) -> None: r"""Write a test that ensures that your operators distribute, i.e. :math:`z \times (x + y) = z \times x + z \times y` """ - # TODO: Implement for Task 0.2. - raise NotImplementedError("Need to implement for Task 0.2") + left = mul(z, add(x, y)) + right = add(mul(z, x), mul(z, y)) + assert_close(left, right) @pytest.mark.task0_2 -def test_other() -> None: +@given(small_floats) +def test_other(a: float) -> None: """Write a test that ensures some other property holds for your functions.""" - # TODO: Implement for Task 0.2. - raise NotImplementedError("Need to implement for Task 0.2") + assert_close(neg(neg(a)), a) + b = a + 1.2345 + assert eq(a, b) == eq(b, a) # ## Task 0.3 - Higher-order functions @@ -168,8 +178,9 @@ def test_sum_distribute(ls1: List[float], ls2: List[float]) -> None: """Write a test that ensures that the sum of `ls1` plus the sum of `ls2` is the same as the sum of each element of `ls1` plus each element of `ls2`. """ - # TODO: Implement for Task 0.3. - raise NotImplementedError("Need to implement for Task 0.3") + left = minitorch.operators.sum(addLists(ls1, ls2)) + right = minitorch.operators.sum(ls1) + minitorch.operators.sum(ls2) + assert_close(left, right) @pytest.mark.task0_3