diff --git a/Procfile b/Procfile new file mode 100644 index 000000000..62e430aca --- /dev/null +++ b/Procfile @@ -0,0 +1 @@ +web: gunicorn 'app:create_app()' \ No newline at end of file diff --git a/app/__init__.py b/app/__init__.py index 2764c4cc8..b16665a6b 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,20 +1,26 @@ from flask import Flask +# Step 1: +# Import and initialize SQLAlchemy from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate import os from dotenv import load_dotenv - +# Initialize SQLAlchemy db = SQLAlchemy() migrate = Migrate() +DATABASE_CONNECTION_STRING='postgresql+psycopg2://postgres:postgres@localhost:5432/task_list_api_development' load_dotenv() def create_app(test_config=None): app = Flask(__name__) app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False + app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_CONNECTION_STRING - if test_config is None: + # Step 2: + #Configure SQLAlchemy + if not test_config: app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get( "SQLALCHEMY_DATABASE_URI") else: @@ -22,13 +28,19 @@ def create_app(test_config=None): app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get( "SQLALCHEMY_TEST_DATABASE_URI") - # Import models here for Alembic setup + # Import models from app.models.task import Task from app.models.goal import Goal - + # Step 3: + # Hook up Flask & SQLALchemy db.init_app(app) migrate.init_app(app, db) # Register Blueprints here + from .routes import tasks_bp + app.register_blueprint(tasks_bp) + + from .routes import goals_bp + app.register_blueprint(goals_bp) return app diff --git a/app/models/goal.py b/app/models/goal.py index 8cad278f8..22a69a30c 100644 --- a/app/models/goal.py +++ b/app/models/goal.py @@ -1,6 +1,27 @@ from flask import current_app from app import db +from sqlalchemy.orm import backref + +from app.models.task import Task class Goal(db.Model): goal_id = db.Column(db.Integer, primary_key=True) + title = db.Column(db.String) + tasks = db.relationship("Task", backref='goal', lazy=True) +#Task.query.get(goal_id) = a list of tasks with that matching goal_id + def to_dict(self): + return{ + "id": self.goal_id, + "title": self.title + } + + def to_dict_with_tasks(self, goal_id): + tasks = Task.query.filter_by(fk_goal_id=f"{goal_id}") + task_list = [] + for task in tasks: + task_list.append(task.to_dict()) + return{ + "id":self.goal_id, + "title":self.title, + } \ No newline at end of file diff --git a/app/models/task.py b/app/models/task.py index 39c89cd16..10135584d 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -3,4 +3,29 @@ class Task(db.Model): - task_id = db.Column(db.Integer, primary_key=True) + __tablename__= "tasks" + task_id = db.Column(db.Integer, primary_key=True, autoincrement=True) + title=db.Column(db.String(200)) + description=db.Column(db.String(200)) + completed_at=db.Column(db.DateTime, nullable=True) + goal_id=db.Column(db.Integer, db.ForeignKey('goal.goal_id'), nullable=True) + + def to_dict(self): + if self.goal_id: + return { + "id":self.task_id, + "title": self.title, + "description": self.description, + "is_complete": True if self.completed_at else False, + "goal_id": self.goal_id + } + + else: + return{ + "id":self.task_id, + "title": self.title, + "description": self.description, + "is_complete": True if self.completed_at else False + } + + diff --git a/app/routes.py b/app/routes.py index 8e9dfe684..7edd56e5a 100644 --- a/app/routes.py +++ b/app/routes.py @@ -1,2 +1,188 @@ -from flask import Blueprint +from app import db +from app.models.task import Task +from flask import Blueprint, jsonify, make_response, request, abort +from datetime import date +from pathlib import Path +import os +from dotenv import load_dotenv +import requests +from app.models.goal import Goal + +tasks_bp = Blueprint("tasks", __name__, url_prefix="/tasks") +goals_bp = Blueprint("goals", __name__, url_prefix="/goals") +env_path = Path('.')/ '.env' +load_dotenv() + + +@tasks_bp.route("", methods=["POST", "GET"]) +def handle_tasks(): + + if request.method == "POST": + request_body = request.get_json() + + if "title" not in request_body or "description" not in request_body or "completed_at" not in request_body: + return make_response({"details":"Invalid data"}, 400) + + new_task = Task( + title=request_body["title"], + description=request_body["description"], + completed_at=request_body["completed_at"] + ) + + db.session.add(new_task) + db.session.commit() + + return jsonify({"task": new_task.to_dict()}), 201 + + elif request.method == "GET": + task_response= [] + if request.args.get('sort') == 'asc': + tasks = Task.query.order_by(Task.title.asc()).all() + elif request.args.get('sort') == 'desc': + tasks = Task.query.order_by(Task.title.desc()).all() + else: + tasks = Task.query.all() + + for task in tasks: + task_response.append(task.to_dict()) + return jsonify(task_response), 200 + +@tasks_bp.route("/", methods= ["GET", "PUT", "DELETE"]) +def handle_task(task_id): + task = Task.query.get(task_id) + + + if task is None: + return make_response(f"Task {task_id} not found"), 404 + + if request.method == "GET": + return jsonify({"task": task.to_dict()}), 200 + + elif request.method == "PUT": + request_body = request.get_json() + if "title" not in request_body or "description" not in request_body: + return make_response("invalid request"), 400 + + task.title=request_body["title"] + task.description=request_body["description"] + + + db.session.commit() + + return jsonify({"task": task.to_dict()}),200 + + + elif request.method == "DELETE": + db.session.delete(task) + db.session.commit() + + return ({'details': f'Task {task_id} "{task.title}" successfully deleted'}), 200 + + +@tasks_bp.route("//mark_complete", methods=["PATCH"]) +def mark_complete(task_id): + task = Task.query.get(task_id) + today = date.today() + if task is None: + return make_response("", 404) + else: + task.completed_at = today + db.session.commit() + PATH = 'https://slack.com/api/chat.postMessage' + params = {"token": os.environ.get("SLACK_TOKEN"), + "channel":"task-notifications", + "text":f"Someone just completed the task {task.title}" + } + requests.post(PATH,data=params) + return jsonify({"task":task.to_dict()}), 200 + +@tasks_bp.route("//mark_incomplete", methods=["PATCH"]) +def mark_incomplete(task_id): + task = Task.query.get(task_id) + today = date.today() + if task is None: + return make_response("", 404) + else: + task.completed_at = None + db.session.commit() + + return jsonify({"task":task.to_dict()}), 200 + +@goals_bp.route("", methods=["POST"]) +def create_goal(): + request_body=request.get_json() + if "title" not in request_body: + return jsonify({"details": "Invalid data"}), 400 + new_goal=Goal( + title = request_body["title"] + ) + db.session.add(new_goal) + db.session.commit() + + + return jsonify({"goal": {"id": new_goal.goal_id, "title": new_goal.title}}), 201 + + +@goals_bp.route("", methods=["GET"]) +def get_goals(): + request_body=request.get_json() + + goals = Goal.query.all() + goal_response=[] + for goal in goals: + goal_response.append(goal.to_dict()) + if not goal: + return make_response("", 404) + + return jsonify(goal_response), 200 + + +@goals_bp.route("/", methods=["GET", "PUT", "DELETE"]) +def handle_goal(goal_id): + goal_id=int(goal_id) + goal = Goal.query.get(goal_id) + if not goal: + return make_response("", 404) + if request.method == "GET": + return jsonify({"goal": goal.to_dict()}), 200 + + elif request.method == "PUT": + input_data = request.get_json() + + goal.title=input_data["title"] + db.session.commit() + + return jsonify({"goal": goal.to_dict()}),200 + + elif request.method == "DELETE": + db.session.delete(goal) + db.session.commit() + + return ({'details': f'Goal {goal.goal_id} "{goal.title}" successfully deleted'}), 200 + +@goals_bp.route("//tasks", methods=["GET", "POST"]) +def handles_both(goal_id): + goal = Goal.query.get(goal_id) + goals_response=[] + if goal is None: + abort(404) + if request.method == "GET": + answer = { + "id": goal.goal_id, + "title": goal.title, + "tasks": [task.to_dict() for task in goal.tasks] + } + return jsonify(answer), 200 + elif request.method == "POST": + request_body=request.get_json() + + task_ids = request_body["task_ids"] + for task_id in task_ids: + + task = Task.query.get(task_id) + goal.tasks.append(task) + + db.session.commit() + return jsonify({"id":goal.goal_id, "task_ids": [task.task_id for task in goal.tasks]}), 200 + diff --git a/migrations/README b/migrations/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 000000000..f8ed4801f --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,45 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 000000000..8b3fb3353 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,96 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/307dd50f9c9b_.py b/migrations/versions/307dd50f9c9b_.py new file mode 100644 index 000000000..b82d8cda2 --- /dev/null +++ b/migrations/versions/307dd50f9c9b_.py @@ -0,0 +1,42 @@ +"""empty message + +Revision ID: 307dd50f9c9b +Revises: +Create Date: 2021-11-17 21:57:57.563923 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '307dd50f9c9b' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('goal_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('goal_id') + ) + op.create_table('tasks', + sa.Column('task_id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(length=200), nullable=True), + sa.Column('description', sa.String(length=200), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('goal_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['goal_id'], ['goal.goal_id'], ), + sa.PrimaryKeyConstraint('task_id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('tasks') + op.drop_table('goal') + # ### end Alembic commands ### diff --git a/tests/test_wave_02.py b/tests/test_wave_02.py index 399daf4db..3644c8067 100644 --- a/tests/test_wave_02.py +++ b/tests/test_wave_02.py @@ -8,7 +8,7 @@ def test_get_tasks_sorted_asc(client, three_tasks): assert len(response_body) == 3 assert response_body == [ { - "id": 2, + "id": 2, "title": "Answer forgotten email 📧", "description": "", "is_complete": False}, diff --git a/tests/test_wave_03.py b/tests/test_wave_03.py index 92017b8c0..f8fe97ba5 100644 --- a/tests/test_wave_03.py +++ b/tests/test_wave_03.py @@ -95,7 +95,7 @@ def test_mark_complete_on_completed_task(client, completed_task): assert Task.query.get(1).completed_at -def test_mark_incomplete_on_incomplete_task(client, one_task): + # Act response = client.patch("/tasks/1/mark_incomplete") response_body = response.get_json() diff --git a/tests/test_wave_05.py b/tests/test_wave_05.py index 6ba60c6fa..a089c7266 100644 --- a/tests/test_wave_05.py +++ b/tests/test_wave_05.py @@ -1,4 +1,5 @@ import pytest +from app.models.goal import Goal def test_get_goals_no_saved_goals(client): # Act @@ -41,18 +42,17 @@ def test_get_goal(client, one_goal): } } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_get_goal_not_found(client): - pass + # Act response = client.get("/goals/1") response_body = response.get_json() # Assert - # ---- Complete Test ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Test ---- + assert response.status_code == 404 + assert response_body == None + def test_create_goal(client): # Act @@ -71,30 +71,36 @@ def test_create_goal(client): } } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_update_goal(client, one_goal): - pass # Act - # ---- Complete Act Here ---- + response = client.put("/goals/1", json={ + "title": "Updated Goal Title" + }) + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # assertion 3 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 200 + assert "goal" in response_body + assert response_body == { + "goal": { + "id": 1, + "title": "Updated Goal Title" + } + } + goal = Goal.query.get(1) -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_update_goal_not_found(client): - pass # Act - # ---- Complete Act Here ---- + response = client.put("/goals/1", json={ + "title": "Updated Goal Title" + }) + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 404 + assert response_body == None def test_delete_goal(client, one_goal): @@ -113,18 +119,16 @@ def test_delete_goal(client, one_goal): response = client.get("/goals/1") assert response.status_code == 404 -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_delete_goal_not_found(client): - pass - # Act - # ---- Complete Act Here ---- + response = client.delete("/goals/1") + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 404 + assert response_body == None + assert Goal.query.all() == [] def test_create_goal_missing_title(client):