diff --git a/.env.example b/.env.example new file mode 100644 index 000000000..39205aa28 --- /dev/null +++ b/.env.example @@ -0,0 +1,8 @@ +# Local development database (PostgreSQL) +SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://user:password@localhost:5432/task_list_api_development + +# Test database +SQLALCHEMY_TEST_DATABASE_URI=postgresql+psycopg2://user:password@localhost:5432/task_list_api_test + +# Slack API token (optional for Slack integration features) +slack_token=your-slack-token-here diff --git a/README.md b/README.md index 85e1c0f69..a3744fec8 100644 --- a/README.md +++ b/README.md @@ -1,57 +1,63 @@ # Task List API -## Skills Assessed +> **Note**: This is a course project from [Ada Developer Academy](https://adadeveloperacademy.org/). The project framework and requirements were provided by Ada, and I have implemented all the functionality across waves 1-7 to meet the course specifications. -- Gathering technical requirements from written documentation -- Reading, writing, and using tests -- Demonstrating understanding of the client-server model, request-response cycle and conventional RESTful routes -- Driving development with independent research, experimentation, and collaboration -- Reading and using existing external web APIs -- Using Postman as part of the development workflow -- Using git as part of the development workflow -Working with the Flask package: +## Local Development Setup -- Creating models -- Creating conventional RESTful CRUD routes for a model -- Reading query parameters to create custom behavior -- Create unconventional routes for custom behavior -- Apply knowledge about making requests in Python, to call an API inside of an API -- Apply knowledge about environment variables -- Creating a one-to-many relationship between two models +### Prerequisites +- Python 3.13+ +- PostgreSQL +- pip and virtual environment -## Goals +### Installation Steps -There's so much we want to do in the world! When we organize our goals into smaller, bite-sized tasks, we'll be able to track them more easily, and complete them! +1. **Clone the repository** + ```bash + git clone + cd task-list-api + ``` -If we make a web API to organize our tasks, we'll be able to create, read, update, and delete tasks as long as we have access to the Internet and our API is running! +2. **Create and activate virtual environment** + ```bash + python3 -m venv venv + source venv/bin/activate # On Windows: venv\Scripts\activate + ``` -We also want to do some interesting features with our tasks. We want to be able to: +3. **Install dependencies** + ```bash + pip install -r requirements.txt + ``` -- Sort tasks -- Mark them as complete -- Get feedback about our task list through Slack -- Organize tasks with goals +4. **Set up environment variables** + ```bash + cp .env.example .env + ``` + Then edit `.env` with your local database credentials and Slack token (if needed). -... and more! +5. **Create local database** + ```bash + createdb task_list_api_development + createdb task_list_api_test + ``` -## How to Complete and Submit +6. **Run database migrations** + ```bash + flask db upgrade + ``` -Go through the waves one-by-one and build the features of this API. +7. **Run the application** + ```bash + flask run + ``` -At submission time, no matter where you are, submit the project via Learn. +The API will be available at `http://localhost:5000` -## Project Directions +### Running Tests -This project is designed to fulfill the features described in detail in each wave. The tests are meant to only guide your development. +```bash +pytest +# Or use the provided test script +./test.sh +``` -1. [Setup](ada-project-docs/setup.md) -1. [Testing](ada-project-docs/testing.md) -1. [Wave 1: CRUD for one model](ada-project-docs/wave_01.md) -1. [Wave 2: Using query params](ada-project-docs/wave_02.md) -1. [Wave 3: Creating custom endpoints](ada-project-docs/wave_03.md) -1. [Wave 4: Using an external web API](ada-project-docs/wave_04.md) -1. [Wave 5: Creating a second model](ada-project-docs/wave_05.md) -1. [Wave 6: Establishing a one-to-many relationship between two models](ada-project-docs/wave_06.md) -1. [Wave 7: Deployment](ada-project-docs/wave_07.md) -1. [Optional Enhancements](ada-project-docs/optional-enhancements.md) diff --git a/app/__init__.py b/app/__init__.py index 3c581ceeb..17ddc5820 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,13 +1,21 @@ from flask import Flask +from flask_cors import CORS from .db import db, migrate from .models import task, goal +from app.routes.task_routes import bp as tasks_bp +from app.routes.goal_routes import bp as goals_bp import os + def create_app(config=None): app = Flask(__name__) + + CORS(app) + app.config['CORS_HEADERS'] = 'Content-Type' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False - app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('SQLALCHEMY_DATABASE_URI') + app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get( + 'SQLALCHEMY_TEST_DATABASE_URI') if config: # Merge `config` into the app's configuration @@ -18,5 +26,7 @@ def create_app(config=None): migrate.init_app(app, db) # Register Blueprints here + app.register_blueprint(tasks_bp) + app.register_blueprint(goals_bp) return app diff --git a/app/models/goal.py b/app/models/goal.py index 44282656b..3df160087 100644 --- a/app/models/goal.py +++ b/app/models/goal.py @@ -1,5 +1,25 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship +from app.models.task import Task from ..db import db + class Goal(db.Model): id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + tasks: Mapped[list["Task"]] = relationship(back_populates="goal") + + def to_dict(self): + return { + "id": self.id, + "title": self.title + } + + def to_dict_with_tasks(self): + dict_with_tasks = Goal.to_dict(self) + dict_with_tasks["tasks"] = [task.to_dict() for task in self.tasks] + + return dict_with_tasks + + @classmethod + def from_dict(cls, goal_data): + return cls(title=goal_data["title"]) diff --git a/app/models/task.py b/app/models/task.py index 5d99666a4..0315a679e 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -1,5 +1,35 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy import ForeignKey from ..db import db +from typing import Optional +from datetime import datetime + class Task(db.Model): id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + description: Mapped[str] + completed_at: Mapped[Optional[datetime]] + goal_id: Mapped[Optional[int]] = mapped_column(ForeignKey("goal.id")) + goal: Mapped[Optional["Goal"]] = relationship(back_populates="tasks") + + def to_dict(self): + task_as_dict = {} + task_as_dict["id"] = self.id + task_as_dict["title"] = self.title + task_as_dict["description"] = self.description + task_as_dict["is_complete"] = self.completed_at is not None + + if self.goal_id: + task_as_dict["goal_id"] = self.goal_id + + return task_as_dict + + + @classmethod + def from_dict(cls, data): + return cls( + title=data["title"], + description=data["description"], + completed_at=data.get("completed_at") + ) diff --git a/app/routes/goal_routes.py b/app/routes/goal_routes.py index 3aae38d49..ef43fff09 100644 --- a/app/routes/goal_routes.py +++ b/app/routes/goal_routes.py @@ -1 +1,74 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, abort, make_response, request, Response +from app.db import db +from app.models.goal import Goal +from app.models.task import Task +from app.routes.route_utilities import validate_model, create_model +import requests +import os + +bp = Blueprint("goals_bp", __name__, url_prefix="/goals") + + +@bp.post("") +def create_goal(): + request_body = request.get_json() + return create_model(Goal, request_body) + + +@bp.get("") +def get_all_goals(): + query = db.select(Goal).order_by(Goal.id) + goals = db.session.scalars(query) + return [goal.to_dict() for goal in goals] + + +@bp.get("/") +def get_one_goal(goal_id): + goal = validate_model(Goal, goal_id) + return goal.to_dict() + + +@bp.put("/") +def update_goal(goal_id): + goal = validate_model(Goal, goal_id) + request_body = request.get_json() + + if not request_body or "title" not in request_body: + response = {"details": "Invalid data"} + abort(make_response(response, 400)) + + goal.title = request_body["title"] + db.session.commit() + return Response(status=204, mimetype="application/json") + + +@bp.delete("/") +def delete_goal_by_id(goal_id): + goal = validate_model(Goal, goal_id) + db.session.delete(goal) + db.session.commit() + return Response(status=204, mimetype="application/json") + + +@bp.post("//tasks") +def update_tasks_by_goal(goal_id): + goal = validate_model(Goal, goal_id) + goal.tasks.clear() + request_body = request.get_json() + task_id_list = request_body["task_ids"] + + for id in task_id_list: + task = validate_model(Task, id) + task.goal_id = goal_id + + db.session.commit() + + return {"id": goal.id, + "task_ids": task_id_list} + + +@bp.get("//tasks") +def get_tasks_by_goal(goal_id): + goal = validate_model(Goal, goal_id) + + return goal.to_dict_with_tasks() diff --git a/app/routes/route_utilities.py b/app/routes/route_utilities.py new file mode 100644 index 000000000..60bcf90f3 --- /dev/null +++ b/app/routes/route_utilities.py @@ -0,0 +1,32 @@ +from flask import abort, make_response +from ..db import db + + +def validate_model(cls, model_id): + try: + model_id = int(model_id) + except ValueError: + response = {"message": f"{cls.__name__} {model_id} invalid"} + abort(make_response(response, 400)) + + query = db.select(cls).where(cls.id == model_id) + model = db.session.scalar(query) + + if not model: + response = {"message": f"{cls.__name__} {model_id} not found"} + abort(make_response(response, 404)) + + return model + + +def create_model(cls, model_data): + try: + new_model = cls.from_dict(model_data) + except KeyError as error: + response = {"details": "Invalid data"} + abort(make_response(response, 400)) + + db.session.add(new_model) + db.session.commit() + + return new_model.to_dict(), 201 diff --git a/app/routes/task_routes.py b/app/routes/task_routes.py index 3aae38d49..ee3bd0f36 100644 --- a/app/routes/task_routes.py +++ b/app/routes/task_routes.py @@ -1 +1,92 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, abort, make_response, request, Response +from ..db import db +from app.models.task import Task +from app.routes.route_utilities import validate_model, create_model +from datetime import datetime +import requests +import os + +bp = Blueprint("tasks_bp", __name__, url_prefix="/tasks") + + +@bp.post("") +def create_task(): + request_body = request.get_json() + return create_model(Task, request_body) + + +@bp.get("") +def get_all_tasks(): + query = db.select(Task) + sort_quaram = request.args.get("sort") + if sort_quaram == "asc": + query = query.order_by(Task.title.asc()) + elif sort_quaram == "desc": + query = query.order_by(Task.title.desc()) + + tasks = db.session.scalars(query) + + if not tasks: + response = {"details": "No tasks found"} + abort(make_response(response, 400)) + + return [task.to_dict() for task in tasks] + + +@bp.get("/") +def get_one_task(task_id): + task = validate_model(Task, task_id) + return task.to_dict() + + +@bp.put("/") +def update_one_task(task_id): + task = validate_model(Task, task_id) + request_body = request.get_json() + + if not request_body: + response = {"details": "No tasks found"} + abort(make_response(response, 400)) + + task.title = request_body.get("title") + task.description = request_body.get("description") + task.completed_at = request_body.get("completed_at") + + db.session.commit() + + return Response(status=204, mimetype="application/json") + + +@bp.delete("/") +def delete_one_task(task_id): + task = validate_model(Task, task_id) + + db.session.delete(task) + db.session.commit() + + return Response(status=204, mimetype="application/json") + + +@bp.patch("//mark_complete") +def mark_task_complete(task_id): + task = validate_model(Task, task_id) + task.completed_at = datetime.now() + db.session.commit() + + token = os.environ.get("slack_token") + requests.post( + url="https://slack.com/api/chat.postMessage", + headers={"Authorization": f"Bearer {token}"}, + json={"channel": "task-notifications", + "text": f"Someone just completed the task {task.title}"}) + + return Response(status=204, mimetype="application/json") + + +@bp.patch("//mark_incomplete") +def mark_task_incomplete(task_id): + task = validate_model(Task, task_id) + task.completed_at = None + db.session.commit() + + return Response(status=204, mimetype="application/json") diff --git a/migrations/README b/migrations/README new file mode 100644 index 000000000..0e0484415 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 000000000..ec9d45c26 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 000000000..4c9709271 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,113 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions['migrate'].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions['migrate'].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace( + '%', '%%') + except AttributeError: + return str(get_engine().url).replace('%', '%%') + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option('sqlalchemy.url', get_engine_url()) +target_db = current_app.extensions['migrate'].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, 'metadatas'): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=get_metadata(), literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + conf_args = current_app.extensions['migrate'].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=get_metadata(), + **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/6d0e7a86d917_.py b/migrations/versions/6d0e7a86d917_.py new file mode 100644 index 000000000..04c7e2e5a --- /dev/null +++ b/migrations/versions/6d0e7a86d917_.py @@ -0,0 +1,39 @@ +"""empty message + +Revision ID: 6d0e7a86d917 +Revises: +Create Date: 2025-11-01 21:05:49.404081 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '6d0e7a86d917' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('task', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('task') + op.drop_table('goal') + # ### end Alembic commands ### diff --git a/migrations/versions/83ac010da15c_.py b/migrations/versions/83ac010da15c_.py new file mode 100644 index 000000000..a5fea4c7d --- /dev/null +++ b/migrations/versions/83ac010da15c_.py @@ -0,0 +1,34 @@ +"""empty message + +Revision ID: 83ac010da15c +Revises: bcf8242374bd +Create Date: 2025-11-06 20:45:13.595736 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '83ac010da15c' +down_revision = 'bcf8242374bd' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.add_column(sa.Column('goal_id', sa.Integer(), nullable=True)) + batch_op.create_foreign_key(None, 'goal', ['goal_id'], ['id']) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_column('goal_id') + + # ### end Alembic commands ### diff --git a/migrations/versions/8616ef80b327_.py b/migrations/versions/8616ef80b327_.py new file mode 100644 index 000000000..39c0ca186 --- /dev/null +++ b/migrations/versions/8616ef80b327_.py @@ -0,0 +1,42 @@ +"""empty message + +Revision ID: 8616ef80b327 +Revises: 83ac010da15c +Create Date: 2025-12-27 22:49:38.312457 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8616ef80b327' +down_revision = '83ac010da15c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('task', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('goal_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['goal_id'], ['goal.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('task') + op.drop_table('goal') + # ### end Alembic commands ### diff --git a/migrations/versions/bcf8242374bd_.py b/migrations/versions/bcf8242374bd_.py new file mode 100644 index 000000000..ef13f8ab5 --- /dev/null +++ b/migrations/versions/bcf8242374bd_.py @@ -0,0 +1,32 @@ +"""empty message + +Revision ID: bcf8242374bd +Revises: 6d0e7a86d917 +Create Date: 2025-11-06 08:32:55.854628 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'bcf8242374bd' +down_revision = '6d0e7a86d917' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.add_column(sa.Column('title', sa.String(), nullable=False)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.drop_column('title') + + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt index b989cae17..17c971162 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,28 +1,16 @@ -alembic==1.14.1 blinker==1.9.0 certifi==2025.1.31 charset-normalizer==3.4.1 click==8.1.8 -coverage==7.6.12 Flask==3.1.0 -Flask-Migrate==4.1.0 -Flask-SQLAlchemy==3.1.1 +flask-cors==5.0.1 gunicorn==23.0.0 idna==3.10 -iniconfig==2.0.0 itsdangerous==2.2.0 -Jinja2==3.1.5 -Mako==1.3.9 +Jinja2==3.1.6 MarkupSafe==3.0.2 packaging==24.2 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.4 -pytest-cov==6.0.0 python-dotenv==1.0.1 requests==2.32.3 -SQLAlchemy==2.0.38 -toml==0.10.2 -typing_extensions==4.12.2 urllib3==2.3.0 Werkzeug==3.1.3 diff --git a/tests/test_wave_01.py b/tests/test_wave_01.py index fac95a0a3..183d75f51 100644 --- a/tests/test_wave_01.py +++ b/tests/test_wave_01.py @@ -2,7 +2,6 @@ from app.db import db import pytest -@pytest.mark.skip(reason="No way to test this feature yet") def test_task_to_dict(): #Arrange new_task = Task(id = 1, title="Make My Bed", @@ -19,7 +18,6 @@ def test_task_to_dict(): assert task_dict["description"] == "Start the day off right!" assert task_dict["is_complete"] == False -@pytest.mark.skip(reason="No way to test this feature yet") def test_task_to_dict_missing_id(): #Arrange new_task = Task(title="Make My Bed", @@ -36,7 +34,6 @@ def test_task_to_dict_missing_id(): assert task_dict["description"] == "Start the day off right!" assert task_dict["is_complete"] == False -@pytest.mark.skip(reason="No way to test this feature yet") def test_task_to_dict_missing_title(): #Arrange new_task = Task(id = 1, @@ -53,7 +50,6 @@ def test_task_to_dict_missing_title(): assert task_dict["description"] == "Start the day off right!" assert task_dict["is_complete"] == False -@pytest.mark.skip(reason="No way to test this feature yet") def test_task_from_dict(): #Arrange task_dict = { @@ -70,7 +66,6 @@ def test_task_from_dict(): assert task_obj.description == "Start the day off right!" assert task_obj.completed_at is None -@pytest.mark.skip(reason="No way to test this feature yet") def test_task_from_dict_no_title(): #Arrange task_dict = { @@ -82,7 +77,6 @@ def test_task_from_dict_no_title(): with pytest.raises(KeyError, match = 'title'): Task.from_dict(task_dict) -@pytest.mark.skip(reason="No way to test this feature yet") def test_task_from_dict_no_description(): #Arrange task_dict = { @@ -94,7 +88,6 @@ def test_task_from_dict_no_description(): with pytest.raises(KeyError, match = 'description'): Task.from_dict(task_dict) -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_no_saved_tasks(client): # Act response = client.get("/tasks") @@ -105,7 +98,6 @@ def test_get_tasks_no_saved_tasks(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_one_saved_tasks(client, one_task): # Act response = client.get("/tasks") @@ -124,7 +116,6 @@ def test_get_tasks_one_saved_tasks(client, one_task): ] -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_task(client, one_task): # Act response = client.get("/tasks/1") @@ -136,11 +127,11 @@ def test_get_task(client, one_task): "id": 1, "title": "Go on my daily walk 🏞", "description": "Notice something new every day", - "is_complete": False + "is_complete": False, + } -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_task_not_found(client): # Act response = client.get("/tasks/1") @@ -148,14 +139,8 @@ def test_get_task_not_found(client): # Assert assert response.status_code == 404 + assert response_body == {"message":"Task 1 not found"} - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - - -@pytest.mark.skip(reason="No way to test this feature yet") def test_create_task(client): # Act response = client.post("/tasks", json={ @@ -181,7 +166,6 @@ def test_create_task(client): assert new_task.description == "Test Description" assert new_task.completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") def test_update_task(client, one_task): # Act response = client.put("/tasks/1", json={ @@ -201,7 +185,6 @@ def test_update_task(client, one_task): -@pytest.mark.skip(reason="No way to test this feature yet") def test_update_task_not_found(client): # Act response = client.put("/tasks/1", json={ @@ -212,14 +195,8 @@ def test_update_task_not_found(client): # Assert assert response.status_code == 404 + assert response_body == {"message":"Task 1 not found"} - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - - -@pytest.mark.skip(reason="No way to test this feature yet") def test_delete_task(client, one_task): # Act response = client.delete("/tasks/1") @@ -230,7 +207,6 @@ def test_delete_task(client, one_task): query = db.select(Task).where(Task.id == 1) assert db.session.scalar(query) == None -@pytest.mark.skip(reason="No way to test this feature yet") def test_delete_task_not_found(client): # Act response = client.delete("/tasks/1") @@ -238,16 +214,10 @@ def test_delete_task_not_found(client): # Assert assert response.status_code == 404 - - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - + assert response_body == {"message":"Task 1 not found"} assert db.session.scalars(db.select(Task)).all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") def test_create_task_must_contain_title(client): # Act response = client.post("/tasks", json={ @@ -264,7 +234,6 @@ def test_create_task_must_contain_title(client): assert db.session.scalars(db.select(Task)).all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") def test_create_task_must_contain_description(client): # Act response = client.post("/tasks", json={ diff --git a/tests/test_wave_02.py b/tests/test_wave_02.py index a087e0909..db12fe806 100644 --- a/tests/test_wave_02.py +++ b/tests/test_wave_02.py @@ -1,7 +1,6 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_sorted_asc(client, three_tasks): # Act response = client.get("/tasks?sort=asc") @@ -29,7 +28,7 @@ def test_get_tasks_sorted_asc(client, three_tasks): ] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_sorted_desc(client, three_tasks): # Act response = client.get("/tasks?sort=desc") diff --git a/tests/test_wave_03.py b/tests/test_wave_03.py index d7d441695..7031645f8 100644 --- a/tests/test_wave_03.py +++ b/tests/test_wave_03.py @@ -6,21 +6,9 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_on_incomplete_task(client, one_task): # Arrange - """ - The future Wave 4 adds special functionality to this route, - so for this test, we need to set-up "mocking." - - Mocking will help our tests work in isolation, which is a - good thing! - - We need to mock any POST requests that may occur during this - test (due to Wave 4). - - There is no action needed here, the tests should work as-is. - """ + with patch("requests.post") as mock_get: mock_get.return_value.status_code = 200 @@ -34,7 +22,6 @@ def test_mark_complete_on_incomplete_task(client, one_task): assert db.session.scalar(query).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_on_complete_task(client, completed_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -46,21 +33,9 @@ def test_mark_incomplete_on_complete_task(client, completed_task): assert db.session.scalar(query).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_on_completed_task(client, completed_task): # Arrange - """ - The future Wave 4 adds special functionality to this route, - so for this test, we need to set-up "mocking." - - Mocking will help our tests work in isolation, which is a - good thing! - - We need to mock any POST requests that may occur during this - test (due to Wave 4). - - There is no action needed here, the tests should work as-is. - """ + with patch("requests.post") as mock_get: mock_get.return_value.status_code = 200 @@ -74,7 +49,6 @@ def test_mark_complete_on_completed_task(client, completed_task): query = db.select(Task).where(Task.id == 1) assert db.session.scalar(query).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_on_incomplete_task(client, one_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -86,7 +60,6 @@ def test_mark_incomplete_on_incomplete_task(client, one_task): assert db.session.scalar(query).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_missing_task(client): # Act response = client.patch("/tasks/1/mark_complete") @@ -94,14 +67,8 @@ def test_mark_complete_missing_task(client): # Assert assert response.status_code == 404 + assert response_body == {"message": "Task 1 not found"} - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - - -@pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_missing_task(client): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -109,8 +76,4 @@ def test_mark_incomplete_missing_task(client): # Assert assert response.status_code == 404 - - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** + assert response_body == {"message": "Task 1 not found"} \ No newline at end of file diff --git a/tests/test_wave_05.py b/tests/test_wave_05.py index b7cc330ae..451606188 100644 --- a/tests/test_wave_05.py +++ b/tests/test_wave_05.py @@ -1,7 +1,6 @@ from app.models.goal import Goal import pytest -@pytest.mark.skip(reason="No way to test this feature yet") def test_goal_to_dict(): #Arrange new_goal = Goal(id=1, title="Seize the Day!") @@ -13,7 +12,6 @@ def test_goal_to_dict(): assert goal_dict["id"] == 1 assert goal_dict["title"] == "Seize the Day!" -@pytest.mark.skip(reason="No way to test this feature yet") def test_goal_to_dict_no_id(): #Arrange new_goal = Goal(title="Seize the Day!") @@ -25,7 +23,6 @@ def test_goal_to_dict_no_id(): assert goal_dict["id"] is None assert goal_dict["title"] == "Seize the Day!" -@pytest.mark.skip(reason="No way to test this feature yet") def test_goal_to_dict_no_title(): #Arrange new_goal = Goal(id=1) @@ -39,7 +36,6 @@ def test_goal_to_dict_no_title(): -@pytest.mark.skip(reason="No way to test this feature yet") def test_goal_from_dict(): #Arrange goal_dict = { @@ -52,7 +48,6 @@ def test_goal_from_dict(): #Assert assert goal_obj.title == "Seize the Day!" -@pytest.mark.skip(reason="No way to test this feature yet") def test_goal_from_dict_no_title(): #Arrange goal_dict = { @@ -63,7 +58,6 @@ def test_goal_from_dict_no_title(): Goal.from_dict(goal_dict) -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_goals_no_saved_goals(client): # Act response = client.get("/goals") @@ -74,7 +68,6 @@ def test_get_goals_no_saved_goals(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_goals_one_saved_goal(client, one_goal): # Act response = client.get("/goals") @@ -91,7 +84,6 @@ def test_get_goals_one_saved_goal(client, one_goal): ] -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_goal(client, one_goal): # Act response = client.get("/goals/1") @@ -105,22 +97,25 @@ def test_get_goal(client, one_goal): } -@pytest.mark.skip(reason="test to be completed by student") def test_get_goal_not_found(client): - pass + # Act response = client.get("/goals/1") response_body = response.get_json() - raise Exception("Complete test") + # raise Exception("Complete test") # Assert # ---- Complete Test ---- # assertion 1 goes here + assert response.status_code == 404 # assertion 2 goes here + assert response_body == { + "message": "Goal 1 not found" + } # ---- Complete Test ---- -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal(client): # Act response = client.post("/goals", json={ @@ -136,34 +131,37 @@ def test_create_goal(client): } -@pytest.mark.skip(reason="test to be completed by student") def test_update_goal(client, one_goal): - raise Exception("Complete test") + # raise Exception("Complete test") # Act # ---- Complete Act Here ---- - + response = client.put("/goals/1", json={"title": "Updated Goal Title"}) + # Assert # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # assertion 3 goes here + assert response.status_code == 204 + assert response.content_length is None + + response = client.get("/goals/1") + response_body = response.get_json() + assert response_body["title"] == "Updated Goal Title" + # ---- Complete Assertions Here ---- -@pytest.mark.skip(reason="test to be completed by student") def test_update_goal_not_found(client): - raise Exception("Complete test") + # raise Exception("Complete test") # Act # ---- Complete Act Here ---- - + response = client.put("/goals/1", json={"title": "Updated Goal Title"}) + response_body = response.get_json() # Assert # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here + assert response.status_code == 404 + assert response_body == {"message":"Goal 1 not found"} # ---- Complete Assertions Here ---- -@pytest.mark.skip(reason="No way to test this feature yet") def test_delete_goal(client, one_goal): # Act response = client.delete("/goals/1") @@ -178,27 +176,29 @@ def test_delete_goal(client, one_goal): response_body = response.get_json() assert "message" in response_body - raise Exception("Complete test with assertion about response body") + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** + assert response_body == {"message":"Goal 1 not found"} -@pytest.mark.skip(reason="test to be completed by student") def test_delete_goal_not_found(client): - raise Exception("Complete test") + # raise Exception("Complete test") # Act # ---- Complete Act Here ---- - + response = client.delete("/goals/1") + response_body = response.get_json() # Assert # ---- Complete Assertions Here ---- # assertion 1 goes here + assert response.status_code == 404 # assertion 2 goes here + assert response_body == {"message":"Goal 1 not found"} # ---- Complete Assertions Here ---- -@pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal_missing_title(client): # Act response = client.post("/goals", json={}) diff --git a/tests/test_wave_06.py b/tests/test_wave_06.py index 727fce93a..1ede142e6 100644 --- a/tests/test_wave_06.py +++ b/tests/test_wave_06.py @@ -3,7 +3,6 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") def test_post_task_ids_to_goal(client, one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -20,12 +19,10 @@ def test_post_task_ids_to_goal(client, one_goal, three_tasks): "task_ids": [1, 2, 3] } - # Check that Goal was updated in the db query = db.select(Goal).where(Goal.id == 1) assert len(db.session.scalar(query).tasks) == 3 -@pytest.mark.skip(reason="No way to test this feature yet") def test_post_task_ids_to_goal_overwrites_existing_tasks(client, one_task_belongs_to_one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -45,7 +42,6 @@ def test_post_task_ids_to_goal_overwrites_existing_tasks(client, one_task_belong assert len(db.session.scalar(query).tasks) == 2 -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal_no_goal(client): # Act response = client.get("/goals/1/tasks") @@ -53,14 +49,8 @@ def test_get_tasks_for_specific_goal_no_goal(client): # Assert assert response.status_code == 404 + assert {"message":"Goal 1 not found"} - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - - -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): # Act response = client.get("/goals/1/tasks") @@ -77,7 +67,6 @@ def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): # Act response = client.get("/goals/1/tasks") @@ -102,7 +91,6 @@ def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") def test_get_task_includes_goal_id(client, one_task_belongs_to_one_goal): response = client.get("/tasks/1") response_body = response.get_json() diff --git a/tests/test_wave_07.py b/tests/test_wave_07.py index 7e7cef55a..deef29819 100644 --- a/tests/test_wave_07.py +++ b/tests/test_wave_07.py @@ -4,14 +4,14 @@ from app.models.task import Task from app.routes.route_utilities import create_model, validate_model -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_validate_model_with_task(client, three_tasks): - #Act + # Act task_1 = validate_model(Task, 1) task_2 = validate_model(Task, 2) task_3 = validate_model(Task, 3) - #Assert + # Assert assert task_1.id == 1 assert task_1.title == "Water the garden 🌷" assert task_1.description == "" @@ -24,128 +24,118 @@ def test_route_utilities_validate_model_with_task(client, three_tasks): assert task_3.title == "Pay my outstanding tickets 😭" -@pytest.mark.skip(reason="No way to test this feature yet") def test_route_utilities_validate_model_with_task_invalid_id(client, three_tasks): - #Act & Assert + # Act & Assert # Calling `validate_model` without being invoked by a route will - # cause an `HTTPException` when an `abort` statement is reached + # cause an `HTTPException` when an `abort` statement is reached with pytest.raises(HTTPException) as e: result_task = validate_model(Task, "One") - + # Test that the correct status code and response message are returned response = e.value.get_response() assert response.status_code == 400 + assert response.get_json() == {"message": "Task One invalid"} + - raise Exception("Complete test with an assertion about the response body") - # ***************************************************************************** - # ** Complete test with an assertion about the response body **************** - # ***************************************************************************** - -@pytest.mark.skip(reason="No way to test this feature yet") def test_route_utilities_validate_model_with_task_missing_id(client, three_tasks): - #Act & Assert + # Act & Assert with pytest.raises(HTTPException) as e: result_task = validate_model(Task, 4) - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** - - -@pytest.mark.skip(reason="No way to test this feature yet") + + response = e.value.get_response() + assert response.status_code == 404 + assert response.get_json() == {"message": "Task 4 not found"} + + def test_route_utilities_validate_model_with_goal(client, one_goal): - #Act + # Act goal_1 = validate_model(Goal, 1) - #Assert + # Assert assert goal_1.id == 1 assert goal_1.title == "Build a habit of going outside daily" -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_validate_model_with_goal_invalid_id(client, one_goal): - #Act & Assert + # Act & Assert with pytest.raises(HTTPException) as e: result_task = validate_model(Goal, "One") - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") + response = e.value.get_response() + assert response.status_code == 400 + assert response.get_json() == {"message": "Goal One invalid"} + + def test_route_utilities_validate_model_with_goal_missing_id(client, one_goal): - #Act & Assert + # Act & Assert with pytest.raises(HTTPException) as e: result_task = validate_model(Goal, 4) - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") + response = e.value.get_response() + assert response.status_code == 404 + assert response.get_json() == {"message": "Goal 4 not found"} + + def test_route_utilities_create_model_with_task(client): - #Arrange + # Arrange request_body = { "title": "Make the bed", "description": "", "completed_at": None } - #Act + # Act response = create_model(Task, request_body) - #Assert - assert response[0]["id"] == 1 #create_model returns a tuple + # Assert + assert response[0]["id"] == 1 # create_model returns a tuple assert response[0]["title"] == "Make the bed" assert response[0]["description"] == "" assert response[0]["is_complete"] == False assert response[1] == 201 -@pytest.mark.skip(reason="No way to test this feature yet") + def test_route_utilities_create_model_with_task_missing_title(client): - #Arrange + # Arrange request_body = { "description": "", "completed_at": None } - - #Act + + # Act with pytest.raises(HTTPException) as e: create_model(Task, request_body) - + response = e.value.get_response() assert response.status_code == 400 assert response.get_json() == {"details": "Invalid data"} -@pytest.mark.skip(reason="No way to test this feature yet") def test_route_utilities_create_model_with_goal(client): - #Arrange + # Arrange request_body = { "title": "Seize the Day!" } - #Act + # Act response = create_model(Goal, request_body) - #Assert - assert response[0]["id"] == 1 #create_model returns a tuple + # Assert + assert response[0]["id"] == 1 # create_model returns a tuple assert response[0]["title"] == "Seize the Day!" assert response[1] == 201 -@pytest.mark.skip(reason="No way to test this feature yet") + + def test_route_utilities_create_model_with_goal_missing_title(client): - #Arrange + # Arrange request_body = { } - - #Act + + # Act with pytest.raises(HTTPException) as e: create_model(Goal, request_body) - - raise Exception("Complete test with assertion status code and response body") - # ***************************************************************************** - # **Complete test with assertion about status code response body*************** - # ***************************************************************************** + + response = e.value.get_response() + assert response.status_code == 400 + assert response.get_json() == {"details": "Invalid data"}