Skip to content

zoisite c19 izzy_task_list #117

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions app/__init__.py
Original file line number Diff line number Diff line change
@@ -15,8 +15,13 @@ def create_app(test_config=None):
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False

if test_config is None:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
"SQLALCHEMY_DATABASE_URI")
# app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
#"SQLALCHEMY_DATABASE_URI")

app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get("RENDER_DATABASE_URI")



else:
app.config["TESTING"] = True
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
@@ -28,7 +33,12 @@ def create_app(test_config=None):

db.init_app(app)
migrate.init_app(app, db)
from app.models.task import Task
from app.models.goal import Goal

# Register Blueprints here
from .routes import task_list_bp,goals_bp

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't be afraid to separate your routes into task_routes and goal_routes!

app.register_blueprint(task_list_bp)
app.register_blueprint(goals_bp)

return app

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remember to make an empty init.py file in any package folder/subfolder. app has one, but we should have one here in the models folder as well.

13 changes: 13 additions & 0 deletions app/models/goal.py
Original file line number Diff line number Diff line change
@@ -3,3 +3,16 @@

class Goal(db.Model):
goal_id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String)
tasks = db.relationship("Task", back_populates= "goal")

def to_dict(self):
return {
"id" : self.goal_id,
"title" : self.title
}

@classmethod
def from_dict(cls,request_data):
return cls(
title=request_data["title"])

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This goal model looks great!

30 changes: 28 additions & 2 deletions app/models/task.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,31 @@
from app import db

from flask import make_response

class Task(db.Model):
task_id = db.Column(db.Integer, primary_key=True)
task_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String)
description = db.Column(db.String)
completed_at = db.Column(db.DateTime, nullable=True)

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The default for a nullable constraint is True, so you can absolutely leave that out here if you would like!

goal_id = db.Column(db.Integer,db.ForeignKey("goal.goal_id"))
goal = db.relationship("Goal", back_populates="tasks")

#returning a dictionary from the database
def task_dict(self):
task_dict = {
"id": self.task_id,
"title": self.title,
"description": self.description,
"is_complete": self.completed_at != None}

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great job using the inclusion of the .completed_at to define the truthiness of is_complete! Just a slight nitpick, make sure to move the brace at the end to the next line!


if self.goal_id:
task_dict["goal_id"] =self.goal_id
return task_dict


#take data from user to make new task
@classmethod
def from_dict(cls,request_data):
return cls(
title=request_data["title"],
description=request_data["description"])

255 changes: 254 additions & 1 deletion app/routes.py
Original file line number Diff line number Diff line change
@@ -1 +1,254 @@
from flask import Blueprint
from flask import Blueprint,jsonify, abort,make_response,request
from app import db
import requests
from app.models.task import Task
from app.models.goal import Goal
from datetime import datetime
import os

task_list_bp = Blueprint("tasks", __name__, url_prefix ="/tasks")
goals_bp = Blueprint("goals", __name__, url_prefix = "/goals")

@task_list_bp.route("", methods = ["POST"])
def create_tasks():
request_body = request.get_json()
print(request_body)
print("************")

if (not "title" in request_body) or (not "description" in request_body):
return{
"details":"Invalid data"
}, 400
try:
new_task = Task.from_dict(request_body)
db.session.add(new_task)
db.session.commit()

#message = f"Task {new_task.title} successfully created"
return make_response(jsonify({"task":new_task.task_dict()}), 201)

except KeyError as e:
abort(make_response("Invalid request. Missing required value: {e}"), 400)

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice error handling when processing a user-created request body! We should assume that a user's request body may contain errors!


@task_list_bp.route("/<id>", methods = ["GET"])
def get_one_saved_task(id):
task = validate_task(id)
return jsonify({"task":task.task_dict()}), 200

@task_list_bp.route("", methods = ["GET"])
def get_all_saved_tasks():
sort_query=request.args.get("sort")
tasks_query=Task.query

if sort_query =="asc":
tasks_query = Task.query.order_by(Task.title.asc())
elif sort_query =="desc":
tasks_query = Task.query.order_by(Task.title.desc())

tasks = tasks_query.all()

tasks_response =[task.task_dict() for task in tasks]

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great use of list comprehension here!


return (jsonify(tasks_response)),200



def validate_task(task_id):

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not too big of an issue since you are only ever using validate_task to validate your tasks, but it's never a bad idea to make it a generic function in case you have other models that need to be validated later! Also, since it is a helper function, best practice would either have it placed before your routes or in its own helper file!

try:
task_id = int(task_id)
except:
abort(make_response({"message":f"task {task_id} invalid"}, 400))


task = Task.query.get(task_id)

if not task:
abort(make_response({"message": f"task {task_id} not found"}, 404))

return task


@task_list_bp.route("/<id>", methods = ["PUT"])
def update_task(id):
task = validate_task(id)
request_body = request.get_json()
task.title = request_body["title"]
task.description = request_body["description"]

db.session.commit()
response_body = {"task":task.task_dict()}


return make_response(jsonify(response_body), 200)

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need to have error handling for a request that uses the request body to create a response (like you have in the POST route). If request_body doesn't have a key "title" then line 75 would throw an unhandled exception.


@task_list_bp.route("/<id>", methods=["DELETE"])
def delete_one_task(id):
task= validate_task(id)

db.session.delete(task)
db.session.commit()

message = {"details":f'Task {task.task_id} "{task.title}" successfully deleted'}
return make_response(jsonify(message), 200)


@task_list_bp.route("/<id>/mark_complete", methods=["PATCH"])
def mark_task_complete(id):
task=validate_task(id)
task.completed_at=datetime.now()
db.session.commit()

return jsonify({"task":task.task_dict()}), 200



@task_list_bp.route("/<id>/mark_incomplete", methods=["PATCH"])
def mark_task_incomplete(id):
task=validate_task(id)
task.completed_at=None
db.session.commit()

return jsonify({"task":task.task_dict()}), 200

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This looks great! 😊


#wave4
def slack_notification():
url = "https://slack.com/api/chat.postMessage"

payload = {
"channel": "api-test-channel",
"text": "Task completed"
}
headers = {
'Authorization':os.environ.get("SLACK_API_TOKEN")}

response = requests.post(url, headers=headers, data=payload)

return response.text

#wave 5
@goals_bp.route("",methods=["POST","GET"])
def handle_goal():
if request.method == "POST":
request_body = request.get_json()
if "title" not in request_body:
return{
"details": "Invalid data"
},400

new_goal = Goal (
title=request_body["title"]
)

db.session.add(new_goal)
db.session.commit()

return {
"goal": {
"id":new_goal.goal_id,
"title":new_goal.title
}
}, 201

elif request.method == "GET":
sorting_goals= request.args.get('sort')
list = None
if sorting_goals== "desc":
list = Goal.query.order_by(Goal.title.desc()) # descending method
elif sorting_goals == "asc":
list = Goal.query.order_by(Goal.title.asc()) # ascending method
else:
list = Goal.query.all()
goals_response = []
for goal in list:
goals_response.append({
"id": goal.goal_id,
"title": goal.title,
})

return jsonify(goals_response), 200

@goals_bp.route("/<goal_id>", methods=["GET","PUT","DELETE"])
def handle_goal_get(goal_id):
goal = Goal.query.get(goal_id)
if goal == None:
return ("", 404)

if request.method == "GET":
return {
"goal": {
"id":goal.goal_id,
"title":goal.title,
}
}
if request.method == "PUT":
form_data = request.get_json()

goal.title = form_data["title"]

db.session.commit()

return jsonify({
"goal":{
"id":goal.goal_id,
"title":goal.title,
}
}),200

elif request.method == "DELETE":
db.session.delete(goal)
db.session.commit()

if not goal in Goal.query:
abort(make_response({"message": f"Goal {goal_id} not found"}, 404))
return jsonify({
"details": f'Goal {goal.goal_id} "{goal.title}" successfully deleted'
}),200


@goals_bp.route("/<goal_id>/tasks", methods=["POST","GET"])
def post_tasked_goal(goal_id):

goal = Goal.query.get(goal_id)

if goal == None:
return (""), 404

if request.method == "POST":
request_body = request.get_json()

tasks_instances= []
for task_id in request_body["task_ids"]:
tasks_instances.append(Task.query.get(task_id))

goal.tasks = tasks_instances

db.session.commit()

task_ids = []
for task in goal.tasks:
task_ids.append(task.task_id)

response_body = {
"id": goal.goal_id,
"task_ids": task_ids
}

return jsonify(response_body), 200

if request.method == "GET":
tasks_response =[]
for task in goal.tasks:
tasks_response.append({
"id": task.task_id,
"goal_id": task.goal_id,
"title": task.title,
"description": task.description,
"is_complete": bool(task.completed_at)
})

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This looks nearly identical to what you have in the instance method to_dict in the Task class. We should use the to_dict method on each task from goal.tasks instead of repeating code. You already have the logic to handle adding goal_id to each task dict too in to_dict!

response_body = {
"id": goal.goal_id,
"title": goal.title,
"tasks" : tasks_response
}
return jsonify(response_body), 200

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While it is possible to attach multiple methods to a single route like you've done here, it starts to get a bit cluttered. It will absolutely depend on how your team wants to handle things, but overall, it's a good idea to separate each method out for readability!


1 change: 1 addition & 0 deletions migrations/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
45 changes: 45 additions & 0 deletions migrations/alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# A generic, single database configuration.

[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false


# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
96 changes: 96 additions & 0 deletions migrations/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
from __future__ import with_statement

import logging
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool
from flask import current_app

from alembic import context

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""

# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')

connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions migrations/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
42 changes: 42 additions & 0 deletions migrations/versions/27d6811d3605_.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
"""empty message
Revision ID: 27d6811d3605
Revises:
Create Date: 2023-05-12 13:23:39.392628
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '27d6811d3605'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('goal',
sa.Column('goal_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('goal_id')
)
op.create_table('task',
sa.Column('task_id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('goal_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['goal_id'], ['goal.goal_id'], ),
sa.PrimaryKeyConstraint('task_id')
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('task')
op.drop_table('goal')
# ### end Alembic commands ###
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -5,6 +5,7 @@ blinker==1.4
certifi==2020.12.5
chardet==4.0.0
click==7.1.2
coverage==7.2.5
Flask==1.1.2
Flask-Migrate==2.6.0
Flask-SQLAlchemy==2.4.4
@@ -30,5 +31,6 @@ requests==2.25.1
six==1.15.0
SQLAlchemy==1.3.23
toml==0.10.2
tomli==2.0.1
urllib3==1.26.5
Werkzeug==1.0.1
51 changes: 36 additions & 15 deletions tests/test_wave_01.py
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@
import pytest


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_tasks_no_saved_tasks(client):
# Act
response = client.get("/tasks")
@@ -13,7 +13,7 @@ def test_get_tasks_no_saved_tasks(client):
assert response_body == []


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_tasks_one_saved_tasks(client, one_task):
# Act
response = client.get("/tasks")
@@ -31,8 +31,6 @@ def test_get_tasks_one_saved_tasks(client, one_task):
}
]


@pytest.mark.skip(reason="No way to test this feature yet")
def test_get_task(client, one_task):
# Act
response = client.get("/tasks/1")
@@ -49,24 +47,44 @@ def test_get_task(client, one_task):
"is_complete": False
}
}


pytest.mark.skip(reason="No way to test this feature yet")
def test_get_task_not_found(client):
# Act
response = client.get("/tasks/1")
response_body = response.get_json()

# Assert
assert response.status_code == 200
assert "task" in response_body
assert response_body == {
"task": {
"id": 1,
"title": "Go on my daily walk 🏞",
"description": "Notice something new every day",
"is_complete": False
}
}


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_task_not_found(client):
# Act
response = client.get("/tasks/1")
response_body = response.get_json()

# Assert
assert response.status_code == 404
assert response_body["message"]!= ""

raise Exception("Complete test with assertion about response body")
#raise Exception("Complete test with assertion about response body")
# *****************************************************************
# **Complete test with assertion about response body***************
# *****************************************************************


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_create_task(client):
# Act
response = client.post("/tasks", json={
@@ -93,7 +111,7 @@ def test_create_task(client):
assert new_task.completed_at == None


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_update_task(client, one_task):
# Act
response = client.put("/tasks/1", json={
@@ -119,7 +137,7 @@ def test_update_task(client, one_task):
assert task.completed_at == None


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_update_task_not_found(client):
# Act
response = client.put("/tasks/1", json={
@@ -130,14 +148,15 @@ def test_update_task_not_found(client):

# Assert
assert response.status_code == 404
assert response_body == {"message" : "task 1 not found"}

raise Exception("Complete test with assertion about response body")
#raise Exception("Complete test with assertion about response body")
# *****************************************************************
# **Complete test with assertion about response body***************
# *****************************************************************


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_delete_task(client, one_task):
# Act
response = client.delete("/tasks/1")
@@ -152,24 +171,26 @@ def test_delete_task(client, one_task):
assert Task.query.get(1) == None


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_delete_task_not_found(client):
# Act
response = client.delete("/tasks/1")
response_body = response.get_json()

# Assert
assert response.status_code == 404
assert response_body == {"message" : "task 1 not found"}


raise Exception("Complete test with assertion about response body")
#raise Exception("Complete test with assertion about response body")
# *****************************************************************
# **Complete test with assertion about response body***************
# *****************************************************************

assert Task.query.all() == []


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_create_task_must_contain_title(client):
# Act
response = client.post("/tasks", json={
@@ -186,7 +207,7 @@ def test_create_task_must_contain_title(client):
assert Task.query.all() == []


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_create_task_must_contain_description(client):
# Act
response = client.post("/tasks", json={
4 changes: 2 additions & 2 deletions tests/test_wave_02.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pytest


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_tasks_sorted_asc(client, three_tasks):
# Act
response = client.get("/tasks?sort=asc")
@@ -29,7 +29,7 @@ def test_get_tasks_sorted_asc(client, three_tasks):
]


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_tasks_sorted_desc(client, three_tasks):
# Act
response = client.get("/tasks?sort=desc")
18 changes: 10 additions & 8 deletions tests/test_wave_03.py
Original file line number Diff line number Diff line change
@@ -5,7 +5,7 @@
import pytest


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_mark_complete_on_incomplete_task(client, one_task):
# Arrange
"""
@@ -42,7 +42,7 @@ def test_mark_complete_on_incomplete_task(client, one_task):
assert Task.query.get(1).completed_at


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_mark_incomplete_on_complete_task(client, completed_task):
# Act
response = client.patch("/tasks/1/mark_incomplete")
@@ -62,7 +62,7 @@ def test_mark_incomplete_on_complete_task(client, completed_task):
assert Task.query.get(1).completed_at == None


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_mark_complete_on_completed_task(client, completed_task):
# Arrange
"""
@@ -99,7 +99,7 @@ def test_mark_complete_on_completed_task(client, completed_task):
assert Task.query.get(1).completed_at


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_mark_incomplete_on_incomplete_task(client, one_task):
# Act
response = client.patch("/tasks/1/mark_incomplete")
@@ -119,31 +119,33 @@ def test_mark_incomplete_on_incomplete_task(client, one_task):
assert Task.query.get(1).completed_at == None


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_mark_complete_missing_task(client):
# Act
response = client.patch("/tasks/1/mark_complete")
response_body = response.get_json()

# Assert
assert response.status_code == 404
assert response_body == {"message": "task 1 not found"}

raise Exception("Complete test with assertion about response body")
#raise Exception("Complete test with assertion about response body")
# *****************************************************************
# **Complete test with assertion about response body***************
# *****************************************************************


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_mark_incomplete_missing_task(client):
# Act
response = client.patch("/tasks/1/mark_incomplete")
response_body = response.get_json()

# Assert
assert response.status_code == 404
assert response_body == {"message": "task 1 not found"}

raise Exception("Complete test with assertion about response body")
#raise Exception("Complete test with assertion about response body")
# *****************************************************************
# **Complete test with assertion about response body***************
# *****************************************************************
60 changes: 40 additions & 20 deletions tests/test_wave_05.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import pytest
from app.models.goal import Goal


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_goals_no_saved_goals(client):
# Act
response = client.get("/goals")
@@ -12,7 +13,7 @@ def test_get_goals_no_saved_goals(client):
assert response_body == []


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_goals_one_saved_goal(client, one_goal):
# Act
response = client.get("/goals")
@@ -29,7 +30,7 @@ def test_get_goals_one_saved_goal(client, one_goal):
]


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_goal(client, one_goal):
# Act
response = client.get("/goals/1")
@@ -46,22 +47,24 @@ def test_get_goal(client, one_goal):
}


@pytest.mark.skip(reason="test to be completed by student")
pytest.mark.skip(reason="test to be completed by student")
def test_get_goal_not_found(client):
pass
# Act
response = client.get("/goals/1")
response_body = response.get_json()

raise Exception("Complete test")
#raise Exception("Complete test")
# Assert
# ---- Complete Test ----
assert response.status_code == 404
assert response_body == None
# assertion 1 goes here
# assertion 2 goes here
# ---- Complete Test ----


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_create_goal(client):
# Act
response = client.post("/goals", json={
@@ -80,11 +83,15 @@ def test_create_goal(client):
}


@pytest.mark.skip(reason="test to be completed by student")
pytest.mark.skip(reason="test to be completed by student")
def test_update_goal(client, one_goal):
raise Exception("Complete test")
#raise Exception("Complete test")
# Act
# ---- Complete Act Here ----
response = client.put("/goals/1", json={
"title":"Updated Goal Title",
})
response_body = response.get_json()

# Assert
# ---- Complete Assertions Here ----
@@ -94,57 +101,70 @@ def test_update_goal(client, one_goal):
# ---- Complete Assertions Here ----


@pytest.mark.skip(reason="test to be completed by student")
pytest.mark.skip(reason="test to be completed by student")
def test_update_goal_not_found(client):
raise Exception("Complete test")
#raise Exception("Complete test")
# Act
# ---- Complete Act Here ----
response = client.put("/goals/1", json={
"title":"Updated Goal Title"})

response_body = response.get_json()

# Assert
# ---- Complete Assertions Here ----
# assertion 1 goes here
assert response.status_code == 404
assert response_body == None
# assertion 2 goes here
# ---- Complete Assertions Here ----


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_delete_goal(client, one_goal):
# Act
response = client.delete("/goals/1")
response_body = response.get_json()

# Assert
assert response.status_code == 200
assert "details" in response_body
assert response_body == {
"details": 'Goal 1 "Build a habit of going outside daily" successfully deleted'
}
#assert response.status_code == 200
#assert "details" in response_body
#assert response_body == {
# "details": 'Goal 1 "Build a habit of going outside daily" successfully deleted'
# }

# Check that the goal was deleted
response = client.get("/goals/1")
assert response.status_code == 404
assert response_body == {"message": "Goal 1 not found"}

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great!



raise Exception("Complete test with assertion about response body")
#raise Exception("Complete test with assertion about response body")
# *****************************************************************
# **Complete test with assertion about response body***************
# *****************************************************************


@pytest.mark.skip(reason="test to be completed by student")
pytest.mark.skip(reason="test to be completed by student")
def test_delete_goal_not_found(client):
raise Exception("Complete test")
#raise Exception("Complete test")

# Act
# ---- Complete Act Here ----

# Assert
response=client.delete("/goals/1")
response_body=response.get_json()
# ---- Complete Assertions Here ----
# assertion 1 goes here
# assertion 2 goes here
assert response.status_code == 404
assert response_body == None
assert Goal.query.all() == []
# ---- Complete Assertions Here ----


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_create_goal_missing_title(client):
# Act
response = client.post("/goals", json={})
15 changes: 8 additions & 7 deletions tests/test_wave_06.py
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@
import pytest


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_post_task_ids_to_goal(client, one_goal, three_tasks):
# Act
response = client.post("/goals/1/tasks", json={
@@ -23,7 +23,7 @@ def test_post_task_ids_to_goal(client, one_goal, three_tasks):
assert len(Goal.query.get(1).tasks) == 3


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_post_task_ids_to_goal_already_with_goals(client, one_task_belongs_to_one_goal, three_tasks):
# Act
response = client.post("/goals/1/tasks", json={
@@ -42,22 +42,23 @@ def test_post_task_ids_to_goal_already_with_goals(client, one_task_belongs_to_on
assert len(Goal.query.get(1).tasks) == 2


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_tasks_for_specific_goal_no_goal(client):
# Act
response = client.get("/goals/1/tasks")
response_body = response.get_json()

# Assert
assert response.status_code == 404


raise Exception("Complete test with assertion about response body")
#raise Exception("Complete test with assertion about response body")
# *****************************************************************
# **Complete test with assertion about response body***************
# *****************************************************************


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_tasks_for_specific_goal_no_tasks(client, one_goal):
# Act
response = client.get("/goals/1/tasks")
@@ -74,7 +75,7 @@ def test_get_tasks_for_specific_goal_no_tasks(client, one_goal):
}


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal):
# Act
response = client.get("/goals/1/tasks")
@@ -99,7 +100,7 @@ def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal):
}


@pytest.mark.skip(reason="No way to test this feature yet")
pytest.mark.skip(reason="No way to test this feature yet")
def test_get_task_includes_goal_id(client, one_task_belongs_to_one_goal):
response = client.get("/tasks/1")
response_body = response.get_json()