Skip to content

Commit

Permalink
Merge pull request #406 from CityOfNewYork/develop
Browse files Browse the repository at this point in the history
OpenRecords v2.3
  • Loading branch information
joelbcastillo authored Aug 29, 2018
2 parents 7bee649 + babeab2 commit f593ca9
Show file tree
Hide file tree
Showing 129 changed files with 6,872 additions and 4,364 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ LOGFILE_DIRECTORY=<PATH TO LOG DIRECTORY>

# Data
AGENCY_DATA=<FULL PATH TO "agencies.json">
CUSTOM_REQUEST_FORMS=<FULL PATH TO "custom_request_forms.json">
LETTER_TEMPLATES_DATA=<FULL PATH TO "letter_templates.csv">
REASON_DATA=<FULL PATH TO "reasons.csv">
STAFF_DATA=<FULL PATH TO "staff.csv">
Expand Down
6 changes: 2 additions & 4 deletions .eslintrc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ rules:
block-scoped-var: off
class-methods-use-this: off
complexity:
- error
- warn
- 6
consistent-return: off
curly: off
Expand Down Expand Up @@ -188,9 +188,7 @@ rules:
max-nested-callbacks: off
max-params: off
max-statements-per-line: off
max-statements:
- error
- 30
max-statements: off
multiline-ternary: off
new-cap: off
new-parens: off
Expand Down
14 changes: 13 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ Vagrantfile.aws
*.pem
*.crt
*.key
*.htpasswd

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down Expand Up @@ -51,6 +52,7 @@ nosetests.xml
coverage.xml
*,cover
.hypothesis/
.pytest_cache/

# Flask stuff:
instance/
Expand Down Expand Up @@ -84,4 +86,14 @@ pip-selfcheck.json
.idea_modules/

# Visual Studio Code
.vscode/
.vscode/*
.vscode

# Local Configuration
.tmux/

# Test Files
data/FOIL-*
data_test/*
executables/*
openrecords.code-workspace
8 changes: 8 additions & 0 deletions .jshintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"globals": {
"$": true,
"document": true,
"window": true,
"jstz": true
}
}
17 changes: 17 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
language: python
python:
- 3.5.3
install: pip install -r requirements/dev.txt
before_script:
- psql -c 'create database openrecords_test;' -U postgres
- psql -c 'create user testuser;' -U postgres
- psql -c 'grant all privileges on all tables in schema public to testuser;' -U postgres
- mkdir -p /home/travis/build/CityOfNewYork/NYCOpenRecords/logs/
- sleep 10
addons:
postgresql: "9.5"
services:
- postgresql
- redis-server
- elasticsearch
script: pytest -v "tests/"
59 changes: 42 additions & 17 deletions app/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import atexit
from datetime import date
import json

import os
import uuid
Expand All @@ -10,6 +11,7 @@
from business_calendar import Calendar, MO, TU, WE, TH, FR
from celery import Celery
from flask import (
abort,
Flask,
render_template,
request as flask_request,
Expand Down Expand Up @@ -45,22 +47,26 @@
tracy = Tracy()
login_manager = LoginManager()
scheduler = APScheduler()
store = RedisStore(redis.StrictRedis(db=Config.SESSION_REDIS_DB, host=Config.REDIS_HOST, port=Config.REDIS_PORT))
store = RedisStore(redis.StrictRedis(db=Config.SESSION_REDIS_DB,
host=Config.REDIS_HOST, port=Config.REDIS_PORT))
session_redis = PrefixDecorator('session_', store)
celery = Celery(__name__, broker=Config.CELERY_BROKER_URL)
sentry = Sentry()

upload_redis = redis.StrictRedis(db=Config.UPLOAD_REDIS_DB, host=Config.REDIS_HOST, port=Config.REDIS_PORT)
email_redis = redis.StrictRedis(db=Config.EMAIL_REDIS_DB, host=Config.REDIS_HOST, port=Config.REDIS_PORT)
upload_redis = redis.StrictRedis(
db=Config.UPLOAD_REDIS_DB, host=Config.REDIS_HOST, port=Config.REDIS_PORT)
email_redis = redis.StrictRedis(
db=Config.EMAIL_REDIS_DB, host=Config.REDIS_HOST, port=Config.REDIS_PORT)

holidays = NYCHolidays(years=[year for year in range(date.today().year, date.today().year + 5)])
holidays = NYCHolidays(years=[year for year in range(
date.today().year, date.today().year + 5)])
calendar = Calendar(
workdays=[MO, TU, WE, TH, FR],
holidays=[str(key) for key in holidays.keys()]
)


def create_app(config_name, jobs_enabled=True):
def create_app(config_name='default', jobs_enabled=True):
"""
Set up the Flask Application context.
Expand Down Expand Up @@ -92,7 +98,6 @@ def create_app(config_name, jobs_enabled=True):

handler_error = TimedRotatingFileHandler(
os.path.join(app.config['LOGFILE_DIRECTORY'],
'error',
'openrecords_{}_error.log'.format(app.config['APP_VERSION_STRING'])),
when='midnight', interval=1, backupCount=60)
handler_error.setLevel(logging.ERROR)
Expand Down Expand Up @@ -174,21 +179,35 @@ def internal_server_error(e):
Raw Agent: {agent}
Error ID: {error_id}
""".format(
method=flask_request.method,
path=flask_request.path,
ip=flask_request.remote_addr,
agent_platform=flask_request.user_agent.platform,
agent_browser=flask_request.user_agent.browser,
agent_browser_version=flask_request.user_agent.version,
agent=flask_request.user_agent.string,
user=current_user,
error_id=error_id
), exc_info=e
method=flask_request.method,
path=flask_request.path,
ip=flask_request.remote_addr,
agent_platform=flask_request.user_agent.platform,
agent_browser=flask_request.user_agent.browser,
agent_browser_version=flask_request.user_agent.version,
agent=flask_request.user_agent.string,
user=current_user,
error_id=error_id
), exc_info=e
)
return render_template("error/generic.html",
status_code=500,
error_id=error_id)

@app.errorhandler(503)
def maintenance(e):
with open(os.path.join(app.instance_path, 'maintenance.json')) as f:
maintenance_info = json.load(f)
return render_template('error/maintenance.html',
description=maintenance_info['description'],
outage_time=maintenance_info['outage_time'])

@app.before_request
def check_maintenance_mode():
if os.path.exists(os.path.join(app.instance_path, 'maintenance.json')):
if not flask_request.cookies.get('authorized_maintainer', None):
return abort(503)

@app.context_processor
def add_session_config():
"""Add current_app.permanent_session_lifetime converted to milliseconds
Expand All @@ -201,6 +220,11 @@ def add_session_config():
app.permanent_session_lifetime.seconds * 1000),
}

@app.context_processor
def add_debug():
"""Add current_app.debug to context."""
return dict(debug=app.debug)

# Register Blueprints
from .main import main
app.register_blueprint(main)
Expand All @@ -212,7 +236,8 @@ def add_session_config():
app.register_blueprint(request, url_prefix="/request")

from .request.api import request_api_blueprint
app.register_blueprint(request_api_blueprint, url_prefix="/request/api/v1.0")
app.register_blueprint(request_api_blueprint,
url_prefix="/request/api/v1.0")

from .report import report
app.register_blueprint(report, url_prefix="/report")
Expand Down
154 changes: 113 additions & 41 deletions app/agency/api/utils.py
Original file line number Diff line number Diff line change
@@ -1,72 +1,144 @@
from itertools import groupby
from typing import Any, Dict, List, Sequence, Tuple

from operator import itemgetter
from sqlalchemy import or_

from app.models import (
Agencies,
LetterTemplates
)
from app.models import Agencies, LetterTemplates, Reasons


def get_active_users_as_choices(agency_ein):
"""
Retrieve a list of users that are active for a given agency
:param agency_ein: Agency EIN (String)
:return: A list of user tuples (id, name)
def get_active_users_as_choices(agency_ein: str) -> List[Tuple[str, str]]:
"""Retrieve a list of tuples representing users that are active for a given agency. Each tuple contains the
following elements:
(Users.guid, Users.name)
Params:
agency_ein (str): Agency EIN (4 Character String)
Returns:
List(Tuple(str, str)): A list of tuples with the following elements:
(Users.guid, Users.name)
"""
active_users = sorted(
[(user.get_id(), user.name)
for user in Agencies.query.filter_by(ein=agency_ein).one().active_users],
key=lambda x: x[1])
active_users.insert(0, ('', 'All'))
[
(user.get_id(), user.name)
for user in Agencies.query.filter_by(ein=agency_ein).one().active_users
],
key=lambda x: x[1],
)
active_users.insert(0, ("", "All"))
return active_users


def get_letter_templates(agency_ein, template_type=None):
def get_reasons(agency_ein: str, reason_type: str = None) -> Dict:
"""Retrieve the determination reasons (used in emails) for the specified agency as a JSON object. If reason_type is
provided, only retrieve determination_reasons of that type.
Args:
agency_ein (str): Agency EIN
reason_type (str): One of ("denial", "closing", "re-opening")
Returns:
Dict:
{
'type_', [(reason_id, reason_title),...]
}
"""
Retrieve letter templates for the specified agency as a JSON object. If template type is provided, only get
templates of that type.
if reason_type is not None:
reasons = (
Reasons.query.with_entities(Reasons.id, Reasons.title, Reasons.type)
.filter(
Reasons.type == reason_type,
or_(Reasons.agency_ein == agency_ein, Reasons.agency_ein == None),
)
.all()
)
else:
reasons = (
Reasons.query.with_entities(Reasons.id, Reasons.title, Reasons.type)
.filter(or_(Reasons.agency_ein == agency_ein, Reasons.agency_ein == None))
.all()
)
grouped_reasons = list(_group_items(reasons, 2))

reasons_dict = {}

:param agency_ein: Agency EIN (String)
:param template_type: One of "acknowledgment", "denial", "closing", "letter", "extension", "re-opening" (String)
:return: Dictionary
for group in grouped_reasons:
determination_type = group[0]
reasons = group[1]

reasons_dict[determination_type] = []

for reason in reasons:
reasons_dict[determination_type].append((reason[0], reason[1]))

return reasons_dict


def get_letter_templates(agency_ein: str, template_type: str = None) -> Dict:
"""Retrieve letter templates for the specified agency as a dictionary. If template type is provided, only get
templates of that type
Params:
Args:
agency_ein (str): Agency EIN
reason_type (str): One of "acknowledgment", "denial", "closing", "letter", "extension", "re-opening"
Returns:
Dict:
{
'type_': [(template_id, template_name),...]
}
"""
if template_type is not None:
templates = LetterTemplates.query.with_entities(LetterTemplates.id, LetterTemplates.title,
LetterTemplates.type_).filter(
LetterTemplates.type_ == template_type).all()
templates = (
LetterTemplates.query.with_entities(
LetterTemplates.id, LetterTemplates.title, LetterTemplates.type_
)
.filter(LetterTemplates.type_ == template_type)
.all()
)
else:
templates = LetterTemplates.query.with_entities(LetterTemplates.id, LetterTemplates.title,
LetterTemplates.type_).filter_by(agency_ein=agency_ein).all()
templates = (
LetterTemplates.query.with_entities(
LetterTemplates.id, LetterTemplates.title, LetterTemplates.type_
)
.filter_by(agency_ein=agency_ein)
.all()
)

templates = list(_group_templates(templates))
grouped_templates = list(_group_items(templates, 2))

template_dict = {}

for i in templates:
type_ = i[0]
vals = i[1]
for group in grouped_templates:
template_type = group[0]
templates = group[1]

template_dict[type_] = []
template_dict[template_type] = []

for i in vals:
template_dict[type_].append((i[0], i[1]))
for template in templates:
template_dict[template_type].append((template[0], template[1]))

if template_type is not None:
return template_dict[template_type]
return template_dict


def _group_templates(templates):
"""
Group a list of templates by their type
:param templates: List of templates (template.id, template.title, template.type_)
:return: a generator containing each grouped template type
def _group_items(items: Sequence[Sequence], sort_index: int) -> Tuple[Any, list]:
"""Group a collection of items by a specified key
Args:
collections (Sequence): A collection of items to be grouped
sort_index (int): Index of the item to use for grouping
Yields:
tuple:
(
items[sort_index_1], (Sequence[i], Sequence[j], ...),
items[sort_index_2], (Sequence[i], Sequence[j], ...),
...
)
"""
grouped = groupby(templates, itemgetter(2))
grouped = groupby(items, itemgetter(sort_index))

for key, sub_iter in grouped:
yield key, list(sub_iter)
for item_index, sub_iter in grouped:
yield item_index, list(sub_iter)
Loading

0 comments on commit f593ca9

Please sign in to comment.