diff --git a/.github/workflows/run-tests-v1.yml b/.github/workflows/run-tests-v1.yml index 8b6f128e..02ced16a 100644 --- a/.github/workflows/run-tests-v1.yml +++ b/.github/workflows/run-tests-v1.yml @@ -11,34 +11,23 @@ jobs: defaults: run: shell: bash - working-directory: ./flask-api + working-directory: ./backend steps: - uses: actions/checkout@main with: fetch-depth: 500 fetch-tags: true - - name: Set up Python 3.10 - uses: actions/setup-python@v4 + - name: Install poetry + run: pipx install poetry + - name: Set up Python 3.12 + uses: actions/setup-python@v5 with: - python-version: "3.10" - cache: "pip" - - name: Upgrade pip - run: python -m pip install --upgrade pip - - name: Install API dev Dependencies - run: | - pip install -r requirements-dev.txt - - name: Run development tests with mocking enabled, using tox - run: | - # Use tox because it is configured to test against the same package type being deployed - tox - - name: Run release tests with mocking disabled, using tox - env: - COGNITO_REGION: ${{ secrets.COGNITO_REGION }} - COGNITO_ACCESS_ID: ${{ secrets.COGNITO_ACCESS_ID }} - COGNITO_ACCESS_KEY: ${{ secrets.COGNITO_ACCESS_KEY }} - run: | - echo "COGNITO_REGION set" - tox -e releasetest + python-version: "3.12" + cache: "poetry" + - name: Install API Dependencies + run: poetry install --with test + - name: Run tests + run: poetry run pytest test-app: runs-on: ubuntu-latest defaults: diff --git a/README.md b/README.md index c9ebb93b..04f854cc 100644 --- a/README.md +++ b/README.md @@ -14,28 +14,28 @@ This project is part of a larger initiative at Hack for LA around creating a sha ## Technology Overview -The HomeUniteUs project is structured as a multi-[docker](https://docs.docker.com/) container application (for local development and testing), with secret-protected access to networked resources. The project contains three containers, whose activities are coordinated using the `docker compose` configuration outlined in `docker-compose.yml`. The three containers are: - -1. `app`: A frontend [React](https://reactjs.org/docs/getting-started.html) app developed using [TypeScript](https://www.typescriptlang.org/). - * We use [Redux](https://redux.js.org/) to manage client state, with the [Redux Toolkit](https://redux-toolkit.js.org/) to simplify development. - * We use the [Material UI](https://material-ui.com/) component library, for access to high quality UI components designed with accessibility in mind. - * We use the [Vite](https://vitejs.dev/) build tool, for fast dev builds and optimized production builds. -2. `api`: A backend python [connexion](https://connexion.readthedocs.io/en/latest/) REST API, hosted on [AWS](https://docs.aws.amazon.com/). - * We use `connexion` to simplify our API specification and validation. `connexion` uses the [OpenAPI](https://www.openapis.org/) [specification](https://spec.openapis.org/oas/v3.0.1.html) to map API URLs to specific python functions. It will handle routing, validation, security, and parameter passing when an API request is made. `connexion` also generates documentation for our API, and provides a useful user interface (at `{URL}/api/ui`) that can be used to easily interact with the API for development purposes. We use the variant of `connexion` that runs on top of [Flask](https://flask.palletsprojects.com/en/1.1.x/). - * We use the [SQLAlchemy](https://www.sqlalchemy.org/) SQL toolkit and [Object-Relational Mapper (ORM)](https://en.wikipedia.org/wiki/Object%E2%80%93relational_mapping). This serves as a developer-friendly layer between the python app and the database. - * We use [gunicorn](https://gunicorn.org/) as our WSGI server. A WSGI server acts as a communication intermediary between an HTTP proxy and the Hone Unite Us API, handling client requests and passing them to the application, then returning the application's responses to the client. - * We use [nginx](https://nginx.org/en/docs/) as our HTTP server. This “reverse proxy” sits in front of the WSGI server, and handles a number of complex web server tasks. It is capable of load balancing across the WSGI server works, managing TLS connections, serving static files, and more. +The HomeUniteUs project is structured as a multi-[docker](https://docs.docker.com/) container application, with secret-protected access to networked resources. The project contains five containers, whose activities are coordinated using the `docker compose` configuration outlined in `docker-compose.yml`. The five containers are: + +1. `frontend`: A frontend [React](https://reactjs.org/docs/getting-started.html) app developed using [TypeScript](https://www.typescriptlang.org/). + * It uses [Redux](https://redux.js.org/) to manage client state, with the [Redux Toolkit](https://redux-toolkit.js.org/) to simplify development. + * It uses the [Material UI](https://material-ui.com/) component library, for access to high quality UI components designed with accessibility in mind. + * It uses the [Vite](https://vitejs.dev/) build tool, for fast dev builds and optimized production builds. +2. `backend`: A backend python API, hosted on [AWS](https://docs.aws.amazon.com/). + * It uses `FastAPI` as its web framework. + * It uses the [SQLAlchemy](https://www.sqlalchemy.org/) SQL toolkit and [Object-Relational Mapper (ORM)](https://en.wikipedia.org/wiki/Object%E2%80%93relational_mapping). This serves as a developer-friendly layer between the Python app and the database. 3. `db`: A [PostgreSQL](https://www.postgresql.org/) database container. * The database is stored as a docker volume, `db-data`. * If the volume is not found during spin-up, then an empty database volume will be created. - -In the production environment, each of these services along with `nginx` are deployed onto an EC2 instance and managed as `systemd` service units instead of with Docker. +4. `motoserver`: A development tool. It runs [`moto`](http://docs.getmoto.org/en/latest/docs/server_mode.html) in Server Mode. + * It allows developers to mock AWS so that AWS secrets are not needed for local development. This tool is used because HUU uses AWS Cognito as its identity and access provider. However, most local development will not need to make actual calls to AWS Cognito for HUU feature development. Using this tool will allow developers to login to HUU on their development machine. + * It has a dashboard located at http://127.0.0.1:5000/moto-api/ +5. `pgadmin`: An optional development tool. It is a container running [pgAdmin4](https://www.pgadmin.org/). pgAdmin4 is database administration and development platform for PostgreSQL. + * This tool will allow can be used to run queries against the PostgreSQL server running in the `db` container. + * It is accessed by going to http://127.0.0.1:5050 ## Build Instructions -Before you can build the project, you will require a `.env` file containing access keys to the application third party services. Please message a team member on the [#home-unite-us slack channel](https://hackforla.slack.com/archives/CRWUG7X0C) once you've completed onboarding. See the [api](./api/README.md) and [app](./app/README.md) READMEs for more information about the required and optional environment variables. - -Since this project is dockerized, you can choose to either build the backend and frontend apps as docker containers or directly onto your local machine. This guide will focus on docker builds, but full local build and deployment instructions can be found in the [api](./api/README.md) and [app](./app/README.md) READMEs. +Since this project is Dockerized, you can choose to either build the backend and frontend apps as Docker containers or directly onto your local machine. This guide will focus on Docker builds, but full local build and deployment instructions can be found in the [api](./backend/README.md) and [app](./frontend/README.md) READMEs. Also note that the code in this repo *should* build without issue on Linux, Windows, and MacOS. We do, however, utilize some Linux-only tools during deployment and primarily target the Linux platform. @@ -45,16 +45,28 @@ Building with Docker is the simplest option, and debugging applications within t #### Requirements -* A copy of the `.env` file described above -* An up-to-date installation of [docker](https://docs.docker.com/get-docker/) +* An up-to-date installation of [Docker](https://docs.docker.com/get-docker/) #### Instructions -1. Place a copy of the `.env` file in the `app` directory -2. Place a copy of the `.env` file in the `api` directory -3. Build all three containers by running the `docker compose up` shell command from the root directory: -4. Verify there are no build errors, and open `localhost:4040` in any browser, to see the application +1. Build and run all containers by running the `docker compose up -d --build` shell command from the root directory: +2. Verify there are no build errors. If there are build errors, reach out to the development team. +3. Open `http://localhost:34828` in any browser to use Home Unite Us. + +* `pgAdmin4` is available at http://localhost:5050/browser/ to query the database. +* `moto` server is available at http://localhost:5000/moto-api/ to view mocked AWS data. + +#### Test Users + +For local development, test users already exist when started using Docker. + +The password for all test users is `Test123!`. + +- 1 Admin: admin@email.com +- 26 Guests: guest[a-z]@email.com (e.g. `guesta@email.com`, `guestb@email.com`, ... `guestz@email.com`) +- 26 Coordinators: coordinator[a-z]@email.com (e.g. `coordinatora@email.com`, `coordinatorb@email.com`, ... `coordinatorz@email.com`) +- 26 Hosts: host[a-z]@email.com (e.g. `hosta@email.com`, `hostb@email.com`, ... `hostz@email.com`) ## Testing Instructions -Testing instructions for each application are in the [api](./api/README.md) and [app](./app/README.md) README files. +Testing instructions for each application are in the [backend](./backend/README.md) and [frontend](./frontend/README.md) README files. diff --git a/backend/.env.example b/backend/.env.example index 3f43f832..315bd627 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -1,9 +1,10 @@ -COGNITO_CLIENT_ID= -COGNITO_CLIENT_SECRET= -COGNITO_REGION= -COGNITO_REDIRECT_URI=http://localhost:34828/signin -COGNITO_USER_POOL_ID= -COGNITO_ACCESS_ID= -COGNITO_ACCESS_KEY= -ROOT_URL=http://localhost:34828 -DATABASE_URL=sqlite:///./homeuniteus.db \ No newline at end of file +COGNITO_CLIENT_ID=testing +COGNITO_CLIENT_SECRET=testing +COGNITO_REGION=us-east-1 +COGNITO_REDIRECT_URI=http://localhost:4040/signin +COGNITO_USER_POOL_ID=testing +COGNITO_ACCESS_ID=testing +COGNITO_ACCESS_KEY=testing +COGNITO_ENDPOINT_URL=http://127.0.0.1:5000 +ROOT_URL=http://localhost:4040 +DATABASE_URL=postgresql+psycopg2://postgres:postgres@127.0.0.1:5432/huu diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 00000000..86b2e775 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,48 @@ +FROM python:3.12-bookworm AS builder + +# --- Install Poetry --- +ARG POETRY_VERSION=1.8 + +ENV POETRY_HOME=/opt/poetry +ENV POETRY_NO_INTERACTION=1 +ENV POETRY_VIRTUALENVS_IN_PROJECT=1 +ENV POETRY_VIRTUALENVS_CREATE=1 +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +# Tell Poetry where to place its cache and virtual environment +ENV POETRY_CACHE_DIR=/opt/.cache + +RUN pip install "poetry==${POETRY_VERSION}" + +WORKDIR /app + +# --- Reproduce the environment --- +COPY pyproject.toml . + +# Install the dependencies and clear the cache afterwards. +# This may save some MBs. +RUN poetry install --no-root && rm -rf $POETRY_CACHE_DIR + +# Now let's build the runtime image from the builder. +# We'll just copy the env and the PATH reference. +FROM python:3.12-bookworm AS runtime + +ENV VIRTUAL_ENV=/app/.venv +ENV PATH="/app/.venv/bin:$PATH" + +COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +COPY ./alembic /code/alembic +COPY ./alembic.ini /code/alembic.ini +COPY ./app /code/app +COPY ./form_data /code/form_data +COPY ./startup_scripts/entrypoint.sh /code/startup_scripts/entrypoint.sh +COPY ./startup_scripts/setup_moto_server.py /code/startup_scripts/setup_moto_server.py +COPY ./startup_scripts/create_groups_users.py /code/startup_scripts/create_groups_users.py + +RUN chmod +x /code/startup_scripts/entrypoint.sh + +WORKDIR /code +ENTRYPOINT ["/code/startup_scripts/entrypoint.sh"] +CMD [] +EXPOSE 8000 \ No newline at end of file diff --git a/backend/README.md b/backend/README.md index f285d864..68f4ebdc 100644 --- a/backend/README.md +++ b/backend/README.md @@ -13,7 +13,7 @@ This server uses: ## Requirements -You will need Python 3.8+ to install Poetry. +You will need Python 3.12+ to install Poetry. Run `python -V` to check the Python version. @@ -21,34 +21,60 @@ Run `python -V` to check the Python version. [Poetry](https://python-poetry.org/docs/#installation) is used to manage the project dependencies. Follow the [installation instructions](https://python-poetry.org/docs/#installation) to run the CLI globally. +[Docker](https://www.docker.com) is used to run required dependencies for development. + ## Usage - Development ### Getting Started +#### Run Required Docker Containers + +The API uses PostgreSQL and Moto server as it's basic required services. Using Docker Compose, run these containers prior to running the API using the following command: + +```shell +docker compose up -d --build pgadmin motoserver # Runs required docker services: PostgreSQL, Moto Server, pgAdmin4 +``` + +The command above will run three containers. `pgAdmin4` is a convenient tool that wills developers to query the PostgreSQL database. + #### Configuration -The API configuration must be specified before running the application. Configuration variables are specified as entries within a `.env` file located within the `backend` directory. To get started, create a `.env` file within `/backend` and copy the values from `.env.example` into the new `.env` file. You may have to contact someone from the development team to get the necessary values. +The API configuration must be specified before running the application. Configuration variables are specified as entries within a `.env` file located within the `backend` directory. To get started, create a `.env` file within `/backend` and copy the values from `.env.example` into the new `.env` file. -#### Setup and Run +#### Setup and Run API - non-Docker version Once the `.env` file has been configured and Poetry is installed, run the following commands in the `backend` directory to install the required development dependencies and run the application. ```shell -poetry install # Installs all dependencies +poetry install # Installs all dependencies -poetry shell # Activates the virtual environment +poetry shell # Activates the virtual environment -poetry run fastapi dev app/main.py # Runs this server in developer mode +# If using a shell use this: +startup_scripts/entrypoint.sh # Creates test users and runs the API in developer mode + +# If using Powershell use this: +startup_scripts/entrypoint.ps1 # Creates test users and runs the API in developer mode ``` Your server is now running at: ``` -http://127.0.0.1:8000 +http://localhost:8000 ``` And your API docs at: ``` -http://127.0.0.1:8000/docs +http://localhost:8000/docs +``` + +pgAdmin4 is available at: +``` +http://localhost:5050/browser +``` + +Moto server dashboard is available at: +``` +http://localhost:5000/moto-api ``` To exit the virtual environment, within the shell run: @@ -56,6 +82,17 @@ To exit the virtual environment, within the shell run: exit ``` +## Test Users + +The `startup_scripts/entrypoint.sh` (or `startup_scripts/entrypoint.ps1` if using Powershell) script creates the following users. + +The password for all test users is `Test123!`. + +- 1 Admin: admin@example.com +- 26 Guests: guest[a-z]@example.com (e.g. `guesta@example.com`, `guestb@example.com`, ... `guestz@example.com`) +- 26 Coordinators: coordinator[a-z]@example.com (e.g. `coordinatora@example.com`, `coordinatorb@example.com`, ... `coordinatorz@example.com`) +- 26 Hosts: host[a-z]@example.com (e.g. `hosta@example.com`, `hostb@example.com`, ... `hostz@example.com`) + ## Conventions ### API Endpoints diff --git a/backend/alembic.ini b/backend/alembic.ini index ef70e5f2..e4e22ab7 100644 --- a/backend/alembic.ini +++ b/backend/alembic.ini @@ -60,7 +60,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = sqlite:///./homeuniteus.db +# sqlalchemy.url = [post_write_hooks] diff --git a/backend/alembic/env.py b/backend/alembic/env.py index 8901a3f3..7ade5d63 100644 --- a/backend/alembic/env.py +++ b/backend/alembic/env.py @@ -1,17 +1,27 @@ +from app.core.db import Base +from app.core.config import get_settings + +import app.modules.access.models +import app.modules.intake_profile.models +import app.modules.intake_profile.forms.models +import app.modules.onboarding.models +import app.modules.matching.models +import app.modules.relationship_management.models +import app.modules.tenant_housing_orgs.models +import app.modules.workflow.models + from logging.config import fileConfig -from sqlalchemy import engine_from_config -from sqlalchemy import pool +from sqlalchemy import create_engine from alembic import context import sys import os + print(os.getcwd()) sys.path.append(os.getcwd()) -from app import models as db - # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config @@ -23,15 +33,15 @@ # add your model's MetaData object here # for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = db.Base.metadata +target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. +database_url = get_settings().DATABASE_URL + def run_migrations_offline() -> None: """Run migrations in 'offline' mode. @@ -45,9 +55,8 @@ def run_migrations_offline() -> None: script output. """ - url = config.get_main_option("sqlalchemy.url") context.configure( - url=url, + url=database_url, target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, @@ -70,16 +79,11 @@ def run_migrations_online() -> None: # with the test engine configuration. connectable = context.config.attributes.get("connection", None) if connectable is None: - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) + connectable = create_engine(database_url) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, + target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/backend/alembic/versions/3ceec084158f_.py b/backend/alembic/versions/3ceec084158f_.py deleted file mode 100644 index b5518d1b..00000000 --- a/backend/alembic/versions/3ceec084158f_.py +++ /dev/null @@ -1,367 +0,0 @@ -"""empty message - -Revision ID: 3ceec084158f -Revises: -Create Date: 2023-03-13 16:58:30.782837 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.engine.reflection import Inspector - -# revision identifiers, used by Alembic. -revision = '3ceec084158f' -down_revision = None -branch_labels = None -depends_on = None - -def create_missing_table(name: str, *create_args) -> bool: - "Create the table if it is not already present in the database." - conn = op.get_bind() - inspector = Inspector.from_engine(conn) - if name not in inspector.get_table_names(): - op.create_table(name, *create_args) - return True - return False - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - if create_missing_table('applicant_type', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant_type_description', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id')): - op.create_index(op.f('ix_applicant_type_id'), 'applicant_type', ['id'], unique=False) - - if create_missing_table('case_status', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('status_description', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_case_status_id'), 'case_status', ['id'], unique=False) - - if create_missing_table('guest_group', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('group_name', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_guest_group_id'), 'guest_group', ['id'], unique=False) - - if create_missing_table('host', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_host_id'), 'host', ['id'], unique=False) - - if create_missing_table('host_household', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('household_name', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_host_household_id'), 'host_household', ['id'], unique=False) - - if create_missing_table('housing_program_service_provider', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('provider_name', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_housing_program_service_provider_id'), 'housing_program_service_provider', ['id'], unique=False) - - if create_missing_table('image_tag_type', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('tag_text', sa.String(), nullable=False), - sa.Column('tag_description', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_image_tag_type_id'), 'image_tag_type', ['id'], unique=False) - - if create_missing_table('intake_question_type', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('type_description', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_intake_question_type_id'), 'intake_question_type', ['id'], unique=False) - - if create_missing_table('match_status', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('status_description', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_match_status_id'), 'match_status', ['id'], unique=False) - - if create_missing_table('user', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('email', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('email') - ): - op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) - - if create_missing_table('applicant_status', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant_type', sa.Integer(), nullable=False), - sa.Column('status_description', sa.String(), nullable=False), - sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_applicant_status_id'), 'applicant_status', ['id'], unique=False) - - if create_missing_table('group_match_result', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('guest_group', sa.Integer(), nullable=False), - sa.Column('host_household', sa.Integer(), nullable=False), - sa.Column('match_status', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), - sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), - sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_group_match_result_id'), 'group_match_result', ['id'], unique=False) - - if create_missing_table('housing_program', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('program_name', sa.String(), nullable=False), - sa.Column('service_provider', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['service_provider'], ['housing_program_service_provider.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_housing_program_id'), 'housing_program', ['id'], unique=False) - - if create_missing_table('applicant', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant_type', sa.Integer(), nullable=False), - sa.Column('applicant_status', sa.Integer(), nullable=False), - sa.Column('user', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['applicant_status'], ['applicant_status.id'], ), - sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), - sa.ForeignKeyConstraint(['user'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_applicant_id'), 'applicant', ['id'], unique=False) - - if create_missing_table('intake_question_set', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('question_set_name', sa.String(), nullable=False), - sa.Column('housing_program', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_intake_question_set_id'), 'intake_question_set', ['id'], unique=False) - - if create_missing_table('program_coordinator', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('user', sa.Integer(), nullable=False), - sa.Column('housing_program', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), - sa.ForeignKeyConstraint(['user'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_program_coordinator_id'), 'program_coordinator', ['id'], unique=False) - - if create_missing_table('applicant_status_log', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('log_description', sa.String(), nullable=False), - sa.Column('logtime', sa.DateTime(), nullable=False), - sa.Column('applicant', sa.Integer(), nullable=False), - sa.Column('src_status', sa.Integer(), nullable=False), - sa.Column('dest_status', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['dest_status'], ['applicant_status.id'], ), - sa.ForeignKeyConstraint(['src_status'], ['applicant_status.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_applicant_status_log_id'), 'applicant_status_log', ['id'], unique=False) - - if create_missing_table('applicant_uploaded_image', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant', sa.Integer(), nullable=False), - sa.Column('image_data', sa.LargeBinary(length=5242880), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_applicant_uploaded_image_id'), 'applicant_uploaded_image', ['id'], unique=False) - - if create_missing_table('guest_group_member', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('guest_group', sa.Integer(), nullable=False), - sa.Column('applicant', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_guest_group_member_id'), 'guest_group_member', ['id'], unique=False) - - if create_missing_table('host_household_member', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('host_household', sa.Integer(), nullable=False), - sa.Column('applicant', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_host_household_member_id'), 'host_household_member', ['id'], unique=False) - - if create_missing_table('housing_program_pariticipant', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant', sa.Integer(), nullable=False), - sa.Column('housing_program', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_housing_program_pariticipant_id'), 'housing_program_pariticipant', ['id'], unique=False) - - if create_missing_table('intake_question', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant_type', sa.Integer(), nullable=False), - sa.Column('intake_question_type', sa.Integer(), nullable=False), - sa.Column('intake_question_set', sa.Integer(), nullable=False), - sa.Column('question_text', sa.String(), nullable=False), - sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), - sa.ForeignKeyConstraint(['intake_question_set'], ['intake_question_set.id'], ), - sa.ForeignKeyConstraint(['intake_question_type'], ['intake_question_type.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_intake_question_id'), 'intake_question', ['id'], unique=False) - - if create_missing_table('match_result', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant_a', sa.Integer(), nullable=False), - sa.Column('applicant_b', sa.Integer(), nullable=False), - sa.Column('match_status', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['applicant_a'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['applicant_b'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_match_result_id'), 'match_result', ['id'], unique=False) - - if create_missing_table('program_case', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('coordinator', sa.Integer(), nullable=False), - sa.Column('case_status', sa.Integer(), nullable=False), - sa.Column('host_household', sa.Integer(), nullable=False), - sa.Column('guest_group', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['case_status'], ['case_status.id'], ), - sa.ForeignKeyConstraint(['coordinator'], ['program_coordinator.id'], ), - sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), - sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_program_case_id'), 'program_case', ['id'], unique=False) - - if create_missing_table('image_tag', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('applicant', sa.Integer(), nullable=False), - sa.Column('image_tag_type', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant_uploaded_image.id'], ), - sa.ForeignKeyConstraint(['image_tag_type'], ['image_tag_type.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_image_tag_id'), 'image_tag', ['id'], unique=False) - - if create_missing_table('intake_response_value', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('intake_question', sa.Integer(), nullable=False), - sa.Column('response_text', sa.String(), nullable=False), - sa.ForeignKeyConstraint(['intake_question'], ['intake_question.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_intake_response_value_id'), 'intake_response_value', ['id'], unique=False) - - if create_missing_table('program_case_log', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('log_description', sa.String(), nullable=False), - sa.Column('logtime', sa.DateTime(), nullable=False), - sa.Column('program_case', sa.Integer(), nullable=False), - sa.Column('src_status', sa.Integer(), nullable=False), - sa.Column('dest_status', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['dest_status'], ['case_status.id'], ), - sa.ForeignKeyConstraint(['program_case'], ['program_case.id'], ), - sa.ForeignKeyConstraint(['src_status'], ['case_status.id'], ), - sa.PrimaryKeyConstraint('id') - ): - op.create_index(op.f('ix_program_case_log_id'), 'program_case_log', ['id'], unique=False) - - if create_missing_table('match_fail_condition', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('response_value_a', sa.Integer(), nullable=False), - sa.Column('response_value_b', sa.Integer(), nullable=False), - sa.Column('reason_text', sa.String(), nullable=False), - sa.ForeignKeyConstraint(['response_value_a'], ['intake_response_value.id'], ), - sa.ForeignKeyConstraint(['response_value_b'], ['intake_response_value.id'], ), - sa.PrimaryKeyConstraint('id')): - op.create_index(op.f('ix_match_fail_condition_id'), 'match_fail_condition', ['id'], unique=False) - - if create_missing_table('match_failure', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('match_result', sa.Integer(), nullable=False), - sa.Column('failed_condition', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['failed_condition'], ['match_fail_condition.id'], ), - sa.ForeignKeyConstraint(['match_result'], ['match_result.id'], ), - sa.PrimaryKeyConstraint('id')): - op.create_index(op.f('ix_match_failure_id'), 'match_failure', ['id'], unique=False) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_match_failure_id'), table_name='match_failure') - op.drop_table('match_failure') - op.drop_index(op.f('ix_match_fail_condition_id'), table_name='match_fail_condition') - op.drop_table('match_fail_condition') - op.drop_index(op.f('ix_program_case_log_id'), table_name='program_case_log') - op.drop_table('program_case_log') - op.drop_index(op.f('ix_intake_response_value_id'), table_name='intake_response_value') - op.drop_table('intake_response_value') - op.drop_index(op.f('ix_image_tag_id'), table_name='image_tag') - op.drop_table('image_tag') - op.drop_index(op.f('ix_program_case_id'), table_name='program_case') - op.drop_table('program_case') - op.drop_index(op.f('ix_match_result_id'), table_name='match_result') - op.drop_table('match_result') - op.drop_index(op.f('ix_intake_question_id'), table_name='intake_question') - op.drop_table('intake_question') - op.drop_index(op.f('ix_housing_program_pariticipant_id'), table_name='housing_program_pariticipant') - op.drop_table('housing_program_pariticipant') - op.drop_index(op.f('ix_host_household_member_id'), table_name='host_household_member') - op.drop_table('host_household_member') - op.drop_index(op.f('ix_guest_group_member_id'), table_name='guest_group_member') - op.drop_table('guest_group_member') - op.drop_index(op.f('ix_applicant_uploaded_image_id'), table_name='applicant_uploaded_image') - op.drop_table('applicant_uploaded_image') - op.drop_index(op.f('ix_applicant_status_log_id'), table_name='applicant_status_log') - op.drop_table('applicant_status_log') - op.drop_index(op.f('ix_program_coordinator_id'), table_name='program_coordinator') - op.drop_table('program_coordinator') - op.drop_index(op.f('ix_intake_question_set_id'), table_name='intake_question_set') - op.drop_table('intake_question_set') - op.drop_index(op.f('ix_applicant_id'), table_name='applicant') - op.drop_table('applicant') - op.drop_index(op.f('ix_housing_program_id'), table_name='housing_program') - op.drop_table('housing_program') - op.drop_index(op.f('ix_group_match_result_id'), table_name='group_match_result') - op.drop_table('group_match_result') - op.drop_index(op.f('ix_applicant_status_id'), table_name='applicant_status') - op.drop_table('applicant_status') - op.drop_index(op.f('ix_user_id'), table_name='user') - op.drop_table('user') - op.drop_index(op.f('ix_match_status_id'), table_name='match_status') - op.drop_table('match_status') - op.drop_index(op.f('ix_intake_question_type_id'), table_name='intake_question_type') - op.drop_table('intake_question_type') - op.drop_index(op.f('ix_image_tag_type_id'), table_name='image_tag_type') - op.drop_table('image_tag_type') - op.drop_index(op.f('ix_housing_program_service_provider_id'), table_name='housing_program_service_provider') - op.drop_table('housing_program_service_provider') - op.drop_index(op.f('ix_host_household_id'), table_name='host_household') - op.drop_table('host_household') - op.drop_index(op.f('ix_host_id'), table_name='host') - op.drop_table('host') - op.drop_index(op.f('ix_guest_group_id'), table_name='guest_group') - op.drop_table('guest_group') - op.drop_index(op.f('ix_case_status_id'), table_name='case_status') - op.drop_table('case_status') - op.drop_index(op.f('ix_applicant_type_id'), table_name='applicant_type') - op.drop_table('applicant_type') - # ### end Alembic commands ### diff --git a/backend/alembic/versions/a1a53aaf81d3_initial_migration.py b/backend/alembic/versions/a1a53aaf81d3_initial_migration.py new file mode 100644 index 00000000..8bf0a807 --- /dev/null +++ b/backend/alembic/versions/a1a53aaf81d3_initial_migration.py @@ -0,0 +1,166 @@ +"""initial migration + +Revision ID: a1a53aaf81d3 +Revises: +Create Date: 2024-10-09 20:55:54.123802 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'a1a53aaf81d3' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'field_properties', + sa.Column('properties_id', sa.Integer(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('field_type', sa.String(), nullable=False), + sa.Column('choices', sa.JSON(), nullable=False), + sa.CheckConstraint( + "field_type IN ('date', 'dropdown', 'multiple_choice', 'email', 'file_upload', 'group', 'long_text', 'number', 'short_text', 'yes_no')", + name='chk_field_type'), sa.PrimaryKeyConstraint('properties_id')) + op.create_table('field_validations', + sa.Column('validations_id', sa.Integer(), nullable=False), + sa.Column('required', sa.Boolean(), nullable=False), + sa.Column('max_length', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('validations_id')) + op.create_table( + 'forms', sa.Column('form_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('created_at', + sa.DateTime(timezone=True), + server_default=sa.sql.func.utcnow(), + nullable=False), sa.PrimaryKeyConstraint('form_id')) + op.create_table('housing_orgs', + sa.Column('housing_org_id', sa.Integer(), nullable=False), + sa.Column('org_name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('housing_org_id'), + sa.UniqueConstraint('org_name')) + op.create_table('role', sa.Column('id', sa.Integer(), nullable=False), + sa.Column('type', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('type')) + op.create_index(op.f('ix_role_id'), 'role', ['id'], unique=False) + op.create_table('unmatched_guest_case_status', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('status_text', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('status_text')) + op.create_table('field_groups', + sa.Column('group_id', sa.Integer(), nullable=False), + sa.Column('form_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.ForeignKeyConstraint( + ['form_id'], + ['forms.form_id'], + ), sa.PrimaryKeyConstraint('group_id')) + op.create_table( + 'housing_programs', + sa.Column('housing_program_id', sa.Integer(), nullable=False), + sa.Column('program_name', sa.String(), nullable=False), + sa.Column('housing_org_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ['housing_org_id'], + ['housing_orgs.housing_org_id'], + ), sa.PrimaryKeyConstraint('housing_program_id')) + op.create_table( + 'user', sa.Column('id', sa.Integer(), nullable=False), + sa.Column('email', sa.String(), nullable=False), + sa.Column('firstName', sa.String(length=255), nullable=False), + sa.Column('middleName', sa.String(length=255), nullable=True), + sa.Column('lastName', sa.String(length=255), nullable=True), + sa.Column('roleId', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ['roleId'], + ['role.id'], + ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) + op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) + op.create_table( + 'fields', sa.Column('field_id', sa.Integer(), nullable=False), + sa.Column('ref', sa.String(), nullable=False), + sa.Column('properties_id', sa.Integer(), nullable=False), + sa.Column('validations_id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ['group_id'], + ['field_groups.group_id'], + ), + sa.ForeignKeyConstraint( + ['properties_id'], + ['field_properties.properties_id'], + ), + sa.ForeignKeyConstraint( + ['validations_id'], + ['field_validations.validations_id'], + ), sa.PrimaryKeyConstraint('field_id')) + op.create_table( + 'unmatched_guest_case', sa.Column('id', sa.Integer(), nullable=False), + sa.Column('guest_id', sa.Integer(), nullable=False), + sa.Column('coordinator_id', sa.Integer(), nullable=False), + sa.Column('status_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ['coordinator_id'], + ['user.id'], + ), sa.ForeignKeyConstraint( + ['guest_id'], + ['user.id'], + ), + sa.ForeignKeyConstraint( + ['status_id'], + ['unmatched_guest_case_status.id'], + ), sa.PrimaryKeyConstraint('id')) + op.create_table( + 'responses', sa.Column('answer_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('field_id', sa.Integer(), nullable=False), + sa.Column('answer_text', sa.String(), nullable=False), + sa.ForeignKeyConstraint( + ['field_id'], + ['fields.field_id'], + ), sa.ForeignKeyConstraint( + ['user_id'], + ['user.id'], + ), sa.PrimaryKeyConstraint('answer_id')) + + op.execute( + "INSERT INTO role (type) VALUES ('admin') ON CONFLICT DO NOTHING") + op.execute( + "INSERT INTO role (type) VALUES ('guest') ON CONFLICT DO NOTHING") + op.execute( + "INSERT INTO role (type) VALUES ('coordinator') ON CONFLICT DO NOTHING" + ) + op.execute( + "INSERT INTO role (type) VALUES ('host') ON CONFLICT DO NOTHING") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('responses') + op.drop_table('unmatched_guest_case') + op.drop_table('fields') + op.drop_index(op.f('ix_user_id'), table_name='user') + op.drop_table('user') + op.drop_table('housing_programs') + op.drop_table('field_groups') + op.drop_table('unmatched_guest_case_status') + op.drop_index(op.f('ix_role_id'), table_name='role') + op.drop_table('role') + op.drop_table('housing_orgs') + op.drop_table('forms') + op.drop_table('field_validations') + op.drop_table('field_properties') + + op.execute("DELETE FROM role WHERE type='admin'") + op.execute("DELETE FROM role WHERE type='guest'") + op.execute("DELETE FROM role WHERE type='coordinator'") + op.execute("DELETE FROM role WHERE type='host'") + # ### end Alembic commands ### diff --git a/backend/alembic/versions/cfc4e41b69d3_initial_form_api.py b/backend/alembic/versions/cfc4e41b69d3_initial_form_api.py deleted file mode 100644 index 8379e0d9..00000000 --- a/backend/alembic/versions/cfc4e41b69d3_initial_form_api.py +++ /dev/null @@ -1,85 +0,0 @@ -"""initial_form_api - -Revision ID: cfc4e41b69d3 -Revises: e4c8bb426528 -Create Date: 2024-05-05 17:14:51.771328 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'cfc4e41b69d3' -down_revision = 'e4c8bb426528' -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.create_table('field_properties', - sa.Column('properties_id', sa.Integer(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('field_type', sa.String(length=50), nullable=False), - sa.Column('choices', sa.JSON(), nullable=True), - sa.CheckConstraint("field_type IN ('date', 'dropdown', 'multiple_choice', 'email', 'file_upload', 'group', 'long_text', 'number', 'short_text', 'yes_no')", name='chk_field_type'), - sa.PrimaryKeyConstraint('properties_id') - ) - op.create_table('field_validations', - sa.Column('validations_id', sa.Integer(), nullable=False), - sa.Column('required', sa.Boolean(), nullable=False), - sa.Column('max_length', sa.Integer(), nullable=True), - sa.PrimaryKeyConstraint('validations_id') - ) - op.create_table('forms', - sa.Column('form_id', sa.Integer(), nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('form_id') - ) - op.create_table('field_groups', - sa.Column('group_id', sa.Integer(), nullable=False), - sa.Column('form_id', sa.Integer(), nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.ForeignKeyConstraint(['form_id'], ['forms.form_id'], ), - sa.PrimaryKeyConstraint('group_id') - ) - op.create_table('fields', - sa.Column('field_id', sa.Integer(), nullable=False), - sa.Column('ref', sa.String(length=255), nullable=False), - sa.Column('properties_id', sa.Integer(), nullable=False), - sa.Column('validations_id', sa.Integer(), nullable=False), - sa.Column('group_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['group_id'], ['field_groups.group_id'], ), - sa.ForeignKeyConstraint(['properties_id'], ['field_properties.properties_id'], ), - sa.ForeignKeyConstraint(['validations_id'], ['field_validations.validations_id'], ), - sa.PrimaryKeyConstraint('field_id') - ) - op.create_table('responses', - sa.Column('answer_id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('field_id', sa.String(length=255), nullable=False), - sa.Column('answer_text', sa.Text(), nullable=True), - sa.ForeignKeyConstraint(['field_id'], ['fields.field_id'], ), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('answer_id') - ) - with op.batch_alter_table('role', schema=None) as batch_op: - batch_op.create_unique_constraint('role', ['name']) - with op.batch_alter_table('user', schema=None) as batch_op: - batch_op.alter_column('lastName', - existing_type=sa.VARCHAR(length=255), - nullable=True, - existing_server_default=sa.text("'Unknown'")) - -def downgrade() -> None: - with op.batch_alter_table('role', schema=None) as batch_op: - batch_op.drop_constraint('role', type_='unique') - op.drop_table('responses') - op.drop_table('fields') - op.drop_table('field_groups') - op.drop_table('forms') - op.drop_table('field_validations') - op.drop_table('field_properties') diff --git a/backend/alembic/versions/e4c8bb426528_add_user_types.py b/backend/alembic/versions/e4c8bb426528_add_user_types.py deleted file mode 100644 index 42cce722..00000000 --- a/backend/alembic/versions/e4c8bb426528_add_user_types.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Add user types - -Revision ID: e4c8bb426528 -Revises: ec8b1c17739a -Create Date: 2024-03-10 21:47:13.942845 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy import text -from app.user_roles import UserRole - -# revision identifiers, used by Alembic. -revision = 'e4c8bb426528' -down_revision = 'ec8b1c17739a' -branch_labels = None -depends_on = None - -def upgrade() -> None: - ''' - 1. Add one table: - 1. role - Store available application user roles - 2. Prepopulate the role table with four role types: Admin, Host, Guest, Coordinator - 3. Update the user table to add the first, middle, last name, and role_id columns. - * All existing users will be given the first, last name "UNKNOWN" - * Assign all existing users to the Guest role. - 4. Drop the host table. - * There is no way to map host users back to the user table. We would need a user id foreign - key, or at least an email address. - ''' - role_table = op.create_table('role', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') - ) - op.bulk_insert(role_table, - [{'name': UserRole.ADMIN.value}, - {'name': UserRole.HOST.value}, - {'name': UserRole.GUEST.value}, - {'name': UserRole.COORDINATOR.value}]) - op.create_index(op.f('ix_role_id'), 'role', ['id']) - - conn = op.get_bind() - guest_role_id = conn.execute(text("SELECT id FROM role WHERE name = 'Guest'")).fetchone()[0] - - with op.batch_alter_table('user', schema=None) as batch_op: - # Each existing user will get the first and last names "Unknown" by default - # and they will be assigned to the "Guest" user role. - batch_op.add_column(sa.Column('firstName', sa.String(length=255), nullable=False, server_default='Unknown')) - batch_op.add_column(sa.Column('middleName', sa.String(length=255), nullable=True)) - batch_op.add_column(sa.Column('lastName', sa.String(length=255), nullable=True)) - batch_op.add_column(sa.Column('role_id', sa.Integer, nullable=False, server_default=str(guest_role_id))) - batch_op.create_foreign_key('fk_user_role_id', 'role', ['role_id'], ['id']) - - op.drop_table('host') - -def downgrade() -> None: - with op.batch_alter_table('user', schema=None) as batch_op: - batch_op.drop_constraint('fk_user_role_id', type_='foreignkey') - batch_op.drop_column('lastName') - batch_op.drop_column('middleName') - batch_op.drop_column('firstName') - - op.drop_index(op.f('ix_role_id'), table_name='role') - op.drop_table('role') - op.create_table('host', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_host_id'), 'host', ['id']) diff --git a/backend/alembic/versions/ec8b1c17739a_drop_unused_tables.py b/backend/alembic/versions/ec8b1c17739a_drop_unused_tables.py deleted file mode 100644 index a6713646..00000000 --- a/backend/alembic/versions/ec8b1c17739a_drop_unused_tables.py +++ /dev/null @@ -1,299 +0,0 @@ -"""Drop unused tables - -Revision ID: ec8b1c17739a -Revises: 3ceec084158f -Create Date: 2024-03-10 15:54:55.578328 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'ec8b1c17739a' -down_revision = '3ceec084158f' -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index('ix_housing_program_pariticipant_id', table_name='housing_program_pariticipant') - op.drop_table('housing_program_pariticipant') - op.drop_index('ix_applicant_status_id', table_name='applicant_status') - op.drop_table('applicant_status') - op.drop_index('ix_applicant_type_id', table_name='applicant_type') - op.drop_table('applicant_type') - op.drop_index('ix_match_failure_id', table_name='match_failure') - op.drop_table('match_failure') - op.drop_index('ix_program_case_log_id', table_name='program_case_log') - op.drop_table('program_case_log') - op.drop_index('ix_intake_response_value_id', table_name='intake_response_value') - op.drop_table('intake_response_value') - op.drop_index('ix_case_status_id', table_name='case_status') - op.drop_table('case_status') - op.drop_index('ix_image_tag_type_id', table_name='image_tag_type') - op.drop_table('image_tag_type') - op.drop_index('ix_applicant_uploaded_image_id', table_name='applicant_uploaded_image') - op.drop_table('applicant_uploaded_image') - op.drop_index('ix_match_fail_condition_id', table_name='match_fail_condition') - op.drop_table('match_fail_condition') - op.drop_index('ix_host_household_member_id', table_name='host_household_member') - op.drop_table('host_household_member') - op.drop_index('ix_applicant_id', table_name='applicant') - op.drop_table('applicant') - op.drop_index('ix_intake_question_id', table_name='intake_question') - op.drop_table('intake_question') - op.drop_index('ix_image_tag_id', table_name='image_tag') - op.drop_table('image_tag') - op.drop_index('ix_match_status_id', table_name='match_status') - op.drop_table('match_status') - op.drop_index('ix_guest_group_id', table_name='guest_group') - op.drop_table('guest_group') - op.drop_index('ix_applicant_status_log_id', table_name='applicant_status_log') - op.drop_table('applicant_status_log') - op.drop_index('ix_host_household_id', table_name='host_household') - op.drop_table('host_household') - op.drop_index('ix_match_result_id', table_name='match_result') - op.drop_table('match_result') - op.drop_index('ix_intake_question_set_id', table_name='intake_question_set') - op.drop_table('intake_question_set') - op.drop_index('ix_program_case_id', table_name='program_case') - op.drop_table('program_case') - op.drop_index('ix_intake_question_type_id', table_name='intake_question_type') - op.drop_table('intake_question_type') - op.drop_index('ix_guest_group_member_id', table_name='guest_group_member') - op.drop_table('guest_group_member') - op.drop_index('ix_group_match_result_id', table_name='group_match_result') - op.drop_table('group_match_result') - op.drop_index('ix_program_coordinator_id', table_name='program_coordinator') - op.drop_table('program_coordinator') - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('program_coordinator', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('user', sa.INTEGER(), nullable=False), - sa.Column('housing_program', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), - sa.ForeignKeyConstraint(['user'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_program_coordinator_id', 'program_coordinator', ['id'], unique=False) - op.create_table('group_match_result', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('guest_group', sa.INTEGER(), nullable=False), - sa.Column('host_household', sa.INTEGER(), nullable=False), - sa.Column('match_status', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), - sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), - sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_group_match_result_id', 'group_match_result', ['id'], unique=False) - op.create_table('guest_group_member', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('guest_group', sa.INTEGER(), nullable=False), - sa.Column('applicant', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_guest_group_member_id', 'guest_group_member', ['id'], unique=False) - op.create_table('intake_question_type', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('type_description', sa.VARCHAR(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_intake_question_type_id', 'intake_question_type', ['id'], unique=False) - op.create_table('program_case', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('coordinator', sa.INTEGER(), nullable=False), - sa.Column('case_status', sa.INTEGER(), nullable=False), - sa.Column('host_household', sa.INTEGER(), nullable=False), - sa.Column('guest_group', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['case_status'], ['case_status.id'], ), - sa.ForeignKeyConstraint(['coordinator'], ['program_coordinator.id'], ), - sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), - sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_program_case_id', 'program_case', ['id'], unique=False) - op.create_table('intake_question_set', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('question_set_name', sa.VARCHAR(), nullable=False), - sa.Column('housing_program', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_intake_question_set_id', 'intake_question_set', ['id'], unique=False) - op.create_table('match_result', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant_a', sa.INTEGER(), nullable=False), - sa.Column('applicant_b', sa.INTEGER(), nullable=False), - sa.Column('match_status', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['applicant_a'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['applicant_b'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_match_result_id', 'match_result', ['id'], unique=False) - op.create_table('host_household', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('household_name', sa.VARCHAR(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_host_household_id', 'host_household', ['id'], unique=False) - op.create_table('applicant_status_log', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('log_description', sa.VARCHAR(), nullable=False), - sa.Column('logtime', sa.DATETIME(), nullable=False), - sa.Column('applicant', sa.INTEGER(), nullable=False), - sa.Column('src_status', sa.INTEGER(), nullable=False), - sa.Column('dest_status', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['dest_status'], ['applicant_status.id'], ), - sa.ForeignKeyConstraint(['src_status'], ['applicant_status.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_applicant_status_log_id', 'applicant_status_log', ['id'], unique=False) - op.create_table('guest_group', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('group_name', sa.VARCHAR(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_guest_group_id', 'guest_group', ['id'], unique=False) - op.create_table('match_status', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('status_description', sa.VARCHAR(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_match_status_id', 'match_status', ['id'], unique=False) - op.create_table('image_tag', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant', sa.INTEGER(), nullable=False), - sa.Column('image_tag_type', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant_uploaded_image.id'], ), - sa.ForeignKeyConstraint(['image_tag_type'], ['image_tag_type.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_image_tag_id', 'image_tag', ['id'], unique=False) - op.create_table('intake_question', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant_type', sa.INTEGER(), nullable=False), - sa.Column('intake_question_type', sa.INTEGER(), nullable=False), - sa.Column('intake_question_set', sa.INTEGER(), nullable=False), - sa.Column('question_text', sa.VARCHAR(), nullable=False), - sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), - sa.ForeignKeyConstraint(['intake_question_set'], ['intake_question_set.id'], ), - sa.ForeignKeyConstraint(['intake_question_type'], ['intake_question_type.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_intake_question_id', 'intake_question', ['id'], unique=False) - op.create_table('applicant', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant_type', sa.INTEGER(), nullable=False), - sa.Column('applicant_status', sa.INTEGER(), nullable=False), - sa.Column('user', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['applicant_status'], ['applicant_status.id'], ), - sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), - sa.ForeignKeyConstraint(['user'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_applicant_id', 'applicant', ['id'], unique=False) - op.create_table('host_household_member', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('host_household', sa.INTEGER(), nullable=False), - sa.Column('applicant', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_host_household_member_id', 'host_household_member', ['id'], unique=False) - op.create_table('match_fail_condition', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('response_value_a', sa.INTEGER(), nullable=False), - sa.Column('response_value_b', sa.INTEGER(), nullable=False), - sa.Column('reason_text', sa.VARCHAR(), nullable=False), - sa.ForeignKeyConstraint(['response_value_a'], ['intake_response_value.id'], ), - sa.ForeignKeyConstraint(['response_value_b'], ['intake_response_value.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_match_fail_condition_id', 'match_fail_condition', ['id'], unique=False) - op.create_table('applicant_uploaded_image', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant', sa.INTEGER(), nullable=False), - sa.Column('image_data', sa.BLOB(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_applicant_uploaded_image_id', 'applicant_uploaded_image', ['id'], unique=False) - op.create_table('image_tag_type', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('tag_text', sa.VARCHAR(), nullable=False), - sa.Column('tag_description', sa.VARCHAR(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_image_tag_type_id', 'image_tag_type', ['id'], unique=False) - op.create_table('case_status', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('status_description', sa.VARCHAR(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_case_status_id', 'case_status', ['id'], unique=False) - op.create_table('intake_response_value', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('intake_question', sa.INTEGER(), nullable=False), - sa.Column('response_text', sa.VARCHAR(), nullable=False), - sa.ForeignKeyConstraint(['intake_question'], ['intake_question.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_intake_response_value_id', 'intake_response_value', ['id'], unique=False) - op.create_table('program_case_log', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('log_description', sa.VARCHAR(), nullable=False), - sa.Column('logtime', sa.DATETIME(), nullable=False), - sa.Column('program_case', sa.INTEGER(), nullable=False), - sa.Column('src_status', sa.INTEGER(), nullable=False), - sa.Column('dest_status', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['dest_status'], ['case_status.id'], ), - sa.ForeignKeyConstraint(['program_case'], ['program_case.id'], ), - sa.ForeignKeyConstraint(['src_status'], ['case_status.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_program_case_log_id', 'program_case_log', ['id'], unique=False) - op.create_table('match_failure', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('match_result', sa.INTEGER(), nullable=False), - sa.Column('failed_condition', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['failed_condition'], ['match_fail_condition.id'], ), - sa.ForeignKeyConstraint(['match_result'], ['match_result.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_match_failure_id', 'match_failure', ['id'], unique=False) - op.create_table('applicant_type', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant_type_description', sa.VARCHAR(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_applicant_type_id', 'applicant_type', ['id'], unique=False) - op.create_table('applicant_status', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant_type', sa.INTEGER(), nullable=False), - sa.Column('status_description', sa.VARCHAR(), nullable=False), - sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_applicant_status_id', 'applicant_status', ['id'], unique=False) - op.create_table('housing_program_pariticipant', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('applicant', sa.INTEGER(), nullable=False), - sa.Column('housing_program', sa.INTEGER(), nullable=False), - sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), - sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_housing_program_pariticipant_id', 'housing_program_pariticipant', ['id'], unique=False) - # ### end Alembic commands ### diff --git a/backend/app/core/config.py b/backend/app/core/config.py index aa251f6d..ce5a7cba 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -12,6 +12,7 @@ class Settings(BaseSettings): COGNITO_USER_POOL_ID: str COGNITO_ACCESS_ID: str COGNITO_ACCESS_KEY: str + COGNITO_ENDPOINT_URL: str | None = None HUU_ENVIRONMENT: str ROOT_URL: str DATABASE_URL: str diff --git a/backend/app/core/db.py b/backend/app/core/db.py index 4611012c..9839f75d 100644 --- a/backend/app/core/db.py +++ b/backend/app/core/db.py @@ -1,24 +1,28 @@ """Shared database components.""" from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, DeclarativeBase +from sqlalchemy.types import JSON + +from typing import Any _db_engine = None _DbSessionFactory = None class Base(DeclarativeBase): - pass + type_annotation_map = {dict[str, Any]: JSON} def init_db(engine): + if engine is None: + raise Exception("db engine does not exist") Base.metadata.create_all(bind=engine, checkfirst=True) def db_engine(settings): global _db_engine if _db_engine is None: - _db_engine = create_engine(settings.DATABASE_URL, - connect_args={"check_same_thread": False}) + _db_engine = create_engine(settings.DATABASE_URL) return _db_engine diff --git a/backend/app/modules/deps.py b/backend/app/modules/deps.py index b75f3ce2..a96116ca 100644 --- a/backend/app/modules/deps.py +++ b/backend/app/modules/deps.py @@ -65,6 +65,7 @@ def get_cognito_client(settings: SettingsDep): region_name=settings.COGNITO_REGION, aws_access_key_id=settings.COGNITO_ACCESS_ID, aws_secret_access_key=settings.COGNITO_ACCESS_KEY, + endpoint_url=settings.COGNITO_ENDPOINT_URL ) diff --git a/backend/app/modules/intake_profile/forms/model.py b/backend/app/modules/intake_profile/forms/models.py similarity index 88% rename from backend/app/modules/intake_profile/forms/model.py rename to backend/app/modules/intake_profile/forms/models.py index b269d0c6..620f7e04 100644 --- a/backend/app/modules/intake_profile/forms/model.py +++ b/backend/app/modules/intake_profile/forms/models.py @@ -1,6 +1,7 @@ -from typing import Annotated +from typing import Annotated, Any +import datetime -from sqlalchemy import ForeignKey, Text, DateTime, JSON +from sqlalchemy import ForeignKey, DateTime from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship @@ -15,9 +16,9 @@ class Form(Base): __tablename__ = 'forms' form_id: Mapped[intpk] title: Mapped[str] = mapped_column(nullable=False) - description: Mapped[Text] - created_at: Mapped[DateTime] = mapped_column( - default=func.current_timestamp()) + description: Mapped[str] + created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now()) def get_field_ids(self) -> list[int]: return [ @@ -29,9 +30,9 @@ def get_field_ids(self) -> list[int]: class FieldProperties(Base): __tablename__ = 'field_properties' properties_id: Mapped[intpk] - description: Mapped[Text] + description: Mapped[str] field_type: Mapped[str] = mapped_column(nullable=False) - choices: Mapped[JSON] + choices: Mapped[dict[str, Any]] __table_args__ = (CheckConstraint( "field_type IN ('date', 'dropdown', 'multiple_choice', 'email', 'file_upload', 'group', 'long_text', 'number', 'short_text', 'yes_no')", @@ -51,7 +52,7 @@ class FieldGroup(Base): form_id: Mapped[int] = mapped_column(ForeignKey('forms.form_id'), nullable=False) title: Mapped[str] = mapped_column(nullable=False) - description: Mapped[Text] + description: Mapped[str] form = relationship("Form", back_populates="field_groups") @@ -75,7 +76,7 @@ class Response(Base): user_id: Mapped[int] = mapped_column(ForeignKey('user.id'), nullable=False) field_id: Mapped[int] = mapped_column(ForeignKey('fields.field_id'), nullable=False) - answer_text: Mapped[Text] + answer_text: Mapped[str] user = relationship("User") field = relationship("Field") diff --git a/backend/app/modules/intake_profile/model.py b/backend/app/modules/intake_profile/models.py similarity index 100% rename from backend/app/modules/intake_profile/model.py rename to backend/app/modules/intake_profile/models.py diff --git a/backend/app/modules/matching/model.py b/backend/app/modules/matching/models.py similarity index 100% rename from backend/app/modules/matching/model.py rename to backend/app/modules/matching/models.py diff --git a/backend/app/modules/onboarding/model.py b/backend/app/modules/onboarding/models.py similarity index 100% rename from backend/app/modules/onboarding/model.py rename to backend/app/modules/onboarding/models.py diff --git a/backend/app/modules/relationship_management/model.py b/backend/app/modules/relationship_management/models.py similarity index 100% rename from backend/app/modules/relationship_management/model.py rename to backend/app/modules/relationship_management/models.py diff --git a/backend/startup_scripts/create_groups_users.py b/backend/startup_scripts/create_groups_users.py new file mode 100644 index 00000000..06c9b803 --- /dev/null +++ b/backend/startup_scripts/create_groups_users.py @@ -0,0 +1,84 @@ +import os +import boto3 +import psycopg2 +from urllib.parse import urlparse + + +def create_user(cognito_client, user_pool_id, email, group): + """Create users in moto server cognitoidp preventing duplicates.""" + try: + cognito_client.admin_get_user(UserPoolId=user_pool_id, Username=email) + except Exception: + # The exception means the user doesn't exist so it can now be created. + cognito_client.admin_create_user(UserPoolId=user_pool_id, + Username=email, + TemporaryPassword="Test123!", + MessageAction='SUPPRESS') + + cognito_client.admin_confirm_sign_up(UserPoolId=user_pool_id, + Username=email) + + cognito_client.admin_add_user_to_group( + UserPoolId=user_pool_id, + Username=email, + GroupName=group, + ) + + +def create_group(groups, group, user_pool_id): + """Create a group in moto server preventing duplicates.""" + if not any(g['GroupName'] == group for g in groups['Groups']): + cognito_client.create_group(GroupName=group, UserPoolId=user_pool_id) + + +if __name__ == '__main__': + cognito_client = boto3.client( + "cognito-idp", + region_name="us-east-1", + aws_access_key_id=os.environ['COGNITO_CLIENT_ID'], + aws_secret_access_key=os.environ['COGNITO_CLIENT_SECRET'], + endpoint_url=os.environ['COGNITO_ENDPOINT_URL']) + + user_pool_id = os.environ['COGNITO_USER_POOL_ID'] + + # Get existing groups to prevent duplicates + groups = cognito_client.list_groups( + UserPoolId=user_pool_id, + Limit=10, + ) + create_group(groups, 'Admins', user_pool_id) + create_group(groups, 'Hosts', user_pool_id) + create_group(groups, 'Guests', user_pool_id) + create_group(groups, 'Coordinators', user_pool_id) + + rows = [] + create_user(cognito_client, user_pool_id, 'admin@example.com', 'Admins') + print('admin@example.com/Test123! created.') + rows.append(('admin@example.com', 'admin', 'admin', 1)) + + for role, role_id, group in [ + ('guest', 2, 'Guests'), + ('coordinator', 3, 'Coordinators'), + ('host', 4, 'Hosts'), + ]: + for x in 'abcdefghijklmnopqrstuvwxyz': + email = role + x + '@example.com' + rows.append(( + email, + role, + x, + role_id, + )) + create_user(cognito_client, user_pool_id, email, group) + print(email + '/Test123! created.') + + sql = 'INSERT INTO public.user (email, "firstName", "lastName", "roleId") VALUES (%s, %s, %s, %s) ON CONFLICT(email) DO NOTHING' + url = urlparse(os.environ['DATABASE_URL']) + with psycopg2.connect(database=url.path[1:], + user=url.username, + password=url.password, + host=url.hostname, + port=url.port) as db_conn: + with db_conn.cursor() as cur: + cur.executemany(sql, rows) + db_conn.commit() diff --git a/backend/startup_scripts/entrypoint.ps1 b/backend/startup_scripts/entrypoint.ps1 new file mode 100644 index 00000000..463f4517 --- /dev/null +++ b/backend/startup_scripts/entrypoint.ps1 @@ -0,0 +1,44 @@ +# PowerShell script + +# Function to source environment variables from a file (since PowerShell doesn't have a direct equivalent) +function Import-EnvFile { + param ( + [string]$FilePath + ) + + if (Test-Path $FilePath) { + Get-Content $FilePath | ForEach-Object { + $parts = $_ -split '=' + if ($parts.Length -eq 2) { + $name = $parts[0].Trim() + $value = $parts[1].Trim() + Set-Item -Path Env:$name -Value $value + } + } + } +} + +# Check if .env file exists and is readable +if (Test-Path .env -and (Get-Item .env).IsReadOnly -eq $false) { + dotenv list --format export > envfile + Import-EnvFile "envfile" + Remove-Item -Force envfile +} + +# Alembic migration +alembic upgrade head + +# Check if the script was called with 'prod' as an argument +if ($args[0] -eq "prod") { + fastapi run app/main.py --port 8000 +} else { + # Setup moto server and export Cognito environment variables + python startup_scripts/setup_moto_server.py > envfile + Import-EnvFile "envfile" + Remove-Item -Force envfile + + # Create test users in moto server and postgres + python startup_scripts/create_groups_users.py + + fastapi dev +} diff --git a/backend/startup_scripts/entrypoint.sh b/backend/startup_scripts/entrypoint.sh new file mode 100755 index 00000000..083f6a99 --- /dev/null +++ b/backend/startup_scripts/entrypoint.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +# Export environment variables if .env file exists. +# This is only needed when running this script on a host. +if [ -r .env ]; then + dotenv list --format export > envfile + source envfile + rm envfile +fi + +# Alembic migration +alembic upgrade head + +if [ "$1" == "prod" ]; then + fastapi run app/main.py --port 8000 +else + # Setup moto server and export Cognito environment variables + python startup_scripts/setup_moto_server.py > envfile + source envfile + rm envfile + + # Create test users in moto server and postgres + python startup_scripts/create_groups_users.py + + fastapi dev +fi diff --git a/backend/startup_scripts/setup_moto_server.py b/backend/startup_scripts/setup_moto_server.py new file mode 100644 index 00000000..01db8ea4 --- /dev/null +++ b/backend/startup_scripts/setup_moto_server.py @@ -0,0 +1,71 @@ +import re +import uuid +import os +import boto3 + + +class AWSTemporaryUserPool(): + """Provide a temporary user pool for development and testing purposes.""" + + def __init__(self, cognito_client): + self.cognito_client = cognito_client + self.tmp_userpool_id = None + self.tmp_client_id = None + self.tmp_client_secret = None + + def create(self): + unique_poolname = f"TestUserPool{str(uuid.uuid4())}" + mock_pool_resp = self.cognito_client.create_user_pool( + PoolName=unique_poolname, UsernameAttributes=['email']) + mock_pool_id = mock_pool_resp['UserPool']['Id'] + + client_response = self.cognito_client.create_user_pool_client( + UserPoolId=mock_pool_id, + ClientName="MockUserPoolClient", + GenerateSecret=True, + ExplicitAuthFlows=[ + 'ALLOW_USER_PASSWORD_AUTH', # Enable USER_PASSWORD_AUTH flow + 'ALLOW_REFRESH_TOKEN_AUTH' # You can add other auth flows as needed + ]) + + self.tmp_userpool_id = mock_pool_id + self.tmp_client_id = client_response['UserPoolClient']['ClientId'] + self.tmp_client_secret = client_response['UserPoolClient'][ + 'ClientSecret'] + + +if __name__ == '__main__': + + cognito_client = boto3.client( + "cognito-idp", + region_name="us-east-1", + aws_access_key_id="testing", + aws_secret_access_key="testing", + endpoint_url=os.environ['COGNITO_ENDPOINT_URL']) + + # Only create a user pool and test data if one does not already exist + pools = cognito_client.list_user_pools(MaxResults=5) + if len(pools['UserPools']) > 0: + # It is assumed that if the user pool exists then it has test data for use already. + USER_POOL_ID = pools['UserPools'][0]['Id'] + + clients = cognito_client.list_user_pool_clients( + UserPoolId=USER_POOL_ID, MaxResults=5) + + CLIENT_ID = clients['UserPoolClients'][0]['ClientId'] + + user_pool_client = cognito_client.describe_user_pool_client( + UserPoolId=USER_POOL_ID, ClientId=CLIENT_ID) + CLIENT_SECRET = user_pool_client['UserPoolClient']['ClientSecret'] + else: + temp_pool = AWSTemporaryUserPool(cognito_client) + temp_pool.create() + + USER_POOL_ID = temp_pool.tmp_userpool_id + CLIENT_ID = temp_pool.tmp_client_id + CLIENT_SECRET = temp_pool.tmp_client_secret + + # Output user pool information that the API can use to connect to moto server + print("export COGNITO_USER_POOL_ID=" + USER_POOL_ID) + print("export COGNITO_CLIENT_ID=" + CLIENT_ID) + print("export COGNITO_CLIENT_SECRET=" + CLIENT_SECRET) diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index 268f6133..27f81c7a 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -20,7 +20,7 @@ @pytest.fixture -def session_factory() -> Session: +def db_engine(): SQLALCHEMY_DATABASE_URL = "sqlite+pysqlite:///:memory:" engine = sa.create_engine( @@ -28,12 +28,18 @@ def session_factory() -> Session: connect_args={"check_same_thread": False}, poolclass=StaticPool, ) + + return engine + + +@pytest.fixture +def session_factory(db_engine) -> Session: TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, - bind=engine) + bind=db_engine) import app.seed - Base.metadata.create_all(bind=engine) + Base.metadata.create_all(bind=db_engine) return TestingSessionLocal diff --git a/backend/tests/test_alembic_migration.py b/backend/tests/test_alembic_migration.py deleted file mode 100644 index 12f1baff..00000000 --- a/backend/tests/test_alembic_migration.py +++ /dev/null @@ -1,35 +0,0 @@ -from app.user_roles import UserRole -from app.repositories.user_repo import UserRepository - -# Importing these tests will register them within our test project -# These tests do an excellent job of detecting errors in the alembic -# downgrade and upgrade scripts. -from pytest_alembic.tests import test_single_head_revision -from pytest_alembic.tests import test_upgrade -from pytest_alembic.tests import test_model_definitions_match_ddl -from pytest_alembic.tests import test_up_down_consistency - - -def test_db_session_version(empty_db_session): - """ - Test that the pytest in-memory database is at the most - up-to-date alembic migration version. This will ensure all - the require database objects and pre-populated fields will - be available. - """ - # Adding a new database revision will break this test case - - # Before updating to the new revision please add additional - # test cases below that check the integrity of your new migration - assert DataAccessLayer.revision_id() == 'cfc4e41b69d3' - - -def test_user_roles_available(empty_db_session): - """ - Test that all of the UserRole types are pre-populated within - the Role table after migrating the database to the HEAD revision. - """ - user_repo = UserRepository(empty_db_session) - for role in UserRole: - db_role = user_repo._get_role(role) - assert db_role.name == role.value diff --git a/docker-compose.yml b/docker-compose.yml index c4b603bb..199399d0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: "3.9" - services: db: image: postgres @@ -11,27 +9,75 @@ services: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres - POSTGRES_PORT=5432 - expose: - - 5432 - api: - build: - context: ./api + ports: + - "5432:5432" + pgadmin: + image: dpage/pgadmin4 volumes: - - ./api:/usr/src/app + - pgadmin-data:/var/lib/pgadmin + environment: + - PGADMIN_DEFAULT_EMAIL=pgadmin4@pgadmin.org + - PGADMIN_DEFAULT_PASSWORD=pgadmin + - PGADMIN_CONFIG_SERVER_MODE=False + - PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED=False + entrypoint: /bin/sh -c "chmod 600 /pgpass; /entrypoint.sh;" + user: root + configs: + - source: servers.json + target: /pgadmin4/servers.json + - source: pgpass + target: /pgpass + ports: + - "5050:80" + depends_on: + - db + motoserver: + image: motoserver/moto:latest ports: - - "8080:8080" + - "5000:5000" environment: - - DATABASE_URL=postgresql://postgres:postgres@db:5432/huu + - MOTO_PORT=5000 + backend: + build: + context: ./backend + ports: + - "8000:8000" + environment: + - COGNITO_CLIENT_ID=testing + - COGNITO_CLIENT_SECRET=testing + - COGNITO_REGION=us-east-1 + - COGNITO_REDIRECT_URI=http://localhost:4040/signin + - COGNITO_USER_POOL_ID=testing + - COGNITO_ACCESS_ID=testing + - COGNITO_ACCESS_KEY=testing + - COGNITO_ENDPOINT_URL=http://motoserver:5000 + - ROOT_URL=http://localhost:4040 + - DATABASE_URL=postgresql+psycopg2://postgres:postgres@db:5432/huu links: - db depends_on: - - db - app: + - pgadmin + - motoserver + frontend: build: - context: ./app + context: ./frontend ports: - "4040:80" - depends_on: - - api volumes: db-data: {} + pgadmin-data: {} +configs: + pgpass: + content: db:5432:*:postgres:postgres + servers.json: + content: | + {"Servers": {"1": { + "Group": "Servers", + "Name": "Home Unite Us", + "Host": "db", + "Port": 5432, + "MaintenanceDB": "postgres", + "Username": "postgres", + "PassFile": "/pgpass", + "SSLMode": "prefer" + }}}