diff --git a/.github/workflows/ci_api.yml b/.github/workflows/ci_api.yml
index 5f8a218a..1058dc41 100644
--- a/.github/workflows/ci_api.yml
+++ b/.github/workflows/ci_api.yml
@@ -1,8 +1,7 @@
name: CI (API)
env:
- VENV_PATH: "~/.venv-api"
- POETRY_VERSION: "1.2.2"
+ MIN_SUPPORTED_RUST_VERSION: "1.68.0"
on:
push:
@@ -20,165 +19,118 @@ defaults:
working-directory: api
jobs:
-
code_quality:
- name: Code Quality
+ name: Code quality
runs-on: ubuntu-22.04
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v3
-
- - name: Set up Python
- uses: actions/setup-python@v4
- with:
- python-version: '3.10'
-
- - name: Cache poetry install
- uses: actions/cache@v3
- with:
- path: ~/.local
- key: poetry-1.2.2
-
- - name: Install Poetry
- uses: snok/install-poetry@v1
- with:
- version: ${{ env.POETRY_VERSION }}
- virtualenvs-create: true
- virtualenvs-in-project: false
- virtualenvs-path: ${{ env.VENV_PATH }}
-
- - name: Cache deps
- id: cache-deps
- uses: actions/cache@v3
- with:
- path: ${{ env.VENV_PATH }}
- key: venv-api-${{ hashFiles('**/poetry.lock') }}
-
- - name: Install confluent-kafka prerequisites
- run: |
- sudo apt-get install --yes librdkafka-dev python3-dev
-
- - name: Install psycopg2 prerequisites
- run: |
- sudo apt-get install --yes build-essential libpq-dev
-
- - name: Install dependencies
- run: |
- poetry install --no-interaction --no-root
-
- - name: Install latest just release
- uses: taiki-e/install-action@just
-
- - name: Install latest dprint release
- uses: taiki-e/install-action@dprint
-
- - name: Lint
- # currently cli is not-maintained
- run: |
- just fmt-check
- just lint
-
- test:
- name: Test
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- python-version: ['3.10']
- # container operations are only supported on Linux runners
- os: [ubuntu-22.04]
-
- services:
- # it is hard to setup locally installed postgres,
- # such as setting the password.
-
- # TravisCI and build.sr.ht use empty password as default
- # but that doesn't work with Github action.
- # you need to work with `pg_hba.conf` or using `sudo -u postgres`
- # but the latter force you to install every dependency in in root PATH.
- postgres:
- image: postgres:11
- env:
- POSTGRES_USER: postgres
- # set the password without messing with `pg_hba.conf`
- POSTGRES_PASSWORD: postgres
- # create default db
- POSTGRES_DB: knotdb
- ports:
- - 5432:5432
- options: >-
- --health-cmd pg_isready
- --health-interval 10s
- --health-timeout 5s
- --health-retries 5
-
steps:
- - name: Checkout repository
+ - name: Checkout source code
uses: actions/checkout@v3
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
+ - name: Install rust toolchain
+ uses: dtolnay/rust-toolchain@stable
with:
- python-version: ${{ matrix.python-version }}
+ components: clippy, rustfmt
- - name: Cache poetry install
- uses: actions/cache@v3
- with:
- path: ~/.local
- key: poetry-1.2.2
+ - name: Install latest just release
+ uses: taiki-e/install-action@just
- - name: Install Poetry
- uses: snok/install-poetry@v1
- with:
- version: ${{ env.POETRY_VERSION }}
- virtualenvs-create: true
- virtualenvs-in-project: true
+ - name: Install latest dprint release
+ uses: taiki-e/install-action@dprint
- - name: Cache deps
- id: cache-deps
- uses: actions/cache@v3
- with:
- path: ${{ env.VENV_PATH }}
- key: venv-api-${{ hashFiles('**/poetry.lock') }}
+ - name: Ensure `fmt` has been run
+ run: just fmt-check
- - name: Install confluent-kafka prerequisites
- run: |
- sudo apt-get install --yes librdkafka-dev python3-dev
+ - name: Ensure MSRV is set in `clippy.toml`
+ run: grep "^msrv = \"${{ env.MIN_SUPPORTED_RUST_VERSION }}\"\$" clippy.toml
- - name: Install psycopg2 prerequisites
- run: |
- sudo apt-get install --yes build-essential libpq-dev
+ - name: Run clippy
+ env:
+ SQLX_OFFLINE: true
+ run: just lint
- - name: Install dependencies
- run: |
- poetry install --no-interaction --no-root
+ msrv:
+ name: Minimum supported rust version
+ runs-on: ubuntu-22.04
+ steps:
+ - name: Checkout source code
+ uses: actions/checkout@v3
+
+ - name: Install rust toolchain (v${{ env.MIN_SUPPORTED_RUST_VERSION }})
+ uses: dtolnay/rust-toolchain@master
+ with:
+ toolchain: ${{ env.MIN_SUPPORTED_RUST_VERSION }}
+
+ - name: Install latest nextest release
+ uses: taiki-e/install-action@nextest
- name: Install latest just release
uses: taiki-e/install-action@just
- - name: Unit tests
+ - name: Prepare the environment variables
run: |
- just _test-unit
+ cp .example.env .env
- - name: Check DB version
- # pass `-h localhost` to use TCP connection. Otherwise, it will talk to locally installed postgres
- run: psql -h localhost -p ${{ job.services.postgres.ports[5432] }} -U postgres --version
+ - name: Prepare the database
env:
- # to avoid having password prompt
- PGPASSWORD: postgres
-
- - name: Create DB schema
+ PGPASSWORD: secret
run: |
- psql -h localhost -p ${{ job.services.postgres.ports[5432] }} -U postgres knotdb < tests/integration/test_schema.sql
+ # Start the database
+ docker-compose --file tests/docker-compose.yml run --detach --name db db
+ # Check DB version
+ docker exec db /cockroach/cockroach version
+ # Create a db
+ docker exec db /cockroach/cockroach sql --insecure --execute "create database knot;"
+ # Improt a schema
+ docker exec db /cockroach/cockroach sql --insecure --database knot < db/schema.sql
+
+ - name: Run tests
env:
- PGPASSWORD: postgres
+ DATABASE_URL: postgres://root@localhost:26257/knot
+ run: just test
- - name: Prepare for Integration tests
- run: |
- # use default location, instead setting the `RESTKNOT_CONFIG_FILE`
- cp config.example.yml config.yml
+ build:
+ name: Build for ${{ matrix.build }}
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - { build: linux-gnu, os: ubuntu-22.04, target: x86_64-unknown-linux-gnu }
+ # - { build: win-gnu, os: windows-2022, target: x86_64-pc-windows-gnu }
+ # - { build: win-msvc, os: windows-2022, target: x86_64-pc-windows-msvc }
+ # - { build: win32-msvc, os: windows-2022, target: i686-pc-windows-msvc }
+ # - { build: macos, os: macos-12 , target: x86_64-apple-darwin }
- - name: Integration tests
+ steps:
+ - name: Checkout source code
+ uses: actions/checkout@v3
+
+ - name: Install Rust toolchain
+ uses: dtolnay/rust-toolchain@stable
+ with:
+ target: ${{ matrix.target }}
+
+ - name: Show version information
+ shell: bash
run: |
- just test
+ gcc --version || true
+ rustup -V
+ rustup toolchain list
+ rustup default
+ cargo -V
+ rustc -V
+
+ - name: Install musl-tools
+ if: matrix.target == 'x86_64-unknown-linux-musl'
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y --no-install-recommends \
+ --allow-unauthenticated musl-tools
+
+ - name: Rust cache
+ uses: Swatinem/rust-cache@v2
+ with:
+ key: ${{ matrix.os }}-${{ matrix.target }}
+
+ - name: Build
+ run: cargo build --release --locked --target ${{ matrix.target }}
diff --git a/.gitignore b/.gitignore
index c66391b1..53c2a2aa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,113 +1,10 @@
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*.py[cod]
-*$py.class
+_build
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-env/
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-temp/
-.pytest_cache/
-*.egg-info/
-.installed.cfg
-*.egg
-
-# PyInstaller
-# Usually these files are written by a python script from a template
-# before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*.cover
-.hypothesis/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-
-# Flask stuff:
-instance/
-.webassets-cache
-
-# Scrapy stuff:
-.scrapy
-
-# neo stuff:
-.deploy/
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-# Jupyter Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# celery beat schedule file
-celerybeat-schedule
-
-# SageMath parsed files
-*.sage.py
-
-# dotenv
-.env
-
-# virtualenv
-.venv
-venv/
-ENV/
-
-# Spyder project settings
-.spyderproject
-.spyproject
-
-# Rope project settings
-.ropeproject
-
-# mkdocs documentation
-/site
-
-# mypy
-.mypy_cache/
-
-.vscode
+# Rust
+target
# RESTKnot
+.env
**/integration/schema.sql
-config.yml
+config.toml
docker-compose.yml
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..6b5980f3
--- /dev/null
+++ b/README.md
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+
+
+
+
+
+
+---
+
+Manage DNS records with asynchronous and simple APIs.
+
+RESTKnot provide a high-level asynchronous API to existing Knot DNS
+server. This project consists of three applications: RESTKnot agent,
+RESTKnot API, and RESTKnot CLI. A user can create DNS record through web
+API provided by RESTKnot API, or as command line app using RESTKnot CLI.
+Both of them send command to RESTKnot agent which will be translated
+into Knot DNS action.
+
+## Features
+
+- Asynchronous operation
+- Created default DNS records when adding new zone.
+- Untangle all related record when deleting zone with single API.
+- Prevent wrong RDATA format with validation support.
+- Prevent record lost by checking RDATA contents before adding any
+ record.
+
+## Take the tour
+
+### Create New Zone
+
+``` bash
+curl -X POST \
+ http://localhost:5000/api/domain/add \
+ -H 'X-API-key: 123' \
+ -F user_id=001 \
+ -F zone=example.com
+```
+
+### Edit a Single Record
+
+``` bash
+curl -X PUT \
+ http://127.0.0.1:5000/api/record/edit/10 \
+ -H 'x-api-key: 123' \
+ -F zone=bar.com \
+ -F owner=@ \
+ -F rtype=NS \
+ -F rdata=one.exampledns.com. \
+ -F ttl=3600
+```
+
+### Delete a Zone
+
+``` bash
+curl -X DELETE \
+ http://localhost:5000/api/domain/delete \
+ -H 'X-API-Key: 123' \
+ -F zone=example.com
+```
+
+## Project information
+
+- [Documentation](https://restknot.readthedocs.io/en/stable/index.html)
+- [Contributing](https://restknot.readthedocs.io/en/stable/project/contributing.html)
+- [Changelog](https://restknot.readthedocs.io/en/stable/project/changelog.html)
+- [License](https://restknot.readthedocs.io/en/stable/project/license.html)
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 80e44d07..00000000
--- a/README.rst
+++ /dev/null
@@ -1,83 +0,0 @@
-.. raw:: html
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-========
-
-Manage DNS records with asynchronous and simple APIs.
-
-RESTKnot provide a high-level asynchronous API to existing Knot DNS server. This project consists of
-three applications: RESTKnot agent, RESTKnot API, and RESTKnot CLI. A user can
-create DNS record through web API provided by RESTKnot API, or as command line
-app using RESTKnot CLI. Both of them send command to RESTKnot agent which will
-be translated into Knot DNS action.
-
-.. end-of-readme-intro
-
-Features
---------
-
-* Asynchronous operation
-* Created default DNS records when adding new zone.
-* Untangle all related record when deleting zone with single API.
-* Prevent wrong RDATA format with validation support.
-* Prevent record lost by checking RDATA contents before adding any record.
-
-Take the tour
--------------
-
-Create New Zone
-^^^^^^^^^^^^^^^
-
-.. code-block:: bash
-
- curl -X POST \
- http://localhost:5000/api/domain/add \
- -H 'X-API-key: 123' \
- -F user_id=001 \
- -F zone=example.com
-
-Edit a Single Record
-^^^^^^^^^^^^^^^^^^^^
-
-.. code-block:: bash
-
- curl -X PUT \
- http://127.0.0.1:5000/api/record/edit/10 \
- -H 'x-api-key: 123' \
- -F zone=bar.com \
- -F owner=@ \
- -F rtype=NS \
- -F rdata=one.exampledns.com. \
- -F ttl=3600
-
-Delete a Zone
-^^^^^^^^^^^^^
-
-.. code-block:: bash
-
- curl -X DELETE \
- http://localhost:5000/api/domain/delete \
- -H 'X-API-Key: 123' \
- -F zone=example.com
-
-.. end-of-readme-usage
-
-Project information
--------------------
-
-* `Documentation `_
-* `Contributing `_
-* `Changelog `_
-* `License `_
diff --git a/api/.dockerignore b/api/.dockerignore
deleted file mode 100644
index 8c7b6b92..00000000
--- a/api/.dockerignore
+++ /dev/null
@@ -1,19 +0,0 @@
-# Git
-.git
-.gitignore
-
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*/__pycache__/
-*/*/__pycache__/
-*/*/*/__pycache__/
-*.py[cod]
-*/*.py[cod]
-*/*/*.py[cod]
-*/*/*/*.py[cod]
-
-# other
-.env
-docker-compose.yml
-.flake8
-tests/
diff --git a/api/.example.env b/api/.example.env
index 920119f5..68ff297e 100644
--- a/api/.example.env
+++ b/api/.example.env
@@ -1,62 +1,9 @@
-################################################################################
-# Environment Variables Configuration
-#
-# It will not override the system variables
-#
-# If you want to disable app debugging:
-# ```
-# FLASK_DEBUG=0
-# FLASK_ENV=production
-# ```
-#
-# `APP_HOST` and `APP_PORT` is the host address and the port location.
-#
-# `DB_HOST` is the database host address, and `DB_NAME` is the name of the
-# database. If you use CockroachDB the port will be `26257` with `root` as user.
-# For PostgreSQL the user is `postgres` and the port is `5432`.
-#
-# If you do not use SSL for your database connection, set `DB_SSL` to `disable`.
-#
-# In CI/CD system the `DB_PASSWORD` is empty. But you have to supply it for
-# testing locally.
-#
-# `KAFKA_HOST` is the host address for Kafka broker, and `KAFKA_PORT` is the
-# port value.
-#
-# `RESTKNOT_KAFKA_TOPIC` is the topic name that RESTKnot will be using.
-#
-# `DEFAULT_NS` is the default name server that will be set in domain creation.
-#
-# `RESKNOT_CLUSTER_FILE` is the config that contains a list of masters and
-# slave of your DNS. The default location is the root directory of the project.
-#
-# `RESKNOT_API_KEY` is the key used to authorize access to the REST API.
-#
-################################################################################
-
-export FLASK_APP=autoapp.py
-export FLASK_DEBUG=1
-export FLASK_ENV=development
-
-export APP_HOST=0.0.0.0
-export APP_PORT=5000
-
-# DB
-export DB_HOST=127.0.0.1
-export DB_NAME=knotdb
-
-# change `port` to 26257 and `user` to root
-# if you use cockroachdb
-export DB_PORT=5432
-export DB_USER=postgres
-export DB_SSL=disable
-
-export DB_PASSWORD=postgres
-# different from TravisCI and build.sr.ht,
-# postgres in Github actions need a non-empty password.
-
-# App
-export RESTKNOT_KAFKA_TOPIC=domaindata
-export DEFAULT_NS='one.dns.id. two.dns.id.'
-export DEFAULT_SOA_RDATA='one.dns.id. hostmaster.dns.id. 3600 1800 604800 86400'
-export RESTKNOT_API_KEY=123
+# loaded with `dotenv`
+
+APP_ENV='dev'
+APP_BASE_URL='127.0.0.1'
+PORT='5000'
+CONFIG_LOCATION='config.toml'
+DATABASE_URL='postgres://root@localhost:26257/knot'
+# Indonesia is UTC+7
+UTC_OFFSET_HOUR='7'
diff --git a/api/.gitignore b/api/.gitignore
index d6023e00..e9eec482 100644
--- a/api/.gitignore
+++ b/api/.gitignore
@@ -1,71 +1,5 @@
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*.py[cod]
-*$py.class
+/target
+data
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-*.egg-info/
-.installed.cfg
-*.egg
-MANIFEST
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*.cover
-.hypothesis/
-.pytest_cache/
-
-# Flask stuff:
-instance/
-.webassets-cache
-
-# Scrapy stuff:
-.scrapy
-
-# Sphinx documentation
-docs/_build/
-
-# Jupyter Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# SageMath parsed files
-*.sage.py
-
-# Environments
.env
-
-# mkdocs documentation
-/site
-
-# mypy
-.mypy_cache/
-
-# visual studio files
-.vscode
-
-config.yml
+!tests/docker-compose.yml
diff --git a/api/Cargo.lock b/api/Cargo.lock
new file mode 100644
index 00000000..f57c3a13
--- /dev/null
+++ b/api/Cargo.lock
@@ -0,0 +1,2427 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "ahash"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
+dependencies = [
+ "getrandom",
+ "once_cell",
+ "version_check",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "android_system_properties"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.70"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
+
+[[package]]
+name = "api"
+version = "0.8.0"
+dependencies = [
+ "anyhow",
+ "axum",
+ "axum_typed_multipart",
+ "chrono",
+ "dotenv",
+ "hyper",
+ "serde",
+ "serde_json",
+ "sqlx",
+ "thiserror",
+ "time 0.3.20",
+ "tokio",
+ "toml",
+ "tower",
+ "tracing",
+ "tracing-subscriber",
+ "url",
+ "utoipa",
+ "utoipa-swagger-ui",
+]
+
+[[package]]
+name = "async-trait"
+version = "0.1.68"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "atoi"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "axum"
+version = "0.6.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349f8ccfd9221ee7d1f3d4b33e1f8319b3a81ed8f61f2ea40b37b859794b4491"
+dependencies = [
+ "async-trait",
+ "axum-core",
+ "bitflags",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "hyper",
+ "itoa",
+ "matchit",
+ "memchr",
+ "mime",
+ "multer",
+ "percent-encoding",
+ "pin-project-lite",
+ "rustversion",
+ "serde",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "sync_wrapper",
+ "tokio",
+ "tower",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "axum-core"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2f958c80c248b34b9a877a643811be8dbca03ca5ba827f2b63baf3a81e5fc4e"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "mime",
+ "rustversion",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "axum_typed_multipart"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48941c54aecb9284bd2b3ce10a35c7eff8970c2a6bf64755452f35320e0bb848"
+dependencies = [
+ "anyhow",
+ "axum",
+ "axum_typed_multipart_macros",
+ "tempfile",
+ "thiserror",
+]
+
+[[package]]
+name = "axum_typed_multipart_macros"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ee22cbdb79169805d2c1902b132408093ec74ca1dda6c43ccd83c75752d078a"
+dependencies = [
+ "darling",
+ "proc-macro-error",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "base64"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
+
+[[package]]
+name = "byteorder"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
+
+[[package]]
+name = "bytes"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
+
+[[package]]
+name = "cc"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chrono"
+version = "0.4.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b"
+dependencies = [
+ "iana-time-zone",
+ "js-sys",
+ "num-integer",
+ "num-traits",
+ "serde",
+ "time 0.1.45",
+ "wasm-bindgen",
+ "winapi",
+]
+
+[[package]]
+name = "codespan-reporting"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
+dependencies = [
+ "termcolor",
+ "unicode-width",
+]
+
+[[package]]
+name = "core-foundation"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "280a9f2d8b3a38871a3c8a46fb80db65e5e5ed97da80c4d08bf27fb63e35e181"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crc"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe"
+dependencies = [
+ "crc-catalog",
+]
+
+[[package]]
+name = "crc-catalog"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484"
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crossbeam-queue"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "cxx"
+version = "1.0.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f61f1b6389c3fe1c316bf8a4dccc90a38208354b330925bce1f74a6c4756eb93"
+dependencies = [
+ "cc",
+ "cxxbridge-flags",
+ "cxxbridge-macro",
+ "link-cplusplus",
+]
+
+[[package]]
+name = "cxx-build"
+version = "1.0.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12cee708e8962df2aeb38f594aae5d827c022b6460ac71a7a3e2c3c2aae5a07b"
+dependencies = [
+ "cc",
+ "codespan-reporting",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "scratch",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "cxxbridge-flags"
+version = "1.0.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7944172ae7e4068c533afbb984114a56c46e9ccddda550499caa222902c7f7bb"
+
+[[package]]
+name = "cxxbridge-macro"
+version = "1.0.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "darling"
+version = "0.14.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850"
+dependencies = [
+ "darling_core",
+ "darling_macro",
+]
+
+[[package]]
+name = "darling_core"
+version = "0.14.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0"
+dependencies = [
+ "fnv",
+ "ident_case",
+ "proc-macro2",
+ "quote",
+ "strsim",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "darling_macro"
+version = "0.14.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e"
+dependencies = [
+ "darling_core",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
+dependencies = [
+ "block-buffer",
+ "crypto-common",
+ "subtle",
+]
+
+[[package]]
+name = "dirs"
+version = "4.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
+dependencies = [
+ "dirs-sys",
+]
+
+[[package]]
+name = "dirs-sys"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi",
+]
+
+[[package]]
+name = "dotenv"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
+
+[[package]]
+name = "dotenvy"
+version = "0.15.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
+
+[[package]]
+name = "either"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "encoding_rs"
+version = "0.8.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "errno"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0"
+dependencies = [
+ "errno-dragonfly",
+ "libc",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "errno-dragonfly"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
+name = "event-listener"
+version = "2.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
+
+[[package]]
+name = "fastrand"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
+dependencies = [
+ "instant",
+]
+
+[[package]]
+name = "flate2"
+version = "1.0.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
+dependencies = [
+ "crc32fast",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "foreign-types"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+dependencies = [
+ "foreign-types-shared",
+]
+
+[[package]]
+name = "foreign-types-shared"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
+
+[[package]]
+name = "futures-intrusive"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5"
+dependencies = [
+ "futures-core",
+ "lock_api",
+ "parking_lot",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e"
+
+[[package]]
+name = "futures-task"
+version = "0.3.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65"
+
+[[package]]
+name = "futures-util"
+version = "0.3.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+ "futures-task",
+ "pin-project-lite",
+ "pin-utils",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi 0.11.0+wasi-snapshot-preview1",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+dependencies = [
+ "ahash",
+]
+
+[[package]]
+name = "hashlink"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa"
+dependencies = [
+ "hashbrown",
+]
+
+[[package]]
+name = "heck"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
+[[package]]
+name = "hkdf"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437"
+dependencies = [
+ "hmac",
+]
+
+[[package]]
+name = "hmac"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
+dependencies = [
+ "digest",
+]
+
+[[package]]
+name = "http"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
+[[package]]
+name = "http-body"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1"
+dependencies = [
+ "bytes",
+ "http",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "httparse"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
+
+[[package]]
+name = "httpdate"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
+
+[[package]]
+name = "hyper"
+version = "0.14.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc5e554ff619822309ffd57d8734d77cd5ce6238bc956f037ea06c58238c9899"
+dependencies = [
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "futures-util",
+ "http",
+ "http-body",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "pin-project-lite",
+ "socket2",
+ "tokio",
+ "tower-service",
+ "tracing",
+ "want",
+]
+
+[[package]]
+name = "iana-time-zone"
+version = "0.1.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "wasm-bindgen",
+ "windows",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
+dependencies = [
+ "cxx",
+ "cxx-build",
+]
+
+[[package]]
+name = "ident_case"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
+
+[[package]]
+name = "idna"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+ "serde",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "io-lifetimes"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220"
+dependencies = [
+ "hermit-abi 0.3.1",
+ "libc",
+ "windows-sys 0.48.0",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
+
+[[package]]
+name = "js-sys"
+version = "0.3.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.141"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5"
+
+[[package]]
+name = "link-cplusplus"
+version = "1.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d59d8c75012853d2e872fb56bc8a2e53718e2cafe1a4c823143141c6d90c322f"
+
+[[package]]
+name = "lock_api"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "matchit"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40"
+
+[[package]]
+name = "md-5"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca"
+dependencies = [
+ "digest",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
+name = "mime_guess"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
+dependencies = [
+ "mime",
+ "unicase",
+]
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
+dependencies = [
+ "libc",
+ "log",
+ "wasi 0.11.0+wasi-snapshot-preview1",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "multer"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2"
+dependencies = [
+ "bytes",
+ "encoding_rs",
+ "futures-util",
+ "http",
+ "httparse",
+ "log",
+ "memchr",
+ "mime",
+ "spin",
+ "version_check",
+]
+
+[[package]]
+name = "native-tls"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"
+dependencies = [
+ "lazy_static",
+ "libc",
+ "log",
+ "openssl",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "security-framework",
+ "security-framework-sys",
+ "tempfile",
+]
+
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
+dependencies = [
+ "autocfg",
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
+dependencies = [
+ "hermit-abi 0.2.6",
+ "libc",
+]
+
+[[package]]
+name = "num_threads"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.17.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
+
+[[package]]
+name = "openssl"
+version = "0.10.49"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d2f106ab837a24e03672c59b1239669a0596406ff657c3c0835b6b7f0f35a33"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "foreign-types",
+ "libc",
+ "once_cell",
+ "openssl-macros",
+ "openssl-sys",
+]
+
+[[package]]
+name = "openssl-macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a20eace9dc2d82904039cb76dcf50fb1a0bba071cfd1629720b5d6f1ddba0fa"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
+[[package]]
+name = "parking_lot"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
+dependencies = [
+ "instant",
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
+dependencies = [
+ "cfg-if",
+ "instant",
+ "libc",
+ "redox_syscall 0.2.16",
+ "smallvec",
+ "winapi",
+]
+
+[[package]]
+name = "paste"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79"
+
+[[package]]
+name = "percent-encoding"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+
+[[package]]
+name = "pin-project"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "pkg-config"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
+dependencies = [
+ "getrandom",
+ "redox_syscall 0.2.16",
+ "thiserror",
+]
+
+[[package]]
+name = "regex"
+version = "1.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+
+[[package]]
+name = "rust-embed"
+version = "6.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b68543d5527e158213414a92832d2aab11a84d2571a5eb021ebe22c43aab066"
+dependencies = [
+ "rust-embed-impl",
+ "rust-embed-utils",
+ "walkdir",
+]
+
+[[package]]
+name = "rust-embed-impl"
+version = "6.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d4e0f0ced47ded9a68374ac145edd65a6c1fa13a96447b873660b2a568a0fd7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "rust-embed-utils",
+ "shellexpand",
+ "syn 1.0.109",
+ "walkdir",
+]
+
+[[package]]
+name = "rust-embed-utils"
+version = "7.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "512b0ab6853f7e14e3c8754acb43d6f748bb9ced66aa5915a6553ac8213f7731"
+dependencies = [
+ "sha2",
+ "walkdir",
+]
+
+[[package]]
+name = "rustix"
+version = "0.37.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2aae838e49b3d63e9274e1c01833cc8139d3fec468c3b84688c628f44b1ae11d"
+dependencies = [
+ "bitflags",
+ "errno",
+ "io-lifetimes",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06"
+
+[[package]]
+name = "ryu"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "schannel"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3"
+dependencies = [
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "scratch"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1"
+
+[[package]]
+name = "security-framework"
+version = "2.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254"
+dependencies = [
+ "bitflags",
+ "core-foundation",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
+[[package]]
+name = "security-framework-sys"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.159"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.159"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_path_to_error"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7f05c1d5476066defcdfacce1f52fc3cae3af1d3089727100c02ae92e5abbe0"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde_spanned"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha1"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "sha2"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "shellexpand"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ccc8076840c4da029af4f87e4e8daeb0fca6b87bbb02e10cb60b791450e11e4"
+dependencies = [
+ "dirs",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
+
+[[package]]
+name = "socket2"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "spin"
+version = "0.9.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
+
+[[package]]
+name = "sqlformat"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e"
+dependencies = [
+ "itertools",
+ "nom",
+ "unicode_categories",
+]
+
+[[package]]
+name = "sqlx"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188"
+dependencies = [
+ "sqlx-core",
+ "sqlx-macros",
+]
+
+[[package]]
+name = "sqlx-core"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029"
+dependencies = [
+ "ahash",
+ "atoi",
+ "base64",
+ "bitflags",
+ "byteorder",
+ "bytes",
+ "chrono",
+ "crc",
+ "crossbeam-queue",
+ "dirs",
+ "dotenvy",
+ "either",
+ "event-listener",
+ "futures-channel",
+ "futures-core",
+ "futures-intrusive",
+ "futures-util",
+ "hashlink",
+ "hex",
+ "hkdf",
+ "hmac",
+ "indexmap",
+ "itoa",
+ "libc",
+ "log",
+ "md-5",
+ "memchr",
+ "once_cell",
+ "paste",
+ "percent-encoding",
+ "rand",
+ "serde",
+ "serde_json",
+ "sha1",
+ "sha2",
+ "smallvec",
+ "sqlformat",
+ "sqlx-rt",
+ "stringprep",
+ "thiserror",
+ "tokio-stream",
+ "url",
+ "whoami",
+]
+
+[[package]]
+name = "sqlx-macros"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9"
+dependencies = [
+ "dotenvy",
+ "either",
+ "heck",
+ "hex",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "serde_json",
+ "sha2",
+ "sqlx-core",
+ "sqlx-rt",
+ "syn 1.0.109",
+ "url",
+]
+
+[[package]]
+name = "sqlx-rt"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024"
+dependencies = [
+ "native-tls",
+ "once_cell",
+ "tokio",
+ "tokio-native-tls",
+]
+
+[[package]]
+name = "stringprep"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "strsim"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+
+[[package]]
+name = "subtle"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
+
+[[package]]
+name = "syn"
+version = "1.0.109"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c9da457c5285ac1f936ebd076af6dac17a61cfe7826f2076b4d015cf47bc8ec"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "sync_wrapper"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
+
+[[package]]
+name = "tempfile"
+version = "3.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998"
+dependencies = [
+ "cfg-if",
+ "fastrand",
+ "redox_syscall 0.3.5",
+ "rustix",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+]
+
+[[package]]
+name = "time"
+version = "0.1.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a"
+dependencies = [
+ "libc",
+ "wasi 0.10.0+wasi-snapshot-preview1",
+ "winapi",
+]
+
+[[package]]
+name = "time"
+version = "0.3.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890"
+dependencies = [
+ "itoa",
+ "libc",
+ "num_threads",
+ "serde",
+ "time-core",
+ "time-macros",
+]
+
+[[package]]
+name = "time-core"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
+
+[[package]]
+name = "time-macros"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36"
+dependencies = [
+ "time-core",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
+[[package]]
+name = "tokio"
+version = "1.27.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d0de47a4eecbe11f498978a9b29d792f0d2692d1dd003650c24c76510e3bc001"
+dependencies = [
+ "autocfg",
+ "bytes",
+ "libc",
+ "mio",
+ "num_cpus",
+ "pin-project-lite",
+ "socket2",
+ "tokio-macros",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "tokio-native-tls"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
+dependencies = [
+ "native-tls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-stream"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "toml"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b403acf6f2bb0859c93c7f0d967cb4a75a7ac552100f9322faf64dc047669b21"
+dependencies = [
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_edit",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.19.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13"
+dependencies = [
+ "indexmap",
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "winnow",
+]
+
+[[package]]
+name = "tower"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project",
+ "pin-project-lite",
+ "tokio",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
+
+[[package]]
+name = "tower-service"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
+
+[[package]]
+name = "tracing"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
+dependencies = [
+ "cfg-if",
+ "log",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+dependencies = [
+ "lazy_static",
+ "log",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "time 0.3.20",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
+name = "try-lock"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
+
+[[package]]
+name = "typenum"
+version = "1.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
+
+[[package]]
+name = "unicode-width"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
+
+[[package]]
+name = "unicode_categories"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
+
+[[package]]
+name = "url"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+]
+
+[[package]]
+name = "utoipa"
+version = "3.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24e7ee17c9ef094b86e1e04170d90765bd76cb381921dacb4d3e175a267bdae6"
+dependencies = [
+ "indexmap",
+ "serde",
+ "serde_json",
+ "utoipa-gen",
+]
+
+[[package]]
+name = "utoipa-gen"
+version = "3.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df6f458e5abc811d44aca28455efc4163fb7565a7af2aa32d17611f3d1d9794d"
+dependencies = [
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.13",
+]
+
+[[package]]
+name = "utoipa-swagger-ui"
+version = "3.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "062bba5a3568e126ac72049a63254f4cb1da2eb713db0c1ab2a4c76be191db8c"
+dependencies = [
+ "axum",
+ "mime_guess",
+ "regex",
+ "rust-embed",
+ "serde",
+ "serde_json",
+ "utoipa",
+ "zip",
+]
+
+[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "walkdir"
+version = "2.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "want"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0"
+dependencies = [
+ "log",
+ "try-lock",
+]
+
+[[package]]
+name = "wasi"
+version = "0.10.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
+
+[[package]]
+name = "web-sys"
+version = "0.3.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "whoami"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c70234412ca409cc04e864e89523cb0fc37f5e1344ebed5a3ebf4192b6b9f68"
+dependencies = [
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
+dependencies = [
+ "windows-targets 0.48.0",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
+dependencies = [
+ "windows_aarch64_gnullvm 0.42.2",
+ "windows_aarch64_msvc 0.42.2",
+ "windows_i686_gnu 0.42.2",
+ "windows_i686_msvc 0.42.2",
+ "windows_x86_64_gnu 0.42.2",
+ "windows_x86_64_gnullvm 0.42.2",
+ "windows_x86_64_msvc 0.42.2",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.45.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
+dependencies = [
+ "windows-targets 0.42.2",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+dependencies = [
+ "windows-targets 0.48.0",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
+dependencies = [
+ "windows_aarch64_gnullvm 0.42.2",
+ "windows_aarch64_msvc 0.42.2",
+ "windows_i686_gnu 0.42.2",
+ "windows_i686_msvc 0.42.2",
+ "windows_x86_64_gnu 0.42.2",
+ "windows_x86_64_gnullvm 0.42.2",
+ "windows_x86_64_msvc 0.42.2",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
+dependencies = [
+ "windows_aarch64_gnullvm 0.48.0",
+ "windows_aarch64_msvc 0.48.0",
+ "windows_i686_gnu 0.48.0",
+ "windows_i686_msvc 0.48.0",
+ "windows_x86_64_gnu 0.48.0",
+ "windows_x86_64_gnullvm 0.48.0",
+ "windows_x86_64_msvc 0.48.0",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+
+[[package]]
+name = "winnow"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae8970b36c66498d8ff1d66685dc86b91b29db0c7739899012f63a63814b4b28"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "zip"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef"
+dependencies = [
+ "byteorder",
+ "crc32fast",
+ "crossbeam-utils",
+ "flate2",
+]
diff --git a/api/Cargo.toml b/api/Cargo.toml
new file mode 100644
index 00000000..b97c551a
--- /dev/null
+++ b/api/Cargo.toml
@@ -0,0 +1,50 @@
+[package]
+name = "api"
+version = "0.8.0"
+edition = "2021"
+authors = ["azzamsa "]
+categories = ["web-programming"]
+exclude = ["/README.md"]
+keywords = ["knot", "dns"]
+license = "MIT"
+repository = "https://github.com/BiznetGIO/RESTKnot"
+description = "REST API for Knot Service"
+
+[dependencies]
+axum = { version = "0.6.12" }
+hyper = "0.14.25"
+tokio = { version = "1.27", features = ["macros", "rt-multi-thread"] }
+
+# Swagger Documentation
+utoipa = { version = "3.2", features = ["axum_extras"] }
+utoipa-swagger-ui = { version = "3.1", features = ["axum"] }
+
+# Database
+sqlx = { version = "0.6.3", features = ["runtime-tokio-native-tls", "postgres", "offline", "chrono", "migrate"] }
+
+# Configurations
+dotenv = "0.15.0"
+toml = "0.7.3"
+url = "2.3"
+
+# Error
+thiserror = "1.0"
+
+# Logging
+time = "0.3.20"
+tracing = "0.1.37"
+tracing-subscriber = { version = "0.3.16", features = ["env-filter", "time", "local-time"] }
+
+serde = { version = "1.0", features = ["derive"] }
+chrono = { version = "0.4.24", features = ["serde"] }
+axum_typed_multipart = "0.3.2"
+
+[dev-dependencies]
+anyhow = "1.0"
+serde_json = "1.0"
+tower = { version = "0.4", features = ["util"] }
+
+[package.metadata.binstall]
+pkg-url = "{ repo }/releases/download/v{ version }/{ name }-{ version}-{ target }.tar.gz"
+bin-dir = "{ bin }{ binary-ext }"
+pkg-fmt = "tgz"
diff --git a/api/Dockerfile b/api/Dockerfile
deleted file mode 100644
index c3b87d0e..00000000
--- a/api/Dockerfile
+++ /dev/null
@@ -1,37 +0,0 @@
-FROM python:3.7-slim-buster
-
-ENV PIP_DEFAULT_TIMEOUT=100 \
- POETRY_VERSION=1.2.2 \
- GUNICORN_VERSION=20.1.0
-
-RUN apt-get update
-# working with timezones
-RUN apt-get install --no-install-recommends --yes tzdata
-# confluent-kafka-python needs these
-RUN apt-get install --no-install-recommends --yes librdkafka-dev python3-dev
-# psycopg2 needs these
-RUN apt-get install --no-install-recommends --yes build-essential libpq-dev
-
-# upgrading pip solves many installation problems
-RUN pip3 install --upgrade pip
-RUN pip3 install "gunicorn==$GUNICORN_VERSION"
-RUN pip3 install "poetry==$POETRY_VERSION"
-
-WORKDIR /restknotapi
-
-# leverage Docker cache
-COPY poetry.lock pyproject.toml /restknotapi/
-
-COPY ./config.yml /restknotapi/config.yml
-
-RUN poetry config virtualenvs.create false && poetry install --no-dev --no-interaction --no-ansi
-
-ARG BUILD_VERSION
-RUN echo "$BUILD_VERSION" > build-version.txt
-
-COPY . /restknotapi
-# check build version
-RUN cat /restknotapi/build-version.txt
-
-EXPOSE 5000
-CMD ["gunicorn", "autoapp:app", "-b", "0.0.0.0:5000"]
diff --git a/api/app/__init__.py b/api/app/__init__.py
deleted file mode 100644
index 3a50e354..00000000
--- a/api/app/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import logging
-import sys
-
-from flask import Flask
-from flask_cors import CORS
-
-from app.controllers import api_blueprint
-
-
-def create_app():
- app = Flask(__name__)
- register_extensions(app)
- register_blueprints(app)
- configure_logger(app)
-
- return app
-
-
-def register_extensions(app):
- """Register Flask extensions."""
- CORS(app, resources={r"/api/*": {"origins": "*"}})
-
-
-def register_blueprints(app):
- """Register Flask blueprints."""
- app.register_blueprint(api_blueprint)
-
-
-def configure_logger(app):
- """Configure loggers."""
- stdout_handler = logging.StreamHandler(sys.stdout)
-
- stdout_format = logging.Formatter(
- "[%(asctime)s - %(levelname)s - %(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s"
- )
- stdout_handler.setFormatter(stdout_format)
- app.logger.addHandler(stdout_handler)
diff --git a/api/app/configs/__init__.py b/api/app/configs/__init__.py
deleted file mode 100644
index 81dfc368..00000000
--- a/api/app/configs/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Application configuration.
-
-Most configuration is set via environment variables.
-
-For local development, use a .env file to set
-environment variables.
-"""
-from environs import Env
-
-env = Env()
-env.read_env()
-
-ENV = env.str("FLASK_ENV", default="production")
-DEBUG = ENV == "development"
-CACHE_TYPE = "simple" # Can be "memcached", "redis", etc.
diff --git a/api/app/controllers/__init__.py b/api/app/controllers/__init__.py
deleted file mode 100644
index fc92f1c6..00000000
--- a/api/app/controllers/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .api import api_blueprint # noqa: F401
diff --git a/api/app/controllers/api/__init__.py b/api/app/controllers/api/__init__.py
deleted file mode 100644
index 4d767d72..00000000
--- a/api/app/controllers/api/__init__.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from flask import Blueprint
-from flask_restful import Api
-
-from .domain import (
- AddDomain,
- DeleteDomain,
- GetDomainByUser,
- GetDomainData,
- GetDomainDataId,
-)
-from .health import AllHealthCheck, HealthCheck
-from .meta import MetaConfig, MetaVersion
-from .record import GetRecordData, GetRecordDataId, RecordAdd, RecordDelete, RecordEdit
-from .ttl import GetTtlData, GetTtlDataId, TtlAdd, TtlDelete, TtlEdit
-from .type_ import GetTypeData, GetTypeDataId, TypeAdd, TypeDelete, TypeEdit
-from .user import GetUserData, GetUserDataId, UserDelete, UserSignUp, UserUpdate
-
-api_blueprint = Blueprint("api", __name__, url_prefix="/api")
-api = Api(api_blueprint)
-
-
-api.add_resource(HealthCheck, "/health")
-api.add_resource(AllHealthCheck, "/health/all")
-api.add_resource(MetaVersion, "/meta/version")
-api.add_resource(MetaConfig, "/meta/config")
-
-api.add_resource(GetRecordData, "/record/list")
-api.add_resource(GetRecordDataId, "/record/list/")
-api.add_resource(RecordAdd, "/record/add")
-api.add_resource(RecordEdit, "/record/edit/")
-api.add_resource(RecordDelete, "/record/delete/")
-
-api.add_resource(GetDomainData, "/domain/list")
-api.add_resource(GetDomainDataId, "/domain/list/zone/")
-api.add_resource(GetDomainByUser, "/domain/list/user/")
-api.add_resource(DeleteDomain, "/domain/delete")
-api.add_resource(AddDomain, "/domain/add")
-
-api.add_resource(UserSignUp, "/user/add")
-api.add_resource(GetUserData, "/user/list")
-api.add_resource(GetUserDataId, "/user/list/")
-api.add_resource(UserUpdate, "/user/edit/")
-api.add_resource(UserDelete, "/user/delete/")
-
-# internal usage (db only, didn't communicate with knot)
-
-api.add_resource(GetTtlData, "/ttl/list")
-api.add_resource(GetTtlDataId, "/ttl/list/")
-api.add_resource(TtlAdd, "/ttl/add")
-api.add_resource(TtlEdit, "/ttl/edit/")
-api.add_resource(TtlDelete, "/ttl/delete/")
-
-api.add_resource(GetTypeData, "/type/list")
-api.add_resource(GetTypeDataId, "/type/list/")
-api.add_resource(TypeAdd, "/type/add")
-api.add_resource(TypeEdit, "/type/edit/")
-api.add_resource(TypeDelete, "/type/delete/")
diff --git a/api/app/controllers/api/domain.py b/api/app/controllers/api/domain.py
deleted file mode 100644
index be0125bf..00000000
--- a/api/app/controllers/api/domain.py
+++ /dev/null
@@ -1,245 +0,0 @@
-import os
-
-from flask import current_app, request
-from flask_restful import Resource, reqparse
-
-from app.helpers import command, helpers, validator
-from app.middlewares import auth
-from app.models import domain as domain_model
-from app.models import model
-from app.models import record as record_model
-from app.models import zone as zone_model
-from app.vendors.rest import response
-
-
-def insert_zone(zone, user_id):
- data = {"zone": zone, "user_id": user_id}
- zone_id = model.insert(table="zone", data=data)
- return zone_id
-
-
-def insert_soa_record(zone_id):
- record_data = {"owner": "@", "zone_id": zone_id, "type_id": "1", "ttl_id": "6"}
- record_id = model.insert(table="record", data=record_data)
- return record_id
-
-
-def insert_soa_rdata(record_id):
- """Insert default SOA record.
-
- Notes:
-
- See: https://tools.ietf.org/html/rfc1035 (3.3.13. SOA RDATA format)
- """
- current_time = helpers.soa_time_set()
- serial = f"{str(current_time)}01"
- default_soa_content = os.environ.get("DEFAULT_SOA_RDATA")
- rdatas = default_soa_content.split(" ")
- # rdata doesn't contains serial
- mname_and_rname = " ".join(rdatas[0:2])
- ttls = " ".join(rdatas[2:])
-
- rdata = f"{mname_and_rname} {serial} {ttls}"
- content_data = {"rdata": rdata, "record_id": record_id}
-
- model.insert(table="rdata", data=content_data)
-
-
-def insert_soa_default(zone_id):
- """Create default SOA record"""
- record_id = insert_soa_record(zone_id)
- insert_soa_rdata(record_id)
- return record_id
-
-
-def insert_ns_record(zone_id):
- record_data = {"owner": "@", "zone_id": zone_id, "type_id": "4", "ttl_id": "6"}
- record_id = model.insert(table="record", data=record_data)
- return record_id
-
-
-def insert_ns_rdata(name, record_id):
- data = {"rdata": name, "record_id": record_id}
- model.insert(table="rdata", data=data)
-
-
-def insert_ns_default(zone_id):
- """Create default NS record"""
- default_ns = os.environ.get("DEFAULT_NS")
- nameserver = default_ns.split(" ")
- record_ids = []
-
- for name in nameserver:
- record_id = insert_ns_record(zone_id)
- insert_ns_rdata(name, record_id)
- record_ids.append(record_id)
-
- return record_ids
-
-
-def insert_cname_record(zone_id):
- record_data = {"owner": "www", "zone_id": zone_id, "type_id": "5", "ttl_id": "6"}
- record_id = model.insert(table="record", data=record_data)
- return record_id
-
-
-def insert_cname_rdata(zone, record_id):
- data = {"rdata": f"{zone}.", "record_id": record_id}
- model.insert(table="rdata", data=data)
-
-
-def insert_cname_default(zone_id, zone):
- """Create default CNAME record"""
- record_id = insert_cname_record(zone_id)
- insert_cname_rdata(zone, record_id)
- return record_id
-
-
-class GetDomainData(Resource):
- @auth.auth_required
- def get(self):
- try:
- zones = model.get_all("zone")
- if not zones:
- return response(404)
-
- domains_detail = []
- for zone in zones:
- detail = domain_model.get_other_data(zone)
- domains_detail.append(detail)
-
- return response(200, data=domains_detail)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class GetDomainDataId(Resource):
- @auth.auth_required
- def get(self):
- zone_id = request.args.get("id")
- zone_name = request.args.get("name")
-
- if not any((zone_id, zone_name)):
- return response(422, "Problems parsing parameters")
-
- try:
- if zone_id:
- zone = model.get_one(table="zone", field="id", value=zone_id)
-
- if zone_name:
- zone = model.get_one(table="zone", field="zone", value=zone_name)
-
- if not zone:
- return response(404)
-
- data = domain_model.get_other_data(zone)
- return response(200, data=data)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class GetDomainByUser(Resource):
- @auth.auth_required
- def get(self, user_id):
- try:
- zones = zone_model.get_zones_by_user(user_id)
- if not zones:
- return response(404)
-
- domains_detail = []
- for zone in zones:
- detail = domain_model.get_other_data(zone)
- domains_detail.append(detail)
-
- return response(200, data=domains_detail)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class AddDomain(Resource):
- @helpers.check_producer
- @auth.auth_required
- def post(self):
- """Add new domain (zone) with additional default record.
-
- note:
- SOA, NS, and CNAME records are added automatically when adding new domain
- """
- parser = reqparse.RequestParser()
- parser.add_argument("zone", type=str, required=True, location="form")
- parser.add_argument("user_id", type=int, required=True, location="form")
- args = parser.parse_args()
- zone = args["zone"]
- user_id = args["user_id"]
-
- # Validation
- if not model.is_unique(table="zone", field="zone", value=f"{zone}"):
- return response(409, message="Duplicate Zone")
-
- user = model.get_one(table="user", field="id", value=user_id)
- if not user:
- return response(404, message="User Not Found")
-
- try:
- validator.validate("ZONE", zone)
- except Exception as e:
- return response(422, message=f"{e}")
-
- try:
- zone_id = insert_zone(zone, user_id)
-
- # create zone config
- command.set_config(zone, zone_id, "conf-set")
-
- # create default records
- soa_record_id = insert_soa_default(zone_id)
- ns_record_ids = insert_ns_default(zone_id)
- cname_record_id = insert_cname_default(zone_id, zone)
- record_ids = [soa_record_id, *ns_record_ids, cname_record_id]
- command.set_default_zone(record_ids)
-
- command.delegate(zone, zone_id, "conf-set", "master")
- command.delegate(zone, zone_id, "conf-set", "slave")
-
- data_ = {"id": zone_id, "zone": zone}
- return response(201, data=data_)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class DeleteDomain(Resource):
- @helpers.check_producer
- @auth.auth_required
- def delete(self):
- """Remove domain (zone) and all its related records."""
- parser = reqparse.RequestParser()
- parser.add_argument("zone", type=str, required=True, location="form")
- args = parser.parse_args()
- zone = args["zone"]
-
- try:
- zone_id = zone_model.get_zone_id(zone)
- except Exception:
- return response(404, message="Zone Not Found")
-
- try:
- records = record_model.get_records_by_zone(zone)
- for record in records:
- # zone-purge didn't work
- # all the records must be unset one-by-one. otherwise old record
- # will appear again if the same zone name crated.
- command.set_zone(record["id"], "zone-unset")
- command.set_config(zone, zone_id, "conf-unset")
-
- # other data (e.g record) deleted automatically
- # by cockroach when no PK existed
- model.delete(table="zone", field="id", value=zone_id)
-
- return response(204, data=zone)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
diff --git a/api/app/controllers/api/health.py b/api/app/controllers/api/health.py
deleted file mode 100644
index e49c9fdb..00000000
--- a/api/app/controllers/api/health.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from confluent_kafka import KafkaError, KafkaException
-from flask_restful import Resource
-
-from app.helpers.producer import kafka_admin
-from app.vendors.rest import response
-
-
-class HealthCheck(Resource):
- def get(self):
- data = {"status": "running"}
- return response(200, data=data, message="OK")
-
-
-class AllHealthCheck(Resource):
- def get(self):
- broker_status = "unknown"
-
- admin_client = kafka_admin()
-
- try:
- topics = admin_client.list_topics(timeout=5).topics
- if topics:
- broker_status = "connected"
- except KafkaException as e:
- if e.args[0].code() in (KafkaError._TIMED_OUT, KafkaError._TRANSPORT):
- broker_status = "not connected"
-
- data = {"status": "running", "broker_status": broker_status}
- return response(200, data=data, message="OK")
diff --git a/api/app/controllers/api/meta.py b/api/app/controllers/api/meta.py
deleted file mode 100644
index 404f61c6..00000000
--- a/api/app/controllers/api/meta.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from flask_restful import Resource
-
-from app.helpers import helpers
-from app.middlewares import auth
-from app.vendors.rest import response
-
-
-class MetaVersion(Resource):
- def get(self):
- build = helpers.read_version("requirements.txt", "build-version.txt")
-
- data = {"build": build}
- return response(200, data=data, message="OK")
-
-
-class MetaConfig(Resource):
- @auth.auth_required
- def get(self):
- config = helpers.get_config()
- brokers = config["brokers"]
- clusters = config["knot_servers"]
-
- data = {"knot_servers": clusters, "brokers": brokers}
- return response(200, data=data, message="OK")
diff --git a/api/app/controllers/api/record.py b/api/app/controllers/api/record.py
deleted file mode 100644
index ff43ccfd..00000000
--- a/api/app/controllers/api/record.py
+++ /dev/null
@@ -1,291 +0,0 @@
-from flask import current_app
-from flask_restful import Resource, reqparse
-
-from app.helpers import command, helpers, rules, validator
-from app.middlewares import auth
-from app.models import model
-from app.models import record as record_model
-from app.models import ttl as ttl_model
-from app.models import type_ as type_model
-from app.models import zone as zone_model
-from app.vendors.rest import response
-
-
-def get_serial_resource(zone):
- soa_record = record_model.get_soa_record(zone)
- if not soa_record:
- raise ValueError("Zone has no SOA record. Any change to the zone is not doable")
-
- rdata_record = model.get_one(
- table="rdata", field="record_id", value=soa_record["id"]
- )
- rdatas = rdata_record["rdata"].split(" ")
- serial = rdatas[2]
- # `serial_counter` is the last two digit of serial value (YYYYMMDDnn)
- serial_counter = serial[-2:]
- serial_date = serial[:-2]
-
- return {
- "soa_record": soa_record,
- "rdata_record": rdata_record,
- "serial": serial,
- "serial_counter": serial_counter,
- "serial_date": serial_date,
- }
-
-
-def check_serial_limit(serial_resource):
- serial_counter = serial_resource["serial_counter"]
- serial_date = serial_resource["serial_date"]
- today_date = helpers.soa_time_set()
-
- if int(serial_counter) > 97 and serial_date == today_date:
- # knot maximum of nn is 99
- # 97 was chosen because serial
- # increment can be twice at time
- raise ValueError("Zone Change Limit Reached")
-
-
-def update_serial(serial_resource, increment="01"):
- serial = serial_resource["serial"]
- soa_record = serial_resource["soa_record"]
- rdata_record = serial_resource["rdata_record"]
-
- new_serial = helpers.increment_serial(serial, increment)
- new_rdata = helpers.replace_serial(rdata_record["rdata"], new_serial)
- content_data = {
- "where": {"record_id": soa_record["id"]},
- "data": {"rdata": new_rdata, "record_id": soa_record["id"]},
- }
- model.update("rdata", data=content_data)
-
-
-class GetRecordData(Resource):
- @auth.auth_required
- def get(self):
- try:
- records = model.get_all("record")
- if not records:
- return response(404)
-
- records_detail = []
- for record in records:
- detail = record_model.get_other_data(record)
- records_detail.append(detail)
-
- return response(200, data=records_detail)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class GetRecordDataId(Resource):
- @auth.auth_required
- def get(self, record_id):
- try:
- record = model.get_one(table="record", field="id", value=record_id)
- if not record:
- return response(404)
-
- data = record_model.get_other_data(record)
- return response(200, data=data)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class RecordAdd(Resource):
- @helpers.check_producer
- @auth.auth_required
- def post(self):
- """Add new record.
-
- note:
- Adding any record with other record is allowed. IETF best practice
- is not handled automatically. Knot didn't handle this too, and let the
- user know the standards themselves.
- See https://tools.ietf.org/html/rfc1912
- """
- parser = reqparse.RequestParser()
- parser.add_argument("zone", type=str, required=True, location="form")
- parser.add_argument("owner", type=str, required=True, location="form")
- parser.add_argument("rtype", type=str, required=True, location="form")
- parser.add_argument("rdata", type=str, required=True, location="form")
- parser.add_argument("ttl", type=str, required=True, location="form")
- args = parser.parse_args()
- owner = args["owner"].lower()
- rtype = args["rtype"].lower()
- rdata = args["rdata"]
- zone = args["zone"]
- ttl = args["ttl"]
-
- try:
- ttl_id = ttl_model.get_ttlid_by_ttl(ttl)
-
- type_id = type_model.get_typeid_by_rtype(rtype)
- zone_id = zone_model.get_zone_id(zone)
- except Exception as e:
- return response(404, message=f"{e}")
-
- try:
- rules.check_add(rtype, zone_id, type_id, owner, rdata, ttl_id)
- except Exception as e:
- return response(409, message=f"{e}")
-
- try:
- # rtype no need to be validated & no need to check its length
- # `get_typeid` will raise error for non existing rtype
- validator.validate(rtype, rdata)
- validator.validate("owner", owner)
- except Exception as e:
- return response(422, message=f"{e}")
-
- try:
- serial_resource = get_serial_resource(zone)
- check_serial_limit(serial_resource)
- except Exception as e:
- return response(429, message=f"{e}")
-
- try:
- data = {
- "owner": owner,
- "zone_id": zone_id,
- "type_id": type_id,
- "ttl_id": ttl_id,
- }
- record_id = model.insert(table="record", data=data)
-
- content_data = {"rdata": rdata, "record_id": record_id}
- model.insert(table="rdata", data=content_data)
-
- command.set_zone(record_id, "zone-set")
-
- # increment serial after adding new record
- rtype = type_model.get_type_by_recordid(record_id)
- if rtype != "SOA":
- update_serial(serial_resource)
-
- record = model.get_one(table="record", field="id", value=record_id)
- data = record_model.get_other_data(record)
- return response(201, data=data)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class RecordEdit(Resource):
- @helpers.check_producer
- @auth.auth_required
- def put(self, record_id):
- parser = reqparse.RequestParser()
- parser.add_argument("zone", type=str, required=True, location="form")
- parser.add_argument("owner", type=str, required=True, location="form")
- parser.add_argument("rtype", type=str, required=True, location="form")
- parser.add_argument("rdata", type=str, required=True, location="form")
- parser.add_argument("ttl", type=str, required=True, location="form")
- args = parser.parse_args()
- owner = args["owner"].lower()
- rtype = args["rtype"].lower()
- rdata = args["rdata"]
- zone = args["zone"]
- ttl = args["ttl"]
-
- try:
- ttl_id = ttl_model.get_ttlid_by_ttl(ttl)
- record_model.is_exists(record_id)
-
- type_id = type_model.get_typeid_by_rtype(rtype)
- zone_id = zone_model.get_zone_id(zone)
- except Exception as e:
- return response(404, message=f"{e}")
-
- try:
- rules.check_edit(rtype, zone_id, type_id, owner, rdata, ttl_id, record_id)
- except Exception as e:
- return response(409, message=f"{e}")
-
- try:
- validator.validate(rtype, rdata)
- validator.validate("owner", owner)
- except Exception as e:
- return response(422, message=f"{e}")
-
- try:
- serial_resource = get_serial_resource(zone)
- check_serial_limit(serial_resource)
- except Exception as e:
- return response(429, message=f"{e}")
-
- try:
- data = {
- "where": {"id": record_id},
- "data": {
- "owner": owner,
- "zone_id": zone_id,
- "type_id": type_id,
- "ttl_id": ttl_id,
- },
- }
- content_data = {
- "where": {"record_id": record_id},
- "data": {"rdata": rdata, "record_id": record_id},
- }
-
- command.set_zone(record_id, "zone-unset")
-
- model.update("rdata", data=content_data)
- model.update("record", data=data)
-
- command.set_zone(record_id, "zone-set")
-
- # increment serial after adding new record
- rtype = type_model.get_type_by_recordid(record_id)
- if rtype != "SOA":
- update_serial(serial_resource, "02")
-
- record = model.get_one(table="record", field="id", value=record_id)
- data_ = record_model.get_other_data(record)
- return response(200, data=data_)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class RecordDelete(Resource):
- @helpers.check_producer
- @auth.auth_required
- def delete(self, record_id):
- """Delete specific record.
-
- note:
- SOA record can't be deleted. One zone must have minimum one SOA record at time.
- But it can be edited, see`record edit`.
- """
- try:
- record_model.is_exists(record_id)
- except Exception:
- return response(404)
-
- zone = zone_model.get_zone_by_record(record_id)
- zone_name = zone["zone"]
-
- try:
- serial_resource = get_serial_resource(zone_name)
- check_serial_limit(serial_resource)
- except Exception as e:
- return response(429, message=f"{e}")
-
- try:
- rtype = type_model.get_type_by_recordid(record_id)
- if rtype == "SOA":
- return response(403, message="Can't Delete SOA Record")
- if rtype != "SOA":
- update_serial(serial_resource)
-
- command.set_zone(record_id, "zone-unset")
-
- model.delete(table="record", field="id", value=record_id)
- return response(204)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
diff --git a/api/app/controllers/api/ttl.py b/api/app/controllers/api/ttl.py
deleted file mode 100644
index bbabbe16..00000000
--- a/api/app/controllers/api/ttl.py
+++ /dev/null
@@ -1,87 +0,0 @@
-from flask_restful import Resource, reqparse
-
-from app.middlewares import auth
-from app.models import model
-from app.vendors.rest import response
-
-
-class GetTtlData(Resource):
- @auth.auth_required
- def get(self):
- try:
- ttls = model.get_all("ttl")
- if not ttls:
- return response(404)
-
- return response(200, data=ttls)
- except Exception as e:
- return response(500, message=f"{e}")
-
-
-class GetTtlDataId(Resource):
- @auth.auth_required
- def get(self, ttl_id):
- try:
- ttl = model.get_one(table="ttl", field="id", value=ttl_id)
- if not ttl:
- return response(404)
-
- return response(200, data=ttl)
- except Exception as e:
- return response(500, message=f"{e}")
-
-
-class TtlAdd(Resource):
- @auth.auth_required
- def post(self):
- parser = reqparse.RequestParser()
- parser.add_argument("ttl", type=str, required=True, location="form")
- args = parser.parse_args()
- ttl = args["ttl"]
-
- data = {"ttl": ttl}
- if not ttl:
- return response(422)
-
- try:
- inserted_id = model.insert(table="ttl", data=data)
- data_ = {"id": inserted_id, **data}
-
- return response(201, data=data_)
- except Exception as e:
- return response(500, message=f"{e}")
-
-
-class TtlEdit(Resource):
- @auth.auth_required
- def put(self, ttl_id):
- parser = reqparse.RequestParser()
- parser.add_argument("ttl", type=str, required=True, location="form")
- args = parser.parse_args()
- ttl = args["ttl"]
-
- if not ttl:
- return response(422)
-
- try:
- data = {"where": {"id": ttl_id}, "data": {"ttl": ttl}}
- row_count = model.update("ttl", data=data)
- if not row_count:
- return response(404)
-
- return response(200, data=data.get("data"))
- except Exception as e:
- return response(500, message=f"{e}")
-
-
-class TtlDelete(Resource):
- @auth.auth_required
- def delete(self, ttl_id):
- try:
- row_count = model.delete(table="ttl", field="id", value=ttl_id)
- if not row_count:
- return response(404)
-
- return response(204)
- except Exception as e:
- return response(500, message=f"{e}")
diff --git a/api/app/controllers/api/type_.py b/api/app/controllers/api/type_.py
deleted file mode 100644
index 92f39207..00000000
--- a/api/app/controllers/api/type_.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from flask_restful import Resource, reqparse
-
-from app.middlewares import auth
-from app.models import model
-from app.vendors.rest import response
-
-
-class GetTypeData(Resource):
- @auth.auth_required
- def get(self):
- try:
- types = model.get_all("type")
- if not types:
- return response(404)
-
- return response(200, data=types)
- except Exception:
- return response(500)
-
-
-class GetTypeDataId(Resource):
- @auth.auth_required
- def get(self, type_id):
- try:
- type_ = model.get_one(table="type", field="id", value=type_id)
- if not type_:
- return response(404)
-
- return response(200, data=type_)
- except Exception:
- return response(500)
-
-
-class TypeAdd(Resource):
- @auth.auth_required
- def post(self):
- parser = reqparse.RequestParser()
- parser.add_argument("type", type=str, required=True, location="form")
- args = parser.parse_args()
- type_ = args["type"]
-
- data = {"type": type_}
-
- if not type_:
- return response(422)
-
- try:
- inserted_id = model.insert(table="type", data=data)
-
- data_ = {"id": inserted_id, **data}
- return response(201, data=data_)
- except Exception:
- return response(500)
-
-
-class TypeEdit(Resource):
- @auth.auth_required
- def put(self, type_id):
- parser = reqparse.RequestParser()
- parser.add_argument("type", type=str, required=True, location="form")
- args = parser.parse_args()
- type_ = args["type"]
-
- if not type_:
- return response(422)
-
- try:
- data = {"where": {"id": type_id}, "data": {"type": type_}}
- row_count = model.update("type", data=data)
- if not row_count:
- return response(404)
-
- return response(200, data=data.get("data"))
- except Exception:
- return response(500)
-
-
-class TypeDelete(Resource):
- @auth.auth_required
- def delete(self, type_id):
- try:
- row_count = model.delete(table="type", field="id", value=type_id)
- if not row_count:
- return response(404)
-
- return response(204)
- except Exception:
- return response(500)
diff --git a/api/app/controllers/api/user.py b/api/app/controllers/api/user.py
deleted file mode 100644
index 7d0d566c..00000000
--- a/api/app/controllers/api/user.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from flask import current_app, request
-from flask_restful import Resource, reqparse
-
-from app.helpers import helpers, validator
-from app.middlewares import auth
-from app.models import model
-from app.vendors.rest import response
-
-
-class GetUserData(Resource):
- @auth.auth_required
- def get(self):
- try:
- users = model.get_all("user")
- if not users:
- return response(404)
-
- return response(200, data=users)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class GetUserDataId(Resource):
- @auth.auth_required
- def get(self):
- user_id = request.args.get("id")
- email = request.args.get("email")
- try:
- if not any((user_id, email)):
- return response(422, "Problems parsing parameters")
-
- if user_id:
- user = model.get_one(table="user", field="id", value=user_id)
- if email:
- user = model.get_one(table="user", field="email", value=email)
- if not user:
- return response(404)
-
- return response(200, data=user)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class UserSignUp(Resource):
- @auth.auth_required
- def post(self):
- parser = reqparse.RequestParser()
- # import ipdb; ipdb.set_trace()
- parser.add_argument("email", type=str, required=True, location="form")
- args = parser.parse_args()
- # import ipdb; ipdb.set_trace()
- email = args["email"]
- # import ipdb; ipdb.set_trace()
-
- if not model.is_unique(table="user", field="email", value=f"{email}"):
- return response(409, message="Duplicate Email")
-
- try:
- validator.validate("EMAIL", email)
- except Exception as e:
- return response(422, message=f"{e}")
-
- try:
- data = {"email": email, "created_at": helpers.get_datetime()}
-
- inserted_id = model.insert(table="user", data=data)
- data_ = {"id": inserted_id, **data}
- return response(201, data=data_)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class UserUpdate(Resource):
- @auth.auth_required
- def put(self, user_id):
- parser = reqparse.RequestParser()
- parser.add_argument("email", type=str, required=True, location="form")
- args = parser.parse_args()
- email = args["email"]
- args = parser.parse_args()
-
- if not model.is_unique(table="user", field="email", value=f"{email}"):
- return response(409, message="Duplicate Email")
-
- try:
- validator.validate("EMAIL", email)
- except Exception as e:
- return response(422, message=f"{e}")
-
- try:
- data = {"where": {"id": user_id}, "data": {"email": email}}
- row_count = model.update("user", data=data)
- if not row_count:
- return response(404)
-
- return response(200, data=data.get("data"))
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
-
-
-class UserDelete(Resource):
- @auth.auth_required
- def delete(self, user_id):
- try:
- row_count = model.delete(table="user", field="id", value=user_id)
- if not row_count:
- return response(404)
-
- return response(204)
- except Exception as e:
- current_app.logger.error(f"{e}")
- return response(500)
diff --git a/api/app/database.py b/api/app/database.py
deleted file mode 100644
index 5d16f73b..00000000
--- a/api/app/database.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import os
-
-import psycopg2
-
-
-def connect():
- """Connect to database."""
- try:
- connection = psycopg2.connect(
- database=os.environ.get("DB_NAME"),
- user=os.environ.get("DB_USER"),
- password=os.environ.get("DB_PASSWORD"),
- sslmode=os.environ.get("DB_SSL"),
- host=os.environ.get("DB_HOST"),
- port=os.environ.get("DB_PORT"),
- )
- connection.autocommit = False
- return connection
- except Exception as exc:
- raise ValueError(f"{exc}")
diff --git a/api/app/helpers/command.py b/api/app/helpers/command.py
deleted file mode 100644
index 90fa8cb5..00000000
--- a/api/app/helpers/command.py
+++ /dev/null
@@ -1,178 +0,0 @@
-import json
-
-from app.helpers import helpers, producer
-from app.models import model
-
-
-def get_other_data(record_id):
- """Return other record data from given record id."""
- try:
- record = model.get_one(table="record", field="id", value=record_id)
-
- zone_id = record["zone_id"]
- type_id = record["type_id"]
- ttl_id = record["ttl_id"]
-
- zone = model.get_one(table="zone", field="id", value=zone_id)
- type_ = model.get_one(table="type", field="id", value=type_id)
- ttl = model.get_one(table="ttl", field="id", value=ttl_id)
- rdata = model.get_one(table="rdata", field="record_id", value=record_id)
- return (record, zone, type_, ttl, rdata)
- except Exception as error:
- raise ValueError(f"{error}")
-
-
-def generate_command(**kwargs):
- """Return dictionary of given keywords & values."""
- zone = kwargs.get("zone_name")
- owner = kwargs.get("owner")
- rtype = kwargs.get("rtype")
- ttl = kwargs.get("ttl")
- rdata = kwargs.get("rdata")
- command = kwargs.get("command")
-
- cmd = {
- "cmd": command,
- "zone": zone,
- "owner": owner,
- "rtype": rtype,
- "ttl": ttl,
- "data": rdata,
- }
-
- return cmd
-
-
-def set_config(zone, zone_id, command):
- """Send config command with JSON structure to broker."""
-
- # there are two option to put conf-begin and conf-commit
- # either here (api) or in (agent)
- # I'd rather choose to put it here for finer tuning
- conf_begin = {"cmd": "conf-begin", "zone": zone}
- conf_set = {"cmd": command, "section": "zone", "item": "domain", "data": zone}
- conf_commit = {"cmd": "conf-commit", "zone": zone}
-
- queries = []
- for query in [conf_begin, conf_set, conf_commit]:
- queries.append(query)
-
- # agent_type: master, slave
- # because config created both in master and slave
- message = {"agent": {"agent_type": ["master", "slave"]}, "knot": queries}
-
- producer.send(message)
-
-
-def set_zone(record_id, command):
- """Send zone command with JSON structure to broker."""
- record, zone, type_, ttl, rdata = get_other_data(record_id)
- zone_name = zone["zone"]
-
- # escape space and double quote in txt rdata
- rdata = rdata["rdata"]
- if type_["type"] == "TXT":
- rdata = json.dumps(rdata)
-
- zone_begin = {"cmd": "zone-begin", "zone": zone_name}
- zone_set = generate_command(
- zone=zone_name,
- owner=record["owner"],
- rtype=type_["type"],
- ttl=ttl["ttl"],
- rdata=rdata,
- command=command,
- )
- zone_commit = {"cmd": "zone-commit", "zone": zone_name}
-
- queries = []
- for query in [zone_begin, zone_set, zone_commit]:
- queries.append(query)
-
- # agent_type: master
- # because zone only created in master, slave will get zone via axfr
- message = {"agent": {"agent_type": ["master"]}, "knot": queries}
-
- producer.send(message)
-
-
-def set_default_zone(record_ids):
- """Send zone command with JSON structure to broker."""
- # We can use `send_zone` but it will be slow since each zone will be sent
- # separately
-
- zone_sets = []
- for record_id in record_ids:
- record, zone, type_, ttl, rdata = get_other_data(record_id)
-
- # escape space and double quote in txt rdata
- rdata = rdata["rdata"]
- if type_["type"] == "TXT":
- rdata = json.dumps(rdata)
-
- zone_name = zone["zone"]
- zone_set = generate_command(
- zone=zone_name,
- owner=record["owner"],
- rtype=type_["type"],
- ttl=ttl["ttl"],
- rdata=rdata,
- command="zone-set",
- )
- zone_sets.append(zone_set)
-
- zone_begin = {"cmd": "zone-begin", "zone": zone_name}
- zone_commit = {"cmd": "zone-commit", "zone": zone_name}
-
- queries = []
- for query in [zone_begin, *zone_sets, zone_commit]:
- queries.append(query)
-
- # agent_type: master
- # because zone only created in master, slave will get zone via axfr
- message = {"agent": {"agent_type": ["master"]}, "knot": queries}
-
- producer.send(message)
-
-
-def delegate(zone, zone_id, command, agent_type):
- """Send delegation config command with JSON structure to broker."""
- config = helpers.get_config()
- try:
- clusters = config["knot_servers"]
- except KeyError:
- raise ValueError("Can't Knot server list in config")
-
- cluster = clusters[agent_type]
-
- # default for master
- cluster_type = "notify"
- cluster_type_item = "notify"
- if agent_type == "slave":
- cluster_type = "master"
- cluster_type_item = "master"
-
- queries = [
- {"item": "file", "data": f"{zone}.zone", "identifier": zone},
- {"item": "serial-policy", "data": "dateserial"},
- {"item": "module", "data": "mod-stats/default"},
- ]
- queries.extend(
- [{"item": cluster_type_item, "data": item} for item in cluster[cluster_type]]
- )
-
- queries.extend([{"item": "acl", "data": acl} for acl in cluster["acl"]])
-
- queries_ = []
- for query in queries:
- query["cmd"] = command
- query["section"] = "zone"
- query["zone"] = zone
- queries_.append(query)
-
- message = {
- "agent": {"agent_type": [agent_type]},
- "knot": [{"cmd": "conf-begin"}, *queries_, {"cmd": "conf-commit"}],
- }
-
- producer.send(message)
diff --git a/api/app/helpers/helpers.py b/api/app/helpers/helpers.py
deleted file mode 100644
index 131e06d1..00000000
--- a/api/app/helpers/helpers.py
+++ /dev/null
@@ -1,123 +0,0 @@
-import datetime
-import os
-import pathlib
-from functools import wraps
-
-import yaml
-
-from app.helpers import producer
-from app.vendors.rest import response
-
-
-def soa_time_set():
- date = datetime.datetime.now().strftime("%Y%m%d")
- return date
-
-
-def replace_serial(rdata, serial):
- """Replace serial value in given rdata."""
- rdatas = rdata.split(" ")
- # `mname_and_rname` contains such 'one.dns.id. two.dns.id.'
- # `ttls` contains such '10800 3600 604800 38400'
- mname_and_rname = " ".join(rdatas[0:2])
- ttls = " ".join(rdatas[3:])
-
- return f"{mname_and_rname} {serial} {ttls}"
-
-
-def increment_serial(serial, increment="01"):
- """Increment serial value with given str value.
-
- Keyword arguments:
- increment -- the increment value (default "01")
- """
- today_date = soa_time_set()
- record_date = serial[:-2]
- # The 10-digit serial (YYYYMMDDnn) is incremented, the first
- # 8 digits match the current iso-date
- nn = serial[-2:]
- if record_date != today_date:
- # date changed, reset `nn`
- nn = "01"
-
- increment = add_str(nn, increment)
- return f"{today_date}{increment}"
-
-
-def get_datetime():
- now = datetime.datetime.now(datetime.timezone.utc)
- return f"{now:%Y-%m-%d %H:%M:%S %z}"
-
-
-def exclude_keys(dict_, keys):
- """Exclude specified key from dict."""
- return {item: dict_[item] for item in dict_ if item not in keys}
-
-
-def add_str(x, y):
- """Handle string addition
-
- :Example:
- add_str('11', '01') => '12'
- """
- return str(int(x) + int(y)).zfill(len(x))
-
-
-def check_producer(f):
- """Check producer availability"""
-
- @wraps(f)
- def decorated_function(*args, **kwargs):
- try:
- producer.kafka_producer()
- except Exception as e:
- return response(500, message=f"{e}")
- else:
- return f(*args, **kwargs)
-
- return decorated_function
-
-
-def read_file(other_file_name, filename):
- root_dir = pathlib.Path(other_file_name).resolve().parent
- path = root_dir.joinpath(filename)
-
- if path.is_file():
- with open(path, "rb") as f:
- content = f.read().decode("utf-8")
- return content
-
-
-def read_version(other_file_name, filename):
- """Read the the current version or build of the app"""
- version = ""
-
- version = read_file(other_file_name, filename)
- if version:
- version = version.rstrip()
-
- if not version:
- version = "__UNKNOWN__"
-
- return version
-
-
-def config_file():
- """Return config file path."""
- path = os.environ.get("RESTKNOT_CONFIG_FILE")
- if not path:
- current_path = pathlib.Path(__file__)
- path = current_path.parents[2].joinpath("config.yml")
-
- is_exists = os.path.exists(path)
- if is_exists:
- return path
- else:
- raise ValueError(f"Config File Not Found: {path}")
-
-
-def get_config():
- """Return config file content."""
- file_ = config_file()
- config = yaml.safe_load(open(file_))
- return config
diff --git a/api/app/helpers/producer.py b/api/app/helpers/producer.py
deleted file mode 100644
index d6d42cec..00000000
--- a/api/app/helpers/producer.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import json
-import os
-
-from confluent_kafka import Producer
-from confluent_kafka.admin import AdminClient
-from flask import current_app
-
-from app.helpers import helpers
-
-
-def kafka_admin():
- """Create Kafka admin."""
- config = helpers.get_config()
- try:
- brokers = config["brokers"]
- except KeyError:
- raise ValueError("Can't find brokers list in config")
-
- # `AdmintClient` can't recive list of brokers
- conf = {"bootstrap.servers": brokers[0]}
- admin_client = AdminClient(conf)
- return admin_client
-
-
-def kafka_producer():
- """Create Kafka producer."""
- config = helpers.get_config()
- try:
- brokers = config["brokers"]
- except KeyError:
- raise ValueError("Can't find brokers list in config")
-
- brokers = ",".join(brokers)
- conf = {"bootstrap.servers": brokers}
- producer = Producer(**conf)
- return producer
-
-
-def _delivery_report(err, msg):
- if err is not None:
- raise ValueError(f"Message delivery failed: {err}")
-
-
-def send(message):
- """Send given message to Kafka broker."""
- producer = None
- try:
- producer = kafka_producer()
- topic = os.environ.get("RESTKNOT_KAFKA_TOPIC")
- encoded_message = json.dumps(message).encode("utf-8")
- producer.produce(topic, encoded_message, callback=_delivery_report)
- except Exception as e:
- current_app.logger.error(f"{e}")
- raise ValueError(f"{e}")
-
- # Serve delivery callback queue.
- producer.poll(0)
- # Wait until all messages have been delivered
- producer.flush()
diff --git a/api/app/helpers/rules.py b/api/app/helpers/rules.py
deleted file mode 100644
index 89bc2fb6..00000000
--- a/api/app/helpers/rules.py
+++ /dev/null
@@ -1,173 +0,0 @@
-# --------------------------------------------------------------------
-# DNS Rules
-#
-# The rules for DNS records:
-#
-# CNAME
-# 1. same owner NOT allowed
-# 2. owner CAN'T coexist with the same A owner
-#
-# A
-# 1. same owner allowed
-# 2. owner CAN'T coexist with the same CNAME owner
-# --------------------------------------------------------------------
-
-from app.models import rules as rules_model
-from app.models import type_ as type_model
-from app.models import zone as zone_model
-
-
-def is_allowed(zone_id, type_id, owner, rdata, ttl_id, record_id=None):
- """A Generic function to check is given record is allowed.
-
- 1. Check for duplicate record
- """
- # duplicate record NOT allowed
- rules = rules_model.Rules()
- rules.is_duplicate(zone_id, type_id, owner, rdata, ttl_id)
-
-
-def is_allowed_cname(zone_id, type_id, owner, rdata, ttl_id):
- """Check is given CNAME record is allowed.
-
- 1. Check for duplicate record
- 2. Check for the same owner
- 3. Check for the same A owner
- 4. Check if the owner is root
- """
- # duplicate record NOT allowed
- is_allowed(zone_id, type_id, owner, rdata, ttl_id)
-
- # 1. same owner NOT allowed
- query = '"type_id"=%(type_id)s AND "owner"=%(owner)s'
- value = {"zone_id": zone_id, "type_id": type_id, "owner": owner}
- rules = rules_model.Rules(query, value)
-
- is_unique = rules.is_unique()
- if not is_unique:
- raise ValueError("A CNAME record already exist with that owner")
-
- # 2. owner CAN'T coexist with the same A owner
- a_type_id = type_model.get_typeid_by_rtype("A")
- query = '"type_id" IN (%(type1)s,%(type2)s) AND "owner"=%(owner)s'
- value = {"zone_id": zone_id, "type1": type_id, "type2": a_type_id, "owner": owner}
- rules = rules_model.Rules(query, value)
-
- is_coexist = rules.is_coexist()
- if is_coexist:
- raise ValueError("An A record already exist with that owner")
-
- # 4. owner can't be root
- # can't be `domainname.com.` and `@`
- zone = zone_model.get_zone(zone_id)
- if owner == f"{zone}." or owner == "@":
- raise ValueError("A CNAME owner can't be root")
-
-
-def is_allowed_a(zone_id, type_id, owner, rdata, ttl_id, record_id=None):
- """Check is given A record is allowed.
-
- 1. Check for duplicate record
- 2. Check for the same CNAME owner
- """
- # duplicate record NOT allowed
- is_allowed(zone_id, type_id, owner, rdata, ttl_id)
-
- # 2. owner CAN'T coexist with the same CNAME owner
- cname_type_id = type_model.get_typeid_by_rtype("CNAME")
- query = '"type_id"=%(type_id)s AND "owner"=%(owner)s'
- value = {"zone_id": zone_id, "type_id": cname_type_id, "owner": owner}
- rules = rules_model.Rules(query, value)
-
- is_coexist = rules.is_coexist()
- if is_coexist:
- raise ValueError("A CNAME record already exist with that owner")
-
-
-def is_allowed_cname_edit(zone_id, type_id, owner, rdata, ttl_id, record_id=None):
- """Check is given CNAME record is allowed.
-
- This function separated from `cname_add` because it needs to exclude its id
- while searching for other records.
- """
- # duplicate record NOT allowed
- is_allowed(zone_id, type_id, owner, rdata, ttl_id)
-
- # 1. same owner NOT allowed
- query = '"type_id"=%(type_id)s AND "owner"=%(owner)s AND "id"<>%(record_id)s'
- value = {
- "zone_id": zone_id,
- "type_id": type_id,
- "owner": owner,
- "record_id": record_id,
- }
- rules = rules_model.Rules(query, value)
-
- is_unique = rules.is_unique()
- if not is_unique:
- raise ValueError("A CNAME record already exist with that owner")
-
- # 2. owner CAN'T coexist with the same A owner
- a_type_id = type_model.get_typeid_by_rtype("A")
- query = '"type_id" IN (%(type1)s,%(type2)s) AND "owner"=%(owner)s AND "id"<>%(record_id)s'
- value = {
- "zone_id": zone_id,
- "type1": type_id,
- "type2": a_type_id,
- "owner": owner,
- "record_id": record_id,
- }
- rules = rules_model.Rules(query, value)
-
- is_coexist = rules.is_coexist()
- if is_coexist:
- raise ValueError("An A record already exist with that owner")
-
- # 3. owner can't be root
- zone = zone_model.get_zone(zone_id)
- if owner == f"{zone}." or owner == "@":
- raise ValueError("A CNAME owner can't be root")
-
-
-# function based on rtype input when adding record
-functions_add = {
- "CNAME": is_allowed_cname,
- "A": is_allowed_a,
- "AAAA": is_allowed_a,
- "SOA": is_allowed,
- "NS": is_allowed,
- "MX": is_allowed,
- "TXT": is_allowed,
- "SRV": is_allowed,
-}
-functions_edit = {
- "CNAME": is_allowed_cname_edit,
- "A": is_allowed_a,
- "AAAA": is_allowed_a,
- "SOA": is_allowed,
- "NS": is_allowed,
- "MX": is_allowed,
- "TXT": is_allowed,
- "SRV": is_allowed,
-}
-
-
-def check_add(rtype, zone_id, type_id, owner, rdata, ttl_id):
- rtype = rtype.upper()
- if rtype in functions_add.keys():
- functions_add[rtype](zone_id, type_id, owner, rdata, ttl_id)
- else:
- raise ValueError("Unsupported Record Type")
-
-
-def check_edit(rtype, zone_id, type_id, owner, rdata, ttl_id, record_id=None):
- """Return function when user editing A record.
-
- Some function need dummy `record_id` parameters to match with other function
- parameter length
- """
- rtype = rtype.upper()
- if rtype in functions_edit.keys():
- functions_edit[rtype](zone_id, type_id, owner, rdata, ttl_id, record_id)
- else:
- raise ValueError("Unsupported Record Type")
diff --git a/api/app/helpers/validator.py b/api/app/helpers/validator.py
deleted file mode 100644
index 8dd40206..00000000
--- a/api/app/helpers/validator.py
+++ /dev/null
@@ -1,185 +0,0 @@
-# --------------------------------------------------------------------
-# RDATA Rules
-#
-# The rules for DNS RDATA:
-#
-# IP
-# EMAIL
-# MX
-# CNAME
-# ZONE
-# SOA
-#
-#
-#
-# Credits:
-# RE Email Credit: https://emailregex.com/
-# RE ZONE Credit:
-# RE CNAME Credit: https://www.regextester.com/106386
-# --------------------------------------------------------------------
-
-import re
-import string
-from ipaddress import ip_address
-
-RE_EMAIL = "(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
-RE_ZONE = "^(?!(https:\/\/|http:\/\/|www\.|mailto:|smtp:|ftp:\/\/|ftps:\/\/))(((([a-zA-Z0-9])|([a-zA-Z0-9][a-zA-Z0-9\-]{0,86}[a-zA-Z0-9]))\.(([a-zA-Z0-9])|([a-zA-Z0-9][a-zA-Z0-9\-]{0,73}[a-zA-Z0-9]))\.(([a-zA-Z0-9]{2,12}\.[a-zA-Z0-9]{2,12})|([a-zA-Z0-9]{2,25})))|((([a-zA-Z0-9])|([a-zA-Z0-9][a-zA-Z0-9\-]{0,162}[a-zA-Z0-9]))\.(([a-zA-Z0-9]{2,12}\.[a-zA-Z0-9]{2,12})|([a-zA-Z0-9]{2,25}))))$"
-RE_CNAME = "^(([a-zA-Z0-9_]|[a-zA-Z0-9_][a-zA-Z0-9_\-]*[a-zA-Z0-9_])\.)*([A-Za-z0-9_]|[A-Za-z0-9_][A-Za-z0-9_\-]*[A-Za-z0-9_](\.?))$"
-
-
-def is_valid_ip(ip):
- """Check whether it's a valid IPv4 or IPv6."""
- try:
- ip_address(ip)
- except ValueError:
- raise ValueError("Bad IP Adrress")
-
-
-def is_valid_email(email):
- """Check if it's a valid email address."""
- match = re.match(RE_EMAIL, email)
- if match is None:
- raise ValueError("Bad Email Adrress")
-
-
-def is_valid_mx(mx_rdata):
- """Check if MX RDATA contents is valid."""
- msg = "Bad MX RDATA"
-
- preference = mx_rdata.split(" ")[0]
- hostname = mx_rdata.split(" ")[1]
-
- try:
- # we need to improve this validation.
- # this is a loose validation
- if (int(preference)).bit_length() <= 16 and len(mx_rdata.split(" ")) == 2:
- pass
- else:
- raise ValueError(msg)
- except Exception:
- raise ValueError(msg)
-
- try:
- is_valid_cname(hostname)
- except Exception:
- raise ValueError(msg)
-
-
-def is_valid_cname(cname_rdata):
- """Check if CNAME RDATA contents is valid."""
- msg = "Bad CNAME RDATA"
-
- if cname_rdata == "@":
- raise ValueError(msg)
-
- match = re.match(RE_CNAME, cname_rdata)
- if match is None:
- raise ValueError(msg)
-
-
-def is_valid_zone(domain_name):
- """Check if it's a valid domain name."""
- match = re.match(RE_ZONE, domain_name)
- if match is None:
- raise ValueError("Bad Domain Name")
-
-
-def is_valid_txt(txt_rdata):
- """Check if it's a valid TXT rdata."""
- for char in txt_rdata:
- if char not in string.printable:
- raise ValueError("Bad TXT RDATA")
-
-
-def is_valid_soa(soa_rdata):
- """Simple function to check SOA RDATA."""
- rdatas = soa_rdata.split(" ")
-
- try:
- is_valid_cname(rdatas[0])
- is_valid_cname(rdatas[1])
- except Exception:
- raise ValueError("Bad SOA RDATA")
-
- for number in rdatas[2:]:
- try:
- int(number)
- except ValueError:
- raise ValueError("Bad SOA RDATA")
-
-
-def is_valid_srv(rdata):
- """Simple function to check SRV RDATA."""
- rdatas = rdata.split(" ")
- if len(rdatas) != 4:
- raise ValueError("Bad SRV RDATA")
-
- try:
- is_valid_cname(rdatas[3])
- except Exception:
- raise ValueError("Bad SRV RDATA")
-
- for number in rdatas[:3]:
- try:
- int(number)
- except ValueError:
- raise ValueError("Bad SRV RDATA")
-
-
-def is_valid_owner(owner):
- """Check if it's a valid owner.
-
- Rules:
- - owner label can't end with dot (".")
- - owner label can't ends/starts with dash ("-")
- - owner can't exceed 255 characters
- - owner label can't exceed 63 characters
- - owner can't contains parens ("()")
- """
-
- def check_hypen(label):
- if any((label.endswith("."), label.endswith("-"), label.startswith("-"))):
- raise ValueError("Bad OWNER")
-
- check_hypen(owner)
-
- if "." in owner:
- for label in owner.split("."):
- check_hypen(label)
-
- if len(owner) > 255:
- raise ValueError("Bad OWNER")
-
- if "." in owner:
- for label in owner.split("."):
- if len(label) > 63:
- raise ValueError("Bad OWNER")
-
- if any(char in "()" for char in owner):
- raise ValueError("Bad OWNER")
-
-
-functions = {
- "A": is_valid_ip,
- "AAAA": is_valid_ip,
- "MX": is_valid_mx,
- "CNAME": is_valid_cname,
- "NS": is_valid_cname,
- "EMAIL": is_valid_email,
- "ZONE": is_valid_zone,
- "SOA": is_valid_soa,
- "TXT": is_valid_txt,
- "SRV": is_valid_srv,
- "OWNER": is_valid_owner,
-}
-
-
-def validate(rtype, rdata):
- if not rdata:
- raise ValueError("RDATA can't be empty")
-
- rtype = rtype.upper()
- if rtype in functions.keys():
- functions[rtype](rdata)
- else:
- raise ValueError("Unsupported Record Type")
diff --git a/api/app/middlewares/__init__.py b/api/app/middlewares/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/api/app/middlewares/auth.py b/api/app/middlewares/auth.py
deleted file mode 100644
index 388924db..00000000
--- a/api/app/middlewares/auth.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import os
-from functools import wraps
-
-from flask import request
-
-from app.vendors.rest import response
-
-
-def auth_required(f):
- """Decorate given function with authentication check."""
-
- @wraps(f)
- def decorated_function(*args, **kwargs):
- user_key = request.headers.get("X-API-Key", None)
- app_key = os.environ.get("RESTKNOT_API_KEY")
-
- if user_key != app_key:
- return response(400, message="Access denied")
-
- return f(*args, **kwargs)
-
- return decorated_function
diff --git a/api/app/models/__init__.py b/api/app/models/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/api/app/models/domain.py b/api/app/models/domain.py
deleted file mode 100644
index a7214d2b..00000000
--- a/api/app/models/domain.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from app.helpers import helpers
-from app.models import model
-from app.models import record as record_model
-
-
-def get_other_data(zone):
- if zone is None:
- return
-
- user = model.get_one(table="user", field="id", value=zone["user_id"])
- user = helpers.exclude_keys(user, {"created_at"})
- records = record_model.get_records_by_zone(zone["zone"])
-
- records_detail = []
- for record in records:
- record_detail = record_model.get_other_data(record)
- record_detail = helpers.exclude_keys(record_detail, {"zone"})
- records_detail.append(record_detail)
-
- data = {
- "zone_id": zone["id"],
- "zone": zone["zone"],
- "user": user,
- "records": records_detail,
- }
-
- return data
diff --git a/api/app/models/model.py b/api/app/models/model.py
deleted file mode 100644
index 5d70104d..00000000
--- a/api/app/models/model.py
+++ /dev/null
@@ -1,175 +0,0 @@
-import psycopg2
-
-from app import database
-from app.vendors.prepare import PreparingCursor
-
-
-def get_db():
- try:
- connection = database.connect()
- cursor = connection.cursor(cursor_factory=PreparingCursor)
- return cursor, connection
- except Exception as exc:
- raise ValueError(f"{exc}")
-
-
-def zip_column_name(table, rows):
- results = []
- column = get_columns(table)
- for row in rows:
- results.append(dict(zip(column, row)))
- return results
-
-
-def get_columns(table):
- column = None
- cursor, _ = get_db()
- try:
- query = f"SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name='{table}'"
- cursor.execute(query)
- column = [row[0] for row in cursor.fetchall()]
- except (Exception, psycopg2.DatabaseError) as error:
- raise ValueError(f"{error}")
- return column
-
-
-def get_all(table):
- results = []
- cursor, connection = get_db()
- try:
- query = f'SELECT * FROM "{table}"'
- cursor.prepare(query)
- cursor.execute()
- rows = cursor.fetchall()
- results = zip_column_name(table, rows)
- except (psycopg2.DatabaseError, psycopg2.OperationalError) as error:
- connection.rollback()
- raise ValueError(f"{error}")
- else:
- connection.commit()
- return results
-
-
-def get_one(table, field=None, value=None):
- results = []
- cursor, connection = get_db()
- column = get_columns(table)
- try:
- query = f'SELECT * FROM "{table}" WHERE "{field}"=%(value)s'
- cursor.prepare(query)
- cursor.execute({"value": value})
- rows = cursor.fetchone()
- if not rows:
- return
- results = dict(zip(column, list(rows)))
- except (psycopg2.DatabaseError, psycopg2.OperationalError) as error:
- connection.rollback()
- raise ValueError(f"{error}")
- else:
- connection.commit()
- return results
-
-
-def insert(table, data=None):
- cursor, connection = get_db()
- rows = []
- rows_value = []
-
- # arrange row and values
- for row in data:
- rows.append(row)
- rows_value.append(str(data[row]))
-
- str_placeholer = ["%s"] * len(rows)
-
- try:
- rows = ",".join(rows)
- str_placeholer = ",".join(str_placeholer)
-
- query = f'INSERT INTO "{table}" ({rows}) VALUES ({str_placeholer}) RETURNING *'
- cursor.prepare(query)
- cursor.execute((tuple(rows_value)))
- except (Exception, psycopg2.DatabaseError) as error:
- connection.rollback()
- raise ValueError(f"{error}")
- else:
- connection.commit()
- inserted_data_id = cursor.fetchone()[0]
- return inserted_data_id
-
-
-def update(table, data=None):
- cursor, connection = get_db()
- data_ = data["data"]
- rows = []
- set_value = []
-
- for row in data_:
- rows.append(row)
- row_value = str(data_[row])
- set_value.append(f"{row}='{row_value}'")
-
- field = list(data["where"].keys())[0] # must be one
- field_data = data["where"][field]
-
- try:
- set_ = ",".join(set_value)
- query = f'UPDATE "{table}" SET {set_} WHERE {field}=%(field_data)s'
- cursor.prepare(query)
- cursor.execute({"field_data": field_data})
- except (Exception, psycopg2.DatabaseError) as error:
- connection.rollback()
- raise ValueError(f"{error}")
- else:
- connection.commit()
- rows_edited = cursor.rowcount
- return rows_edited
-
-
-def delete(table, field=None, value=None):
- cursor, connection = get_db()
- rows_deleted = 0
- try:
- query = f'DELETE FROM "{table}" WHERE {field}=%(value)s'
- cursor.prepare(query)
- cursor.execute({"value": value})
- except (Exception, psycopg2.DatabaseError) as error:
- connection.rollback()
- raise ValueError(f"{error}")
- else:
- connection.commit()
- rows_deleted = cursor.rowcount
- return rows_deleted
-
-
-def is_unique(table, field=None, value=None):
- """Check if data only appear once."""
- cursor, connection = get_db()
-
- query = f'SELECT * FROM "{table}" WHERE "{field}"=%(value)s'
- cursor.prepare(query)
- cursor.execute({"value": value})
- rows = cursor.fetchall()
-
- if rows: # initial database will return None
- if len(rows) != 0:
- return False
-
- return True
-
-
-def plain_get(table, query, value=None):
- """Accept plain SQL to be sent as prepared statement."""
- results = []
- cursor, connection = get_db()
- try:
- cursor.prepare(query)
- cursor.execute(value)
- rows = cursor.fetchall()
- results = zip_column_name(table, rows)
- except (psycopg2.DatabaseError, psycopg2.OperationalError) as error:
- connection.rollback()
- raise ValueError(f"{error}")
- else:
- connection.commit()
- return results
diff --git a/api/app/models/record.py b/api/app/models/record.py
deleted file mode 100644
index 33cb0b96..00000000
--- a/api/app/models/record.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from flask import current_app
-
-from app.helpers import helpers
-from app.models import model
-from app.models import type_ as type_model
-from app.models import zone as zone_model
-
-
-def get_other_data(record):
- try:
- rdata = model.get_one(table="rdata", field="record_id", value=record["id"])
- zone = model.get_one(table="zone", field="id", value=record["zone_id"])
- ttl = model.get_one(table="ttl", field="id", value=record["ttl_id"])
- type_ = model.get_one(table="type", field="id", value=record["type_id"])
-
- if rdata:
- rdata = helpers.exclude_keys(rdata, {"id", "record_id"})
- rdata = rdata.get("rdata")
-
- zone = helpers.exclude_keys(
- zone, {"id", "is_committed", "user_id", "record_id"}
- )
-
- data = {
- "id": record["id"],
- "owner": record["owner"],
- "rdata": rdata,
- "zone": zone["zone"],
- "type": type_["type"],
- "ttl": ttl["ttl"],
- }
-
- return data
- except Exception as e:
- current_app.logger.error(f"{e}")
-
-
-def is_exists(record_id):
- record = model.get_one(table="record", field="id", value=record_id)
- if not record:
- raise ValueError("Record Not Found")
-
-
-def get_records_by_zone(zone):
- zone_id = zone_model.get_zone_id(zone)
-
- query = 'SELECT * FROM "record" WHERE "zone_id"=%(zone_id)s'
- value = {"zone_id": zone_id}
- records = model.plain_get("record", query, value)
- return records
-
-
-def get_soa_record(zone):
- records = get_records_by_zone(zone)
-
- for record in records:
- rtype = type_model.get_type_by_recordid(record["id"])
- if rtype == "SOA":
- return record
diff --git a/api/app/models/rules.py b/api/app/models/rules.py
deleted file mode 100644
index be10c050..00000000
--- a/api/app/models/rules.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from app.models import model
-
-
-class Rules:
- def __init__(self, query=None, value=None):
- """Append default query to given query."""
- self.query = (
- f'SELECT * FROM "record" WHERE "zone_id"=%(zone_id)s AND {query or None}'
- )
- self.value = value
-
- def is_unique(self):
- """Check if no record exists."""
- records = model.plain_get("record", self.query, self.value)
-
- if records: # initial database will return None
- if len(records) == 0:
- return True
- return False
-
- return True # also if None
-
- def is_coexist(self):
- """Check if no record exists."""
- records = model.plain_get("record", self.query, self.value)
-
- if records:
- if len(records) > 0:
- return True
-
- return False
-
- def is_duplicate(self, zone_id, type_id, owner, rdata, ttl_id):
- """Check duplicate record exists."""
- base_query = 'SELECT * FROM "record" WHERE "zone_id"=%(zone_id)s AND'
- query = (
- base_query
- + '"type_id"=%(type_id)s AND "owner"=%(owner)s AND "ttl_id"=%(ttl_id)s'
- ) # noqa: W503
- value = {
- "zone_id": zone_id,
- "type_id": type_id,
- "owner": owner,
- "ttl_id": ttl_id,
- }
-
- records = model.plain_get("record", query, value)
- for record in records:
- rdata_record = model.get_one(
- table="rdata", field="record_id", value=record["id"]
- )
- if rdata == rdata_record["rdata"]:
- raise ValueError("The record already exists")
diff --git a/api/app/models/ttl.py b/api/app/models/ttl.py
deleted file mode 100644
index 8179ada2..00000000
--- a/api/app/models/ttl.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from app.models import model
-
-
-def is_exists(ttl_id):
- ttl_ = model.get_one(table="ttl", field="id", value=ttl_id)
- if not ttl_:
- raise ValueError("TTL Not Found")
-
-
-def get_ttlid_by_ttl(ttl):
- """Get type id by record record type."""
- ttl_ = model.get_one(table="ttl", field="ttl", value=ttl)
- if not ttl_:
- raise ValueError("TTL Not Found")
-
- ttl_id = ttl_["id"]
- return ttl_id
diff --git a/api/app/models/type_.py b/api/app/models/type_.py
deleted file mode 100644
index 3247fc0c..00000000
--- a/api/app/models/type_.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from app.models import model
-
-
-def get_typeid_by_rtype(rtype):
- """Get type id by record record type."""
- type_ = model.get_one(table="type", field="type", value=rtype.upper())
- if not type_:
- raise ValueError("Type Not Found")
-
- type_id = type_["id"]
- return type_id
-
-
-def get_type_by_recordid(record_id):
- """Get record type by record id."""
- try:
- record = model.get_one(table="record", field="id", value=record_id)
- type_id = record["type_id"]
-
- type_ = model.get_one(table="type", field="id", value=type_id)
- return type_["type"]
- except Exception:
- raise ValueError("Unrecognized Record Type")
-
-
-def is_exists(type_id):
- type_ = model.get_one(table="type", field="id", value=type_id)
- if not type_:
- raise ValueError("Type Not Found")
diff --git a/api/app/models/zone.py b/api/app/models/zone.py
deleted file mode 100644
index 87a2ab03..00000000
--- a/api/app/models/zone.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from app.models import model
-
-
-def get_zone(zone_id):
- """Get zone name by ID"""
- zone = model.get_one(table="zone", field="id", value=f"{zone_id}")
- if not zone:
- raise ValueError("Zone Not Found")
-
- zone = zone["zone"]
- return zone
-
-
-def get_zone_id(zone):
- zone = model.get_one(table="zone", field="zone", value=f"{zone}")
- if not zone:
- raise ValueError("Zone Not Found")
-
- zone_id = zone["id"]
- return zone_id
-
-
-def get_zone_by_record(record_id):
- record = model.get_one(table="record", field="id", value=f"{record_id}")
- zone_id = record["zone_id"]
- zone = model.get_one(table="zone", field="id", value=f"{zone_id}")
- return zone
-
-
-def get_zones_by_user(user_id):
- query = 'SELECT * FROM "zone" WHERE "user_id"=%(user_id)s'
- value = {"user_id": user_id}
- zones = model.plain_get("zone", query, value)
- return zones
diff --git a/api/app/vendors/__init__.py b/api/app/vendors/__init__.py
deleted file mode 100644
index 6e3e938e..00000000
--- a/api/app/vendors/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from flask import Flask
-from flask_cors import CORS
-
-
-def create_app():
- app = Flask(__name__)
- CORS(app, resources={r"/api/*": {"origins": "*"}})
-
- from .controllers import api_blueprint
-
- app.register_blueprint(api_blueprint)
-
- return app
diff --git a/api/app/vendors/prepare.py b/api/app/vendors/prepare.py
deleted file mode 100644
index 0f7d0ebf..00000000
--- a/api/app/vendors/prepare.py
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/usr/bin/env python
-"""An example of cursor dealing with prepared statements.
-
-A cursor can be used as a regular one, but has also a prepare() statement. If
-prepare() is called, execute() and executemany() can be used without query: in
-this case the parameters are passed to the prepared statement. The functions
-also execute the prepared statement if the query is the same prepared before.
-
-Prepared statements aren't automatically deallocated when the cursor is
-deleted, but are when the cursor is closed. For long-running sessions creating
-an unbound number of cursors you should make sure to deallocate the prepared
-statements (calling close() or deallocate() on the cursor).
-
-"""
-
-# Copyright (C) 2012 Daniele Varrazzo
-
-import re
-from threading import Lock
-
-import psycopg2
-import psycopg2.extensions as ext
-
-
-class PreparingCursor(ext.cursor):
- _lock = Lock()
- _ncur = 0
-
- def __init__(self, *args, **kwargs):
- super(PreparingCursor, self).__init__(*args, **kwargs)
- # create a distinct name for the statements prepared by this cursor
- self._lock.acquire()
- self._prepname = "psyco_%x" % self._ncur
- PreparingCursor._ncur += 1
- self._lock.release()
-
- self._prepared = None
- self._execstmt = None
-
- _re_replargs = re.compile(r"(%s)|(%\([^)]+\)s)")
-
- def prepare(self, stmt):
- """Prepare a query for execution.
-
- TODO: handle literal %s and $s in the string.
- """
- # replace the python placeholders with postgres placeholders
- parlist = []
- parmap = {}
- parord = []
-
- def repl(m):
- par = m.group(1)
- if par is not None:
- parlist.append(par)
- return "$%d" % len(parlist)
- else:
- par = m.group(2)
- assert par
- idx = parmap.get(par)
- if idx is None:
- idx = parmap[par] = "$%d" % (len(parmap) + 1)
- parord.append(par)
-
- return idx
-
- pgstmt = self._re_replargs.sub(repl, stmt)
-
- if parlist and parmap:
- raise psycopg2.ProgrammingError(
- "you can't mix positional and named placeholders"
- )
-
- self.deallocate()
- self.execute("prepare %s as %s" % (self._prepname, pgstmt))
-
- if parlist:
- self._execstmt = "execute %s (%s)" % (self._prepname, ",".join(parlist))
- elif parmap:
- self._execstmt = "execute %s (%s)" % (self._prepname, ",".join(parord))
- else:
- self._execstmt = "execute %s" % (self._prepname)
-
- self._prepared = stmt
-
- @property
- def prepared(self):
- """The query currently prepared."""
- return self._prepared
-
- def deallocate(self):
- """Deallocate the currently prepared statement."""
- if self._prepared is not None:
- self.execute("deallocate " + self._prepname)
- self._prepared = None
- self._execstmt = None
-
- def execute(self, stmt=None, args=None):
- if stmt is None or stmt == self._prepared:
- stmt = self._execstmt
- elif not isinstance(stmt, str):
- args = stmt
- stmt = self._execstmt
-
- if stmt is None:
- raise psycopg2.ProgrammingError(
- "execute() with no query called without prepare"
- )
-
- return super(PreparingCursor, self).execute(stmt, args)
-
- def executemany(self, stmt, args=None):
- if args is None:
- args = stmt
- stmt = self._execstmt
-
- if stmt is None:
- raise psycopg2.ProgrammingError(
- "executemany() with no query called without prepare"
- )
- else:
- if stmt != self._prepared:
- self.prepare(stmt)
-
- return super(PreparingCursor, self).executemany(self._execstmt, args)
-
- def close(self):
- if not self.closed and not self.connection.closed and self._prepared:
- self.deallocate()
-
- return super(PreparingCursor, self).close()
diff --git a/api/app/vendors/rest.py b/api/app/vendors/rest.py
deleted file mode 100644
index 0026883e..00000000
--- a/api/app/vendors/rest.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import json
-from datetime import date, datetime
-
-from flask import Response
-
-
-def json_serial(obj):
- """JSON serializer for objects not serializable by default json code"""
-
- if isinstance(obj, (datetime, date)):
- return obj.isoformat()
-
- raise TypeError("Type %s not serializable" % type(obj))
-
-
-def response(status_code, message=None, data=None):
- """Response data helper
-
- Arguments:
- status_code {int} -- http status code
-
- Keyword Arguments:
- message {string} -- response message (default: {None})
- data {dict} -- data to be appended to response (default: {None})
-
- Returns:
- dict -- response data
- """
- success_status = {
- 200: "OK",
- 201: "Created",
- 202: "Accepted",
- 204: "No Content",
- 304: "Not modified",
- }
-
- failure_status = {
- 400: "Internal error occurred - unexpected error caused by request data",
- 401: "Unauthorized operation",
- 403: "Forbidden",
- 404: "Not Found",
- 405: "Method Not Allowed, for example, resource doesn't support DELETE method",
- 406: "Method Not Acceptable",
- 409: "Conflict",
- 422: "Unprocessable Entity",
- 423: "Locked",
- 426: "Upgrade Required",
- 500: "Internal Server Error",
- 501: "Not Implemented - functionality is not implemented on the server side",
- 503: "Service is unavailable",
- }
-
- status = {}
- status["code"] = status_code
-
- if status_code in success_status:
- count = 0
- if type(data) is list:
- count = len(data)
- if type(data) is dict:
- count = 1
- status["count"] = count
- status["data"] = data if data else None
- status["status"] = "success"
- status["message"] = message if message else success_status[status_code]
- elif status_code in failure_status:
- status["status"] = "error"
- status["message"] = message if message else failure_status[status_code]
- else:
- status["status"] = "error"
- status["message"] = message if message else failure_status[400]
-
- response = Response(
- response=json.dumps(status, default=json_serial),
- status=status_code,
- mimetype="application/json",
- )
-
- return response
diff --git a/api/autoapp.py b/api/autoapp.py
deleted file mode 100644
index cfff8c97..00000000
--- a/api/autoapp.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Create an application instance."""
-from app import create_app
-
-app = create_app()
diff --git a/api/clippy.toml b/api/clippy.toml
new file mode 100644
index 00000000..1877592e
--- /dev/null
+++ b/api/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.68.0"
diff --git a/api/config.example.toml b/api/config.example.toml
new file mode 100644
index 00000000..3a649fe1
--- /dev/null
+++ b/api/config.example.toml
@@ -0,0 +1,11 @@
+brokers = [ "10.0.0.1:9092", "10.0.0.2:9092", "10.0.0.3:9092" ]
+
+[servers]
+
+[servers.master]
+notify = [ "slave1", "slave2" ]
+acl = [ "slave1", "slave2" ]
+
+[servers.slave]
+master = [ "master1" ]
+acl = [ "master1" ]
diff --git a/api/config.example.yml b/api/config.example.yml
deleted file mode 100644
index e4f5b9d5..00000000
--- a/api/config.example.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-# list of knot servers
-
-# The value here follows the knotc command, not knot.conf
-knot_servers:
- master:
- notify:
- - slave1
- - slave2
- acl:
- - slave1
- - slave2
- slave:
- master:
- - master1
- acl:
- - master1
-
-
-# list of kafka brokers
-brokers:
- - "10.0.0.1:9092"
- - "10.0.0.2:9092"
- - "10.0.0.3:9092"
diff --git a/api/db/schema.sql b/api/db/schema.sql
new file mode 100644
index 00000000..c3e9df43
--- /dev/null
+++ b/api/db/schema.sql
@@ -0,0 +1,59 @@
+-- This schema is currently used for CockroachDB v2.0
+CREATE TABLE "user" (
+ id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
+ email STRING NOT NULL,
+ created_at TIMESTAMP NULL DEFAULT current_timestamp():::TIMESTAMP
+);
+
+CREATE TABLE "type" (
+ id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
+ "type" STRING NULL
+);
+
+
+CREATE TABLE ttl (
+ id INT NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
+ ttl STRING NULL
+);
+
+CREATE TABLE zone (
+ id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
+ zone STRING NULL,
+ is_committed BOOL NULL,
+ user_id INT8 NOT NULL REFERENCES "user" (id) ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+CREATE TABLE record (
+ id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
+ owner STRING NULL,
+ zone_id INT8 NOT NULL REFERENCES zone (id) ON DELETE CASCADE ON UPDATE CASCADE,
+ type_id INT8 NOT NULL REFERENCES "type" (id) ON DELETE CASCADE ON UPDATE CASCADE,
+ ttl_id INT8 NOT NULL REFERENCES ttl (id) ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+CREATE TABLE rdata (
+ id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
+ rdata STRING NULL,
+ record_id INT8 NOT NULL REFERENCES record (id) ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+INSERT INTO "type" (id, "type") VALUES
+ (1, 'SOA'),
+ (2, 'SRV'),
+ (3, 'A'),
+ (4, 'NS'),
+ (5, 'CNAME'),
+ (6, 'MX'),
+ (7, 'AAAA'),
+ (8, 'TXT');
+
+INSERT INTO ttl (id, ttl) VALUES
+ (1, '86400'),
+ (2, '43200'),
+ (3, '28800'),
+ (4, '14400'),
+ (5, '7200'),
+ (6, '3600'),
+ (7, '1800'),
+ (8, '900'),
+ (9, '300');
diff --git a/api/docker-compose.example.yml b/api/docker-compose.example.yml
deleted file mode 100644
index b5deb4f3..00000000
--- a/api/docker-compose.example.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-version: '3'
-services:
- restknot-api:
- image: biznetgio/restknot-api:0.7.12
- # depends_on:
- # - kafka1
- ports:
- - '5000:5000'
- environment:
- - TZ=Asia/Jakarta
- - FLASK_APP=autoapp.py
- - FLASK_ENV=production
- - FLASK_DEBUG=0
-
- # server
- - APP_HOST=0.0.0.0 # default 0.0.0.0
- - APP_PORT=5000 # default 8000
-
- - DB_NAME=knotdb # don't add whitespace beetween =
- - DB_HOST=roach
- - DB_PORT=26257
- - DB_USER=root
- - DB_SSL=disable
-
- # topic to suscribe
- - RESTKNOT_KAFKA_TOPIC=domaindata
-
- # don't add quote, otherwise it will not work
- #
- - DEFAULT_SOA_RDATA=one.dns.id. hostmaster.dns.id. 3600 1800 604800 86400
- - DEFAULT_NS=one.dns.id. two.dns.id.
-
- - RESTKNOT_API_KEY=123
- # set config file location. `config.yml` is the default
- # - RESTKNOT_CONFIG_FILE=
-
- # default timeout:30
- command: gunicorn 'autoapp:app' -c 'gunicorn.conf.py' --timeout 60
diff --git a/api/gunicorn.conf.py b/api/gunicorn.conf.py
deleted file mode 100644
index 353e7e46..00000000
--- a/api/gunicorn.conf.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import multiprocessing
-import os
-
-
-def max_workers():
- return multiprocessing.cpu_count() * 2 + 1
-
-
-host = os.environ.get("APP_HOST", "0.0.0.0")
-port = os.environ.get("APP_PORT", "8000")
-
-bind = f"{host}:{port}"
-workers = max_workers()
diff --git a/api/justfile b/api/justfile
index 6b9631c8..188df07a 100644
--- a/api/justfile
+++ b/api/justfile
@@ -1,7 +1,10 @@
#!/usr/bin/env -S just --justfile
+shebang := if os() == 'windows' { 'powershell.exe' } else { '/usr/bin/sh' }
+
set dotenv-load := true
+alias d := dev
alias r := run
alias f := fmt
alias l := lint
@@ -11,42 +14,66 @@ alias t := test
_default:
just --list --unsorted
-# Setup the repository.
+# Setup the repository
setup:
- poetry shell && poetry install
+ sqlx --version || cargo install sqlx-cli --no-default-features --features postgres,native-tls
+ cargo watch --version || cargo install cargo-watch
+ cargo outdated --version || cargo install --locked cargo-outdated
+
+# Develop the app.
+dev:
+ cargo watch -x 'clippy --locked --all-targets --all-features'
# Develop the app.
run:
- flask run
+ cargo run
# Format the codebase.
fmt:
- poetry run black .
- poetry run isort .
- dprint fmt --config ../configs/dprint.json
+ cargo fmt --all
-# Check is the codebase properly formatted.
+# Check is the codebase is properly formatted.
fmt-check:
- poetry run black --check .
- dprint check --config ../configs/dprint.json
+ cargo fmt --all -- --check
# Lint the codebase.
lint:
- poetry run ruff .
+ cargo clippy --locked --all-targets --all-features
+
+# Check the documentation.
+_doc-check:
+ cargo doc --all-features --no-deps
-_test-unit:
- poetry run pytest -s tests/unit/
+# Run the unit tests.
+_unit-test:
+ cargo test --lib
# Test the codebase.
-test: _test-unit
- poetry run pytest -s tests/integration/
+test:
+ cargo test
+
+_update-sqlx-schema:
+ cargo sqlx prepare -- --lib
+
+_check-sqlx-schema:
+ cargo sqlx prepare --check -- --lib
+
+# reset the database schema.
+_reset-db:
+ sqlx database drop && sqlx database create
-# Tasks to make the code-base comply with the rules. Mostly used in git hooks.
-comply: fmt lint _test-unit
+# Tasks to make the code base comply with the rules. Mostly used in git hooks.
+comply: fmt lint test _doc-check _update-sqlx-schema
-# Check if the repository comply with the rules and ready to be pushed.
-check: fmt-check lint test
+# Check if the repository complies with the rules and is ready to be pushed.
+check: _check-sqlx-schema fmt-check lint test _doc-check
-# Check dependencies health.
-up:
- poetry show --outdated | grep --file=<(poetry show --tree | grep '^\w' | cut -d' ' -f1)
+# Check dependencies' health. Pass `--write` to upgrade dependencies.
+up arg="":
+ #!{{ shebang }}
+ if [ "{{ arg }}" = "--write" ]; then
+ cargo upgrade
+ cargo update
+ else
+ cargo outdated --root-deps-only
+ fi;
diff --git a/api/poetry.lock b/api/poetry.lock
deleted file mode 100644
index 0c05ac02..00000000
--- a/api/poetry.lock
+++ /dev/null
@@ -1,740 +0,0 @@
-[[package]]
-name = "aniso8601"
-version = "9.0.1"
-description = "A library for parsing ISO 8601 strings."
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.extras]
-dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"]
-
-[[package]]
-name = "attrs"
-version = "22.1.0"
-description = "Classes Without Boilerplate"
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-
-[package.extras]
-dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
-docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
-tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
-tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
-
-[[package]]
-name = "black"
-version = "22.10.0"
-description = "The uncompromising code formatter."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
-[[package]]
-name = "certifi"
-version = "2022.9.24"
-description = "Python package for providing Mozilla's CA Bundle."
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "charset-normalizer"
-version = "2.1.1"
-description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-category = "main"
-optional = false
-python-versions = ">=3.6.0"
-
-[package.extras]
-unicode-backport = ["unicodedata2"]
-
-[[package]]
-name = "click"
-version = "8.1.3"
-description = "Composable command line interface toolkit"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-description = "Cross-platform colored terminal text."
-category = "main"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-
-[[package]]
-name = "confluent-kafka"
-version = "1.7.0"
-description = "Confluent's Python client for Apache Kafka"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.extras]
-avro = ["avro (==1.10.0)", "avro-python3 (==1.10.0)", "fastavro (>=0.23.0,<1.0)", "fastavro (>=1.0)", "requests"]
-dev = ["avro (==1.10.0)", "avro-python3 (==1.10.0)", "fastavro (>=0.23.0,<1.0)", "fastavro (>=1.0)", "flake8", "pytest", "pytest (==4.6.4)", "pytest-timeout", "requests"]
-doc = ["avro (==1.10.0)", "avro-python3 (==1.10.0)", "fastavro (>=0.23.0,<1.0)", "fastavro (>=1.0)", "requests", "sphinx", "sphinx-rtd-theme"]
-json = ["jsonschema", "pyrsistent", "pyrsistent (==0.16.1)", "requests"]
-protobuf = ["protobuf", "requests"]
-schema-registry = ["requests"]
-
-[[package]]
-name = "environs"
-version = "9.5.0"
-description = "simplified environment variable parsing"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-marshmallow = ">=3.0.0"
-python-dotenv = "*"
-
-[package.extras]
-dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"]
-django = ["dj-database-url", "dj-email-url", "django-cache-url"]
-lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"]
-tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"]
-
-[[package]]
-name = "exceptiongroup"
-version = "1.0.0rc9"
-description = "Backport of PEP 654 (exception groups)"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-test = ["pytest (>=6)"]
-
-[[package]]
-name = "flask"
-version = "2.2.2"
-description = "A simple framework for building complex web applications."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-click = ">=8.0"
-itsdangerous = ">=2.0"
-Jinja2 = ">=3.0"
-Werkzeug = ">=2.2.2"
-
-[package.extras]
-async = ["asgiref (>=3.2)"]
-dotenv = ["python-dotenv"]
-
-[[package]]
-name = "flask-cors"
-version = "3.0.10"
-description = "A Flask extension adding a decorator for CORS support"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-Flask = ">=0.9"
-Six = "*"
-
-[[package]]
-name = "flask-restful"
-version = "0.3.9"
-description = "Simple framework for creating REST APIs"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-aniso8601 = ">=0.82"
-Flask = ">=0.8"
-pytz = "*"
-six = ">=1.3.0"
-
-[package.extras]
-docs = ["sphinx"]
-
-[[package]]
-name = "idna"
-version = "3.4"
-description = "Internationalized Domain Names in Applications (IDNA)"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "iniconfig"
-version = "1.1.1"
-description = "iniconfig: brain-dead simple config-ini parsing"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "itsdangerous"
-version = "2.1.2"
-description = "Safely pass data to untrusted environments and back."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "jinja2"
-version = "3.1.2"
-description = "A very fast and expressive template engine."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-MarkupSafe = ">=2.0"
-
-[package.extras]
-i18n = ["Babel (>=2.7)"]
-
-[[package]]
-name = "libknot"
-version = "3.2.1"
-description = "Python bindings for libknot"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "markupsafe"
-version = "2.1.1"
-description = "Safely add untrusted strings to HTML/XML markup."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "marshmallow"
-version = "3.18.0"
-description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-packaging = ">=17.0"
-
-[package.extras]
-dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"]
-docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
-lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"]
-tests = ["pytest", "pytz", "simplejson"]
-
-[[package]]
-name = "mypy-extensions"
-version = "0.4.3"
-description = "Experimental type system extensions for programs checked with the mypy typechecker."
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "packaging"
-version = "21.3"
-description = "Core utilities for Python packages"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
-
-[[package]]
-name = "pathspec"
-version = "0.10.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "platformdirs"
-version = "2.5.2"
-description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"]
-test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
-
-[[package]]
-name = "pluggy"
-version = "1.0.0"
-description = "plugin and hook calling mechanisms for python"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
-
-[[package]]
-name = "psycopg2"
-version = "2.9.5"
-description = "psycopg2 - Python-PostgreSQL Database Adapter"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "pyparsing"
-version = "3.0.9"
-description = "pyparsing module - Classes and methods to define and execute parsing grammars"
-category = "main"
-optional = false
-python-versions = ">=3.6.8"
-
-[package.extras]
-diagrams = ["jinja2", "railroad-diagrams"]
-
-[[package]]
-name = "pytest"
-version = "7.2.0"
-description = "pytest: simple powerful testing with Python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-attrs = ">=19.2.0"
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
-iniconfig = "*"
-packaging = "*"
-pluggy = ">=0.12,<2.0"
-tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
-
-[package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
-
-[[package]]
-name = "pytest-mock"
-version = "3.10.0"
-description = "Thin-wrapper around the mock package for easier use with pytest"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-pytest = ">=5.0"
-
-[package.extras]
-dev = ["pre-commit", "pytest-asyncio", "tox"]
-
-[[package]]
-name = "python-dotenv"
-version = "0.21.0"
-description = "Read key-value pairs from a .env file and set them as environment variables"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-cli = ["click (>=5.0)"]
-
-[[package]]
-name = "pytz"
-version = "2022.5"
-description = "World timezone definitions, modern and historical"
-category = "main"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "pyyaml"
-version = "6.0"
-description = "YAML parser and emitter for Python"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "requests"
-version = "2.28.1"
-description = "Python HTTP for Humans."
-category = "main"
-optional = false
-python-versions = ">=3.7, <4"
-
-[package.dependencies]
-certifi = ">=2017.4.17"
-charset-normalizer = ">=2,<3"
-idna = ">=2.5,<4"
-urllib3 = ">=1.21.1,<1.27"
-
-[package.extras]
-socks = ["PySocks (>=1.5.6,!=1.5.7)"]
-use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
-
-[[package]]
-name = "ruff"
-version = "0.0.85"
-description = "An extremely fast Python linter, written in Rust."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "six"
-version = "1.16.0"
-description = "Python 2 and 3 compatibility utilities"
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
-
-[[package]]
-name = "tomli"
-version = "2.0.1"
-description = "A lil' TOML parser"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "urllib3"
-version = "1.26.12"
-description = "HTTP library with thread-safe connection pooling, file post, and more."
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
-
-[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
-secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
-socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
-
-[[package]]
-name = "werkzeug"
-version = "2.2.2"
-description = "The comprehensive WSGI web application library."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-MarkupSafe = ">=2.1.1"
-
-[package.extras]
-watchdog = ["watchdog"]
-
-[metadata]
-lock-version = "1.1"
-python-versions = "^3.10"
-content-hash = "da62de54d8f7b813b45ce036d0948e11b4f032262faaa540d08c5bc1404edb73"
-
-[metadata.files]
-aniso8601 = [
- {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"},
- {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
-]
-attrs = [
- {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
- {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
-]
-black = [
- {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"},
- {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"},
- {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"},
- {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"},
- {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"},
- {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"},
- {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"},
- {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"},
- {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"},
- {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"},
- {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"},
- {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"},
- {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
- {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
- {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
- {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
- {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
- {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
- {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
- {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
- {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
-]
-certifi = [
- {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
- {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
-]
-charset-normalizer = [
- {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
- {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
-]
-click = [
- {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
- {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
-]
-colorama = [
- {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
- {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
-]
-confluent-kafka = [
- {file = "confluent-kafka-1.7.0.tar.gz", hash = "sha256:80e01b4791513c27eded8517af847530dfdf04c43d99ff132ed9c3085933b75b"},
- {file = "confluent_kafka-1.7.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1246f3c674357630b078bbc76824eabea87ac5a9ca270886abca9c7f052381da"},
- {file = "confluent_kafka-1.7.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f98fa8982da1a960e6c1bfca49b235f8de45c8af83d6b741d78f96f346748488"},
- {file = "confluent_kafka-1.7.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:aa5f2905783b1a4e560e4172e228e2174a077090cbdf91a5448dd8deac02b2a9"},
- {file = "confluent_kafka-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2b4d8d53148a26f0cafcb42e9483f76473120bc091fa0ede497caf8cc8db6f88"},
- {file = "confluent_kafka-1.7.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:bbba1f144992fbd920cb10c7c2450e82fc8936e04272d36be3a3567bfbf768d4"},
- {file = "confluent_kafka-1.7.0-cp36-cp36m-win32.whl", hash = "sha256:f2628f3ebffe05d346f0456c566d5519a59bd0aa88179a9b7408c1808415c102"},
- {file = "confluent_kafka-1.7.0-cp36-cp36m-win_amd64.whl", hash = "sha256:8bb0d7e28deac58b234f7481184a60f743838c4e06309fbcca9484b93697c33b"},
- {file = "confluent_kafka-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aad712996e1465e806f7e027ad248b2474d2140a3985d5f7789a5ff68e5dba8a"},
- {file = "confluent_kafka-1.7.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:415c23e7ccf948e50de616191febd4ec299b1d748ae0abdab3888f0ec0915ea9"},
- {file = "confluent_kafka-1.7.0-cp37-cp37m-win32.whl", hash = "sha256:bc2ad89e6cc4e05c5855dfbee2838a699861943ab3ea62ff2b914d72fcd1a6c6"},
- {file = "confluent_kafka-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:11173733e0540a98e493c91a05686ba4e777883c2cda756d47848fce84e06b30"},
- {file = "confluent_kafka-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5d9c75822c0b1cb7787fc60a78b3f249bfd56b3a692dd079d9d7510ffefe2c99"},
- {file = "confluent_kafka-1.7.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ee3f33077e3534b33cec9825843cd705ede458c585cfab2a052813391fb73291"},
- {file = "confluent_kafka-1.7.0-cp38-cp38-win32.whl", hash = "sha256:5e044e5c5fce78c87aedd56dbd7bd5c046dbf7a0bc9a0eff32229766be8808a5"},
- {file = "confluent_kafka-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:c0b3fc70c31f636562464e905c2b75a2705d3d53bb4687fd48b574dee2a7fa51"},
- {file = "confluent_kafka-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1e8e7770eaf2f6df0a3620f0bfc5dc2293e6ca3ac1e14c4babe6fefc03f50e18"},
- {file = "confluent_kafka-1.7.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33c32de2357ddcd3f8a98a96591c69c7ada76215e051ed5dbb17b763921f376a"},
- {file = "confluent_kafka-1.7.0-cp39-cp39-win32.whl", hash = "sha256:955de681f2bc7241d580ebb43d7516f825950518bfaf2c8e6bc3c88d22be4f08"},
- {file = "confluent_kafka-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:9f5ff838f2ca87e467aa992f9fcb8bbdd222097690fe6b15aa733025a1613532"},
-]
-environs = [
- {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"},
- {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"},
-]
-exceptiongroup = [
- {file = "exceptiongroup-1.0.0rc9-py3-none-any.whl", hash = "sha256:2e3c3fc1538a094aab74fad52d6c33fc94de3dfee3ee01f187c0e0c72aec5337"},
- {file = "exceptiongroup-1.0.0rc9.tar.gz", hash = "sha256:9086a4a21ef9b31c72181c77c040a074ba0889ee56a7b289ff0afb0d97655f96"},
-]
-flask = [
- {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"},
- {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"},
-]
-flask-cors = [
- {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"},
- {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"},
-]
-flask-restful = [
- {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"},
- {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"},
-]
-idna = [
- {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
- {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
-]
-iniconfig = [
- {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
- {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
-]
-itsdangerous = [
- {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
- {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
-]
-jinja2 = [
- {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
- {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
-]
-libknot = [
- {file = "libknot-3.2.1.tar.gz", hash = "sha256:fe72f293df008dedd922dd2dfd530d9e2d7d8c7c7c2179d071d7e4c51ffaf0cc"},
-]
-markupsafe = [
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
- {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
-]
-marshmallow = [
- {file = "marshmallow-3.18.0-py3-none-any.whl", hash = "sha256:35e02a3a06899c9119b785c12a22f4cda361745d66a71ab691fd7610202ae104"},
- {file = "marshmallow-3.18.0.tar.gz", hash = "sha256:6804c16114f7fce1f5b4dadc31f4674af23317fcc7f075da21e35c1a35d781f7"},
-]
-mypy-extensions = [
- {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
- {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
-]
-packaging = [
- {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
- {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
-]
-pathspec = [
- {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"},
- {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"},
-]
-platformdirs = [
- {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
- {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
-]
-pluggy = [
- {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
- {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
-]
-psycopg2 = [
- {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"},
- {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"},
- {file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"},
- {file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"},
- {file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"},
- {file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"},
- {file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"},
- {file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"},
- {file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"},
- {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"},
- {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
-]
-pyparsing = [
- {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
- {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
-]
-pytest = [
- {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"},
- {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"},
-]
-pytest-mock = [
- {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"},
- {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"},
-]
-python-dotenv = [
- {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"},
- {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"},
-]
-pytz = [
- {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"},
- {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"},
-]
-pyyaml = [
- {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
- {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
- {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
- {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
- {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
- {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
- {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
- {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
- {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
- {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
- {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
- {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
- {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
- {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
- {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
- {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
- {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
- {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
- {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
- {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
-]
-requests = [
- {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
- {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
-]
-ruff = [
- {file = "ruff-0.0.85-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:136130136ccc7e3cd0d6c9cf1d4f32b30e10a672868e946aef0bb8573356cdd5"},
- {file = "ruff-0.0.85-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:aed9c50a5bc92d4053f7e78479d9c9fe9e2b143a40f145902d0f14453a98c7a4"},
- {file = "ruff-0.0.85-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76deeb7ce5986a2ccc745a26085c2366b8641e4b305580a7139bf3d82d7b8e0c"},
- {file = "ruff-0.0.85-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2ac1bfb375bed8452efa333ad32237ec6b8fea4f8ab4fa4a71716d6e7c9f7598"},
- {file = "ruff-0.0.85-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7e39ffcfb1fa36e5a4307603b4c74898831e87f65dfb5884e61302477107990"},
- {file = "ruff-0.0.85-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:706d77843603817bf26fb1a726c9d0919089fe8fa7062e6d321e2d06acf31788"},
- {file = "ruff-0.0.85-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9148058c379ca654f54f5dd24fcb79a1a19bf2c8c4e01cd92a1766ae81a7be28"},
- {file = "ruff-0.0.85-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4515727c8a2b7fc77a1c1e647ca29d51672af88ce0cdf4744322df826900a606"},
- {file = "ruff-0.0.85-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3be4e90c7513d3f0c7b2abd2321ce37eec6353bdde1cbc5d2b56dd8f6b542fd5"},
- {file = "ruff-0.0.85-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:47c3245b30e1cca4a3fbd6d51e44ff2b70199d4bcd852c983dfda670566c23f0"},
- {file = "ruff-0.0.85-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4cf537227636de45b451c998e7f6795f14dd5dfb9c5984a2c1ce0f4b96e3794f"},
- {file = "ruff-0.0.85-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d3cc360b15fd06e1e0c8ff943bd6daaee2ea21af7e42c4cbb6bcae1cb9996325"},
- {file = "ruff-0.0.85-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:37434afe1eb5db9cdccf16230a4f368da08a0b604cb9b2132262ad79b896309c"},
- {file = "ruff-0.0.85-py3-none-win32.whl", hash = "sha256:c86cbf37e8e81bf142a07aa1c4c9df09cfc724f4119ebe58908c1c329059ccc7"},
- {file = "ruff-0.0.85-py3-none-win_amd64.whl", hash = "sha256:0d7025ea7329d32b2c29cc97fce00c36be7e6bbb87359dab4e84c3d176328c7d"},
- {file = "ruff-0.0.85.tar.gz", hash = "sha256:83e28714091f463f582898bfd12ac2279c4b8481d7ca4518d5275b5dfe9ca86e"},
-]
-six = [
- {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
- {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
-]
-tomli = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
-]
-urllib3 = [
- {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
- {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
-]
-werkzeug = [
- {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"},
- {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"},
-]
diff --git a/api/pyproject.toml b/api/pyproject.toml
deleted file mode 100644
index 30b18823..00000000
--- a/api/pyproject.toml
+++ /dev/null
@@ -1,39 +0,0 @@
-[tool.poetry]
-name = "api"
-version = "0.7.12"
-description = "RESTKnot API"
-authors = ["Azzam S.A "]
-license = "MIT"
-readme = "README.md"
-
-[tool.poetry.dependencies]
-python = "^3.10"
-confluent-kafka = "1.7.0"
-environs = "9.5.0"
-Flask = "2.2.2"
-Flask-Cors = "3.0.10"
-Flask-RESTful = "0.3.9"
-libknot = "3.2.1"
-psycopg2 = "2.9.5"
-python-dotenv = "0.21.0"
-PyYAML = "6.0"
-requests = "2.28.1"
-
-[tool.poetry.group.dev.dependencies]
-pytest = "^7.2.0"
-pytest-mock = "^3.10.0"
-black = "^22.10.0"
-ruff = "^0.0.85"
-
-[tool.isort]
-profile = "black"
-known_first_party = "app"
-
-[tool.ruff]
-ignore = [
- "E501",
-]
-
-[build-system]
-requires = ["poetry-core"]
-build-backend = "poetry.core.masonry.api"
diff --git a/api/schema.sql b/api/schema.sql
deleted file mode 100644
index 6fa1b1ee..00000000
--- a/api/schema.sql
+++ /dev/null
@@ -1,58 +0,0 @@
-CREATE TABLE "user" (
- id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
- email STRING NOT NULL,
- created_at TIMESTAMP NULL DEFAULT current_timestamp():::TIMESTAMP
-);
-
-CREATE TABLE "type" (
- id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
- "type" STRING NULL
-);
-
-
-CREATE TABLE ttl (
- id INT NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
- ttl STRING NULL
-);
-
-CREATE TABLE zone (
- id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
- zone STRING NULL,
- is_committed BOOL NULL,
- user_id INT8 NOT NULL REFERENCES "user" (id) ON DELETE CASCADE ON UPDATE CASCADE
-);
-
-CREATE TABLE record (
- id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
- owner STRING NULL,
- zone_id INT8 NOT NULL REFERENCES zone (id) ON DELETE CASCADE ON UPDATE CASCADE,
- type_id INT8 NOT NULL REFERENCES "type" (id) ON DELETE CASCADE ON UPDATE CASCADE,
- ttl_id INT8 NOT NULL REFERENCES ttl (id) ON DELETE CASCADE ON UPDATE CASCADE
-);
-
-CREATE TABLE rdata (
- id INT8 NOT NULL PRIMARY KEY DEFAULT unique_rowid(),
- rdata STRING NULL,
- record_id INT8 NOT NULL REFERENCES record (id) ON DELETE CASCADE ON UPDATE CASCADE
-);
-
-INSERT INTO "type" (id, "type") VALUES
- (1, 'SOA'),
- (2, 'SRV'),
- (3, 'A'),
- (4, 'NS'),
- (5, 'CNAME'),
- (6, 'MX'),
- (7, 'AAAA'),
- (8, 'TXT');
-
-INSERT INTO ttl (id, ttl) VALUES
- (1, '86400'),
- (2, '43200'),
- (3, '28800'),
- (4, '14400'),
- (5, '7200'),
- (6, '3600'),
- (7, '1800'),
- (8, '900'),
- (9, '300');
diff --git a/api/sqlx-data.json b/api/sqlx-data.json
new file mode 100644
index 00000000..95c8c858
--- /dev/null
+++ b/api/sqlx-data.json
@@ -0,0 +1,3 @@
+{
+ "db": "PostgreSQL"
+}
\ No newline at end of file
diff --git a/api/src/config.rs b/api/src/config.rs
new file mode 100644
index 00000000..14f9042b
--- /dev/null
+++ b/api/src/config.rs
@@ -0,0 +1,158 @@
+use std::{fmt, str::FromStr};
+
+use dotenv;
+use serde::{Deserialize, Serialize};
+use url::Url;
+
+use crate::Error;
+
+const ENV_APP_ENV: &str = "APP_ENV";
+const ENV_APP_BASE_URL: &str = "APP_BASE_URL";
+const ENV_HTTP_PORT: &str = "PORT";
+const ENV_DATABASE_URL: &str = "DATABASE_URL";
+const ENV_DATABASE_POOL_SIZE: &str = "DATABASE_POOL_SIZE";
+const ENV_CONFIG_LOCATION: &str = "CONFIG_LOCATION";
+const ENV_UTC_OFFSET_HOUR: &str = "UTC_OFFSET_HOUR";
+
+const POSTGRES_SCHEME: &str = "postgres";
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Config {
+ pub env: Env,
+ pub base_url: String,
+ pub config_location: String,
+ pub utc_offset_hour: i8,
+ pub http: Http,
+ pub database: Database,
+}
+
+const APP_ENV_DEV: &str = "dev";
+const APP_ENV_STAGING: &str = "staging";
+const APP_ENV_PRODUCTION: &str = "production";
+
+#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub enum Env {
+ Dev,
+ Staging,
+ Production,
+}
+
+impl FromStr for Env {
+ type Err = Error;
+
+ fn from_str(s: &str) -> Result {
+ match s {
+ APP_ENV_DEV => Ok(Env::Dev),
+ APP_ENV_STAGING => Ok(Env::Staging),
+ APP_ENV_PRODUCTION => Ok(Env::Production),
+ _ => Err(Error::InvalidArgument(format!(
+ "config: {} is not a valid env. Valid values are [{}, {}, {}]",
+ s,
+ Env::Dev,
+ Env::Staging,
+ Env::Production,
+ ))),
+ }
+ }
+}
+
+impl fmt::Display for Env {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Env::Dev => write!(f, "{APP_ENV_DEV}"),
+ Env::Staging => write!(f, "{APP_ENV_STAGING}"),
+ Env::Production => write!(f, "{APP_ENV_PRODUCTION}"),
+ }
+ }
+}
+
+/// Database contains the data necessary to connect to a database
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Database {
+ pub url: String,
+ pub pool_size: u32,
+}
+const DEFAULT_DATABASE_POOL_SIZE: u32 = 100;
+
+/// Http contains the data specific to the HTTP(s) server
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Http {
+ pub port: u16,
+ // pub https_domain: String,
+ // pub https_port: u16,
+}
+const DEFAULT_HTTP_PORT: u16 = 8000;
+// const ENV_HTTPS_DOMAIN: &str = "HTTPS_DOMAIN";
+// const ENV_HTTPS_PORT: &str = "HTTPS_PORT";
+// const DEFAULT_HTTPS_CERT_DIRECTORY: &str = "certs";
+
+impl Config {
+ /// Load and validate the configuration from the environment.
+ /// If an error is found while parsing the values, or validating the data, an error is returned.
+ pub fn load() -> Result {
+ dotenv::dotenv().ok();
+
+ // app
+ let env = std::env::var(ENV_APP_ENV)
+ .map_err(|_| env_not_found(ENV_APP_ENV))?
+ .parse::()?;
+ let base_url =
+ std::env::var(ENV_APP_BASE_URL).map_err(|_| env_not_found(ENV_APP_BASE_URL))?;
+ let utc_offset_hour =
+ std::env::var(ENV_UTC_OFFSET_HOUR).map_err(|_| env_not_found(ENV_UTC_OFFSET_HOUR))?;
+ let utc_offset_hour: i8 = utc_offset_hour.parse()?;
+ let config_location =
+ std::env::var(ENV_CONFIG_LOCATION).map_err(|_| env_not_found(ENV_CONFIG_LOCATION))?;
+
+ // http
+ let http_port = std::env::var(ENV_HTTP_PORT)
+ .ok()
+ .map_or(Ok(DEFAULT_HTTP_PORT), |env_val| env_val.parse::())?;
+
+ let http = Http { port: http_port };
+
+ // database
+ let database_url =
+ std::env::var(ENV_DATABASE_URL).map_err(|_| env_not_found(ENV_DATABASE_URL))?;
+ let database_pool_size = std::env::var(ENV_DATABASE_POOL_SIZE)
+ .ok()
+ .map_or(Ok(DEFAULT_DATABASE_POOL_SIZE), |pool_size_str| {
+ pool_size_str.parse::()
+ })?;
+
+ let database = Database {
+ url: database_url,
+ pool_size: database_pool_size,
+ };
+
+ let mut config = Self {
+ base_url,
+ config_location,
+ utc_offset_hour,
+ env,
+ http,
+ database,
+ };
+
+ config.clean_and_validate()?;
+
+ Ok(config)
+ }
+
+ fn clean_and_validate(&mut self) -> Result<(), Error> {
+ // Database
+ let database_url = Url::parse(&self.database.url)?;
+ if database_url.scheme() != POSTGRES_SCHEME {
+ return Err(Error::InvalidArgument(String::from(
+ "config: database_url is not a valid postgres URL",
+ )));
+ }
+
+ Ok(())
+ }
+}
+
+fn env_not_found(var: &str) -> Error {
+ Error::NotFound(format!("config: {var} env var not found"))
+}
diff --git a/api/src/context.rs b/api/src/context.rs
new file mode 100644
index 00000000..af6dfe35
--- /dev/null
+++ b/api/src/context.rs
@@ -0,0 +1,13 @@
+use std::sync::Arc;
+
+use crate::{health, meta, record, rtype, ttl, user};
+
+#[derive(Clone)]
+pub struct ServerContext {
+ pub health_service: Arc,
+ pub meta_service: Arc,
+ pub ttl_service: Arc,
+ pub rtype_service: Arc,
+ pub user_service: Arc,
+ pub record_service: Arc,
+}
diff --git a/api/src/db/mod.rs b/api/src/db/mod.rs
new file mode 100644
index 00000000..5dad93c2
--- /dev/null
+++ b/api/src/db/mod.rs
@@ -0,0 +1,29 @@
+use std::time::Duration;
+
+use sqlx::{self, postgres::PgPoolOptions, Executor, Pool, Postgres, Transaction};
+
+use crate::{config, Error};
+
+pub type DB = Pool;
+pub trait Queryer<'c>: Executor<'c, Database = sqlx::Postgres> {}
+
+impl<'c> Queryer<'c> for &Pool {}
+impl<'c> Queryer<'c> for &'c mut Transaction<'_, Postgres> {}
+
+pub async fn connect(database: &config::Database) -> Result {
+ // See https://www.alexedwards.net/blog/configuring-sqldb
+ // and https://making.pusher.com/production-ready-connection-pooling-in-go
+ // for the details
+ // ret.SetMaxOpenConns(int(poolSize))
+ // ret.SetMaxIdleConns(int(poolSize / 2))
+ // ret.SetConnMaxLifetime(30 * time.Minute)
+ PgPoolOptions::new()
+ .max_connections(database.pool_size)
+ .max_lifetime(Duration::from_secs(30 * 60)) // 30 mins
+ .connect(&database.url)
+ .await
+ .map_err(|err| {
+ tracing::error!("{}", err);
+ err.into()
+ })
+}
diff --git a/api/src/errors/app.rs b/api/src/errors/app.rs
new file mode 100644
index 00000000..490e32ae
--- /dev/null
+++ b/api/src/errors/app.rs
@@ -0,0 +1,81 @@
+#[derive(Debug)]
+pub enum Error {
+ // Other
+ Internal,
+
+ // Rtype
+ RtypeNotFound,
+ RtypeAlreadyExists,
+
+ // Ttl
+ TtlNotFound,
+ TtlAlreadyExists,
+
+ // User
+ UserNotFound,
+ UsernameAlreadyExists,
+
+ // Record
+ RecordNotFound,
+ RecordAlreadyExists,
+
+ // Rdata
+ RdataNotFound,
+ RdataAlreadyExists,
+
+ // Zone
+ ZoneNotFound,
+ ZoneAlreadyExists,
+}
+
+impl std::convert::From for crate::Error {
+ fn from(err: Error) -> Self {
+ match err {
+ // Ttl
+ Error::TtlNotFound => crate::Error::NotFound(String::from("Ttl not found")),
+ Error::TtlAlreadyExists => {
+ crate::Error::AlreadyExists(String::from("Ttl time is already in use"))
+ }
+
+ // Rtype
+ Error::RtypeNotFound => crate::Error::NotFound(String::from("Record type not found")),
+ Error::RtypeAlreadyExists => {
+ crate::Error::AlreadyExists(String::from("Record type time is already in use"))
+ }
+
+ // User
+ Error::UserNotFound => crate::Error::NotFound(String::from("user not found")),
+ Error::UsernameAlreadyExists => {
+ crate::Error::AlreadyExists(String::from("username is already in use"))
+ }
+
+ // Record
+ Error::RecordNotFound => crate::Error::NotFound(String::from("record not found")),
+ Error::RecordAlreadyExists => {
+ crate::Error::AlreadyExists(String::from("Recordname is already exists"))
+ }
+
+ // Rdata
+ Error::RdataNotFound => crate::Error::NotFound(String::from("rdata not found")),
+ Error::RdataAlreadyExists => {
+ crate::Error::AlreadyExists(String::from("Recordname is already exists"))
+ }
+
+ // Zone
+ Error::ZoneNotFound => crate::Error::NotFound(String::from("Zone not found")),
+ Error::ZoneAlreadyExists => {
+ crate::Error::AlreadyExists(String::from("Zone is already exists"))
+ }
+
+ // Other
+ Error::Internal => crate::Error::Internal(String::new()),
+ }
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(_err: sqlx::Error) -> Self {
+ // Not found error should be catched manually
+ Error::Internal
+ }
+}
diff --git a/api/src/errors/mod.rs b/api/src/errors/mod.rs
new file mode 100644
index 00000000..9d7d13ec
--- /dev/null
+++ b/api/src/errors/mod.rs
@@ -0,0 +1,110 @@
+pub mod app;
+
+use axum::{
+ http::StatusCode,
+ response::{IntoResponse, Response},
+ Json,
+};
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Clone, thiserror::Error)]
+pub enum Error {
+ #[error("Internal error")]
+ Internal(String),
+
+ #[error("{0}")]
+ NotFound(String),
+
+ #[error("{0}")]
+ PermissionDenied(String),
+
+ #[error("{0}")]
+ InvalidArgument(String),
+
+ #[error("{0}")]
+ AlreadyExists(String),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+struct ErrorResponse {
+ message: String,
+}
+
+// Tell axum how to convert `AppError` into a response.
+impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ let status_code = match self {
+ Error::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, // 500
+ Error::NotFound(_) => StatusCode::NOT_FOUND, // 404
+ Error::InvalidArgument(_) => StatusCode::BAD_REQUEST, // 400
+ Error::AlreadyExists(_) => StatusCode::CONFLICT, // 409
+ Error::PermissionDenied(_) => StatusCode::FORBIDDEN, // 403
+ };
+
+ let err_response = ErrorResponse {
+ message: format!("{}", &self),
+ };
+
+ (status_code, Json(err_response)).into_response()
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: sqlx::Error) -> Self {
+ match err {
+ sqlx::Error::RowNotFound => Error::NotFound("not found".into()),
+ _ => Error::Internal(err.to_string()),
+ }
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: std::net::AddrParseError) -> Self {
+ Error::Internal(err.to_string())
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: std::env::VarError) -> Self {
+ match err {
+ std::env::VarError::NotPresent => Error::NotFound("env var not found".into()),
+ _ => Error::Internal(err.to_string()),
+ }
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: hyper::Error) -> Self {
+ Error::Internal(err.to_string())
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: axum::http::Error) -> Self {
+ Error::Internal(err.to_string())
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: tracing_subscriber::filter::ParseError) -> Self {
+ Error::Internal(err.to_string())
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: tracing_subscriber::filter::FromEnvError) -> Self {
+ Error::Internal(err.to_string())
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: std::num::ParseIntError) -> Self {
+ Error::InvalidArgument(err.to_string())
+ }
+}
+
+impl std::convert::From for Error {
+ fn from(err: url::ParseError) -> Self {
+ Error::InvalidArgument(format!("url is not valid: {err}"))
+ }
+}
diff --git a/api/src/health/entities.rs b/api/src/health/entities.rs
new file mode 100644
index 00000000..897f1036
--- /dev/null
+++ b/api/src/health/entities.rs
@@ -0,0 +1,4 @@
+#[derive(Debug)]
+pub struct Health {
+ pub status: String,
+}
diff --git a/api/src/health/mod.rs b/api/src/health/mod.rs
new file mode 100644
index 00000000..6a2b3e24
--- /dev/null
+++ b/api/src/health/mod.rs
@@ -0,0 +1,7 @@
+mod service;
+
+// public
+pub mod entities;
+pub mod model;
+pub mod query;
+pub use service::Service;
diff --git a/api/src/health/model/mod.rs b/api/src/health/model/mod.rs
new file mode 100644
index 00000000..511cdd22
--- /dev/null
+++ b/api/src/health/model/mod.rs
@@ -0,0 +1,22 @@
+use serde::{Deserialize, Serialize};
+use utoipa::ToSchema;
+
+use crate::health::entities;
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Health {
+ pub status: String,
+}
+
+impl From for Health {
+ fn from(health: entities::Health) -> Self {
+ Self {
+ status: health.status,
+ }
+ }
+}
+
+#[derive(Debug, Serialize, ToSchema)]
+pub struct HealthResponse {
+ pub data: Health,
+}
diff --git a/api/src/health/query.rs b/api/src/health/query.rs
new file mode 100644
index 00000000..9d928e98
--- /dev/null
+++ b/api/src/health/query.rs
@@ -0,0 +1,27 @@
+use axum::{Extension, Json};
+use std::sync::Arc;
+
+use crate::context::ServerContext;
+use crate::health::model;
+
+// utoipa can't find `model::`
+#[allow(unused_imports)]
+use crate::health::model::HealthResponse;
+
+#[utoipa::path(
+ get,
+ path = "/health",
+ responses(
+ (status = 200, description = "Get health information", body = HealthResponse),
+ ),
+)]
+pub async fn health(
+ ctx: Extension>,
+) -> Result, crate::Error> {
+ let health = ctx.health_service.get_health().await?;
+
+ let response = model::HealthResponse {
+ data: health.into(),
+ };
+ Ok(Json(response))
+}
diff --git a/api/src/health/service/get_health.rs b/api/src/health/service/get_health.rs
new file mode 100644
index 00000000..482d031f
--- /dev/null
+++ b/api/src/health/service/get_health.rs
@@ -0,0 +1,11 @@
+use super::Service;
+use crate::health::entities;
+
+impl Service {
+ pub async fn get_health(&self) -> Result {
+ let health = entities::Health {
+ status: "running".to_string(),
+ };
+ Ok(health)
+ }
+}
diff --git a/api/src/health/service/mod.rs b/api/src/health/service/mod.rs
new file mode 100644
index 00000000..e43dc985
--- /dev/null
+++ b/api/src/health/service/mod.rs
@@ -0,0 +1,16 @@
+mod get_health;
+
+#[derive(Debug)]
+pub struct Service;
+
+impl Service {
+ pub fn new() -> Self {
+ Self {}
+ }
+}
+
+impl Default for Service {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/api/src/lib.rs b/api/src/lib.rs
new file mode 100644
index 00000000..024fd1c4
--- /dev/null
+++ b/api/src/lib.rs
@@ -0,0 +1,15 @@
+pub mod config;
+pub mod context;
+pub mod db;
+mod errors;
+pub mod health;
+pub mod logger;
+pub mod meta;
+pub mod record;
+pub mod routes;
+pub mod rtype;
+pub mod scalar;
+pub mod ttl;
+pub mod user;
+
+pub use errors::Error;
diff --git a/api/src/logger.rs b/api/src/logger.rs
new file mode 100644
index 00000000..e86a86bd
--- /dev/null
+++ b/api/src/logger.rs
@@ -0,0 +1,37 @@
+use time::format_description::well_known::Rfc3339;
+use tracing_subscriber::{
+ filter::{self},
+ fmt::{layer, time::OffsetTime},
+ prelude::*,
+ registry,
+};
+
+use crate::config::{Config, Env};
+
+pub fn init(config: &Config) -> Result<(), crate::Error> {
+ let log_level = if config.env == Env::Production {
+ filter::LevelFilter::INFO
+ } else {
+ filter::LevelFilter::DEBUG
+ };
+
+ let env_filter = filter::EnvFilter::builder()
+ .with_default_directive(log_level.into())
+ .from_env()?
+ .add_directive("sqlx::query=error".parse()?)
+ .add_directive("hyper=warn".parse()?)
+ .add_directive("reqwest=warn".parse()?);
+
+ let utc_offset_hour = config.utc_offset_hour;
+ let fmt_layer = layer()
+ .with_target(true)
+ .with_timer(OffsetTime::new(
+ time::UtcOffset::from_hms(utc_offset_hour, 0, 0).unwrap_or(time::UtcOffset::UTC),
+ Rfc3339,
+ ))
+ .with_filter(env_filter);
+
+ registry().with(fmt_layer).init();
+
+ Ok(())
+}
diff --git a/api/src/main.rs b/api/src/main.rs
new file mode 100644
index 00000000..3c2ade79
--- /dev/null
+++ b/api/src/main.rs
@@ -0,0 +1,24 @@
+use std::{
+ net::{IpAddr, SocketAddr},
+ sync::Arc,
+};
+
+use api::{config::Config, logger, routes::app};
+use axum::Server;
+
+#[tokio::main]
+async fn main() -> Result<(), api::Error> {
+ let config = Arc::new(Config::load()?);
+ logger::init(&config)?;
+
+ let app = app().await?;
+
+ let host: IpAddr = config.base_url.parse()?;
+ let port = config.http.port;
+ let address = &SocketAddr::new(host, port);
+
+ tracing::info!("App started at `{}`", address);
+ Server::bind(address).serve(app.into_make_service()).await?;
+
+ Ok(())
+}
diff --git a/api/src/meta/entities.rs b/api/src/meta/entities.rs
new file mode 100644
index 00000000..227eec6f
--- /dev/null
+++ b/api/src/meta/entities.rs
@@ -0,0 +1,34 @@
+use serde::Deserialize;
+
+#[derive(Debug)]
+pub struct Version {
+ pub build: String,
+ pub version: String,
+}
+
+#[derive(Debug, Deserialize)]
+pub struct Config {
+ pub brokers: Vec,
+ pub servers: Servers,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Servers {
+ pub master: Master,
+ pub slave: Slave,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Master {
+ pub notify: Vec,
+ pub acl: Vec,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Slave {
+ pub master: Vec,
+ pub acl: Vec,
+}
diff --git a/api/src/meta/mod.rs b/api/src/meta/mod.rs
new file mode 100644
index 00000000..6a2b3e24
--- /dev/null
+++ b/api/src/meta/mod.rs
@@ -0,0 +1,7 @@
+mod service;
+
+// public
+pub mod entities;
+pub mod model;
+pub mod query;
+pub use service::Service;
diff --git a/api/src/meta/model/mod.rs b/api/src/meta/model/mod.rs
new file mode 100644
index 00000000..fcfda174
--- /dev/null
+++ b/api/src/meta/model/mod.rs
@@ -0,0 +1,89 @@
+use serde::{Deserialize, Serialize};
+use utoipa::ToSchema;
+
+use crate::meta::entities;
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Version {
+ pub build: String,
+ pub version: String,
+}
+
+impl From for Version {
+ fn from(meta: entities::Version) -> Self {
+ Self {
+ build: meta.build,
+ version: meta.version,
+ }
+ }
+}
+
+#[derive(Debug, Serialize, ToSchema)]
+pub struct VersionResponse {
+ pub data: Version,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Config {
+ pub brokers: Vec,
+ pub servers: Servers,
+}
+
+impl From for Config {
+ fn from(c: entities::Config) -> Self {
+ Self {
+ brokers: c.brokers,
+ servers: c.servers.into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Servers {
+ pub master: Master,
+ pub slave: Slave,
+}
+
+impl From for Servers {
+ fn from(s: entities::Servers) -> Self {
+ Self {
+ master: s.master.into(),
+ slave: s.slave.into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Master {
+ pub notify: Vec,
+ pub acl: Vec,
+}
+
+impl From for Master {
+ fn from(m: entities::Master) -> Self {
+ Self {
+ acl: m.acl,
+ notify: m.notify,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Slave {
+ pub master: Vec,
+ pub acl: Vec,
+}
+
+impl From for Slave {
+ fn from(s: entities::Slave) -> Self {
+ Self {
+ master: s.master,
+ acl: s.acl,
+ }
+ }
+}
+
+#[derive(Debug, Serialize, ToSchema)]
+pub struct ConfigResponse {
+ pub data: Config,
+}
diff --git a/api/src/meta/query.rs b/api/src/meta/query.rs
new file mode 100644
index 00000000..f2171b7d
--- /dev/null
+++ b/api/src/meta/query.rs
@@ -0,0 +1,41 @@
+use axum::{Extension, Json};
+use std::sync::Arc;
+
+use crate::context::ServerContext;
+use crate::meta::model;
+
+// utoipa can't find `model::`
+#[allow(unused_imports)]
+use crate::meta::model::{Config, Version};
+
+#[utoipa::path(
+ get,
+ path = "/meta/version",
+ responses(
+ (status = 200, description = "Get meta information", body = MetaResponse),
+ ),
+)]
+pub async fn version(
+ ctx: Extension>,
+) -> Result, crate::Error> {
+ let meta = ctx.meta_service.get_version().await?;
+
+ let response = model::VersionResponse { data: meta.into() };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ get,
+ path = "/meta/config",
+ responses(
+ (status = 200, description = "Get config information", body = ConfigResponse),
+ ),
+)]
+pub async fn config(
+ ctx: Extension>,
+) -> Result, crate::Error> {
+ let meta = ctx.meta_service.get_config().await?;
+
+ let response = model::ConfigResponse { data: meta.into() };
+ Ok(Json(response))
+}
diff --git a/api/src/meta/service/get_config.rs b/api/src/meta/service/get_config.rs
new file mode 100644
index 00000000..552e112c
--- /dev/null
+++ b/api/src/meta/service/get_config.rs
@@ -0,0 +1,14 @@
+use std::fs;
+
+use super::Service;
+use crate::{meta::entities, Error};
+
+impl Service {
+ pub async fn get_config(&self) -> Result {
+ let content = fs::read_to_string(&self.config.config_location)
+ .map_err(|_| Error::NotFound("Configuration is not found".into()))?;
+ let config = toml::from_str(&content)
+ .map_err(|_| Error::InvalidArgument("Invalid configuration file".into()))?;
+ Ok(config)
+ }
+}
diff --git a/api/src/meta/service/get_version.rs b/api/src/meta/service/get_version.rs
new file mode 100644
index 00000000..f5f96816
--- /dev/null
+++ b/api/src/meta/service/get_version.rs
@@ -0,0 +1,12 @@
+use super::Service;
+use crate::meta::entities;
+
+impl Service {
+ pub async fn get_version(&self) -> Result {
+ let meta = entities::Version {
+ build: option_env!("VCS_REVISION").unwrap_or("unknown").to_string(),
+ version: env!("CARGO_PKG_VERSION").to_string(),
+ };
+ Ok(meta)
+ }
+}
diff --git a/api/src/meta/service/mod.rs b/api/src/meta/service/mod.rs
new file mode 100644
index 00000000..01c5fc2e
--- /dev/null
+++ b/api/src/meta/service/mod.rs
@@ -0,0 +1,16 @@
+mod get_config;
+mod get_version;
+
+use std::sync::Arc;
+
+use crate::config::Config;
+
+pub struct Service {
+ config: Arc,
+}
+
+impl Service {
+ pub fn new(config: Arc) -> Self {
+ Self { config }
+ }
+}
diff --git a/api/src/record/entities.rs b/api/src/record/entities.rs
new file mode 100644
index 00000000..10501826
--- /dev/null
+++ b/api/src/record/entities.rs
@@ -0,0 +1,53 @@
+use sqlx;
+
+use crate::scalar::Id;
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct Record {
+ /// The ID of the record.
+ pub id: Id,
+ /// The zone name
+ pub zone: Zone,
+ /// The record owner
+ pub owner: String,
+ /// The record type
+ pub rtype: String,
+ /// The record data
+ pub rdata: String,
+ /// The time to live
+ pub ttl: i32,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+// We have to adopt the legacy design
+pub struct RecordEntity {
+ pub id: Id,
+ pub owner: String,
+ pub zone_id: Id,
+ // Previous database already use `type` instead of `rtype`
+ pub type_id: Id,
+ pub ttl_id: Id,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct Zone {
+ pub id: Id,
+ pub name: String,
+ pub is_committed: bool,
+ pub user_id: Id,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct Rdata {
+ pub id: Id,
+ pub data: String,
+ pub record_id: Id,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct CreateRecord {
+ pub owner: String,
+ pub zone_id: Id,
+ pub rtype_id: Id,
+ pub ttl_id: Id,
+}
diff --git a/api/src/record/mod.rs b/api/src/record/mod.rs
new file mode 100644
index 00000000..a37ea9e7
--- /dev/null
+++ b/api/src/record/mod.rs
@@ -0,0 +1,8 @@
+mod entities;
+mod repository;
+mod service;
+
+// public
+pub mod model;
+pub mod query;
+pub use service::Service;
diff --git a/api/src/record/model/input.rs b/api/src/record/model/input.rs
new file mode 100644
index 00000000..b2d6665f
--- /dev/null
+++ b/api/src/record/model/input.rs
@@ -0,0 +1,37 @@
+use axum_typed_multipart::TryFromMultipart;
+
+use crate::scalar::Id;
+
+#[derive(TryFromMultipart)]
+pub struct CreateRecordInput {
+ /// The zone name
+ pub zone: String,
+ /// The record owner
+ pub owner: String,
+ /// The record type
+ pub rtype: String,
+ /// The record data
+ pub rdata: String,
+ /// The time to live
+ pub ttl: i32,
+}
+
+#[derive(TryFromMultipart)]
+pub struct UpdateRecordInput {
+ /// The zone name
+ pub zone: String,
+ /// The record owner
+ pub owner: String,
+ /// The record type
+ pub rtype: String,
+ /// The record data
+ pub rdata: String,
+ /// The time to live
+ pub ttl: i32,
+}
+
+#[derive(TryFromMultipart)]
+pub struct DeleteRecordInput {
+ /// The ID of the record to modify.
+ pub id: Id,
+}
diff --git a/api/src/record/model/mod.rs b/api/src/record/model/mod.rs
new file mode 100644
index 00000000..a8feb027
--- /dev/null
+++ b/api/src/record/model/mod.rs
@@ -0,0 +1,81 @@
+pub mod input;
+
+use serde::{Deserialize, Serialize};
+use utoipa::ToSchema;
+
+use crate::{record::entities, scalar::Id};
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Record {
+ /// The ID of the record.
+ pub id: Id,
+ /// The zone name
+ pub zone: Zone,
+ /// The record owner
+ pub owner: String,
+ /// The record type
+ pub rtype: String,
+ /// The record data
+ pub rdata: String,
+ /// The time to live
+ pub ttl: i32,
+}
+
+impl From for Record {
+ fn from(record: entities::Record) -> Self {
+ Self {
+ id: record.id,
+ zone: record.zone.into(),
+ owner: record.owner,
+ rtype: record.rtype,
+ rdata: record.rdata,
+ ttl: record.ttl,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Zone {
+ pub id: Id,
+ pub name: String,
+ pub is_committed: bool,
+ pub user_id: Id,
+}
+
+impl From for Zone {
+ fn from(zone: entities::Zone) -> Self {
+ Self {
+ id: zone.id,
+ name: zone.name,
+ is_committed: zone.is_committed,
+ user_id: zone.user_id,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Rdata {
+ pub id: Id,
+ pub rdata: String,
+ pub record_id: Id,
+}
+
+impl From for Rdata {
+ fn from(rdata: entities::Rdata) -> Self {
+ Self {
+ id: rdata.id,
+ rdata: rdata.data,
+ record_id: rdata.record_id,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct RecordResponse {
+ pub data: Record,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct RecordsResponse {
+ pub data: Vec,
+}
diff --git a/api/src/record/query.rs b/api/src/record/query.rs
new file mode 100644
index 00000000..08c30247
--- /dev/null
+++ b/api/src/record/query.rs
@@ -0,0 +1,80 @@
+use std::sync::Arc;
+
+use axum::response::IntoResponse;
+use axum::{extract::Path, http::StatusCode, response::Response, Extension, Json};
+use axum_typed_multipart::TypedMultipart;
+
+use super::model::input;
+use super::{model, service};
+use crate::context::ServerContext;
+
+// utoipa can't find `model::`
+#[allow(unused_imports)]
+use crate::record::model::Record;
+
+#[utoipa::path(
+ get,
+ path = "/record/list",
+ responses(
+ (status = 200, description = "List all records", body = RecordsResponse),
+ ),
+)]
+pub async fn list(
+ ctx: Extension>,
+) -> Result, crate::Error> {
+ let records = ctx.record_service.find_records().await?;
+
+ let records = records.into_iter().map(|t| t.into()).collect();
+ let response = model::RecordsResponse { data: records };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ get,
+ path = "/record/list/:id",
+ responses(
+ (status = 200, description = "Get a record by id", body = RecordResponse),
+ ),
+)]
+pub async fn get(
+ ctx: Extension>,
+ Path(id): Path,
+) -> Result, crate::Error> {
+ let record = ctx.record_service.find_record(id).await?;
+
+ let response = model::RecordResponse {
+ data: record.into(),
+ };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ post,
+ path = "/record/add",
+ responses(
+ (status = 201, description = "Create new record", body = RecordResponse),
+ ),
+)]
+pub async fn post(
+ ctx: Extension>,
+ TypedMultipart(input::CreateRecordInput {
+ zone,
+ owner,
+ rtype,
+ rdata,
+ ttl,
+ }): TypedMultipart,
+) -> Result {
+ let input = service::CreateRecordInput {
+ zone,
+ owner,
+ rtype,
+ rdata,
+ ttl,
+ };
+ let record = ctx.record_service.create_record(input).await?;
+ let response = model::RecordResponse {
+ data: record.into(),
+ };
+ Ok((StatusCode::CREATED, Json(response)).into_response())
+}
diff --git a/api/src/record/repository/create_record.rs b/api/src/record/repository/create_record.rs
new file mode 100644
index 00000000..639f368b
--- /dev/null
+++ b/api/src/record/repository/create_record.rs
@@ -0,0 +1,29 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, record::entities};
+
+impl Repository {
+ pub async fn create_record<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ record: &entities::CreateRecord,
+ ) -> Result {
+ const QUERY: &str = "insert into record (owner, zone_id, rtype_id, ttl_id) values ($1, $2, $3, $4) returning *";
+
+ match sqlx::query_as::<_, entities::RecordEntity>(QUERY)
+ .bind(&record.owner)
+ .bind(record.zone_id)
+ .bind(record.rtype_id)
+ .bind(record.ttl_id)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(record) => Ok(record),
+ }
+ }
+}
diff --git a/api/src/record/repository/find_all_records.rs b/api/src/record/repository/find_all_records.rs
new file mode 100644
index 00000000..8a442780
--- /dev/null
+++ b/api/src/record/repository/find_all_records.rs
@@ -0,0 +1,24 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, record::entities};
+
+impl Repository {
+ pub async fn find_all_records<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ ) -> Result, Error> {
+ const QUERY: &str = "select * from record ORDER BY id";
+
+ match sqlx::query_as::<_, entities::Record>(QUERY)
+ .fetch_all(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(res) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/record/repository/find_rdata_by_id.rs b/api/src/record/repository/find_rdata_by_id.rs
new file mode 100644
index 00000000..6e161ee5
--- /dev/null
+++ b/api/src/record/repository/find_rdata_by_id.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, record::entities, scalar::Id};
+
+impl Repository {
+ pub async fn find_rdata_by_id<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "SELECT * FROM rdata WHERE id = $1";
+
+ match sqlx::query_as::<_, entities::Rdata>(QUERY)
+ .bind(id)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::RdataNotFound),
+ Ok(Some(rdata)) => Ok(rdata),
+ }
+ }
+}
diff --git a/api/src/record/repository/find_record_by_id.rs b/api/src/record/repository/find_record_by_id.rs
new file mode 100644
index 00000000..40bc8a36
--- /dev/null
+++ b/api/src/record/repository/find_record_by_id.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, record::entities, scalar::Id};
+
+impl Repository {
+ pub async fn find_record_by_id<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "SELECT * FROM record WHERE id = $1";
+
+ match sqlx::query_as::<_, entities::RecordEntity>(QUERY)
+ .bind(id)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::RecordNotFound),
+ Ok(Some(record)) => Ok(record),
+ }
+ }
+}
diff --git a/api/src/record/repository/find_zone_by_id.rs b/api/src/record/repository/find_zone_by_id.rs
new file mode 100644
index 00000000..25c7f6e7
--- /dev/null
+++ b/api/src/record/repository/find_zone_by_id.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, record::entities, scalar::Id};
+
+impl Repository {
+ pub async fn find_zone_by_id<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "SELECT * FROM zone WHERE id = $1";
+
+ match sqlx::query_as::<_, entities::Zone>(QUERY)
+ .bind(id)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::ZoneNotFound),
+ Ok(Some(zone)) => Ok(zone),
+ }
+ }
+}
diff --git a/api/src/record/repository/find_zone_by_name.rs b/api/src/record/repository/find_zone_by_name.rs
new file mode 100644
index 00000000..74d30256
--- /dev/null
+++ b/api/src/record/repository/find_zone_by_name.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, record::entities};
+
+impl Repository {
+ pub async fn find_zone_by_name<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ name: String,
+ ) -> Result {
+ const QUERY: &str = "SELECT * FROM zone WHERE name = $1";
+
+ match sqlx::query_as::<_, entities::Zone>(QUERY)
+ .bind(name)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::ZoneNotFound),
+ Ok(Some(zone)) => Ok(zone),
+ }
+ }
+}
diff --git a/api/src/record/repository/mod.rs b/api/src/record/repository/mod.rs
new file mode 100644
index 00000000..d47f405b
--- /dev/null
+++ b/api/src/record/repository/mod.rs
@@ -0,0 +1,21 @@
+mod create_record;
+mod find_all_records;
+mod find_rdata_by_id;
+mod find_record_by_id;
+mod find_zone_by_id;
+mod find_zone_by_name;
+
+#[derive(Debug, Clone)]
+pub struct Repository {}
+
+impl Repository {
+ pub fn new() -> Repository {
+ Repository {}
+ }
+}
+
+impl Default for Repository {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/api/src/record/service/create_record.rs b/api/src/record/service/create_record.rs
new file mode 100644
index 00000000..b63ecbeb
--- /dev/null
+++ b/api/src/record/service/create_record.rs
@@ -0,0 +1,29 @@
+use super::{CreateRecordInput, Service};
+use crate::record::entities;
+
+impl Service {
+ pub async fn create_record(
+ &self,
+ input: CreateRecordInput,
+ ) -> Result {
+ // let recordname_exists = self.check_record_exists(&self.db, &input.email).await?;
+ // if recordname_exists {
+ // return Err(Error::RecordAlreadyExists.into());
+ // }
+
+ let zone = self.repo.find_zone_by_name(&self.db, input.zone).await?;
+ let rtype = self.rtype_service.find_rtype_by_type(&input.rtype).await?;
+ let ttl = self.ttl_service.find_ttl_by_time(input.ttl).await?;
+
+ let record_input = entities::CreateRecord {
+ owner: input.owner,
+ zone_id: zone.id,
+ rtype_id: rtype.id,
+ ttl_id: ttl.id,
+ };
+
+ let record = self.repo.create_record(&self.db, &record_input).await?;
+ let record = self.find_record(record.id).await?;
+ Ok(record)
+ }
+}
diff --git a/api/src/record/service/find_record.rs b/api/src/record/service/find_record.rs
new file mode 100644
index 00000000..665919c8
--- /dev/null
+++ b/api/src/record/service/find_record.rs
@@ -0,0 +1,22 @@
+use super::Service;
+use crate::{record::entities, scalar::Id};
+
+impl Service {
+ pub async fn find_record(&self, id: Id) -> Result {
+ let record = self.repo.find_record_by_id(&self.db, id).await?;
+ let rdata = self.repo.find_rdata_by_id(&self.db, record.id).await?;
+ let zone = self.repo.find_zone_by_id(&self.db, record.zone_id).await?;
+ let rtype = self.rtype_service.find_rtype(record.type_id).await?;
+ let ttl = self.ttl_service.find_ttl(record.ttl_id).await?;
+
+ let record = entities::Record {
+ id: record.id,
+ zone,
+ owner: record.owner,
+ rtype: rtype.rtype,
+ rdata: rdata.data,
+ ttl: ttl.time,
+ };
+ Ok(record)
+ }
+}
diff --git a/api/src/record/service/find_records.rs b/api/src/record/service/find_records.rs
new file mode 100644
index 00000000..c381c971
--- /dev/null
+++ b/api/src/record/service/find_records.rs
@@ -0,0 +1,19 @@
+use super::Service;
+use crate::record::entities;
+
+impl Service {
+ pub async fn find_records(&self) -> Result, crate::Error> {
+ let records = self.repo.find_all_records(&self.db).await?;
+ let records: Result, _> = records
+ .into_iter()
+ .map(entities::Record::try_from)
+ .collect();
+
+ match records.ok() {
+ None => Err(crate::Error::InvalidArgument(
+ "failed to convert record".into(),
+ )),
+ Some(records) => Ok(records),
+ }
+ }
+}
diff --git a/api/src/record/service/mod.rs b/api/src/record/service/mod.rs
new file mode 100644
index 00000000..a6ad172b
--- /dev/null
+++ b/api/src/record/service/mod.rs
@@ -0,0 +1,46 @@
+mod create_record;
+mod find_record;
+mod find_records;
+
+use std::sync::Arc;
+
+use crate::{db::DB, record::repository::Repository, rtype, scalar::Id, ttl};
+
+#[derive(Debug)]
+pub struct Service {
+ repo: Repository,
+ pub db: DB,
+ rtype_service: Arc,
+ ttl_service: Arc,
+}
+
+impl Service {
+ pub fn new(db: DB, rtype_service: Arc, ttl_service: Arc) -> Self {
+ let repo = Repository::new();
+ Self {
+ db,
+ repo,
+ rtype_service,
+ ttl_service,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct CreateRecordInput {
+ pub zone: String,
+ pub owner: String,
+ pub rtype: String,
+ pub rdata: String,
+ pub ttl: i32,
+}
+
+#[derive(Debug)]
+pub struct UpdateRecordInput {
+ pub id: Id,
+ pub zone: String,
+ pub owner: String,
+ pub rtype: String,
+ pub rdata: String,
+ pub ttl: i32,
+}
diff --git a/api/src/routes.rs b/api/src/routes.rs
new file mode 100644
index 00000000..7e0f5a15
--- /dev/null
+++ b/api/src/routes.rs
@@ -0,0 +1,96 @@
+use std::sync::Arc;
+
+use axum::{
+ routing::{delete, get, post, put},
+ Extension, Router,
+};
+use utoipa::OpenApi;
+use utoipa_swagger_ui::SwaggerUi;
+
+use crate::{config, context::ServerContext, db, health, meta, record, rtype, ttl, user};
+
+pub async fn app() -> Result {
+ let config = Arc::new(config::Config::load()?);
+ let db = db::connect(&config.database).await?;
+
+ let health_service = Arc::new(health::Service::new());
+ let meta_service = Arc::new(meta::Service::new(config.clone()));
+ let ttl_service = Arc::new(ttl::Service::new(db.clone()));
+ let rtype_service = Arc::new(rtype::Service::new(db.clone()));
+ let user_service = Arc::new(user::Service::new(db.clone()));
+ let record_service = Arc::new(record::Service::new(
+ db,
+ rtype_service.clone(),
+ ttl_service.clone(),
+ ));
+ let server_context = Arc::new(ServerContext {
+ health_service,
+ meta_service,
+ ttl_service,
+ rtype_service,
+ user_service,
+ record_service,
+ });
+
+ #[derive(OpenApi)]
+ #[openapi(
+ paths(
+ meta::query::version,
+ meta::query::config,
+ health::query::health,
+ ttl::query::list, ttl::query::get, ttl::query::post, ttl::query::put, ttl::query::delete,
+ rtype::query::list, rtype::query::get, rtype::query::post, rtype::query::put, rtype::query::delete,
+ user::query::list, user::query::get, user::query::post, user::query::put, user::query::delete,
+ record::query::list, record::query::get, record::query::post
+ ),
+ components(schemas(
+ meta::model::Version, meta::model::VersionResponse, meta::model::Config, meta::model::ConfigResponse,
+ health::model::Health, health::model::HealthResponse,
+ ttl::model::Ttl, ttl::model::TtlsResponse, ttl::model::TtlsResponse,
+ rtype::model::Rtype, rtype::model::RtypesResponse, rtype::model::RtypeResponse,
+ user::model::User, user::model::UsersResponse, user::model::UserResponse,
+ record::model::Record, record::model::RecordsResponse, record::model::RecordResponse,
+ )),
+ tags(
+ (name = "RESTKnot", description = "RESTKnot REST API")
+ )
+ )]
+ struct ApiDoc;
+
+ let mut app = Router::new()
+ .route("/health", get(health::query::health))
+ // meta
+ .route("/meta/version", get(meta::query::version))
+ .route("/meta/config", get(meta::query::config))
+ // ttl
+ .route("/ttl/list", get(ttl::query::list))
+ .route("/ttl/list/:id", get(ttl::query::get))
+ .route("/ttl/add", post(ttl::query::post))
+ .route("/ttl/edit/:id", put(ttl::query::put))
+ .route("/ttl/delete/:id", delete(ttl::query::delete))
+ // rtype
+ .route("/type/list", get(rtype::query::list))
+ .route("/type/list/:id", get(rtype::query::get))
+ .route("/type/add", post(rtype::query::post))
+ .route("/type/edit/:id", put(rtype::query::put))
+ .route("/type/delete/:id", delete(rtype::query::delete))
+ // user
+ .route("/user/list", get(user::query::list))
+ .route("/user/list/:id", get(user::query::get))
+ .route("/user/add", post(user::query::post))
+ .route("/user/edit/:id", put(user::query::put))
+ .route("/user/delete/:id", delete(rtype::query::delete));
+ // record
+ // .route("/record/list", get(record::query::list))
+ // .route("/record/list/:id", get(record::query::get))
+ // .route("/record/add", post(record::query::post))
+ // .route("/record/edit/:id", put(record::query::put))
+ // .route("/record/delete/:id", delete(rtype::query::delete));
+
+ if config.env != config::Env::Production {
+ app = app.merge(SwaggerUi::new("/swagger").url("/api-doc/openapi.json", ApiDoc::openapi()));
+ }
+ let app = app.layer(Extension(server_context));
+
+ Ok(app)
+}
diff --git a/api/src/rtype/entities.rs b/api/src/rtype/entities.rs
new file mode 100644
index 00000000..49155040
--- /dev/null
+++ b/api/src/rtype/entities.rs
@@ -0,0 +1,14 @@
+use sqlx;
+
+use crate::scalar::Id;
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct Rtype {
+ pub id: Id,
+ pub rtype: String,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct CreateRtype {
+ pub rtype: String,
+}
diff --git a/api/src/rtype/mod.rs b/api/src/rtype/mod.rs
new file mode 100644
index 00000000..a37ea9e7
--- /dev/null
+++ b/api/src/rtype/mod.rs
@@ -0,0 +1,8 @@
+mod entities;
+mod repository;
+mod service;
+
+// public
+pub mod model;
+pub mod query;
+pub use service::Service;
diff --git a/api/src/rtype/model/input.rs b/api/src/rtype/model/input.rs
new file mode 100644
index 00000000..598c45a7
--- /dev/null
+++ b/api/src/rtype/model/input.rs
@@ -0,0 +1,21 @@
+use axum_typed_multipart::TryFromMultipart;
+
+use crate::scalar::Id;
+
+#[derive(TryFromMultipart)]
+pub struct CreateRtypeInput {
+ /// The record type
+ pub rtype: String,
+}
+
+#[derive(TryFromMultipart)]
+pub struct UpdateRtypeInput {
+ /// The record type
+ pub rtype: String,
+}
+
+#[derive(TryFromMultipart)]
+pub struct DeleteRtypeInput {
+ /// The ID of the Trtype to modify.
+ pub id: Id,
+}
diff --git a/api/src/rtype/model/mod.rs b/api/src/rtype/model/mod.rs
new file mode 100644
index 00000000..11288a9e
--- /dev/null
+++ b/api/src/rtype/model/mod.rs
@@ -0,0 +1,34 @@
+pub mod input;
+
+use serde::{Deserialize, Serialize};
+use utoipa::ToSchema;
+
+use crate::rtype::entities;
+use crate::scalar::Id;
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Rtype {
+ /// The ID of the rtype.
+ pub id: Id,
+ /// The time for the rtype.
+ pub rtype: String,
+}
+
+impl From for Rtype {
+ fn from(t: entities::Rtype) -> Self {
+ Self {
+ id: t.id,
+ rtype: t.rtype,
+ }
+ }
+}
+
+#[derive(Debug, Serialize, ToSchema)]
+pub struct RtypeResponse {
+ pub data: Rtype,
+}
+
+#[derive(Debug, Serialize, ToSchema)]
+pub struct RtypesResponse {
+ pub data: Vec,
+}
diff --git a/api/src/rtype/query.rs b/api/src/rtype/query.rs
new file mode 100644
index 00000000..8f44e6b4
--- /dev/null
+++ b/api/src/rtype/query.rs
@@ -0,0 +1,97 @@
+use std::sync::Arc;
+
+use axum::response::IntoResponse;
+use axum::{extract::Path, http::StatusCode, response::Response, Extension, Json};
+use axum_typed_multipart::TypedMultipart;
+
+use super::model::input;
+use super::{model, service};
+use crate::context::ServerContext;
+
+// utoipa can't find `model::`
+#[allow(unused_imports)]
+use crate::rtype::model::Rtype;
+
+#[utoipa::path(
+ get,
+ path = "/rtype/list",
+ responses(
+ (status = 200, description = "List all rtypes", body = RtypesResponse),
+ ),
+)]
+pub async fn list(
+ ctx: Extension>,
+) -> Result, crate::Error> {
+ let rtypes = ctx.rtype_service.find_rtypes().await?;
+
+ let rtypes = rtypes.into_iter().map(|t| t.into()).collect();
+ let response = model::RtypesResponse { data: rtypes };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ get,
+ path = "/rtype/list/:id",
+ responses(
+ (status = 200, description = "Get a rtype by id", body = RtypeResponse),
+ ),
+)]
+pub async fn get(
+ ctx: Extension>,
+ Path(id): Path,
+) -> Result, crate::Error> {
+ let rtype = ctx.rtype_service.find_rtype(id).await?;
+
+ let response = model::RtypeResponse { data: rtype.into() };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ post,
+ path = "/rtype/add",
+ responses(
+ (status = 201, description = "Create new rtype", body = RtypeResponse),
+ ),
+)]
+pub async fn post(
+ ctx: Extension>,
+ TypedMultipart(input::CreateRtypeInput { rtype }): TypedMultipart,
+) -> Result {
+ let input = service::CreateRtypeInput { rtype };
+ let rtype = ctx.rtype_service.create_rtype(input).await?;
+ let response = model::RtypeResponse { data: rtype.into() };
+ Ok((StatusCode::CREATED, Json(response)).into_response())
+}
+
+#[utoipa::path(
+ put,
+ path = "/rtype/edit/:id",
+ responses(
+ (status = 200, description = "Update a rtype", body = RtypeResponse),
+ ),
+)]
+pub async fn put(
+ ctx: Extension>,
+ Path(id): Path,
+ TypedMultipart(input::UpdateRtypeInput { rtype }): TypedMultipart,
+) -> Result, crate::Error> {
+ let input = service::UpdateRtypeInput { id, rtype };
+ let rtype = ctx.rtype_service.update_rtype(input).await?;
+ let response = model::RtypeResponse { data: rtype.into() };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ delete,
+ path = "/rtype/delete/:id",
+ responses(
+ (status = 204, description = "Delete a rtype", body = RtypeResponse),
+ ),
+)]
+pub async fn delete(
+ ctx: Extension>,
+ Path(id): Path,
+) -> Result {
+ ctx.rtype_service.delete_rtype(id).await?;
+ Ok(StatusCode::NO_CONTENT)
+}
diff --git a/api/src/rtype/repository/create_rtype.rs b/api/src/rtype/repository/create_rtype.rs
new file mode 100644
index 00000000..32643f8a
--- /dev/null
+++ b/api/src/rtype/repository/create_rtype.rs
@@ -0,0 +1,26 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, rtype::entities};
+
+impl Repository {
+ pub async fn create_rtype<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ rtype: &entities::CreateRtype,
+ ) -> Result {
+ const QUERY: &str = "insert into type (type) values ($1) returning *";
+
+ match sqlx::query_as::<_, entities::Rtype>(QUERY)
+ .bind(&rtype.rtype)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(rtype) => Ok(rtype),
+ }
+ }
+}
diff --git a/api/src/rtype/repository/delete_rtype.rs b/api/src/rtype/repository/delete_rtype.rs
new file mode 100644
index 00000000..cc8ff8a5
--- /dev/null
+++ b/api/src/rtype/repository/delete_rtype.rs
@@ -0,0 +1,26 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, rtype::entities, scalar::Id};
+
+impl Repository {
+ pub async fn delete_rtype<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "delete from type where id = $1 returning *";
+
+ match sqlx::query_as::<_, entities::Rtype>(QUERY)
+ .bind(id)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(rtype) => Ok(rtype),
+ }
+ }
+}
diff --git a/api/src/rtype/repository/find_all_rtypes.rs b/api/src/rtype/repository/find_all_rtypes.rs
new file mode 100644
index 00000000..07e6de29
--- /dev/null
+++ b/api/src/rtype/repository/find_all_rtypes.rs
@@ -0,0 +1,24 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, rtype::entities};
+
+impl Repository {
+ pub async fn find_all_rtypes<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ ) -> Result, Error> {
+ const QUERY: &str = "select * from type ORDER BY id";
+
+ match sqlx::query_as::<_, entities::Rtype>(QUERY)
+ .fetch_all(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(res) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/rtype/repository/find_rtype_by_id.rs b/api/src/rtype/repository/find_rtype_by_id.rs
new file mode 100644
index 00000000..6c6ef9b5
--- /dev/null
+++ b/api/src/rtype/repository/find_rtype_by_id.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, rtype::entities, scalar::Id};
+
+impl Repository {
+ pub async fn find_rtype_by_id<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "select * from type where id = $1";
+
+ match sqlx::query_as::<_, entities::Rtype>(QUERY)
+ .bind(id)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::RtypeNotFound),
+ Ok(Some(res)) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/rtype/repository/find_rtype_by_type.rs b/api/src/rtype/repository/find_rtype_by_type.rs
new file mode 100644
index 00000000..47924942
--- /dev/null
+++ b/api/src/rtype/repository/find_rtype_by_type.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, rtype::entities};
+
+impl Repository {
+ pub async fn find_rtype_by_type<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ rtype: &str,
+ ) -> Result {
+ const QUERY: &str = "select * from type where type = $1";
+
+ match sqlx::query_as::<_, entities::Rtype>(QUERY)
+ .bind(rtype)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::RtypeNotFound),
+ Ok(Some(res)) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/rtype/repository/mod.rs b/api/src/rtype/repository/mod.rs
new file mode 100644
index 00000000..2cefde34
--- /dev/null
+++ b/api/src/rtype/repository/mod.rs
@@ -0,0 +1,21 @@
+mod create_rtype;
+mod delete_rtype;
+mod find_all_rtypes;
+mod find_rtype_by_id;
+mod find_rtype_by_type;
+mod update_rtype;
+
+#[derive(Debug, Clone)]
+pub struct Repository {}
+
+impl Repository {
+ pub fn new() -> Repository {
+ Repository {}
+ }
+}
+
+impl Default for Repository {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/api/src/rtype/repository/update_rtype.rs b/api/src/rtype/repository/update_rtype.rs
new file mode 100644
index 00000000..3f9455c3
--- /dev/null
+++ b/api/src/rtype/repository/update_rtype.rs
@@ -0,0 +1,29 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db, errors::app::Error, rtype::entities};
+
+impl Repository {
+ pub async fn update_rtype<'c, C: db::Queryer<'c>>(
+ &self,
+ db: C,
+ rtype: &entities::Rtype,
+ ) -> Result {
+ const QUERY: &str = "update type set
+ type = $2
+ where id = $1 returning *";
+
+ match sqlx::query_as::<_, entities::Rtype>(QUERY)
+ .bind(rtype.id)
+ .bind(&rtype.rtype)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(rtype) => Ok(rtype),
+ }
+ }
+}
diff --git a/api/src/rtype/service/check_rtype_exists.rs b/api/src/rtype/service/check_rtype_exists.rs
new file mode 100644
index 00000000..0683a2bc
--- /dev/null
+++ b/api/src/rtype/service/check_rtype_exists.rs
@@ -0,0 +1,18 @@
+use super::Service;
+use crate::{db::Queryer, errors::app::Error};
+
+impl Service {
+ /// returns true if a rtypename exists. false otherwise
+ pub async fn check_rtype_exists<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ rtype: &str,
+ ) -> Result {
+ let find_existing_rtype = self.repo.find_rtype_by_type(db, rtype).await;
+ match find_existing_rtype {
+ Ok(_) => Ok(true),
+ Err(Error::RtypeNotFound) => Ok(false),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
diff --git a/api/src/rtype/service/create_rtype.rs b/api/src/rtype/service/create_rtype.rs
new file mode 100644
index 00000000..18d8b79b
--- /dev/null
+++ b/api/src/rtype/service/create_rtype.rs
@@ -0,0 +1,20 @@
+use super::{CreateRtypeInput, Service};
+use crate::{errors::app::Error, rtype::entities};
+
+impl Service {
+ pub async fn create_rtype(
+ &self,
+ input: CreateRtypeInput,
+ ) -> Result {
+ let rtype_exists = self.check_rtype_exists(&self.db, &input.rtype).await?;
+ if rtype_exists {
+ return Err(Error::RtypeAlreadyExists.into());
+ }
+
+ let rtype_input = entities::CreateRtype { rtype: input.rtype };
+
+ let rtype = self.repo.create_rtype(&self.db, &rtype_input).await?;
+
+ Ok(rtype)
+ }
+}
diff --git a/api/src/rtype/service/delete_rtype.rs b/api/src/rtype/service/delete_rtype.rs
new file mode 100644
index 00000000..c2718df5
--- /dev/null
+++ b/api/src/rtype/service/delete_rtype.rs
@@ -0,0 +1,11 @@
+use super::Service;
+
+use crate::{rtype::entities, scalar::Id};
+
+impl Service {
+ pub async fn delete_rtype(&self, id: Id) -> Result {
+ let rtype = self.repo.delete_rtype(&self.db, id).await?;
+
+ Ok(rtype)
+ }
+}
diff --git a/api/src/rtype/service/find_rtype.rs b/api/src/rtype/service/find_rtype.rs
new file mode 100644
index 00000000..3bda0ec2
--- /dev/null
+++ b/api/src/rtype/service/find_rtype.rs
@@ -0,0 +1,10 @@
+use super::Service;
+use crate::{rtype::entities, scalar::Id};
+
+impl Service {
+ pub async fn find_rtype(&self, id: Id) -> Result {
+ let rtype = self.repo.find_rtype_by_id(&self.db, id).await?;
+
+ Ok(rtype)
+ }
+}
diff --git a/api/src/rtype/service/find_rtype_by_type.rs b/api/src/rtype/service/find_rtype_by_type.rs
new file mode 100644
index 00000000..21a195b5
--- /dev/null
+++ b/api/src/rtype/service/find_rtype_by_type.rs
@@ -0,0 +1,10 @@
+use super::Service;
+use crate::rtype::entities;
+
+impl Service {
+ pub async fn find_rtype_by_type(&self, rtype: &str) -> Result {
+ let rtype = self.repo.find_rtype_by_type(&self.db, rtype).await?;
+
+ Ok(rtype)
+ }
+}
diff --git a/api/src/rtype/service/find_rtypes.rs b/api/src/rtype/service/find_rtypes.rs
new file mode 100644
index 00000000..7bef7fe3
--- /dev/null
+++ b/api/src/rtype/service/find_rtypes.rs
@@ -0,0 +1,17 @@
+use super::Service;
+use crate::rtype::entities;
+
+impl Service {
+ pub async fn find_rtypes(&self) -> Result, crate::Error> {
+ let rtypes = self.repo.find_all_rtypes(&self.db).await?;
+ let rtypes: Result, _> =
+ rtypes.into_iter().map(entities::Rtype::try_from).collect();
+
+ match rtypes.ok() {
+ None => Err(crate::Error::InvalidArgument(
+ "failed to convert rtype".into(),
+ )),
+ Some(rtypes) => Ok(rtypes),
+ }
+ }
+}
diff --git a/api/src/rtype/service/mod.rs b/api/src/rtype/service/mod.rs
new file mode 100644
index 00000000..ae09cea9
--- /dev/null
+++ b/api/src/rtype/service/mod.rs
@@ -0,0 +1,33 @@
+mod check_rtype_exists;
+mod create_rtype;
+mod delete_rtype;
+mod find_rtype;
+mod find_rtype_by_type;
+mod find_rtypes;
+mod update_rtype;
+
+use crate::{db::DB, rtype::repository::Repository, scalar::Id};
+
+#[derive(Debug)]
+pub struct Service {
+ repo: Repository,
+ pub db: DB,
+}
+
+impl Service {
+ pub fn new(db: DB) -> Self {
+ let repo = Repository::new();
+ Self { db, repo }
+ }
+}
+
+#[derive(Debug)]
+pub struct CreateRtypeInput {
+ pub rtype: String,
+}
+
+#[derive(Debug)]
+pub struct UpdateRtypeInput {
+ pub id: Id,
+ pub rtype: String,
+}
diff --git a/api/src/rtype/service/update_rtype.rs b/api/src/rtype/service/update_rtype.rs
new file mode 100644
index 00000000..ea3d629a
--- /dev/null
+++ b/api/src/rtype/service/update_rtype.rs
@@ -0,0 +1,23 @@
+use super::{Service, UpdateRtypeInput};
+use crate::{errors::app::Error, rtype::entities};
+
+impl Service {
+ pub async fn update_rtype(
+ &self,
+ input: UpdateRtypeInput,
+ ) -> Result {
+ let rtypename_exists = self.check_rtype_exists(&self.db, &input.rtype).await?;
+ if rtypename_exists {
+ return Err(Error::RtypeAlreadyExists.into());
+ }
+
+ let rtype_input = entities::Rtype {
+ id: input.id,
+ rtype: input.rtype,
+ };
+
+ let rtype = self.repo.update_rtype(&self.db, &rtype_input).await?;
+
+ Ok(rtype)
+ }
+}
diff --git a/api/src/scalar.rs b/api/src/scalar.rs
new file mode 100644
index 00000000..e9063436
--- /dev/null
+++ b/api/src/scalar.rs
@@ -0,0 +1,4 @@
+use chrono::{self, NaiveDateTime};
+
+pub type Time = NaiveDateTime;
+pub type Id = i64;
diff --git a/api/src/ttl/entities.rs b/api/src/ttl/entities.rs
new file mode 100644
index 00000000..ccc600b7
--- /dev/null
+++ b/api/src/ttl/entities.rs
@@ -0,0 +1,31 @@
+use sqlx;
+
+use crate::scalar::Id;
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct Ttl {
+ pub id: Id,
+ pub time: i32,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct CreateTtl {
+ pub time: i32,
+}
+
+/// A struct for a bridge to legacy database
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct TtlLegacy {
+ pub id: Id,
+ // we used `ttl` instead of `time` in the legacy database
+ pub ttl: String,
+}
+
+impl TryFrom for Ttl {
+ type Error = crate::Error;
+
+ fn try_from(t: TtlLegacy) -> Result {
+ let time: i32 = t.ttl.parse()?;
+ Ok(Self { id: t.id, time })
+ }
+}
diff --git a/api/src/ttl/mod.rs b/api/src/ttl/mod.rs
new file mode 100644
index 00000000..a37ea9e7
--- /dev/null
+++ b/api/src/ttl/mod.rs
@@ -0,0 +1,8 @@
+mod entities;
+mod repository;
+mod service;
+
+// public
+pub mod model;
+pub mod query;
+pub use service::Service;
diff --git a/api/src/ttl/model/input.rs b/api/src/ttl/model/input.rs
new file mode 100644
index 00000000..279459a5
--- /dev/null
+++ b/api/src/ttl/model/input.rs
@@ -0,0 +1,20 @@
+use axum_typed_multipart::TryFromMultipart;
+
+use crate::scalar::Id;
+
+#[derive(TryFromMultipart)]
+pub struct CreateTtlInput {
+ /// The time for the ttl.
+ pub ttl: i32,
+}
+
+#[derive(TryFromMultipart)]
+pub struct UpdateTtlInput {
+ /// The time for the ttl.
+ pub ttl: i32,
+}
+
+#[derive(TryFromMultipart)]
+pub struct DeleteTtlInput {
+ pub id: Id,
+}
diff --git a/api/src/ttl/model/mod.rs b/api/src/ttl/model/mod.rs
new file mode 100644
index 00000000..39accd29
--- /dev/null
+++ b/api/src/ttl/model/mod.rs
@@ -0,0 +1,34 @@
+pub mod input;
+
+use serde::{Deserialize, Serialize};
+use utoipa::ToSchema;
+
+use crate::scalar::Id;
+use crate::ttl::entities;
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct Ttl {
+ /// The ID of the ttl.
+ pub id: Id,
+ /// The time for the ttl.
+ pub time: i32,
+}
+
+impl From for Ttl {
+ fn from(t: entities::Ttl) -> Self {
+ Self {
+ id: t.id,
+ time: t.time,
+ }
+ }
+}
+
+#[derive(Debug, Serialize, ToSchema)]
+pub struct TtlResponse {
+ pub data: Ttl,
+}
+
+#[derive(Debug, Serialize, ToSchema)]
+pub struct TtlsResponse {
+ pub data: Vec,
+}
diff --git a/api/src/ttl/query.rs b/api/src/ttl/query.rs
new file mode 100644
index 00000000..edd34525
--- /dev/null
+++ b/api/src/ttl/query.rs
@@ -0,0 +1,97 @@
+use std::sync::Arc;
+
+use axum::response::IntoResponse;
+use axum::{extract::Path, http::StatusCode, response::Response, Extension, Json};
+use axum_typed_multipart::TypedMultipart;
+
+use super::model::input;
+use super::{model, service};
+use crate::context::ServerContext;
+
+// utoipa can't find `model::`
+#[allow(unused_imports)]
+use crate::ttl::model::Ttl;
+
+#[utoipa::path(
+ get,
+ path = "/ttl/list",
+ responses(
+ (status = 200, description = "List all ttls", body = TtlsResponse),
+ ),
+)]
+pub async fn list(
+ ctx: Extension>,
+) -> Result, crate::Error> {
+ let ttls = ctx.ttl_service.find_ttls().await?;
+
+ let ttls = ttls.into_iter().map(|t| t.into()).collect();
+ let response = model::TtlsResponse { data: ttls };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ get,
+ path = "/ttl/list/:id",
+ responses(
+ (status = 200, description = "Get a ttl by id", body = TtlResponse),
+ ),
+)]
+pub async fn get(
+ ctx: Extension>,
+ Path(id): Path,
+) -> Result, crate::Error> {
+ let ttl = ctx.ttl_service.find_ttl(id).await?;
+
+ let response = model::TtlResponse { data: ttl.into() };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ post,
+ path = "/ttl/add",
+ responses(
+ (status = 201, description = "Create new ttl", body = TtlResponse),
+ ),
+)]
+pub async fn post(
+ ctx: Extension>,
+ TypedMultipart(input::CreateTtlInput { ttl }): TypedMultipart,
+) -> Result {
+ let input = service::CreateTtlInput { time: ttl };
+ let ttl = ctx.ttl_service.create_ttl(input).await?;
+ let response = model::TtlResponse { data: ttl.into() };
+ Ok((StatusCode::CREATED, Json(response)).into_response())
+}
+
+#[utoipa::path(
+ put,
+ path = "/ttl/edit/:id",
+ responses(
+ (status = 200, description = "Update a ttl", body = TtlResponse),
+ ),
+)]
+pub async fn put(
+ ctx: Extension>,
+ Path(id): Path,
+ TypedMultipart(input::UpdateTtlInput { ttl }): TypedMultipart,
+) -> Result, crate::Error> {
+ let input = service::UpdateTtlInput { id, time: ttl };
+ let ttl = ctx.ttl_service.update_ttl(input).await?;
+ let response = model::TtlResponse { data: ttl.into() };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ delete,
+ path = "/ttl/delete/:id",
+ responses(
+ (status = 204, description = "Delete a ttl", body = TtlResponse),
+ ),
+)]
+pub async fn delete(
+ ctx: Extension>,
+ Path(id): Path,
+) -> Result {
+ ctx.ttl_service.delete_ttl(id).await?;
+ Ok(StatusCode::NO_CONTENT)
+}
diff --git a/api/src/ttl/repository/create_ttl.rs b/api/src/ttl/repository/create_ttl.rs
new file mode 100644
index 00000000..0cb92dd9
--- /dev/null
+++ b/api/src/ttl/repository/create_ttl.rs
@@ -0,0 +1,26 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, ttl::entities};
+
+impl Repository {
+ pub async fn create_ttl<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ ttl: &entities::CreateTtl,
+ ) -> Result {
+ const QUERY: &str = "insert into ttl (ttl) values ($1) returning *";
+
+ match sqlx::query_as::<_, entities::TtlLegacy>(QUERY)
+ .bind(ttl.time)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(ttl) => Ok(ttl),
+ }
+ }
+}
diff --git a/api/src/ttl/repository/delete_ttl.rs b/api/src/ttl/repository/delete_ttl.rs
new file mode 100644
index 00000000..6891b55d
--- /dev/null
+++ b/api/src/ttl/repository/delete_ttl.rs
@@ -0,0 +1,26 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, scalar::Id, ttl::entities};
+
+impl Repository {
+ pub async fn delete_ttl<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "delete from ttl where id = $1 returning *";
+
+ match sqlx::query_as::<_, entities::TtlLegacy>(QUERY)
+ .bind(id)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(ttl) => Ok(ttl),
+ }
+ }
+}
diff --git a/api/src/ttl/repository/find_all_ttls.rs b/api/src/ttl/repository/find_all_ttls.rs
new file mode 100644
index 00000000..1f25be98
--- /dev/null
+++ b/api/src/ttl/repository/find_all_ttls.rs
@@ -0,0 +1,24 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, ttl::entities};
+
+impl Repository {
+ pub async fn find_all_ttls<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ ) -> Result, Error> {
+ const QUERY: &str = "select * from ttl ORDER BY id";
+
+ match sqlx::query_as::<_, entities::TtlLegacy>(QUERY)
+ .fetch_all(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(res) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/ttl/repository/find_ttl_by_id.rs b/api/src/ttl/repository/find_ttl_by_id.rs
new file mode 100644
index 00000000..47e28b96
--- /dev/null
+++ b/api/src/ttl/repository/find_ttl_by_id.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, scalar::Id, ttl::entities};
+
+impl Repository {
+ pub async fn find_ttl_by_id<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "select * from ttl where id = $1";
+
+ match sqlx::query_as::<_, entities::TtlLegacy>(QUERY)
+ .bind(id)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::TtlNotFound),
+ Ok(Some(res)) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/ttl/repository/find_ttl_by_time.rs b/api/src/ttl/repository/find_ttl_by_time.rs
new file mode 100644
index 00000000..71252894
--- /dev/null
+++ b/api/src/ttl/repository/find_ttl_by_time.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, ttl::entities};
+
+impl Repository {
+ pub async fn find_ttl_by_time<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ time: i32,
+ ) -> Result {
+ const QUERY: &str = "select * from ttl where ttl = $1";
+
+ match sqlx::query_as::<_, entities::TtlLegacy>(QUERY)
+ .bind(time)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::TtlNotFound),
+ Ok(Some(res)) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/ttl/repository/mod.rs b/api/src/ttl/repository/mod.rs
new file mode 100644
index 00000000..a2a21e17
--- /dev/null
+++ b/api/src/ttl/repository/mod.rs
@@ -0,0 +1,21 @@
+mod create_ttl;
+mod delete_ttl;
+mod find_all_ttls;
+mod find_ttl_by_id;
+mod find_ttl_by_time;
+mod update_ttl;
+
+#[derive(Debug, Clone)]
+pub struct Repository {}
+
+impl Repository {
+ pub fn new() -> Repository {
+ Repository {}
+ }
+}
+
+impl Default for Repository {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/api/src/ttl/repository/update_ttl.rs b/api/src/ttl/repository/update_ttl.rs
new file mode 100644
index 00000000..32c37f95
--- /dev/null
+++ b/api/src/ttl/repository/update_ttl.rs
@@ -0,0 +1,29 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db, errors::app::Error, ttl::entities};
+
+impl Repository {
+ pub async fn update_ttl<'c, C: db::Queryer<'c>>(
+ &self,
+ db: C,
+ ttl: &entities::Ttl,
+ ) -> Result {
+ const QUERY: &str = "update ttl set
+ ttl = $2
+ where id = $1 returning *";
+
+ match sqlx::query_as::<_, entities::TtlLegacy>(QUERY)
+ .bind(ttl.id)
+ .bind(ttl.time)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(ttl) => Ok(ttl),
+ }
+ }
+}
diff --git a/api/src/ttl/service/check_ttl_exists.rs b/api/src/ttl/service/check_ttl_exists.rs
new file mode 100644
index 00000000..4d3dc7c6
--- /dev/null
+++ b/api/src/ttl/service/check_ttl_exists.rs
@@ -0,0 +1,18 @@
+use super::Service;
+use crate::{db::Queryer, errors::app::Error};
+
+impl Service {
+ /// returns true if a ttlname exists. false otherwise
+ pub async fn check_ttl_exists<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ time: i32,
+ ) -> Result {
+ let find_existing_ttl = self.repo.find_ttl_by_time(db, time).await;
+ match find_existing_ttl {
+ Ok(_) => Ok(true),
+ Err(Error::TtlNotFound) => Ok(false),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
diff --git a/api/src/ttl/service/create_ttl.rs b/api/src/ttl/service/create_ttl.rs
new file mode 100644
index 00000000..b8827a91
--- /dev/null
+++ b/api/src/ttl/service/create_ttl.rs
@@ -0,0 +1,16 @@
+use super::{CreateTtlInput, Service};
+use crate::{errors::app::Error, ttl::entities};
+
+impl Service {
+ pub async fn create_ttl(&self, input: CreateTtlInput) -> Result {
+ // guard
+ let ttl_exists = self.check_ttl_exists(&self.db, input.time).await?;
+ if ttl_exists {
+ return Err(Error::TtlAlreadyExists.into());
+ }
+
+ let ttl_input = entities::CreateTtl { time: input.time };
+ let ttl = self.repo.create_ttl(&self.db, &ttl_input).await?;
+ entities::Ttl::try_from(ttl)
+ }
+}
diff --git a/api/src/ttl/service/delete_ttl.rs b/api/src/ttl/service/delete_ttl.rs
new file mode 100644
index 00000000..f057a680
--- /dev/null
+++ b/api/src/ttl/service/delete_ttl.rs
@@ -0,0 +1,12 @@
+use super::Service;
+use crate::{scalar::Id, ttl::entities};
+
+impl Service {
+ pub async fn delete_ttl(&self, id: Id) -> Result {
+ // guard
+ self.find_ttl(id).await?;
+
+ let ttl = self.repo.delete_ttl(&self.db, id).await?;
+ entities::Ttl::try_from(ttl)
+ }
+}
diff --git a/api/src/ttl/service/find_ttl.rs b/api/src/ttl/service/find_ttl.rs
new file mode 100644
index 00000000..20d38c77
--- /dev/null
+++ b/api/src/ttl/service/find_ttl.rs
@@ -0,0 +1,9 @@
+use super::Service;
+use crate::{scalar::Id, ttl::entities};
+
+impl Service {
+ pub async fn find_ttl(&self, id: Id) -> Result {
+ let ttl = self.repo.find_ttl_by_id(&self.db, id).await?;
+ entities::Ttl::try_from(ttl)
+ }
+}
diff --git a/api/src/ttl/service/find_ttl_by_time.rs b/api/src/ttl/service/find_ttl_by_time.rs
new file mode 100644
index 00000000..3c8749aa
--- /dev/null
+++ b/api/src/ttl/service/find_ttl_by_time.rs
@@ -0,0 +1,9 @@
+use super::Service;
+use crate::ttl::entities;
+
+impl Service {
+ pub async fn find_ttl_by_time(&self, time: i32) -> Result {
+ let ttl = self.repo.find_ttl_by_time(&self.db, time).await?;
+ entities::Ttl::try_from(ttl)
+ }
+}
diff --git a/api/src/ttl/service/find_ttls.rs b/api/src/ttl/service/find_ttls.rs
new file mode 100644
index 00000000..15c9bfec
--- /dev/null
+++ b/api/src/ttl/service/find_ttls.rs
@@ -0,0 +1,17 @@
+use super::Service;
+use crate::ttl::entities;
+
+impl Service {
+ pub async fn find_ttls(&self) -> Result, crate::Error> {
+ let ttls = self.repo.find_all_ttls(&self.db).await?;
+ let ttls: Result, _> =
+ ttls.into_iter().map(entities::Ttl::try_from).collect();
+
+ match ttls.ok() {
+ None => Err(crate::Error::InvalidArgument(
+ "failed to convert ttl".into(),
+ )),
+ Some(ttls) => Ok(ttls),
+ }
+ }
+}
diff --git a/api/src/ttl/service/mod.rs b/api/src/ttl/service/mod.rs
new file mode 100644
index 00000000..877da93b
--- /dev/null
+++ b/api/src/ttl/service/mod.rs
@@ -0,0 +1,33 @@
+mod check_ttl_exists;
+mod create_ttl;
+mod delete_ttl;
+mod find_ttl;
+mod find_ttl_by_time;
+mod find_ttls;
+mod update_ttl;
+
+use crate::{db::DB, scalar::Id, ttl::repository::Repository};
+
+#[derive(Debug)]
+pub struct Service {
+ repo: Repository,
+ pub db: DB,
+}
+
+impl Service {
+ pub fn new(db: DB) -> Self {
+ let repo = Repository::new();
+ Self { db, repo }
+ }
+}
+
+#[derive(Debug)]
+pub struct CreateTtlInput {
+ pub time: i32,
+}
+
+#[derive(Debug)]
+pub struct UpdateTtlInput {
+ pub id: Id,
+ pub time: i32,
+}
diff --git a/api/src/ttl/service/update_ttl.rs b/api/src/ttl/service/update_ttl.rs
new file mode 100644
index 00000000..844ef932
--- /dev/null
+++ b/api/src/ttl/service/update_ttl.rs
@@ -0,0 +1,21 @@
+use super::{Service, UpdateTtlInput};
+use crate::{errors::app::Error, ttl::entities};
+
+impl Service {
+ pub async fn update_ttl(&self, input: UpdateTtlInput) -> Result {
+ // guards
+ self.find_ttl(input.id).await?;
+
+ let ttlname_exists = self.check_ttl_exists(&self.db, input.time).await?;
+ if ttlname_exists {
+ return Err(Error::TtlAlreadyExists.into());
+ }
+
+ let ttl_input = entities::Ttl {
+ id: input.id,
+ time: input.time,
+ };
+ let ttl = self.repo.update_ttl(&self.db, &ttl_input).await?;
+ entities::Ttl::try_from(ttl)
+ }
+}
diff --git a/api/src/user/entities.rs b/api/src/user/entities.rs
new file mode 100644
index 00000000..7d562c44
--- /dev/null
+++ b/api/src/user/entities.rs
@@ -0,0 +1,23 @@
+use chrono::NaiveDateTime;
+use sqlx;
+
+use crate::scalar::Id;
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct User {
+ pub id: Id,
+ pub email: String,
+ pub created_at: NaiveDateTime,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct CreateUser {
+ pub email: String,
+ pub created_at: NaiveDateTime,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct UpdateUser {
+ pub id: Id,
+ pub email: String,
+}
diff --git a/api/src/user/mod.rs b/api/src/user/mod.rs
new file mode 100644
index 00000000..a37ea9e7
--- /dev/null
+++ b/api/src/user/mod.rs
@@ -0,0 +1,8 @@
+mod entities;
+mod repository;
+mod service;
+
+// public
+pub mod model;
+pub mod query;
+pub use service::Service;
diff --git a/api/src/user/model/input.rs b/api/src/user/model/input.rs
new file mode 100644
index 00000000..cae4e603
--- /dev/null
+++ b/api/src/user/model/input.rs
@@ -0,0 +1,21 @@
+use axum_typed_multipart::TryFromMultipart;
+
+use crate::scalar::Id;
+
+#[derive(TryFromMultipart)]
+pub struct CreateUserInput {
+ /// The email for the User.
+ pub email: String,
+}
+
+#[derive(TryFromMultipart)]
+pub struct UpdateUserInput {
+ /// The email for the User.
+ pub email: String,
+}
+
+#[derive(TryFromMultipart)]
+pub struct DeleteUserInput {
+ /// The ID of the User to modify.
+ pub id: Id,
+}
diff --git a/api/src/user/model/mod.rs b/api/src/user/model/mod.rs
new file mode 100644
index 00000000..bd14d1f7
--- /dev/null
+++ b/api/src/user/model/mod.rs
@@ -0,0 +1,38 @@
+pub mod input;
+
+use serde::{Deserialize, Serialize};
+use utoipa::ToSchema;
+
+use crate::{
+ scalar::{Id, Time},
+ user::entities,
+};
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct User {
+ /// The ID of the User.
+ pub id: Id,
+ /// The email for the User.
+ pub email: String,
+ pub created_at: Time,
+}
+
+impl From for User {
+ fn from(user: entities::User) -> Self {
+ Self {
+ id: user.id,
+ email: user.email,
+ created_at: user.created_at,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct UserResponse {
+ pub data: User,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
+pub struct UsersResponse {
+ pub data: Vec,
+}
diff --git a/api/src/user/query.rs b/api/src/user/query.rs
new file mode 100644
index 00000000..29a8ca9a
--- /dev/null
+++ b/api/src/user/query.rs
@@ -0,0 +1,97 @@
+use std::sync::Arc;
+
+use axum::response::IntoResponse;
+use axum::{extract::Path, http::StatusCode, response::Response, Extension, Json};
+use axum_typed_multipart::TypedMultipart;
+
+use super::model::input;
+use super::{model, service};
+use crate::context::ServerContext;
+
+// utoipa can't find `model::`
+#[allow(unused_imports)]
+use crate::user::model::User;
+
+#[utoipa::path(
+ get,
+ path = "/user/list",
+ responses(
+ (status = 200, description = "List all users", body = UsersResponse),
+ ),
+)]
+pub async fn list(
+ ctx: Extension>,
+) -> Result, crate::Error> {
+ let users = ctx.user_service.find_users().await?;
+
+ let users = users.into_iter().map(|t| t.into()).collect();
+ let response = model::UsersResponse { data: users };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ get,
+ path = "/user/list/:id",
+ responses(
+ (status = 200, description = "Get a user by id", body = UserResponse),
+ ),
+)]
+pub async fn get(
+ ctx: Extension>,
+ Path(id): Path,
+) -> Result, crate::Error> {
+ let user = ctx.user_service.find_user(id).await?;
+
+ let response = model::UserResponse { data: user.into() };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ post,
+ path = "/user/add",
+ responses(
+ (status = 201, description = "Create new user", body = UserResponse),
+ ),
+)]
+pub async fn post(
+ ctx: Extension>,
+ TypedMultipart(input::CreateUserInput { email }): TypedMultipart,
+) -> Result {
+ let input = service::CreateUserInput { email };
+ let user = ctx.user_service.create_user(input).await?;
+ let response = model::UserResponse { data: user.into() };
+ Ok((StatusCode::CREATED, Json(response)).into_response())
+}
+
+#[utoipa::path(
+ put,
+ path = "/user/edit/:id",
+ responses(
+ (status = 200, description = "Update a user", body = UserResponse),
+ ),
+)]
+pub async fn put(
+ ctx: Extension>,
+ Path(id): Path,
+ TypedMultipart(input::UpdateUserInput { email }): TypedMultipart,
+) -> Result, crate::Error> {
+ let input = service::UpdateUserInput { id, email };
+ let user = ctx.user_service.update_user(input).await?;
+ let response = model::UserResponse { data: user.into() };
+ Ok(Json(response))
+}
+
+#[utoipa::path(
+ delete,
+ path = "/user/delete/:id",
+ responses(
+ (status = 204, description = "Delete a user", body = UserResponse),
+ ),
+)]
+pub async fn delete(
+ ctx: Extension>,
+ Path(id): Path,
+) -> Result {
+ ctx.user_service.delete_user(id).await?;
+ Ok(StatusCode::NO_CONTENT)
+}
diff --git a/api/src/user/repository/create_user.rs b/api/src/user/repository/create_user.rs
new file mode 100644
index 00000000..a36abd30
--- /dev/null
+++ b/api/src/user/repository/create_user.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, user::entities};
+
+impl Repository {
+ pub async fn create_user<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ user: &entities::CreateUser,
+ ) -> Result {
+ const QUERY: &str = "insert into \"user\" (email, created_at) values ($1, $2) returning *";
+
+ match sqlx::query_as::<_, entities::User>(QUERY)
+ .bind(&user.email)
+ .bind(user.created_at)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(user) => Ok(user),
+ }
+ }
+}
diff --git a/api/src/user/repository/delete_user.rs b/api/src/user/repository/delete_user.rs
new file mode 100644
index 00000000..646de4ec
--- /dev/null
+++ b/api/src/user/repository/delete_user.rs
@@ -0,0 +1,26 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, scalar::Id, user::entities};
+
+impl Repository {
+ pub async fn delete_user<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "delete from \"user\" where id = $1 returning *";
+
+ match sqlx::query_as::<_, entities::User>(QUERY)
+ .bind(id)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(user) => Ok(user),
+ }
+ }
+}
diff --git a/api/src/user/repository/find_all_users.rs b/api/src/user/repository/find_all_users.rs
new file mode 100644
index 00000000..a0e68073
--- /dev/null
+++ b/api/src/user/repository/find_all_users.rs
@@ -0,0 +1,24 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, user::entities};
+
+impl Repository {
+ pub async fn find_all_users<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ ) -> Result, Error> {
+ const QUERY: &str = "select * from \"user\" ORDER BY id";
+
+ match sqlx::query_as::<_, entities::User>(QUERY)
+ .fetch_all(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(res) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/user/repository/find_user_by_email.rs b/api/src/user/repository/find_user_by_email.rs
new file mode 100644
index 00000000..8775ce38
--- /dev/null
+++ b/api/src/user/repository/find_user_by_email.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, user::entities};
+
+impl Repository {
+ pub async fn find_user_by_email<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ email: &str,
+ ) -> Result {
+ const QUERY: &str = "select * from \"user\" where email = $1";
+
+ match sqlx::query_as::<_, entities::User>(QUERY)
+ .bind(email)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::UserNotFound),
+ Ok(Some(res)) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/user/repository/find_user_by_id.rs b/api/src/user/repository/find_user_by_id.rs
new file mode 100644
index 00000000..0c5479ca
--- /dev/null
+++ b/api/src/user/repository/find_user_by_id.rs
@@ -0,0 +1,27 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db::Queryer, errors::app::Error, scalar::Id, user::entities};
+
+impl Repository {
+ pub async fn find_user_by_id<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ id: Id,
+ ) -> Result {
+ const QUERY: &str = "select * from \"user\" where id = $1";
+
+ match sqlx::query_as::<_, entities::User>(QUERY)
+ .bind(id)
+ .fetch_optional(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(None) => Err(Error::UserNotFound),
+ Ok(Some(res)) => Ok(res),
+ }
+ }
+}
diff --git a/api/src/user/repository/mod.rs b/api/src/user/repository/mod.rs
new file mode 100644
index 00000000..f5b59edb
--- /dev/null
+++ b/api/src/user/repository/mod.rs
@@ -0,0 +1,21 @@
+mod create_user;
+mod delete_user;
+mod find_all_users;
+mod find_user_by_email;
+mod find_user_by_id;
+mod update_user;
+
+#[derive(Debug, Clone)]
+pub struct Repository {}
+
+impl Repository {
+ pub fn new() -> Repository {
+ Repository {}
+ }
+}
+
+impl Default for Repository {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/api/src/user/repository/update_user.rs b/api/src/user/repository/update_user.rs
new file mode 100644
index 00000000..f4adc838
--- /dev/null
+++ b/api/src/user/repository/update_user.rs
@@ -0,0 +1,29 @@
+use sqlx;
+
+use super::Repository;
+use crate::{db, errors::app::Error, user::entities};
+
+impl Repository {
+ pub async fn update_user<'c, C: db::Queryer<'c>>(
+ &self,
+ db: C,
+ user: &entities::UpdateUser,
+ ) -> Result {
+ const QUERY: &str = "update \"user\" set
+ email = $2
+ where id = $1 returning *";
+
+ match sqlx::query_as::<_, entities::User>(QUERY)
+ .bind(user.id)
+ .bind(&user.email)
+ .fetch_one(db)
+ .await
+ {
+ Err(err) => {
+ tracing::error!("{}", &err);
+ Err(err.into())
+ }
+ Ok(user) => Ok(user),
+ }
+ }
+}
diff --git a/api/src/user/service/check_username_exists.rs b/api/src/user/service/check_username_exists.rs
new file mode 100644
index 00000000..2616f7e6
--- /dev/null
+++ b/api/src/user/service/check_username_exists.rs
@@ -0,0 +1,18 @@
+use super::Service;
+use crate::{db::Queryer, errors::app::Error};
+
+impl Service {
+ /// returns true if a username exists. false otherwise
+ pub async fn check_user_exists<'c, C: Queryer<'c>>(
+ &self,
+ db: C,
+ name: &str,
+ ) -> Result {
+ let find_existing_user = self.repo.find_user_by_email(db, name).await;
+ match find_existing_user {
+ Ok(_) => Ok(true),
+ Err(Error::UserNotFound) => Ok(false),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
diff --git a/api/src/user/service/create_user.rs b/api/src/user/service/create_user.rs
new file mode 100644
index 00000000..3152cd4a
--- /dev/null
+++ b/api/src/user/service/create_user.rs
@@ -0,0 +1,24 @@
+use chrono::Utc;
+
+use super::{CreateUserInput, Service};
+use crate::{errors::app::Error, user::entities};
+
+impl Service {
+ pub async fn create_user(
+ &self,
+ input: CreateUserInput,
+ ) -> Result {
+ // guard
+ let username_exists = self.check_user_exists(&self.db, &input.email).await?;
+ if username_exists {
+ return Err(Error::UsernameAlreadyExists.into());
+ }
+
+ let user_input = entities::CreateUser {
+ email: input.email,
+ created_at: Utc::now().naive_local(),
+ };
+ let user = self.repo.create_user(&self.db, &user_input).await?;
+ Ok(user)
+ }
+}
diff --git a/api/src/user/service/delete_user.rs b/api/src/user/service/delete_user.rs
new file mode 100644
index 00000000..6ac3020a
--- /dev/null
+++ b/api/src/user/service/delete_user.rs
@@ -0,0 +1,12 @@
+use super::Service;
+use crate::{scalar::Id, user::entities};
+
+impl Service {
+ pub async fn delete_user(&self, id: Id) -> Result {
+ // guard
+ self.find_user(id).await?;
+
+ let user = self.repo.delete_user(&self.db, id).await?;
+ Ok(user)
+ }
+}
diff --git a/api/src/user/service/find_user.rs b/api/src/user/service/find_user.rs
new file mode 100644
index 00000000..0211432a
--- /dev/null
+++ b/api/src/user/service/find_user.rs
@@ -0,0 +1,10 @@
+use super::Service;
+use crate::{scalar::Id, user::entities};
+
+impl Service {
+ pub async fn find_user(&self, id: Id) -> Result {
+ let user = self.repo.find_user_by_id(&self.db, id).await?;
+
+ Ok(user)
+ }
+}
diff --git a/api/src/user/service/find_users.rs b/api/src/user/service/find_users.rs
new file mode 100644
index 00000000..26fdb215
--- /dev/null
+++ b/api/src/user/service/find_users.rs
@@ -0,0 +1,17 @@
+use super::Service;
+use crate::user::entities;
+
+impl Service {
+ pub async fn find_users(&self) -> Result, crate::Error> {
+ let users = self.repo.find_all_users(&self.db).await?;
+ let users: Result, _> =
+ users.into_iter().map(entities::User::try_from).collect();
+
+ match users.ok() {
+ None => Err(crate::Error::InvalidArgument(
+ "failed to convert user".into(),
+ )),
+ Some(users) => Ok(users),
+ }
+ }
+}
diff --git a/api/src/user/service/mod.rs b/api/src/user/service/mod.rs
new file mode 100644
index 00000000..67b1bfe7
--- /dev/null
+++ b/api/src/user/service/mod.rs
@@ -0,0 +1,32 @@
+mod check_username_exists;
+mod create_user;
+mod delete_user;
+mod find_user;
+mod find_users;
+mod update_user;
+
+use crate::{db::DB, scalar::Id, user::repository::Repository};
+
+#[derive(Debug)]
+pub struct Service {
+ repo: Repository,
+ pub db: DB,
+}
+
+impl Service {
+ pub fn new(db: DB) -> Self {
+ let repo = Repository::new();
+ Self { db, repo }
+ }
+}
+
+#[derive(Debug)]
+pub struct CreateUserInput {
+ pub email: String,
+}
+
+#[derive(Debug)]
+pub struct UpdateUserInput {
+ pub id: Id,
+ pub email: String,
+}
diff --git a/api/src/user/service/update_user.rs b/api/src/user/service/update_user.rs
new file mode 100644
index 00000000..823cc486
--- /dev/null
+++ b/api/src/user/service/update_user.rs
@@ -0,0 +1,24 @@
+use super::{Service, UpdateUserInput};
+use crate::{errors::app::Error, user::entities};
+
+impl Service {
+ pub async fn update_user(
+ &self,
+ input: UpdateUserInput,
+ ) -> Result {
+ // guards
+ self.find_user(input.id).await?;
+
+ let username_exists = self.check_user_exists(&self.db, &input.email).await?;
+ if username_exists {
+ return Err(Error::UsernameAlreadyExists.into());
+ }
+
+ let user_input = entities::UpdateUser {
+ id: input.id,
+ email: input.email,
+ };
+ let user = self.repo.update_user(&self.db, &user_input).await?;
+ Ok(user)
+ }
+}
diff --git a/api/tests/__init__.py b/api/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/api/tests/docker-compose.yml b/api/tests/docker-compose.yml
new file mode 100644
index 00000000..b1bda1db
--- /dev/null
+++ b/api/tests/docker-compose.yml
@@ -0,0 +1,11 @@
+version: "3"
+services:
+
+ db:
+ image: cockroachdb/cockroach:v2.0.7
+ hostname: roach
+ restart: always
+ command: start --insecure
+ ports:
+ - "26257:26257" # sql port
+ - "8080:8080" # web port
diff --git a/api/tests/fixtures/messages.py b/api/tests/fixtures/messages.py
deleted file mode 100644
index 3a7ac475..00000000
--- a/api/tests/fixtures/messages.py
+++ /dev/null
@@ -1,23 +0,0 @@
-knot_conf_set = {
- "cmd": "conf-set",
- "section": "zone",
- "item": "domain",
- "data": "company.com",
-}
-knot_zone_set_ns = {
- "cmd": "zone-set",
- "zone": None,
- "owner": "@",
- "rtype": "NS",
- "ttl": "3600",
- "data": "one.dns.id.",
-}
-
-knot_delegate_file = {
- "item": "file",
- "data": "company.com.zone",
- "identifier": "company.com",
- "cmd": "conf-set",
- "section": "zone",
- "zone": "company.com",
-}
diff --git a/api/tests/functional/__init__.py b/api/tests/functional/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/api/tests/functional/conftest.py b/api/tests/functional/conftest.py
deleted file mode 100644
index b370afad..00000000
--- a/api/tests/functional/conftest.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import pathlib
-
-import pytest
-from dotenv import load_dotenv
-
-from app import create_app
-from app.models import model
-
-
-def clean_users():
- # removing users will remove everything
- # since all data linked into it
- users = model.get_all("user")
- for user in users:
- user_id = user["id"]
- model.delete(table="user", field="id", value=user_id)
-
-
-@pytest.fixture
-def client():
- current_path = pathlib.Path(__file__)
- dotenv_path = current_path.parents[2].joinpath(".env.example")
- load_dotenv(dotenv_path)
-
- app = create_app()
- client = app.test_client()
-
- yield client
-
- # teardown
- clean_users()
diff --git a/api/tests/functional/docker-compose-kafka.yml b/api/tests/functional/docker-compose-kafka.yml
deleted file mode 100644
index 5e43e5b3..00000000
--- a/api/tests/functional/docker-compose-kafka.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-version: '3'
-services:
- zookeeper:
- image: wurstmeister/zookeeper
- kafka:
- image: wurstmeister/kafka
- ports:
- - "9092:9092"
- environment:
- KAFKA_ADVERTISED_HOST_NAME: localhost
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
diff --git a/api/tests/functional/knot.conf b/api/tests/functional/knot.conf
deleted file mode 100644
index cfd425b8..00000000
--- a/api/tests/functional/knot.conf
+++ /dev/null
@@ -1,59 +0,0 @@
-server:
- rundir: "/run/knot"
- user: knot:knot
- listen: [ 127.0.0.1@53, ::1@53 ]
-
-log:
- - target: syslog
- any: info
-
-remote:
- - id: slave1
- address: 192.168.1.3@53
-
- - id: slave2
- address: 192.168.2.3@53
-
- - id: master1
- address: 192.168.2.1@53
-
- - id: master2
- address: 192.168.2.2@53
-
-acl:
- - id: slave1_acl
- address: 192.168.1.3
- action: notify
-
- - id: slave2_acl
- address: 192.168.2.4
- action: notify
-
- - id: master2_acl
- address: 192.168.1.1
- action: transfer
-
- - id: master1_acl
- address: 192.168.2.2
- action: transfer
-
-template:
- - id: default
- storage: "/var/lib/knot"
- file: "%s.zone"
-
-mod-stats:
- - id: "default"
- edns-presence: "on"
- query-type: "on"
-
-zone:
-# # Master zone
-# - domain: example.com
-# notify: slave
-# acl: acl_slave
-
-# # Slave zone
-# - domain: example.net
-# master: master
-# acl: acl_master
diff --git a/api/tests/functional/servers.yaml b/api/tests/functional/servers.yaml
deleted file mode 100644
index bd5f5495..00000000
--- a/api/tests/functional/servers.yaml
+++ /dev/null
@@ -1,18 +0,0 @@
-# The value here follows the knotc command, not knot.conf
-master:
- notify:
- - slave1
- - slave2
-
- acl:
- - slave1_acl
- - slave2_acl
-
-slave:
- master:
- - master1
- - master2
-
- acl:
- - master1_acl
- - master2_acl
diff --git a/api/tests/functional/test_functional.py b/api/tests/functional/test_functional.py
deleted file mode 100644
index 03bca325..00000000
--- a/api/tests/functional/test_functional.py
+++ /dev/null
@@ -1,24 +0,0 @@
-class TestFunctional:
- def test_create_domain(self, client):
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- res = client.post("/api/domain/add", data=data, headers=headers)
- create_domain_data = res.get_json()
- # list domain
- res = client.get("/api/domain/list", headers=headers)
- list_domain_data = res.get_json()
-
- assert create_domain_data["code"] == 201
- assert create_domain_data["data"]["zone"] == "company.com"
- assert list_domain_data["code"] == 200
- assert list_domain_data["data"][0]["zone"] == "company.com"
- # 4: SOA, NS, NS, CNAME
- assert len(list_domain_data["data"][0]["records"]) == 4
diff --git a/api/tests/functional/test_health.py b/api/tests/functional/test_health.py
deleted file mode 100644
index 559615ed..00000000
--- a/api/tests/functional/test_health.py
+++ /dev/null
@@ -1,6 +0,0 @@
-class TestHealth:
- def test_health(self, client):
- res = client.get("/api/health")
- data = res.get_json()
-
- assert "100" in data["data"]["check"]
diff --git a/api/tests/health/mod.rs b/api/tests/health/mod.rs
new file mode 100644
index 00000000..4a5870fb
--- /dev/null
+++ b/api/tests/health/mod.rs
@@ -0,0 +1,2 @@
+mod schema;
+mod tests;
diff --git a/api/tests/health/schema.rs b/api/tests/health/schema.rs
new file mode 100644
index 00000000..9aab58a2
--- /dev/null
+++ b/api/tests/health/schema.rs
@@ -0,0 +1,11 @@
+use serde::Deserialize;
+
+#[derive(Debug, Deserialize)]
+pub struct HealthResponse {
+ pub data: Health,
+}
+
+#[derive(Debug, Deserialize)]
+pub struct Health {
+ pub status: String,
+}
diff --git a/api/tests/health/tests.rs b/api/tests/health/tests.rs
new file mode 100644
index 00000000..b82eefff
--- /dev/null
+++ b/api/tests/health/tests.rs
@@ -0,0 +1,24 @@
+use anyhow::Result;
+use api::routes::app;
+use axum::{
+ body::Body,
+ http::{Request, StatusCode},
+};
+use tower::util::ServiceExt;
+
+use super::schema::HealthResponse;
+
+#[tokio::test]
+async fn health() -> Result<()> {
+ let app = app().await?;
+
+ let request = Request::builder().uri("/health").body(Body::empty())?;
+
+ let response = app.oneshot(request).await?;
+ assert_eq!(response.status(), StatusCode::OK);
+
+ let body = hyper::body::to_bytes(response.into_body()).await?;
+ let body: HealthResponse = serde_json::from_slice(&body)?;
+ assert_eq!(body.data.status, "running");
+ Ok(())
+}
diff --git a/api/tests/integration/__init__.py b/api/tests/integration/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/api/tests/integration/conftest.py b/api/tests/integration/conftest.py
deleted file mode 100644
index 6c2f56b5..00000000
--- a/api/tests/integration/conftest.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import pathlib
-
-import pytest
-from dotenv import load_dotenv
-
-from app import create_app
-from app.models import model
-
-
-def clean_users():
- # removing users will remove everything
- # since all data linked into it
- users = model.get_all("user")
- for user in users:
- user_id = user["id"]
- model.delete(table="user", field="id", value=user_id)
-
-
-@pytest.fixture
-def client():
- current_path = pathlib.Path(__file__)
- dotenv_path = current_path.parents[2].joinpath(".example.env")
- load_dotenv(dotenv_path)
-
- app = create_app()
- client = app.test_client()
-
- yield client
-
- # teardown
- clean_users()
diff --git a/api/tests/integration/test_domain.py b/api/tests/integration/test_domain.py
deleted file mode 100644
index 945e4aef..00000000
--- a/api/tests/integration/test_domain.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import app.helpers.producer
-
-
-class TestDomain:
- def test_list_no_domain(self, client):
- """Test if db contains no domain."""
- headers = {"X-Api-Key": "123"}
- res = client.get("/api/domain/list", headers=headers)
- json_data = res.get_json()
-
- assert json_data["code"] == 404
-
- def test_domain(self, client, mocker):
- """Test domain happy path.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - List the domain
- - Delete the domain
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- res = client.post("/api/domain/add", data=data, headers=headers)
- create_domain_data = res.get_json()
- # list domain
- res = client.get("/api/domain/list", headers=headers)
- list_domain_data = res.get_json()
- # delete domain
- data = {"zone": "company.com"}
- delete_domain_data = client.delete(
- "/api/domain/delete", data=data, headers=headers
- )
-
- assert create_domain_data["code"] == 201
- assert create_domain_data["data"]["zone"] == "company.com"
- assert list_domain_data["code"] == 200
- assert list_domain_data["data"][0]["zone"] == "company.com"
- # 4: SOA, NS, NS, CNAME
- assert len(list_domain_data["data"][0]["records"]) == 4
- # TODO can we assert the result of `call_args` since using `call_with`
- # is not feasible way knowing the very long arguments
-
- # 4: set_config, set_zone, delegate, delegate (creation)
- # 5: unset_config, unset_zone (SOA, NS, NS, CNAME)
- assert app.helpers.producer.send.call_count == 9
- assert delete_domain_data.status_code == 204
diff --git a/api/tests/integration/test_health.py b/api/tests/integration/test_health.py
deleted file mode 100644
index b8ad462e..00000000
--- a/api/tests/integration/test_health.py
+++ /dev/null
@@ -1,6 +0,0 @@
-class TestHealth:
- def test_health(self, client):
- res = client.get("/api/health")
- data = res.get_json()
-
- assert "running" in data["data"]["status"]
diff --git a/api/tests/integration/test_messages.py b/api/tests/integration/test_messages.py
deleted file mode 100644
index 526b5bf1..00000000
--- a/api/tests/integration/test_messages.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import app.helpers.producer
-import tests.fixtures.messages as message_fx
-
-
-class TestMessages:
- _messages = []
-
- def fake_send(self, messages):
- self._messages.append(messages)
-
- def test_messages(self, client, monkeypatch, mocker):
- """Test if the command sent to broker created appropriately.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Assert the sent command
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- monkeypatch.setattr(app.helpers.producer, "send", self.fake_send)
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
-
- assert len(self._messages) == 4
- # they should be ordered like this, otherwise knot will fail to create
- # configs or zones
- assert self._messages[0]["knot"][1] == message_fx.knot_conf_set
- assert self._messages[1]["knot"][2] == message_fx.knot_zone_set_ns
- assert self._messages[2]["knot"][1] == message_fx.knot_delegate_file
diff --git a/api/tests/integration/test_record.py b/api/tests/integration/test_record.py
deleted file mode 100644
index 99774baa..00000000
--- a/api/tests/integration/test_record.py
+++ /dev/null
@@ -1,452 +0,0 @@
-import datetime
-
-import app.helpers.helpers
-from app.controllers.api import record as record_api
-from app.helpers import helpers
-
-
-class TestRecord:
- def get_record(self, records, type_):
- for record in records:
- if record["type"] == type_:
- return record
-
- def test_list_no_Record(self, client):
- """Test if db contains no record."""
- headers = {"X-Api-Key": "123"}
- res = client.get("/api/domain/list", headers=headers)
- json_data = res.get_json()
-
- assert json_data["code"] == 404
-
- def test_add_record(self, client, mocker):
- """Test adding record from its endpoint.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add a record
- - Query the db to assure it's created
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- res = client.post("/api/domain/add", data=data, headers=headers)
- create_domain_data = res.get_json()
- # add record
- data = {
- "zone": "company.com",
- "owner": "host",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
-
- assert create_domain_data["code"] == 201
- assert create_domain_data["data"]["zone"] == "company.com"
-
- assert add_record_data["code"] == 201
- assert add_record_data["data"]["owner"] == "host"
- assert add_record_data["data"]["rdata"] == "1.1.1.1"
-
- assert list_record_data["code"] == 200
- assert list_record_data["data"][0]["zone"] == "company.com"
- assert list_record_data["data"][0]["user"]["email"] == "first@company.com"
-
- def test_edit_record(self, client, mocker):
- """Test editing record from its endpoint.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add a record
- - Edit a record
- - Query the db to assure it's edited
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- # edit record
- records = list_record_data["data"][0]["records"]
- cname_record = self.get_record(records, "CNAME")
- cname_record_id = cname_record["id"]
- data = {
- "zone": "company.com",
- "owner": "www_edit",
- "rtype": "CNAME",
- "rdata": "company_edited.com",
- "ttl": 3600,
- }
- res = client.put(
- f"/api/record/edit/{cname_record_id}", data=data, headers=headers
- )
- edit_record_data = res.get_json()
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- records = list_record_data["data"][0]["records"]
- edited_record_data = self.get_record(records, "CNAME")
-
- assert edit_record_data["code"] == 200
- assert edit_record_data["data"]["owner"] == "www_edit"
-
- assert list_record_data["code"] == 200
- assert edited_record_data["rdata"] == "company_edited.com"
-
- def test_edit_record_no_ttl_change(self, client, mocker):
- """Test editing record from its endpoint.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Edit a record with the same TTL
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- # edit record
- records = list_record_data["data"][0]["records"]
- cname_record = self.get_record(records, "CNAME")
- cname_record_id = cname_record["id"]
- data = {
- "zone": "company.com",
- "owner": "www",
- "rtype": "CNAME",
- "rdata": "company.com.",
- "ttl": "3600",
- }
- res = client.put(
- f"/api/record/edit/{cname_record_id}", data=data, headers=headers
- )
- edit_record_data = res.get_json()
-
- assert edit_record_data["code"] == 409
- assert edit_record_data["message"] == "The record already exists"
-
- def test_edit_record_with_ttl_change(self, client, mocker):
- """Test editing record from its endpoint.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Edit a record with the different TTL
- - Query the db to assure it's edited
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- # edit record
- records = list_record_data["data"][0]["records"]
- cname_record = self.get_record(records, "CNAME")
- cname_record_id = cname_record["id"]
- data = {
- "zone": "company.com",
- "owner": "www",
- "rtype": "CNAME",
- "rdata": "company.com.",
- "ttl": "300",
- }
- res = client.put(
- f"/api/record/edit/{cname_record_id}", data=data, headers=headers
- )
- edit_record_data = res.get_json()
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- records = list_record_data["data"][0]["records"]
- edited_record_data = self.get_record(records, "CNAME")
-
- assert edit_record_data["code"] == 200
- assert edit_record_data["data"]["ttl"] == "300"
-
- assert list_record_data["code"] == 200
- assert edited_record_data["ttl"] == "300"
-
- def test_delete_record(self, client, mocker):
- """Test deleting record from its endpoint.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - List the default records
- - Delete one of the record
- - Query the db to assure it's deleted
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- # edit record
- records = list_record_data["data"][0]["records"]
- cname_record = self.get_record(records, "CNAME")
- cname_record_id = cname_record["id"]
- delete_res = client.delete(
- f"/api/record/delete/{cname_record_id}", headers=headers
- )
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- records = list_record_data["data"][0]["records"]
-
- assert delete_res.status_code == 204
- # it must be 3 after deletion
- assert len(records) == 3
-
- def test_edit_record_no_ttl_change_MX(self, client, mocker):
- """Test editing record from its endpoint.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add MX record
- - Edit a record with the same TTL
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
-
- # add record
- data = {
- "zone": "company.com",
- "owner": "mx1",
- "rtype": "MX",
- "rdata": "10 mail.example.com.",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- json_data = res.get_json()
- record_id = json_data["data"]["id"]
-
- # edit record
- data = {
- "zone": "company.com",
- "owner": "mx1",
- "rtype": "MX",
- "rdata": "10 mail.example.com.",
- "ttl": 7200,
- }
-
- res = client.put(f"/api/record/edit/{record_id}", data=data, headers=headers)
- edit_record_data = res.get_json()
-
- assert edit_record_data["code"] == 409
- assert edit_record_data["message"] == "The record already exists"
-
- def test_edit_record_with_ttl_change_MX(self, client, mocker):
- """Test editing record from its endpoint.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add MX record
- - Edit a record with the different TTL
- - Query the db to assure it's edited
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
-
- # add record
- data = {
- "zone": "company.com",
- "owner": "mx1",
- "rtype": "MX",
- "rdata": "10 mail.example.com.",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- json_data = res.get_json()
- record_id = json_data["data"]["id"]
-
- # edit record
- data = {
- "zone": "company.com",
- "owner": "mx1",
- "rtype": "MX",
- "rdata": "10 mail.example.com.",
- "ttl": 14400,
- }
- res = client.put(f"/api/record/edit/{record_id}", data=data, headers=headers)
- edit_record_data = res.get_json()
-
- # list record
- res = client.get("/api/domain/list", headers=headers)
- list_record_data = res.get_json()
- records = list_record_data["data"][0]["records"]
- edited_record_data = self.get_record(records, "MX")
-
- assert edit_record_data["code"] == 200
- assert edit_record_data["data"]["ttl"] == "14400"
-
- assert list_record_data["code"] == 200
- assert edited_record_data["ttl"] == "14400"
-
- def test_edit_record_respect_zone_limit(self, client, monkeypatch, mocker):
- """Test edit record respecting zone limit of 99
-
- - Create a User
- - Create a domain (with default SOA, NS, CNAME created)
- - Add TXT record
- - Edit a record with the different TXT value until it reaches a limit
- - Edit a record with tomorrows date
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
-
- # add record
- data = {
- "zone": "company.com",
- "owner": "txt1",
- "rtype": "TXT",
- "rdata": "0",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- json_data = res.get_json()
- record_id = json_data["data"]["id"]
-
- increment_serial = 0
- # 50 times for edit record is enough to make serial > 99
- # record edit increment serial twice at time
- while increment_serial < 50:
- data = {
- "zone": "company.com",
- "owner": "txt1",
- "rtype": "TXT",
- "rdata": f"{increment_serial}",
- "ttl": 7200,
- }
- res = client.put(
- f"/api/record/edit/{record_id}", data=data, headers=headers
- )
- edit_record_data = res.get_json()
-
- increment_serial += 1
-
- assert edit_record_data["code"] == 429
- assert edit_record_data["message"] == "Zone Change Limit Reached"
-
- # ensure correct serial
- serial_resource = record_api.get_serial_resource("company.com")
- today_date = helpers.soa_time_set()
-
- assert serial_resource["serial_counter"] == "98"
- assert serial_resource["serial_date"] == today_date
- assert serial_resource["serial"] == f"{today_date}98"
-
- #
- # if user waits until tomorrow
- #
- def fake_soa_time_set():
- tomorrow_date = datetime.datetime.now() + datetime.timedelta(days=1)
- return tomorrow_date.strftime("%Y%m%d")
-
- monkeypatch.setattr(app.helpers.helpers, "soa_time_set", fake_soa_time_set)
- data = {
- "zone": "company.com",
- "owner": "txt1",
- "rtype": "TXT",
- "rdata": "random text",
- "ttl": 7200,
- }
- res = client.put(f"/api/record/edit/{record_id}", data=data, headers=headers)
- edit_record_data = res.get_json()
-
- assert edit_record_data["code"] == 200
-
- # ensure correct serial
- serial_resource = record_api.get_serial_resource("company.com")
- today_date = helpers.soa_time_set()
-
- assert serial_resource["serial_counter"] == "03"
- assert serial_resource["serial_date"] == today_date
- assert serial_resource["serial"] == f"{today_date}03"
diff --git a/api/tests/integration/test_rules.py b/api/tests/integration/test_rules.py
deleted file mode 100644
index fa8cb9be..00000000
--- a/api/tests/integration/test_rules.py
+++ /dev/null
@@ -1,402 +0,0 @@
-class TestCNAMERules:
- def test_duplicate_record(self, client, mocker):
- """Create multiple CNAME record with same owner and rdata.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - # default CNAME owner is `www`
- - Add CNAME record with `www` as owner -> must be FAIL (duplicate record)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
-
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record
- data = {
- "zone": "company.com",
- "owner": "www",
- "rtype": "CNAME",
- "rdata": "company.com.",
- "ttl": 3600,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
-
- assert add_record_data["code"] == 409
- assert add_record_data["message"] == "The record already exists"
-
- def test_possible_duplicate_record(self, client, mocker):
- """Edit CNAME record that possible same with other.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - # default CNAME owner is `www`
- - Add CNAME record with `www1` as owner.
- - Edit CNAME record with `wwww` as owner and `company.com.` as rdata -> must be FAIL (duplicate record)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
-
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
-
- # add record
- data = {
- "zone": "company.com",
- "owner": "www1",
- "rtype": "CNAME",
- "rdata": "company.com.",
- "ttl": 3600,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- json_data = res.get_json()
- record_id = json_data["data"]["id"]
-
- # edit possible duplicate record
- data = {
- "zone": "company.com",
- "owner": "www",
- "rtype": "CNAME",
- "rdata": "company.com.",
- "ttl": 3600,
- }
- res = client.put(f"/api/record/edit/{record_id}", data=data, headers=headers)
- edit_record_data = res.get_json()
-
- assert edit_record_data["code"] == 409
- assert edit_record_data["message"] == "The record already exists"
-
- def test_unique_host(self, client, mocker):
- """Create multiple CNAME record with different owner/host.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - # default CNAME owner is `www`
- - Add CNAME record with `www1` as owner -> must be SUCCESS (unique allowed)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
-
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record
- data = {
- "zone": "company.com",
- "owner": "www1",
- "rtype": "CNAME",
- "rdata": "company.com",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
-
- assert add_record_data["code"] == 201
- assert add_record_data["data"]["type"] == "CNAME"
- assert add_record_data["data"]["owner"] == "www1"
-
- def test_not_unique_host(self, client, mocker):
- """Create multiple CNAME record with same owner/host.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - # default CNAME owner is `www`
- - Add CNAME record with `www` as owner -> must be FAIL (duplicate owner)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record
- data = {
- "zone": "company.com",
- "owner": "www",
- "rtype": "CNAME",
- "rdata": "company.com",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
-
- assert add_record_data["code"] == 409
- assert (
- add_record_data["message"] == "A CNAME record already exist with that owner"
- )
-
- def test_clash_with_A_owner(self, client, mocker):
- """Create CNAME record with same A owner.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add A record with `host` as owner
- - Add CNAME record with `host` as owner -> must be FAIL (clash with A owner)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record A
- data = {
- "zone": "company.com",
- "owner": "host",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- client.post("/api/record/add", data=data, headers=headers)
- # add record
- data = {
- "zone": "company.com",
- "owner": "host",
- "rtype": "CNAME",
- "rdata": "company.com",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
-
- assert add_record_data["code"] == 409
- assert add_record_data["message"] == "An A record already exist with that owner"
-
-
-class TestARules:
- def test_duplicate_record(self, client, mocker):
- """Create multiple A record with same owner and rdata.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add A record with `a1` as owner and `1.1.1.1` as rdata
- - Add A record with `a1` as owner and `1.1.1.1` as rdata -> must be FAIL (duplicate record)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
-
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record
- data = {
- "zone": "company.com",
- "owner": "a1",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- client.post("/api/record/add", data=data, headers=headers)
-
- # add duplicate record
- data = {
- "zone": "company.com",
- "owner": "a1",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
-
- assert add_record_data["code"] == 409
- assert add_record_data["message"] == "The record already exists"
-
- def test_possible_duplicate_record(self, client, mocker):
- """Edit A record that possible same with other.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add A record with `a1` as owner and `1.1.1.1` as rdata
- - Add A record with `a1` as owner and `2.2.2.2` as rdata
- - Edit A record with `a1` as owner and `1.1.1.1` as rdata -> must be FAIL (duplicate record)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
-
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record
- data = {
- "zone": "company.com",
- "owner": "a1",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- client.post("/api/record/add", data=data, headers=headers)
-
- # add record
- data = {
- "zone": "company.com",
- "owner": "a1",
- "rtype": "A",
- "rdata": "2.2.2.2",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- json_data = res.get_json()
- record_id = json_data["data"]["id"]
-
- # edit possible duplicate record
- data = {
- "zone": "company.com",
- "owner": "a1",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- res = client.put(f"/api/record/edit/{record_id}", data=data, headers=headers)
- edit_record_data = res.get_json()
-
- assert edit_record_data["code"] == 409
- assert edit_record_data["message"] == "The record already exists"
-
- def test_not_unique_owner(self, client, mocker):
- """Create A record with same owner.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add A record with `host` as owner
- - Add A record with `host` as owner -> must be SUCCESS (same owner allowed)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record A
- data = {
- "zone": "company.com",
- "owner": "host",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- # add record A
- data = {
- "zone": "company.com",
- "owner": "host",
- "rtype": "A",
- "rdata": "2.2.2.2",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
-
- assert add_record_data["code"] == 201
- assert add_record_data["data"]["type"] == "A"
- assert add_record_data["data"]["owner"] == "host"
-
- def test_clash_with_cname_owner(self, client, mocker):
- """Create A record with same CNAME owner.
-
- - Create a User
- - Create a domain (with default SOA,NS,CNAME created)
- - Add CNAME record with `host` as owner
- - Add A record with `host` as owner -> must be FAIL (clash with CNAME owner)
- """
- mocker.patch("app.helpers.producer.kafka_producer")
- mocker.patch("app.helpers.producer.send")
- headers = {"X-Api-Key": "123"}
-
- # create user
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- # add domain
- data = {"zone": "company.com", "user_id": user_id}
- client.post("/api/domain/add", data=data, headers=headers)
- # add record CNAME
- data = {
- "zone": "company.com",
- "owner": "host",
- "rtype": "CNAME",
- "rdata": "company.com",
- "ttl": 7200,
- }
- client.post("/api/record/add", data=data, headers=headers)
- # add record A
- data = {
- "zone": "company.com",
- "owner": "host",
- "rtype": "A",
- "rdata": "1.1.1.1",
- "ttl": 7200,
- }
- res = client.post("/api/record/add", data=data, headers=headers)
- add_record_data = res.get_json()
-
- assert add_record_data["code"] == 409
- assert (
- add_record_data["message"] == "A CNAME record already exist with that owner"
- )
diff --git a/api/tests/integration/test_schema.sql b/api/tests/integration/test_schema.sql
deleted file mode 100644
index 029b4869..00000000
--- a/api/tests/integration/test_schema.sql
+++ /dev/null
@@ -1,59 +0,0 @@
--- This schema ported from schema.sql for PostgreSQL compatibility
-CREATE TABLE "user" (
- id SERIAL UNIQUE,
- email VARCHAR NOT NULL,
- created_at TIMESTAMP NULL
-);
-
-CREATE TABLE "type" (
- id SERIAL UNIQUE,
- "type" VARCHAR NULL
-);
-
-
-CREATE TABLE ttl (
- id SERIAL UNIQUE,
- ttl VARCHAR NULL
-);
-
-CREATE TABLE zone (
- id SERIAL UNIQUE,
- zone VARCHAR NULL,
- is_committed BOOL NULL,
- user_id SERIAL REFERENCES "user" (id) ON DELETE CASCADE ON UPDATE CASCADE
-);
-
-CREATE TABLE record (
- id SERIAL UNIQUE,
- owner VARCHAR NULL,
- zone_id INT8 NOT NULL REFERENCES zone (id) ON DELETE CASCADE ON UPDATE CASCADE,
- type_id INT8 NOT NULL REFERENCES "type" (id) ON DELETE CASCADE ON UPDATE CASCADE,
- ttl_id SERIAL REFERENCES ttl (id) ON DELETE CASCADE ON UPDATE CASCADE
-);
-
-CREATE TABLE rdata (
- id SERIAL UNIQUE,
- rdata VARCHAR NULL,
- record_id SERIAL REFERENCES record (id) ON DELETE CASCADE ON UPDATE CASCADE
-);
-
-INSERT INTO "type" (id, "type") VALUES
- (1, 'SOA'),
- (2, 'SRV'),
- (3, 'A'),
- (4, 'NS'),
- (5, 'CNAME'),
- (6, 'MX'),
- (7, 'AAAA'),
- (8, 'TXT');
-
-INSERT INTO ttl (id, ttl) VALUES
- (1, '86400'),
- (2, '43200'),
- (3, '28800'),
- (4, '14400'),
- (5, '7200'),
- (6, '3600'),
- (7, '1800'),
- (8, '900'),
- (9, '300');
diff --git a/api/tests/integration/test_user.py b/api/tests/integration/test_user.py
deleted file mode 100644
index 72f985b2..00000000
--- a/api/tests/integration/test_user.py
+++ /dev/null
@@ -1,94 +0,0 @@
-class TestUser:
- def test_list_no_user(self, client):
- """Test if db contains no user."""
- headers = {"X-Api-Key": "123"}
- res = client.get("/api/user/list", headers=headers)
- json_data = res.get_json()
-
- assert json_data["code"] == 404
-
- def test_crate_user(self, client):
- """Create user from its endpoint.
-
- Then:
- - Check if the response appropriate
- - Query the db to assure it's created
- """
- headers = {"X-Api-Key": "123"}
-
- data = {"email": "first@company.com"}
- res = client.post("/api/user/add", data=data, headers=headers)
- response_data = res.get_json()
-
- res = client.get("/api/user/list", headers=headers)
- db_data = res.get_json()
-
- # assert response
- assert response_data["code"] == 201
- assert response_data["data"]["email"] == "first@company.com"
- # assert db value
- assert "first@company.com" in db_data["data"][0].values()
-
- def test_edit_user(self, client):
- """Edit user from its endpoint.
-
- Then:
- - Check if the response appropriate
- - Query the db to assure it's edited
- """
- headers = {"X-Api-Key": "123"}
-
- data = {"email": "first@company.com"}
- res = client.post("/api/user/add", data=data, headers=headers)
- json_data = res.get_json()
- user_id = json_data["data"]["id"]
-
- data = {"email": "first_edited@company.com"}
- res = client.put(f"/api/user/edit/{user_id}", data=data, headers=headers)
- res_data = res.get_json()
-
- res = client.get("/api/user/list", headers=headers)
- db_data = res.get_json()
-
- assert res_data["code"] == 200
- assert res_data["data"]["email"] == "first_edited@company.com"
- assert "first_edited@company.com" in db_data["data"][0].values()
-
- def test_delete_user(self, client):
- """Delete user from its endpoint.
-
- Then:
- - Check if the response appropriate
- - Query the db to assure it's deleted
- """
- headers = {"X-Api-Key": "123"}
-
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
- user_id = json_data["data"]["id"]
-
- delete_res = client.delete(f"/api/user/delete/{user_id}", headers=headers)
-
- res = client.get("/api/user/list", headers=headers)
- db_data = res.get_json()
-
- assert delete_res.status_code == 204
- assert db_data["code"] == 404
-
- def test_duplicate_email(self, client):
- """Create multiple user with the same email.
-
- Must be failed.
- """
- headers = {"X-Api-Key": "123"}
-
- data = {"email": "first@company.com"}
- client.post("/api/user/add", data=data, headers=headers)
-
- data = {"email": "first@company.com"}
- post_res = client.post("/api/user/add", data=data, headers=headers)
- json_data = post_res.get_json()
-
- assert post_res.status_code == 409
- assert json_data["message"] == "Duplicate Email"
diff --git a/api/tests/load/README.md b/api/tests/load/README.md
deleted file mode 100644
index ed12e760..00000000
--- a/api/tests/load/README.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# Guide
-
-```shell
-$ pip install -r requirements.txt
-
-$ locust --locustfile create_user.py --users 10 --spawn-rate 1 --host http://127.0.0.1:5000/api
-$ # Go to http://0.0.0.0:8089, press start, and see the `chart` section.
-$ # Export the chart to png using the `export` button.
-```
diff --git a/api/tests/load/check_health.py b/api/tests/load/check_health.py
deleted file mode 100644
index 68f43f78..00000000
--- a/api/tests/load/check_health.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from locust import HttpUser, between, task
-
-
-class ApiUser(HttpUser):
- wait_time = between(5, 15)
-
- def on_start(self):
- pass
-
- @task
- def check_health(self):
- """Check server health."""
- headers = {"X-API-Key": "123"}
- self.client.get("/health", headers=headers, name="Check health")
diff --git a/api/tests/load/create_domain.py b/api/tests/load/create_domain.py
deleted file mode 100644
index 0d7f5832..00000000
--- a/api/tests/load/create_domain.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import random
-
-from locust import HttpUser, between, task
-
-
-class ApiUser(HttpUser):
- wait_time = between(5, 15)
-
- def on_start(self):
- pass
-
- @task
- def create_domain(self):
- """Create multiple users with different email address and project id."""
- random_num = int("".join([f"{random.randint(0, 9)}" for num in range(0, 4)]))
-
- headers = {"X-API-Key": "123"}
- data = {"email": f"test-{random_num}@gmail.com"}
- self.client.post(
- "/user/add", data=data, headers=headers, name="Create new user"
- )
-
- data = {"user_id": "", "zone": f"test-{random_num}.com"}
- self.client.post(
- "/domain/add", data=data, headers=headers, name="Create new domain"
- )
diff --git a/api/tests/load/create_user.py b/api/tests/load/create_user.py
deleted file mode 100644
index 8e108e5a..00000000
--- a/api/tests/load/create_user.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import random
-
-from locust import HttpUser, between, task
-
-
-class ApiUser(HttpUser):
- wait_time = between(5, 15)
-
- def on_start(self):
- pass
-
- @task
- def create_user(self):
- """Create multiple users with different email address and project id."""
- random_num = int("".join([f"{random.randint(0, 9)}" for num in range(0, 4)]))
-
- headers = {"X-API-Key": "123"}
- data = {"email": f"test-{random_num}@gmail.com"}
- self.client.post(
- "/user/add", data=data, headers=headers, name="Create new user"
- )
diff --git a/api/tests/load/requirements.txt b/api/tests/load/requirements.txt
deleted file mode 100644
index 595bd63b..00000000
--- a/api/tests/load/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-locust==2.8.6
diff --git a/api/tests/meta/mod.rs b/api/tests/meta/mod.rs
new file mode 100644
index 00000000..4a5870fb
--- /dev/null
+++ b/api/tests/meta/mod.rs
@@ -0,0 +1,2 @@
+mod schema;
+mod tests;
diff --git a/api/tests/meta/schema.rs b/api/tests/meta/schema.rs
new file mode 100644
index 00000000..a3e0b710
--- /dev/null
+++ b/api/tests/meta/schema.rs
@@ -0,0 +1,12 @@
+use serde::Deserialize;
+
+#[derive(Debug, Deserialize)]
+pub struct VersionResponse {
+ pub data: Version,
+}
+
+#[derive(Debug, Deserialize)]
+pub struct Version {
+ pub build: String,
+ pub version: String,
+}
diff --git a/api/tests/meta/tests.rs b/api/tests/meta/tests.rs
new file mode 100644
index 00000000..6a6597f2
--- /dev/null
+++ b/api/tests/meta/tests.rs
@@ -0,0 +1,26 @@
+use anyhow::Result;
+use api::routes::app;
+use axum::{
+ body::Body,
+ http::{Request, StatusCode},
+};
+use tower::util::ServiceExt;
+
+use super::schema::VersionResponse;
+
+#[tokio::test]
+async fn version() -> Result<()> {
+ let app = app().await?;
+
+ let request = Request::builder()
+ .uri("/meta/version")
+ .body(Body::empty())?;
+
+ let response = app.oneshot(request).await?;
+ assert_eq!(response.status(), StatusCode::OK);
+
+ let body = hyper::body::to_bytes(response.into_body()).await?;
+ let body: VersionResponse = serde_json::from_slice(&body)?;
+ assert_eq!(body.data.build, "unknown");
+ Ok(())
+}
diff --git a/api/tests/tests.hurl b/api/tests/tests.hurl
new file mode 100644
index 00000000..1fb487c9
--- /dev/null
+++ b/api/tests/tests.hurl
@@ -0,0 +1,17 @@
+# REST API health
+# hurl --test tests/tests.hurl
+
+GET http://127.0.0.1:5000/health
+
+HTTP/1.1 200
+[Asserts]
+status == 200
+jsonpath "$.data.status" == "running"
+
+# meta
+GET http://127.0.0.1:5000/meta
+
+HTTP/1.1 200
+[Asserts]
+status == 200
+jsonpath "$.data.build" == "unknown"
diff --git a/api/tests/tests.rs b/api/tests/tests.rs
new file mode 100644
index 00000000..e51ff5ac
--- /dev/null
+++ b/api/tests/tests.rs
@@ -0,0 +1,2 @@
+mod health;
+mod meta;
diff --git a/api/tests/unit/__init__.py b/api/tests/unit/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/api/tests/unit/test_helpers.py b/api/tests/unit/test_helpers.py
deleted file mode 100644
index 8e45c397..00000000
--- a/api/tests/unit/test_helpers.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import app
-from app.helpers import helpers
-
-
-def test_replace_serial():
- rdata = "ns.example.com. hostmaster.example.com. 2018070410 3600 3600 3600 3600"
- replaced_serial = helpers.replace_serial(rdata, "2020010101")
- assert (
- replaced_serial
- == "ns.example.com. hostmaster.example.com. 2020010101 3600 3600 3600 3600"
- )
-
-
-def test_increment_serial(monkeypatch):
- monkeypatch.setattr(app.helpers.helpers, "soa_time_set", lambda: "20180704")
-
- incremented_serial1 = helpers.increment_serial("2018070401")
- incremented_serial2 = helpers.increment_serial("2018070401", "02")
- assert incremented_serial1 == "2018070402"
- assert incremented_serial2 == "2018070403"
-
-
-def test_exclude_keys():
- my_dict = {"a": 1, "b": 2, "c": 3}
- new_dict = helpers.exclude_keys(my_dict, "a")
- assert new_dict == {"b": 2, "c": 3}
diff --git a/api/tests/unit/test_validator.py b/api/tests/unit/test_validator.py
deleted file mode 100644
index 7b6e1cda..00000000
--- a/api/tests/unit/test_validator.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import pytest
-
-from app.helpers import validator
-
-
-def test_valid_ip():
- """This test the valid IP in A and AAAA record"""
- validator.is_valid_ip("2001:db8:10::2")
- validator.is_valid_ip("192.0.2.1")
-
- with pytest.raises(Exception):
- validator.is_valid_ip("2001:db8:10::2::")
- with pytest.raises(Exception):
- validator.is_valid_ip("192.0.2")
- with pytest.raises(Exception):
- validator.is_valid_ip("270.0.0.2")
- with pytest.raises(Exception):
- validator.is_valid_ip("localhost")
-
-
-def test_valid_mx():
- validator.is_valid_mx("10 mail.example.com.")
- validator.is_valid_mx("20 mail2.example.com")
-
- with pytest.raises(Exception):
- validator.is_valid_mx("mail.example.com.")
- with pytest.raises(Exception):
- validator.is_valid_mx("mail2.example.com")
-
-
-def test_valid_cname():
- validator.is_valid_cname("example")
- validator.is_valid_cname("example.com")
- validator.is_valid_cname("example.com.")
- validator.is_valid_cname("example-one.com")
-
- with pytest.raises(Exception):
- validator.is_valid_cname("-example")
- with pytest.raises(Exception):
- validator.is_valid_cname("example-")
- with pytest.raises(Exception):
- validator.is_valid_cname("example.-one")
- with pytest.raises(Exception):
- validator.is_valid_cname("example-.one")
- with pytest.raises(Exception):
- validator.is_valid_cname("--example.com.")
- with pytest.raises(Exception):
- validator.is_valid_cname(".example.")
- with pytest.raises(Exception):
- validator.is_valid_cname("*")
- with pytest.raises(Exception):
- validator.is_valid_cname("*.abc")
-
-
-def test_valid_zone():
- validator.is_valid_zone("example.com")
- validator.is_valid_zone("example-one.com")
- validator.is_valid_zone("example-one-two.com")
- validator.is_valid_zone("mail.example-one.com")
- validator.is_valid_zone("mail.example-one-1.com")
- validator.is_valid_zone("mail.example.com")
-
- with pytest.raises(Exception):
- validator.is_valid_zone("--example.com.")
- with pytest.raises(Exception):
- validator.is_valid_zone("foo.example.")
-
-
-def test_valid_soa():
- validator.is_valid_soa(
- "ns.example.com. hostmaster.example.com. 2018070410 3600 3600 3600 3600"
- )
-
- with pytest.raises(Exception):
- validator.is_valid_soa(
- "ns.example.co hostmaster.example.com. 2018070410 imnotint 3600 3600 3600"
- )
-
-
-def test_valid_txt():
- validator.is_valid_txt("this is sample text")
-
- with pytest.raises(Exception):
- validator.is_valid_txt("®€")
- with pytest.raises(Exception):
- validator.is_valid_txt("€")
-
-
-def test_valid_srv():
- validator.is_valid_srv("0 5 5060 one.example.com.")
-
- with pytest.raises(Exception):
- validator.is_valid_srv("0 5 one.example.com.")
- with pytest.raises(Exception):
- validator.is_valid_srv("0 one.example.com.")
- with pytest.raises(Exception):
- validator.is_valid_srv("0 5 notanumber one.example.com.")
- with pytest.raises(Exception):
- validator.is_valid_srv("0 5 one.example.com.")
-
-
-def test_valid_owner():
- validator.is_valid_owner("@")
- validator.is_valid_owner("*")
- validator.is_valid_owner("n")
- validator.is_valid_owner("ns")
- validator.is_valid_owner("a.b.c")
- validator.is_valid_owner("ns-ns-1")
- validator.is_valid_owner("ns1_")
- validator.is_valid_owner("_ns1_")
- validator.is_valid_owner(f"ns.ns.ns.{'a' * 63}")
- validator.is_valid_owner("a" * 255)
-
- with pytest.raises(Exception):
- validator.is_valid_owner("ns.")
- with pytest.raises(Exception):
- validator.is_valid_owner("ns-")
- with pytest.raises(Exception):
- validator.is_valid_owner("-ns")
- with pytest.raises(Exception):
- validator.is_valid_owner("ns.-ns.ns")
- with pytest.raises(Exception):
- validator.is_valid_owner("ns.ns-.ns")
- with pytest.raises(Exception):
- validator.is_valid_owner("ns()ns")
- with pytest.raises(Exception):
- validator.is_valid_owner("ns(ns")
- with pytest.raises(Exception):
- validator.is_valid_owner("ns)ns")
- with pytest.raises(Exception):
- # label more than 64
- validator.is_valid_owner(f"ns.ns.ns.{'a' * 64}")
- with pytest.raises(Exception):
- # owner more than 255
- validator.is_valid_owner("a" * 256)
-
-
-def test_validate_func():
- # validator exists
- validator.validate("A", "192.0.2.1")
-
- with pytest.raises(Exception):
- # empty rdata
- validator.validate("TXT", "")
diff --git a/api/app/helpers/__init__.py b/config.example.toml
similarity index 100%
rename from api/app/helpers/__init__.py
rename to config.example.toml
diff --git a/configs/dprint.json b/configs/dprint.json
index 92f768ce..81e8bd9f 100644
--- a/configs/dprint.json
+++ b/configs/dprint.json
@@ -7,9 +7,10 @@
},
"includes": ["**/*.{json,md,toml,dockerfile}"],
"excludes": [
- "*_cache",
- ".venv"
- "cli"
+ "**/*-lock.json",
+ "**/*-lock.toml",
+ "./target",
+ "sqlx-data.json"
],
"plugins": [
"https://plugins.dprint.dev/json-0.15.3.wasm",
diff --git a/ttl/entities.rs b/ttl/entities.rs
new file mode 100644
index 00000000..e340e3bd
--- /dev/null
+++ b/ttl/entities.rs
@@ -0,0 +1,43 @@
+use sqlx;
+
+use crate::{relay::Base64Cursor, scalar::Id};
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct Ttl {
+ pub id: Id,
+ pub time: i32,
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct TtlEntity {
+ pub id: Id,
+ // We used `ttl` instead of `time` in the legacy database
+ pub ttl: String,
+}
+
+impl TryFrom for Ttl {
+ type Error = crate::Error;
+
+ fn try_from(t: TtlEntity) -> Result {
+ let time: i32 = t.ttl.parse()?;
+ Ok(Self { id: t.id, time })
+ }
+}
+
+#[derive(sqlx::FromRow, Debug, Clone)]
+pub struct CreateTtl {
+ pub time: String,
+}
+
+#[derive(Debug)]
+pub struct TtlEdge {
+ pub node: Ttl,
+ pub cursor: String,
+}
+
+impl From for TtlEdge {
+ fn from(t: Ttl) -> Self {
+ let cursor = Base64Cursor::new(t.id).encode();
+ Self { node: t, cursor }
+ }
+}
diff --git a/ttl/mod.rs b/ttl/mod.rs
new file mode 100644
index 00000000..2dd8a424
--- /dev/null
+++ b/ttl/mod.rs
@@ -0,0 +1,8 @@
+mod entities;
+mod model;
+mod repository;
+mod service;
+
+// public
+pub mod resolver;
+pub use service::Service;
diff --git a/ttl/model/input.rs b/ttl/model/input.rs
new file mode 100644
index 00000000..611973f7
--- /dev/null
+++ b/ttl/model/input.rs
@@ -0,0 +1,22 @@
+use async_graphql::InputObject;
+
+use crate::scalar::Id;
+
+#[derive(InputObject)]
+pub struct CreateTtlInput {
+ /// The time for the ttl.
+ pub time: i32,
+}
+
+#[derive(InputObject)]
+pub struct UpdateTtlInput {
+ /// The ID of the Tttl to modify.
+ pub id: Id,
+ /// The time for the ttl.
+ pub time: i32,
+}
+
+#[derive(InputObject)]
+pub struct DeleteTtlInput {
+ pub id: Id,
+}
diff --git a/ttl/model/mod.rs b/ttl/model/mod.rs
new file mode 100644
index 00000000..ef1938ea
--- /dev/null
+++ b/ttl/model/mod.rs
@@ -0,0 +1,97 @@
+pub mod input;
+
+use std::sync::Arc;
+
+use async_graphql::{ComplexObject, Context, Result, SimpleObject};
+
+use crate::{context::ServerContext, relay, scalar::Id, ttl::entities};
+
+#[derive(Debug, SimpleObject)]
+pub struct Ttl {
+ /// The ID of the ttl.
+ pub id: Id,
+ /// The time for the ttl.
+ pub time: i32,
+}
+
+impl From for Ttl {
+ fn from(t: entities::Ttl) -> Self {
+ Self {
+ id: t.id,
+ time: t.time,
+ }
+ }
+}
+
+#[derive(Debug, SimpleObject)]
+#[graphql(complex)]
+/// The connection type for ttl.
+pub struct TtlConnection {
+ /// A list of edges.
+ pub edges: Vec,
+ //
+ // helper
+ //
+ #[graphql(skip)]
+ /// Returns the elements in the list that come after the specified cursor.
+ pub after: Option,
+ #[graphql(skip)]
+ pub before: Option,
+ #[graphql(skip)]
+ pub first: Option,
+ #[graphql(skip)]
+ pub last: Option,
+}
+
+#[derive(Debug, SimpleObject)]
+pub struct TtlEdge {
+ /// The item at the end of the edge.
+ pub node: Ttl,
+ /// A cursor for use in pagination.
+ pub cursor: String,
+}
+
+#[ComplexObject]
+impl TtlConnection {
+ /// Information to aid in pagination.
+ async fn page_info(&self, ctx: &Context<'_>) -> Result {
+ let ctx = ctx.data::>()?;
+ let conn = relay::Connection::new(ctx.clone());
+
+ let page_info = conn
+ .page_info(
+ self.first,
+ self.after.as_deref(),
+ self.last,
+ self.before.as_deref(),
+ )
+ .await?;
+ Ok(page_info)
+ }
+ /// Identifies the total count of items in the connection.
+ async fn total_count(&self, ctx: &Context<'_>) -> Result {
+ let ctx = ctx.data::>()?;
+ let conn = relay::Connection::new(ctx.clone());
+ Ok(conn.total_count("ttl").await?)
+ }
+}
+
+impl From for TtlEdge {
+ fn from(t: entities::TtlEdge) -> Self {
+ Self {
+ node: t.node.into(),
+ cursor: t.cursor,
+ }
+ }
+}
+
+impl From for TtlEdge {
+ fn from(t: entities::Ttl) -> Self {
+ let cursor = relay::Base64Cursor::new(t.id).encode();
+ let ttl_model = t.into();
+ Self {
+ node: ttl_model,
+ cursor,
+ }
+ }
+}
diff --git a/ttl/resolver.rs b/ttl/resolver.rs
new file mode 100644
index 00000000..511b6d22
--- /dev/null
+++ b/ttl/resolver.rs
@@ -0,0 +1,95 @@
+use std::sync::Arc;
+
+use async_graphql::{Context, Error, FieldResult, Object};
+
+use super::{model, service};
+use crate::{context::ServerContext, scalar::Id};
+
+#[derive(Default)]
+pub struct TtlQuery;
+
+#[Object]
+impl TtlQuery {
+ pub async fn ttls(
+ &self,
+ ctx: &Context<'_>,
+ first: Option,
+ after: Option,
+ last: Option,
+ before: Option,
+ ) -> FieldResult {
+ let server_ctx = ctx.data::>()?;
+ let ttl_edges = server_ctx
+ .ttl_service
+ .find_ttls(first, after.as_deref(), last, before.as_deref())
+ .await?;
+ let edges: Vec = ttl_edges.into_iter().map(|ttl| ttl.into()).collect();
+
+ let ttl_connection = model::TtlConnection {
+ edges,
+ //
+ after,
+ before,
+ first,
+ last,
+ };
+
+ Ok(ttl_connection)
+ }
+ pub async fn ttl(&self, ctx: &Context<'_>, id: Id) -> FieldResult {
+ let server_ctx = ctx.data::>()?;
+
+ let result = server_ctx.ttl_service.find_ttl(id).await;
+ match result {
+ Ok(res) => Ok(res.into()),
+ Err(err) => Err(Error::new(err.to_string())),
+ }
+ }
+}
+
+#[derive(Default)]
+pub struct TtlMutation;
+
+#[Object]
+impl TtlMutation {
+ pub async fn create_ttl(
+ &self,
+ ctx: &Context<'_>,
+ input: model::input::CreateTtlInput,
+ ) -> FieldResult {
+ let server_ctx = ctx.data::>()?;
+
+ let service_input = service::CreateTtlInput { time: input.time };
+ let result = server_ctx.ttl_service.create_ttl(service_input).await;
+ match result {
+ Ok(res) => Ok(res.into()),
+ Err(err) => Err(Error::new(err.to_string())),
+ }
+ }
+ pub async fn update_ttl(
+ &self,
+ ctx: &Context<'_>,
+ input: model::input::UpdateTtlInput,
+ ) -> FieldResult {
+ let server_ctx = ctx.data::>()?;
+
+ let service_input = service::UpdateTtlInput {
+ id: input.id,
+ time: input.time,
+ };
+ let result = server_ctx.ttl_service.update_ttl(service_input).await;
+ match result {
+ Ok(res) => Ok(res.into()),
+ Err(err) => Err(Error::new(err.to_string())),
+ }
+ }
+ pub async fn delete_ttl(&self, ctx: &Context<'_>, id: Id) -> FieldResult {
+ let server_ctx = ctx.data::>()?;
+
+ let result = server_ctx.ttl_service.delete_ttl(id).await;
+ match result {
+ Ok(res) => Ok(res.into()),
+ Err(err) => Err(Error::new(err.to_string())),
+ }
+ }
+}
diff --git a/ttl/service/find_ttls.rs b/ttl/service/find_ttls.rs
new file mode 100644
index 00000000..b500bfe3
--- /dev/null
+++ b/ttl/service/find_ttls.rs
@@ -0,0 +1,57 @@
+use super::Service;
+use crate::{
+ relay,
+ relay::validation::{convert_params, validate_params},
+ ttl::entities,
+};
+
+impl Service {
+ pub async fn find_ttls(
+ &self,
+ first: Option,
+ after: Option<&str>,
+ last: Option,
+ before: Option<&str>,
+ ) -> Result, crate::Error> {
+ validate_params(first, last)?;
+ let (after_id, before_id) = convert_params(after, before)?;
+
+ let ttls = self
+ .repo
+ .find_all_ttls(&self.db, first, after_id, last, before_id)
+ .await?;
+ let ttls: Result, _> =
+ ttls.into_iter().map(entities::Ttl::try_from).collect();
+
+ match ttls.ok() {
+ None => Err(crate::Error::InvalidArgument(
+ "failed to parse the integer value".into(),
+ )),
+ Some(ttls) => {
+ let ttl_edges: Vec =
+ ttls.into_iter().map(|ttl| ttl.into()).collect();
+ Ok(ttl_edges)
+ }
+ }
+ }
+ pub async fn find_page_info(
+ &self,
+ first: Option,
+ after: Option<&str>,
+ last: Option,
+ before: Option<&str>,
+ ) -> Result {
+ let (after_id, before_id) = convert_params(after, before)?;
+
+ let ttls = self
+ .repo
+ .find_all_ttls(&self.db, first, after_id, last, before_id)
+ .await?;
+
+ let page_info = self
+ .repo
+ .find_page_info(&self.db, &ttls, first, after_id, last, before_id)
+ .await?;
+ Ok(page_info)
+ }
+}
diff --git a/ttl/service/mod.rs b/ttl/service/mod.rs
new file mode 100644
index 00000000..877da93b
--- /dev/null
+++ b/ttl/service/mod.rs
@@ -0,0 +1,33 @@
+mod check_ttl_exists;
+mod create_ttl;
+mod delete_ttl;
+mod find_ttl;
+mod find_ttl_by_time;
+mod find_ttls;
+mod update_ttl;
+
+use crate::{db::DB, scalar::Id, ttl::repository::Repository};
+
+#[derive(Debug)]
+pub struct Service {
+ repo: Repository,
+ pub db: DB,
+}
+
+impl Service {
+ pub fn new(db: DB) -> Self {
+ let repo = Repository::new();
+ Self { db, repo }
+ }
+}
+
+#[derive(Debug)]
+pub struct CreateTtlInput {
+ pub time: i32,
+}
+
+#[derive(Debug)]
+pub struct UpdateTtlInput {
+ pub id: Id,
+ pub time: i32,
+}