diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..72dd181 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.7.4 + +COPY . /app +WORKDIR "/app" + +# Development version: + +RUN pip install tox + +RUN python3.7 setup.py develop diff --git a/README.md b/README.md index 205c7e0..85bfb76 100644 --- a/README.md +++ b/README.md @@ -37,14 +37,23 @@ $ export BABBAGE_TEST_DB=postgresql://postgres@localhost:5432/postgres $ make test ``` +### Docker install + +The development version of Babbage can be installed via docker. +Here's how to build and run the tests: + + docker-compose build + docker-compose run db psql -h db -U postgres -c "CREATE DATABASE babbage_test" + docker-compose run babbage tox + ## Usage -``babbage`` is used to query a set of existing database tables, using an +``babbage`` is used to query a set of existing database tables, using an abstract, logical model to query them. A sample of a logical model can be found in ``tests/fixtures/models/cra.json``, and a JSON schema specifying the model is available in ``babbage/schema/model.json``. -The central unit of ``babbage`` is a ``Cube``, i.e. a [OLAP cube](https://en.wikipedia.org/wiki/OLAP_cube) that uses the provided model metadata to construct queries +The central unit of ``babbage`` is a ``Cube``, i.e. a [OLAP cube](https://en.wikipedia.org/wiki/OLAP_cube) that uses the provided model metadata to construct queries against a database table. Additionally, the application supports managing multiple cubes at the same time via a ``CubeManager``, which can be subclassed to enable application-specific ways of defining cubes and where @@ -99,7 +108,7 @@ aggregate = cube.aggregate(aggregates='total_value.sum', drilldowns='supplier|authority' cut='year:2015|authority.country:GB', page_size=500) -# This translates to: +# This translates to: # Aggregate the procurement data by summing up the 'total_value' # for each unique pair of values in the 'supplier' and 'authority' # dimensions, and filter for only those entries where the 'year' @@ -123,7 +132,7 @@ assert 'authority.label' in aggregate_0 The HTTP API for ``babbage`` is a simple Flask [Blueprint](http://flask.pocoo.org/docs/latest/blueprints/) used to expose a small set of calls that correspond to the cube functions listed above. To include it into an existing Flask application, you would need to create a ``CubeManager`` and then -configure the API like this: +configure the API like this: ```python from flask import Flask @@ -132,7 +141,7 @@ from babbage.manager import JSONCubeManager from babbage.api import configure_api app = Flask('demo') -engine = +engine = models_directory = 'models/' manager = JSONCubeManager(engine, models_directory) blueprint = configure_api(app, manager) @@ -149,19 +158,18 @@ relative to the given ``url_prefix``: * ``/``, returns the system status and version. * ``/cubes``, returns a list of the available cubes (name only). -* ``/cubes//model``, returns full metadata for a given +* ``/cubes//model``, returns full metadata for a given cube (i.e. measures, dimensions, aggregates etc.) * ``/cubes//facts`` is used to return individual entries from the cube in a non-aggregated form. Supports filters (``cut``), a set of ``fields`` to return and a ``sort`` (``field_name:direction``), as well as ``page`` and ``page_size``. -* ``/cubes//members`` is used to return the distinct set of +* ``/cubes//members`` is used to return the distinct set of values for a given dimension, e.g. all the suppliers mentioned in a procurement dataset. Supports filters (``cut``), a and a ``sort`` (``field_name:direction``), as well as ``page`` and ``page_size``. -* ``/cubes//aggregate`` is the main endpoint for generating +* ``/cubes//aggregate`` is the main endpoint for generating aggregate views of the data. Supports specifying the ``aggregates`` to include, the ``drilldowns`` to aggregate by, a set of filters (``cut``), a and a ``sort`` (``field_name:direction``), as well as ``page`` and ``page_size``. - diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..6d364c5 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,34 @@ +version: "3" + +services: + # Note for connecting: host is 'db', default user is 'postgres' no password + db: + image: postgres:11 + environment: + - POSTGRES_DB=data_sync + volumes: + - postgres_babbage:/var/lib/postgresql/data/ + # mount the current directory in case we want to restore postgres dumps + - .:/app + # Open up a local port to Postgres, not in conflict with + # any other Postgres you may already be running + ports: + - "6645:5432" + + babbage: + build: . + depends_on: ["db"] + environment: + - PYTHONDONTWRITEBYTECODE=true + - PYTHONPATH=. + - BABBAGE_TEST_DB=postgresql://postgres@db:5432/babbage_test + volumes: + # mount current directory so that changes in files are reflected + # in the running environment + - .:/app + # Open up 5005, hopefully not in conflict with any other you may be running + ports: + - "5005:5000" + +volumes: + postgres_babbage: diff --git a/tox.ini b/tox.ini index d56c0f9..05a31aa 100644 --- a/tox.ini +++ b/tox.ini @@ -1,10 +1,7 @@ [tox] package = babbage envlist = - py27 - py34 - py35 - py36 + py37 lint skip_missing_interpreters = true