Skip to content

Commit

Permalink
Make database init always explicit
Browse files Browse the repository at this point in the history
For consistency with core, ows codebases.

And include a friendly message when the schema doesn't exist.
  • Loading branch information
jeremyh authored and Alex Leith committed Mar 19, 2020
1 parent b18ec51 commit 00a5668
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 23 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ Now install the explorer dependencies:

### Summary generation

Cache some product summaries:
Initialise and create product summaries:

nohup cubedash-gen --all &>> summary-gen.log &
nohup cubedash-gen --init --all &>> summary-gen.log &

(This can take a while the first time, depending on your datacube size.
We're using `nohup .. &` to run in the background.)
Expand Down
22 changes: 15 additions & 7 deletions cubedash/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,8 @@

_LOG = structlog.get_logger()


# pylint: disable=broad-except
def generate_report(item):
config: LocalConfig
product_name: str
force_refresh: Optional(bool) = False
def generate_report(item: Tuple[LocalConfig, str, bool]):
config, product_name, force_refresh = item
log = _LOG.bind(product=product_name)

Expand Down Expand Up @@ -142,7 +138,8 @@ def _load_products(index: Index, product_names) -> List[DatasetType]:
@click.option("--force-concurrently", is_flag=True, default=False)
@click.option(
"--init-database/--no-init-database",
default=True,
"--init",
default=False,
help="Prepare the database for use by datacube explorer",
)
@click.argument("product_names", nargs=-1)
Expand All @@ -164,7 +161,18 @@ def cli(
init_logging(open(event_log_file, "a") if event_log_file else None, verbose=verbose)

index = _get_index(config, "setup")
store = SummaryStore.create(index, init_schema=init_database)
store = SummaryStore.create(index)

if init_database:
echo("Initialising schema", err=True)
store.init()
elif not store.is_initialised():
echo(
style("No cubedash schema exists. ", fg="red")
+ "Please rerun with --init to create one",
err=True,
)
sys.exit(-1)

if generate_all_products:
products = sorted(store.all_dataset_types(), key=lambda p: p.name)
Expand Down
10 changes: 10 additions & 0 deletions cubedash/summary/_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,17 @@
)


def has_schema(engine: Engine) -> bool:
"""
Does the cubedash schema already exist?
"""
return engine.dialect.has_schema(engine, CUBEDASH_SCHEMA)


def create_schema(engine: Engine):
"""
Create any missing parts of the cubedash schema
"""
engine.execute(DDL(f"create schema if not exists {CUBEDASH_SCHEMA}"))
engine.execute(DDL(f"create extension if not exists postgis"))

Expand Down
28 changes: 16 additions & 12 deletions cubedash/summary/_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,27 +88,31 @@ def as_geojson(self):


class SummaryStore:
def __init__(
self, index: Index, summariser: Summariser, init_schema=False, log=_LOG
) -> None:
def __init__(self, index: Index, summariser: Summariser, log=_LOG) -> None:
self.index = index
self.log = log
self._update_listeners = []

self._engine: Engine = _utils.alchemy_engine(index)
self._summariser = summariser

if init_schema:
_schema.create_schema(self._engine)
def is_initialised(self) -> bool:
"""
Do our DB schemas exist?
"""
return _schema.has_schema(self._engine)

def init(self):
"""
Initialise any schema elements that don't exist.
(Requires `create` permissions in the db)
"""
_schema.create_schema(self._engine)

@classmethod
def create(cls, index: Index, init_schema=False, log=_LOG) -> "SummaryStore":
return cls(
index,
Summariser(_utils.alchemy_engine(index)),
init_schema=init_schema,
log=log,
)
def create(cls, index: Index, log=_LOG) -> "SummaryStore":
return cls(index, Summariser(_utils.alchemy_engine(index)), log=log)

def close(self):
"""Close any pooled/open connections. Necessary before forking."""
Expand Down
5 changes: 3 additions & 2 deletions integration_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,10 @@

@pytest.fixture(scope="function")
def summary_store(module_dea_index: Index) -> SummaryStore:
SummaryStore.create(module_dea_index, init_schema=False).drop_all()
store = SummaryStore.create(module_dea_index)
store.drop_all()
module_dea_index.close()
store = SummaryStore.create(module_dea_index, init_schema=True)
store.init()
return store


Expand Down

0 comments on commit 00a5668

Please sign in to comment.