Skip to content

Commit

Permalink
Merge pull request #77 from dataforgoodfr/rs/add-amp-table-in-alembic
Browse files Browse the repository at this point in the history
Add amp table in alembic
  • Loading branch information
RonanMorgan authored Feb 13, 2024
2 parents e2d40d4 + 52526b1 commit 9842b6d
Show file tree
Hide file tree
Showing 7 changed files with 1,365 additions and 1,459 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ share/python-wheels/
.installed.cfg
*.egg
MANIFEST
zones_subset_02022024.csv
spire_positions_subset_02022024.csv

# PyInstaller
# Usually these files are written by a python script from a template
Expand Down
8 changes: 7 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,17 @@ build:
@docker tag d4g/bloom:${VERSION} d4g/bloom:latest

launch-dev-db:
@docker-compose -f docker-env/docker-compose-db.yaml up -d
@docker compose -f docker-env/docker-compose-db.yaml up -d
@sleep 10
$(BLOOM_DEV_DOCKER) --rm d4g/bloom:${VERSION} alembic upgrade head
$(BLOOM_DEV_DOCKER) --rm d4g/bloom:${VERSION} /venv/bin/python3 alembic/init_script/load_vessels_data.py

load-amp-data:
$(BLOOM_DEV_DOCKER) --rm d4g/bloom:${VERSION} /venv/bin/python3 alembic/init_script/load_amp_data.py

load-test-positions-data:
$(BLOOM_DEV_DOCKER) --rm d4g/bloom:${VERSION} /venv/bin/python3 alembic/init_script/load_positions_data.py

launch-dev-container:
$(BLOOM_DEV_DOCKER) -dti d4g/bloom:${VERSION} /bin/bash

Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,9 @@
"""
This script presents a method to load geometry data in a local DB.
First, you will need 4 shape files present in the data directory :
data/Nonterrestrial_WDPA_Jan2023.dbf
data/Nonterrestrial_WDPA_Jan2023.prj
data/Nonterrestrial_WDPA_Jan2023.shp
data/Nonterrestrial_WDPA_Jan2023.shx
The, you will have to spawn a database and a pgadmin containers locally,
using the db.yaml docker compose file.
#! docker compose up -d postgres pgadmin
Once images are built and running, you can run the following
python script from the root of the bloom project.
"""
import logging
import os

import geopandas as gpd
import pandas as pd
from shapely import wkb
from sqlalchemy import create_engine

logging.basicConfig()
Expand All @@ -42,6 +30,20 @@
+ postgres_db
)
engine = create_engine(db_url, echo=False)
gdf = gpd.read_file("data/Nonterrestrial_WDPA_Jan2023.shp")

gdf.to_postgis("mpa", con=engine, if_exists="append", index=False)
df = pd.read_csv("zones_subset_02022024.csv")
df = df.rename(columns={"Geometry": "geometry",
"Index": "index", "Wdpaid": "WDPAID",
"Name": "name",
"Desig Eng": "DESIG_ENG",
"Desig Type": "DESIG_TYPE",
"Iucn Cat": "IUCN_CAT",
"Parent Iso": "PARENT_ISO",
"Iso3": "ISO3",
"Beneficiaries": "BENEFICIARIES"})

df['geometry'] = df['geometry'].apply(wkb.loads)
gdf = gpd.GeoDataFrame(df, crs='epsg:4326')
gdf.head()

gdf.to_postgis("mpa_fr_with_mn", con=engine, if_exists="replace", index=False)
36 changes: 36 additions & 0 deletions alembic/init_script/load_positions_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import logging
import os
from pathlib import Path

import pandas as pd
from sqlalchemy import create_engine

logging.basicConfig()
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)

postgres_user = os.environ.get("POSTGRES_USER")
postgres_password = os.environ.get("POSTGRES_PASSWORD")
postgres_hostname = os.environ.get("POSTGRES_HOSTNAME")
postgres_db = os.environ.get("POSTGRES_DB")
postgres_port = os.environ.get("POSTGRES_PORT")

# The db url is configured with the db connexion variables declared in the db.yaml file.
db_url = (
"postgresql://"
+ postgres_user
+ ":"
+ postgres_password
+ "@"
+ postgres_hostname
+ ":"
+ postgres_port
+ "/"
+ postgres_db
)
engine = create_engine(db_url)
df = pd.read_csv(
Path.joinpath(Path.cwd(), "spire_positions_subset_02022024.csv"),
sep=","
)

df.to_sql("spire_vessel_positions", engine, if_exists="append", index=False)
43 changes: 43 additions & 0 deletions alembic/versions/961cee5426d6_create_amp_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
"""create amp table
Revision ID: 961cee5426d6
Revises: 1fd83d22bd1e
Create Date: 2024-02-11 22:10:19.010986
"""
from alembic import op
import sqlalchemy as sa
import geoalchemy2

# revision identifiers, used by Alembic.
revision = '961cee5426d6'
down_revision = '1fd83d22bd1e'
branch_labels = None
depends_on = None


def upgrade() -> None:
op.create_table("mpa_fr_with_mn",
sa.Column("index", sa.Integer, primary_key=True),
sa.Column("wdpaid", sa.Integer),
sa.Column("name", sa.String, nullable=False),
sa.Column("desig_eng", sa.String),
sa.Column("desig_type", sa.String),
sa.Column("iucn_cat", sa.String),
sa.Column("parent_iso", sa.String),
sa.Column("iso3", sa.String),
sa.Column("geometry", geoalchemy2.types.Geometry(geometry_type="GEOMETRY", srid=4326)),
sa.Column("benificiaries", sa.String)
)


def downgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
sql_tables = inspector.get_table_names()
tables = [
"mpa_fr_with_mn",
]
for t in tables:
if t in sql_tables:
op.drop_table(t)
Loading

0 comments on commit 9842b6d

Please sign in to comment.