-
Notifications
You must be signed in to change notification settings - Fork 1
/
models.py
101 lines (80 loc) · 3.83 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import datetime
import json
from sqlalchemy import Column, BigInteger, Boolean, DateTime, Index, Integer, String
from sqlalchemy.sql import expression, func
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.inspection import inspect
from geoalchemy2 import Geometry
from geoalchemy2.shape import to_shape
from shapely.geometry import mapping
Base = declarative_base()
# Here we store the current and past import runs
class Analysis(Base):
__tablename__ = 'analyses'
id = Column(Integer, primary_key=True)
slug = Column(String, unique=True, nullable=False)
# By default, populate the slug from the city name. Still, future proof
# the DB just in case we start having multiple analyses with multiple
# slugs for the same city, or analyses/cities with identical names.
name = Column(String, nullable=False)
bbox = Column(Geometry(geometry_type='POLYGON'))
# Future-proof the datasets field too. We might get more datasets, we
# don't want to have to do migrations to the analysis table in that case.
datasets = Column(JSONB) # mark datasets like {selected: ['osm', 'flickr'], imported: ['osm']}
parameters = Column(JSONB) # mark params like {gtfs: {url: http://example.com}}
start_time = Column(DateTime, nullable=False, default=datetime.datetime.now(), server_default=func.now())
finish_time = Column(DateTime)
# set this field True once the user has seen the result
viewed = Column(Boolean, nullable=False, default=False, server_default=expression.false())
def serialize(self):
attrs = {c: getattr(self, c) for c in inspect(self).attrs.keys()}
# WKBelement doesn't jsonify
attrs['bbox'] = mapping(to_shape(attrs['bbox']))
return attrs
# This is for data in slug-specific schemas
class SchemaBase(Base):
__abstract__ = True
@declared_attr
def __table_args__(cls):
return (
# geoalchemy doesn't use schema_translate_map if autocreating index
# https://github.com/geoalchemy/geoalchemy2/issues/137
# so we have to declare the index manually instead
Index(f'idx_{cls.__tablename__}_geom', 'geom', postgresql_using='gist'),
# needed for schema_translate_map
{'schema': 'schema'}
)
class OSMPoint(SchemaBase):
__tablename__ = 'osmpoints'
node_id = Column(BigInteger, primary_key=True)
tags = Column(JSONB)
geom = Column(Geometry(geometry_type='POINT', spatial_index=False))
# Use JSONB field for all datasets so we won't need migrations in the future
class FlickrPoint(SchemaBase):
__tablename__ = 'flickrpoints'
point_id = Column(BigInteger, primary_key=True)
properties = Column(JSONB)
geom = Column(Geometry(geometry_type='POINT', spatial_index=False))
class GTFSStop(SchemaBase):
__tablename__ = 'gtfsstops'
stop_id = Column(String, primary_key=True)
properties = Column(JSONB)
geom = Column(Geometry(geometry_type='POINT', spatial_index=False))
# We usually have a completely different (denser) dataset of nodes for local
# accessibility. Makes no sense to save them in the same table as the OSM POIs.
class OSMAccessNode(SchemaBase):
__tablename__ = 'osmaccessnodes'
node_id = Column(BigInteger, primary_key=True)
accessibilities = Column(JSONB)
geom = Column(Geometry(geometry_type='POINT', spatial_index=False))
class OoklaPoint(SchemaBase):
__tablename__ = 'ooklapoints'
quadkey_id = Column(BigInteger, primary_key=True)
properties = Column(JSONB)
geom = Column(Geometry(geometry_type='POINT', spatial_index=False))
class KonturPoint(SchemaBase):
__tablename__ = 'konturpoints'
hex_id = Column(BigInteger, primary_key=True)
properties = Column(JSONB)
geom = Column(Geometry(geometry_type='POINT', spatial_index=False))